]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.cc
i386: Handle memory operand for direct call to cvtps2pd in unpack
[thirdparty/gcc.git] / gcc / calls.cc
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
7adcbafe 2 Copyright (C) 1989-2022 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
4d0cdd0c 29#include "memmodel.h"
957060b5
AM
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
957060b5
AM
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "varasm.h"
2fb9a547 40#include "internal-fn.h"
36566b39
PK
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
670ee920 44#include "expr.h"
d6f4ec51 45#include "output.h"
b0c48229 46#include "langhooks.h"
b2dd096b 47#include "except.h"
6fb5fa3c 48#include "dbgcnt.h"
e9f56944 49#include "rtl-iter.h"
8bd9f164
MS
50#include "tree-vrp.h"
51#include "tree-ssanames.h"
8bd9f164 52#include "intl.h"
314e6352 53#include "stringpool.h"
54aa6b58
MS
54#include "hash-map.h"
55#include "hash-traits.h"
314e6352 56#include "attribs.h"
cc8bea0a 57#include "builtins.h"
ba206889 58#include "gimple-iterator.h"
d677a8b6 59#include "gimple-fold.h"
05d39f0d 60#include "attr-fnspec.h"
e864d395 61#include "value-query.h"
baad4c48
MS
62#include "tree-pretty-print.h"
63
c795bca9
BS
64/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
65#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
66
67/* Data structure and subroutines used within expand_call. */
68
69struct arg_data
70{
71 /* Tree node for this argument. */
72 tree tree_value;
1efe6448 73 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 74 machine_mode mode;
51bbfa0c
RS
75 /* Current RTL value for argument, or 0 if it isn't precomputed. */
76 rtx value;
77 /* Initially-compute RTL value for argument; only for const functions. */
78 rtx initial_value;
79 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 80 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
81 registers. */
82 rtx reg;
099e9712
JH
83 /* Register to pass this argument in when generating tail call sequence.
84 This is not the same register as for normal calls on machines with
85 register windows. */
86 rtx tail_call_reg;
8df3dbb7
RH
87 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
88 form for emit_group_move. */
89 rtx parallel_value;
84b55618
RK
90 /* If REG was promoted from the actual mode of the argument expression,
91 indicates whether the promotion is sign- or zero-extended. */
92 int unsignedp;
f0078f86
AM
93 /* Number of bytes to put in registers. 0 means put the whole arg
94 in registers. Also 0 if not passed in registers. */
51bbfa0c 95 int partial;
da7d8304 96 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
97 Note that some arguments may be passed on the stack
98 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
99 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 100 int pass_on_stack;
e7949876
AM
101 /* Some fields packaged up for locate_and_pad_parm. */
102 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
103 /* Location on the stack at which parameter should be stored. The store
104 has already been done if STACK == VALUE. */
105 rtx stack;
106 /* Location on the stack of the start of this argument slot. This can
107 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 108 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 109 rtx stack_slot;
51bbfa0c
RS
110 /* Place that this stack area has been saved, if needed. */
111 rtx save_area;
4ab56118
RK
112 /* If an argument's alignment does not permit direct copying into registers,
113 copy in smaller-sized pieces into pseudos. These are stored in a
114 block pointed to by this field. The next field says how many
115 word-sized pseudos we made. */
116 rtx *aligned_regs;
117 int n_aligned_regs;
51bbfa0c
RS
118};
119
da7d8304 120/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
121 the corresponding stack location has been used.
122 This vector is used to prevent a function call within an argument from
123 clobbering any stack already set up. */
124static char *stack_usage_map;
125
126/* Size of STACK_USAGE_MAP. */
a20c5714
RS
127static unsigned int highest_outgoing_arg_in_use;
128
129/* Assume that any stack location at this byte index is used,
130 without checking the contents of stack_usage_map. */
131static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
2f4aa534 132
c67846f2
JJ
133/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
134 stack location's tail call argument has been already stored into the stack.
135 This bitmap is used to prevent sibling call optimization if function tries
136 to use parent's incoming argument slots when they have been already
137 overwritten with tail call arguments. */
138static sbitmap stored_args_map;
139
a20c5714
RS
140/* Assume that any virtual-incoming location at this byte index has been
141 stored, without checking the contents of stored_args_map. */
142static unsigned HOST_WIDE_INT stored_args_watermark;
143
2f4aa534
RS
144/* stack_arg_under_construction is nonzero when an argument may be
145 initialized with a constructor call (including a C function that
146 returns a BLKmode struct) and expand_call must take special action
147 to make sure the object being constructed does not overlap the
148 argument list for the constructor call. */
0405cc0e 149static int stack_arg_under_construction;
51bbfa0c 150
d329e058
AJ
151static void precompute_register_parameters (int, struct arg_data *, int *);
152static int store_one_arg (struct arg_data *, rtx, int, int, int);
153static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
154static int finalize_must_preallocate (int, int, struct arg_data *,
155 struct args_size *);
84b8030f 156static void precompute_arguments (int, struct arg_data *);
d329e058
AJ
157static void compute_argument_addresses (struct arg_data *, rtx, int);
158static rtx rtx_for_function_call (tree, tree);
159static void load_register_parameters (struct arg_data *, int, rtx *, int,
160 int, int *);
6ea2b70d 161static int special_function_p (const_tree, int);
d329e058 162static int check_sibcall_argument_overlap_1 (rtx);
48810515 163static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058 164
2f2b4a02 165static tree split_complex_types (tree);
21a3b983 166
f73ad30e 167#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
168static rtx save_fixed_argument_area (int, rtx, int *, int *);
169static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 170#endif
51bbfa0c 171\f
a20c5714
RS
172/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
173 stack region might already be in use. */
174
175static bool
176stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
177 unsigned int reg_parm_stack_space)
178{
179 unsigned HOST_WIDE_INT const_lower, const_upper;
180 const_lower = constant_lower_bound (lower_bound);
181 if (!upper_bound.is_constant (&const_upper))
182 const_upper = HOST_WIDE_INT_M1U;
183
184 if (const_upper > stack_usage_watermark)
185 return true;
186
187 /* Don't worry about things in the fixed argument area;
188 it has already been saved. */
189 const_lower = MAX (const_lower, reg_parm_stack_space);
190 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
191 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
192 if (stack_usage_map[i])
193 return true;
194 return false;
195}
196
197/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
198 stack region are now in use. */
199
200static void
201mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
202{
203 unsigned HOST_WIDE_INT const_lower, const_upper;
204 const_lower = constant_lower_bound (lower_bound);
41d1f11f
RS
205 if (upper_bound.is_constant (&const_upper)
206 && const_upper <= highest_outgoing_arg_in_use)
a20c5714
RS
207 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
208 stack_usage_map[i] = 1;
209 else
210 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
211}
212
51bbfa0c
RS
213/* Force FUNEXP into a form suitable for the address of a CALL,
214 and return that as an rtx. Also load the static chain register
215 if FNDECL is a nested function.
216
77cac2f2
RK
217 CALL_FUSAGE points to a variable holding the prospective
218 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 219
03dacb02 220rtx
f2d3d07e 221prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 222 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 223{
ba228239 224 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
225 but not for a constant address if -fno-function-cse. */
226 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
227 {
228 /* If it's an indirect call by descriptor, generate code to perform
229 runtime identification of the pointer and load the descriptor. */
230 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
231 {
232 const int bit_val = targetm.calls.custom_function_descriptors;
233 rtx call_lab = gen_label_rtx ();
234
235 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
236 fndecl_or_type
237 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
238 fndecl_or_type);
239 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
240 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
241
84355514
AS
242 if (GET_MODE (funexp) != Pmode)
243 funexp = convert_memory_address (Pmode, funexp);
244
4c640e26
EB
245 /* Avoid long live ranges around function calls. */
246 funexp = copy_to_mode_reg (Pmode, funexp);
247
248 if (REG_P (chain))
249 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
250
251 /* Emit the runtime identification pattern. */
252 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
253 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
254 call_lab);
255
256 /* Statically predict the branch to very likely taken. */
257 rtx_insn *insn = get_last_insn ();
258 if (JUMP_P (insn))
259 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
260
261 /* Load the descriptor. */
262 rtx mem = gen_rtx_MEM (ptr_mode,
263 plus_constant (Pmode, funexp, - bit_val));
264 MEM_NOTRAP_P (mem) = 1;
265 mem = convert_memory_address (Pmode, mem);
266 emit_move_insn (chain, mem);
267
268 mem = gen_rtx_MEM (ptr_mode,
269 plus_constant (Pmode, funexp,
270 POINTER_SIZE / BITS_PER_UNIT
271 - bit_val));
272 MEM_NOTRAP_P (mem) = 1;
273 mem = convert_memory_address (Pmode, mem);
274 emit_move_insn (funexp, mem);
275
276 emit_label (call_lab);
277
278 if (REG_P (chain))
279 {
280 use_reg (call_fusage, chain);
281 STATIC_CHAIN_REG_P (chain) = 1;
282 }
283
284 /* Make sure we're not going to be overwritten below. */
285 gcc_assert (!static_chain_value);
286 }
287
288 /* If we are using registers for parameters, force the
289 function address into a register now. */
290 funexp = ((reg_parm_seen
291 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
292 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
293 : memory_address (FUNCTION_MODE, funexp));
294 }
408702b4 295 else
51bbfa0c 296 {
408702b4
RL
297 /* funexp could be a SYMBOL_REF represents a function pointer which is
298 of ptr_mode. In this case, it should be converted into address mode
299 to be a valid address for memory rtx pattern. See PR 64971. */
300 if (GET_MODE (funexp) != Pmode)
301 funexp = convert_memory_address (Pmode, funexp);
302
4c640e26 303 if (!(flags & ECF_SIBCALL))
408702b4
RL
304 {
305 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
306 funexp = force_reg (Pmode, funexp);
307 }
51bbfa0c
RS
308 }
309
f2d3d07e
RH
310 if (static_chain_value != 0
311 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
312 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 313 {
531ca746
RH
314 rtx chain;
315
f2d3d07e 316 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 317 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 318
531ca746
RH
319 emit_move_insn (chain, static_chain_value);
320 if (REG_P (chain))
4c640e26
EB
321 {
322 use_reg (call_fusage, chain);
323 STATIC_CHAIN_REG_P (chain) = 1;
324 }
51bbfa0c
RS
325 }
326
327 return funexp;
328}
329
330/* Generate instructions to call function FUNEXP,
331 and optionally pop the results.
332 The CALL_INSN is the first insn generated.
333
607ea900 334 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
335 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
336 its own args.
2c8da025 337
079e7538
NF
338 FUNTYPE is the data type of the function. This is given to the hook
339 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
340 own args. We used to allow an identifier for library functions, but
341 that doesn't work when the return type is an aggregate type and the
342 calling convention says that the pointer to this aggregate is to be
343 popped by the callee.
51bbfa0c
RS
344
345 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
346 ROUNDED_STACK_SIZE is that number rounded up to
347 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
348 both to put into the call insn and to generate explicit popping
349 code if necessary.
51bbfa0c
RS
350
351 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
352 It is zero if this call doesn't want a structure value.
353
354 NEXT_ARG_REG is the rtx that results from executing
6783fdb7
RS
355 targetm.calls.function_arg (&args_so_far,
356 function_arg_info::end_marker ());
51bbfa0c
RS
357 just after all the args have had their registers assigned.
358 This could be whatever you like, but normally it is the first
359 arg-register beyond those used for args in this call,
360 or 0 if all the arg-registers are used in this call.
361 It is passed on to `gen_call' so you can put this info in the call insn.
362
363 VALREG is a hard register in which a value is returned,
364 or 0 if the call does not return a value.
365
366 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
367 the args to this call were processed.
368 We restore `inhibit_defer_pop' to that value.
369
94b25f81 370 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 371 denote registers used by the called function. */
f725a3ec 372
322e3e34 373static void
28ed065e 374emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 375 tree funtype ATTRIBUTE_UNUSED,
a20c5714
RS
376 poly_int64 stack_size ATTRIBUTE_UNUSED,
377 poly_int64 rounded_stack_size,
5c8e61cf 378 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
d329e058
AJ
379 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
380 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 381 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 382{
a20c5714 383 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
58d745ec 384 rtx call, funmem, pat;
51bbfa0c 385 int already_popped = 0;
a20c5714 386 poly_int64 n_popped = 0;
a00fe3b7
RS
387
388 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
389 patterns exist). Any popping that the callee does on return will
390 be from our caller's frame rather than ours. */
391 if (!(ecf_flags & ECF_SIBCALL))
392 {
393 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 394
fa5322fa 395#ifdef CALL_POPS_ARGS
a00fe3b7 396 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 397#endif
a00fe3b7 398 }
d329e058 399
51bbfa0c
RS
400 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
401 and we don't want to load it into a register as an optimization,
402 because prepare_call_address already did it if it should be done. */
403 if (GET_CODE (funexp) != SYMBOL_REF)
404 funexp = memory_address (FUNCTION_MODE, funexp);
405
325f5379
JJ
406 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
407 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
408 {
409 tree t = fndecl;
e79983f4 410
047d33a0
AO
411 /* Although a built-in FUNCTION_DECL and its non-__builtin
412 counterpart compare equal and get a shared mem_attrs, they
413 produce different dump output in compare-debug compilations,
414 if an entry gets garbage collected in one compilation, then
415 adds a different (but equivalent) entry, while the other
416 doesn't run the garbage collector at the same spot and then
417 shares the mem_attr with the equivalent entry. */
e79983f4
MM
418 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
419 {
420 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
421 if (t2)
422 t = t2;
423 }
424
425 set_mem_expr (funmem, t);
047d33a0 426 }
325f5379 427 else if (fntree)
e19f6650 428 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 429
58d745ec 430 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 431 {
0a1c58a2 432 if (valreg)
58d745ec
RS
433 pat = targetm.gen_sibcall_value (valreg, funmem,
434 rounded_stack_size_rtx,
435 next_arg_reg, NULL_RTX);
0a1c58a2 436 else
58d745ec 437 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
5c8e61cf
RS
438 next_arg_reg,
439 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 440 }
8ac61af7
RK
441 /* If the target has "call" or "call_value" insns, then prefer them
442 if no arguments are actually popped. If the target does not have
443 "call" or "call_value" insns, then we must use the popping versions
444 even if the call has no arguments to pop. */
a20c5714 445 else if (maybe_ne (n_popped, 0)
58d745ec
RS
446 || !(valreg
447 ? targetm.have_call_value ()
448 : targetm.have_call ()))
51bbfa0c 449 {
a20c5714 450 rtx n_pop = gen_int_mode (n_popped, Pmode);
51bbfa0c
RS
451
452 /* If this subroutine pops its own args, record that in the call insn
453 if possible, for the sake of frame pointer elimination. */
2c8da025 454
51bbfa0c 455 if (valreg)
58d745ec
RS
456 pat = targetm.gen_call_value_pop (valreg, funmem,
457 rounded_stack_size_rtx,
458 next_arg_reg, n_pop);
51bbfa0c 459 else
58d745ec
RS
460 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
461 next_arg_reg, n_pop);
51bbfa0c 462
51bbfa0c
RS
463 already_popped = 1;
464 }
465 else
0a1c58a2
JL
466 {
467 if (valreg)
58d745ec
RS
468 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
469 next_arg_reg, NULL_RTX);
0a1c58a2 470 else
58d745ec 471 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
5c8e61cf 472 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 473 }
58d745ec 474 emit_insn (pat);
51bbfa0c 475
ee960939 476 /* Find the call we just emitted. */
e67d1102 477 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 478
325f5379
JJ
479 /* Some target create a fresh MEM instead of reusing the one provided
480 above. Set its MEM_EXPR. */
da4fdf2d
SB
481 call = get_call_rtx_from (call_insn);
482 if (call
325f5379
JJ
483 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
484 && MEM_EXPR (funmem) != NULL_TREE)
485 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
486
ee960939
OH
487 /* Put the register usage information there. */
488 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
489
490 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
491 if (ecf_flags & ECF_CONST)
492 RTL_CONST_CALL_P (call_insn) = 1;
493
494 /* If this is a pure call, then set the insn's unchanging bit. */
495 if (ecf_flags & ECF_PURE)
496 RTL_PURE_CALL_P (call_insn) = 1;
497
498 /* If this is a const call, then set the insn's unchanging bit. */
499 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
500 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 501
1d65f45c
RH
502 /* Create a nothrow REG_EH_REGION note, if needed. */
503 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 504
ca3920ad 505 if (ecf_flags & ECF_NORETURN)
65c5f2a6 506 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 507
570a98eb 508 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 509 {
65c5f2a6 510 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 511 cfun->calls_setjmp = 1;
9defc9b7 512 }
570a98eb 513
0a1c58a2
JL
514 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
515
b1e64e0d
RS
516 /* Restore this now, so that we do defer pops for this call's args
517 if the context of the call as a whole permits. */
518 inhibit_defer_pop = old_inhibit_defer_pop;
519
a20c5714 520 if (maybe_ne (n_popped, 0))
51bbfa0c
RS
521 {
522 if (!already_popped)
e3da301d 523 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
524 = gen_rtx_EXPR_LIST (VOIDmode,
525 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
526 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 527 rounded_stack_size -= n_popped;
a20c5714 528 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
1503a7ec 529 stack_pointer_delta -= n_popped;
2e3f842f 530
68184180 531 add_args_size_note (call_insn, stack_pointer_delta);
9a08d230 532
2e3f842f
L
533 /* If popup is needed, stack realign must use DRAP */
534 if (SUPPORTS_STACK_ALIGNMENT)
535 crtl->need_drap = true;
51bbfa0c 536 }
f8f75b16
JJ
537 /* For noreturn calls when not accumulating outgoing args force
538 REG_ARGS_SIZE note to prevent crossjumping of calls with different
539 args sizes. */
540 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
68184180 541 add_args_size_note (call_insn, stack_pointer_delta);
51bbfa0c 542
f73ad30e 543 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 544 {
f73ad30e
JH
545 /* If returning from the subroutine does not automatically pop the args,
546 we need an instruction to pop them sooner or later.
547 Perhaps do it now; perhaps just record how much space to pop later.
548
549 If returning from the subroutine does pop the args, indicate that the
550 stack pointer will be changed. */
551
a20c5714 552 if (maybe_ne (rounded_stack_size, 0))
f73ad30e 553 {
9dd9bf80 554 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
555 /* Just pretend we did the pop. */
556 stack_pointer_delta -= rounded_stack_size;
557 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 558 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
559 pending_stack_adjust += rounded_stack_size;
560 else
561 adjust_stack (rounded_stack_size_rtx);
562 }
51bbfa0c 563 }
f73ad30e
JH
564 /* When we accumulate outgoing args, we must avoid any stack manipulations.
565 Restore the stack pointer to its original value now. Usually
566 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
567 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
568 popping variants of functions exist as well.
569
570 ??? We may optimize similar to defer_pop above, but it is
571 probably not worthwhile.
f725a3ec 572
f73ad30e
JH
573 ??? It will be worthwhile to enable combine_stack_adjustments even for
574 such machines. */
a20c5714
RS
575 else if (maybe_ne (n_popped, 0))
576 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
51bbfa0c
RS
577}
578
25f0609b
BE
579/* Determine if the function identified by FNDECL is one with
580 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
581
582 For example, if the function might return more than one time (setjmp), then
25f0609b 583 set ECF_RETURNS_TWICE.
20efdf74 584
25f0609b 585 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
586 space from the stack such as alloca. */
587
f2d33f13 588static int
6ea2b70d 589special_function_p (const_tree fndecl, int flags)
20efdf74 590{
d5e254e1
IE
591 tree name_decl = DECL_NAME (fndecl);
592
182ce042
DM
593 if (maybe_special_function_p (fndecl)
594 && IDENTIFIER_LENGTH (name_decl) <= 11)
20efdf74 595 {
d5e254e1 596 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 597 const char *tname = name;
20efdf74 598
ca54603f
JL
599 /* We assume that alloca will always be called by name. It
600 makes no sense to pass it as a pointer-to-function to
601 anything that does not understand its behavior. */
4e722cf1
JJ
602 if (IDENTIFIER_LENGTH (name_decl) == 6
603 && name[0] == 'a'
604 && ! strcmp (name, "alloca"))
f2d33f13 605 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 606
25f0609b 607 /* Disregard prefix _ or __. */
20efdf74
JL
608 if (name[0] == '_')
609 {
25f0609b 610 if (name[1] == '_')
20efdf74
JL
611 tname += 2;
612 else
613 tname += 1;
614 }
615
25f0609b
BE
616 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
617 if (! strcmp (tname, "setjmp")
618 || ! strcmp (tname, "sigsetjmp")
619 || ! strcmp (name, "savectx")
620 || ! strcmp (name, "vfork")
621 || ! strcmp (name, "getcontext"))
622 flags |= ECF_RETURNS_TWICE;
20efdf74 623 }
d1c38823 624
9e878cf1
EB
625 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
626 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
627 flags |= ECF_MAY_BE_ALLOCA;
4e722cf1 628
f2d33f13 629 return flags;
20efdf74
JL
630}
631
4f8cfb42
JH
632/* Return fnspec for DECL. */
633
634static attr_fnspec
635decl_fnspec (tree fndecl)
636{
637 tree attr;
638 tree type = TREE_TYPE (fndecl);
639 if (type)
640 {
641 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
642 if (attr)
643 {
644 return TREE_VALUE (TREE_VALUE (attr));
645 }
646 }
647 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
648 return builtin_fnspec (fndecl);
649 return "";
650}
651
e384e6b5
BS
652/* Similar to special_function_p; return a set of ERF_ flags for the
653 function FNDECL. */
654static int
655decl_return_flags (tree fndecl)
656{
4f8cfb42 657 attr_fnspec fnspec = decl_fnspec (fndecl);
e384e6b5 658
05d39f0d
JH
659 unsigned int arg;
660 if (fnspec.returns_arg (&arg))
661 return ERF_RETURNS_ARG | arg;
662
663 if (fnspec.returns_noalias_p ())
664 return ERF_NOALIAS;
665 return 0;
e384e6b5
BS
666}
667
bae802f9 668/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 669
f2d33f13 670int
6ea2b70d 671setjmp_call_p (const_tree fndecl)
f2d33f13 672{
275311c4
MP
673 if (DECL_IS_RETURNS_TWICE (fndecl))
674 return ECF_RETURNS_TWICE;
f2d33f13
JH
675 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
676}
677
726a989a 678
159e8ef0 679/* Return true if STMT may be an alloca call. */
726a989a
RB
680
681bool
159e8ef0 682gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
683{
684 tree fndecl;
685
686 if (!is_gimple_call (stmt))
687 return false;
688
689 fndecl = gimple_call_fndecl (stmt);
690 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
691 return true;
692
693 return false;
694}
695
159e8ef0
BE
696/* Return true if STMT is a builtin alloca call. */
697
698bool
699gimple_alloca_call_p (const gimple *stmt)
700{
701 tree fndecl;
702
703 if (!is_gimple_call (stmt))
704 return false;
705
706 fndecl = gimple_call_fndecl (stmt);
3d78e008 707 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
159e8ef0
BE
708 switch (DECL_FUNCTION_CODE (fndecl))
709 {
9e878cf1 710 CASE_BUILT_IN_ALLOCA:
eacac712 711 return gimple_call_num_args (stmt) > 0;
159e8ef0
BE
712 default:
713 break;
714 }
715
716 return false;
717}
718
719/* Return true when exp contains a builtin alloca call. */
726a989a 720
c986baf6 721bool
6ea2b70d 722alloca_call_p (const_tree exp)
c986baf6 723{
2284b034 724 tree fndecl;
c986baf6 725 if (TREE_CODE (exp) == CALL_EXPR
2284b034 726 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
727 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
728 switch (DECL_FUNCTION_CODE (fndecl))
729 {
9e878cf1 730 CASE_BUILT_IN_ALLOCA:
159e8ef0
BE
731 return true;
732 default:
733 break;
734 }
735
c986baf6
JH
736 return false;
737}
738
0a35513e
AH
739/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
740 function. Return FALSE otherwise. */
741
742static bool
743is_tm_builtin (const_tree fndecl)
744{
745 if (fndecl == NULL)
746 return false;
747
748 if (decl_is_tm_clone (fndecl))
749 return true;
750
751 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
752 {
753 switch (DECL_FUNCTION_CODE (fndecl))
754 {
755 case BUILT_IN_TM_COMMIT:
756 case BUILT_IN_TM_COMMIT_EH:
757 case BUILT_IN_TM_ABORT:
758 case BUILT_IN_TM_IRREVOCABLE:
759 case BUILT_IN_TM_GETTMCLONE_IRR:
760 case BUILT_IN_TM_MEMCPY:
761 case BUILT_IN_TM_MEMMOVE:
762 case BUILT_IN_TM_MEMSET:
763 CASE_BUILT_IN_TM_STORE (1):
764 CASE_BUILT_IN_TM_STORE (2):
765 CASE_BUILT_IN_TM_STORE (4):
766 CASE_BUILT_IN_TM_STORE (8):
767 CASE_BUILT_IN_TM_STORE (FLOAT):
768 CASE_BUILT_IN_TM_STORE (DOUBLE):
769 CASE_BUILT_IN_TM_STORE (LDOUBLE):
770 CASE_BUILT_IN_TM_STORE (M64):
771 CASE_BUILT_IN_TM_STORE (M128):
772 CASE_BUILT_IN_TM_STORE (M256):
773 CASE_BUILT_IN_TM_LOAD (1):
774 CASE_BUILT_IN_TM_LOAD (2):
775 CASE_BUILT_IN_TM_LOAD (4):
776 CASE_BUILT_IN_TM_LOAD (8):
777 CASE_BUILT_IN_TM_LOAD (FLOAT):
778 CASE_BUILT_IN_TM_LOAD (DOUBLE):
779 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
780 CASE_BUILT_IN_TM_LOAD (M64):
781 CASE_BUILT_IN_TM_LOAD (M128):
782 CASE_BUILT_IN_TM_LOAD (M256):
783 case BUILT_IN_TM_LOG:
784 case BUILT_IN_TM_LOG_1:
785 case BUILT_IN_TM_LOG_2:
786 case BUILT_IN_TM_LOG_4:
787 case BUILT_IN_TM_LOG_8:
788 case BUILT_IN_TM_LOG_FLOAT:
789 case BUILT_IN_TM_LOG_DOUBLE:
790 case BUILT_IN_TM_LOG_LDOUBLE:
791 case BUILT_IN_TM_LOG_M64:
792 case BUILT_IN_TM_LOG_M128:
793 case BUILT_IN_TM_LOG_M256:
794 return true;
795 default:
796 break;
797 }
798 }
799 return false;
800}
801
b5cd4ed4 802/* Detect flags (function attributes) from the function decl or type node. */
7393c642 803
4977bab6 804int
6ea2b70d 805flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
806{
807 int flags = 0;
36dbb93d 808
f2d33f13
JH
809 if (DECL_P (exp))
810 {
811 /* The function exp may have the `malloc' attribute. */
36dbb93d 812 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
813 flags |= ECF_MALLOC;
814
6e9a3221
AN
815 /* The function exp may have the `returns_twice' attribute. */
816 if (DECL_IS_RETURNS_TWICE (exp))
817 flags |= ECF_RETURNS_TWICE;
818
becfd6e5 819 /* Process the pure and const attributes. */
9e3920e9 820 if (TREE_READONLY (exp))
becfd6e5
KZ
821 flags |= ECF_CONST;
822 if (DECL_PURE_P (exp))
e238ccac 823 flags |= ECF_PURE;
becfd6e5
KZ
824 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
825 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 826
dcd6de6d
ZD
827 if (DECL_IS_NOVOPS (exp))
828 flags |= ECF_NOVOPS;
46a4da10
JH
829 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
830 flags |= ECF_LEAF;
cb59f689
JH
831 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
832 flags |= ECF_COLD;
dcd6de6d 833
f2d33f13
JH
834 if (TREE_NOTHROW (exp))
835 flags |= ECF_NOTHROW;
2b187c63 836
0a35513e
AH
837 if (flag_tm)
838 {
839 if (is_tm_builtin (exp))
840 flags |= ECF_TM_BUILTIN;
fe924d9f 841 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
842 || lookup_attribute ("transaction_pure",
843 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
844 flags |= ECF_TM_PURE;
845 }
846
6de9cd9a 847 flags = special_function_p (exp, flags);
f2d33f13 848 }
0a35513e
AH
849 else if (TYPE_P (exp))
850 {
851 if (TYPE_READONLY (exp))
852 flags |= ECF_CONST;
853
854 if (flag_tm
855 && ((flags & ECF_CONST) != 0
856 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
857 flags |= ECF_TM_PURE;
858 }
17fc8d6f
AH
859 else
860 gcc_unreachable ();
f2d33f13
JH
861
862 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
863 {
864 flags |= ECF_NORETURN;
865 if (flags & (ECF_CONST|ECF_PURE))
866 flags |= ECF_LOOPING_CONST_OR_PURE;
867 }
f2d33f13
JH
868
869 return flags;
870}
871
f027e0a2
JM
872/* Detect flags from a CALL_EXPR. */
873
874int
fa233e34 875call_expr_flags (const_tree t)
f027e0a2
JM
876{
877 int flags;
878 tree decl = get_callee_fndecl (t);
879
880 if (decl)
881 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
882 else if (CALL_EXPR_FN (t) == NULL_TREE)
883 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
884 else
885 {
4c640e26
EB
886 tree type = TREE_TYPE (CALL_EXPR_FN (t));
887 if (type && TREE_CODE (type) == POINTER_TYPE)
888 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
889 else
890 flags = 0;
4c640e26
EB
891 if (CALL_EXPR_BY_DESCRIPTOR (t))
892 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
893 }
894
895 return flags;
896}
897
52090e4d 898/* Return true if ARG should be passed by invisible reference. */
16a16ec7
AM
899
900bool
52090e4d 901pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
16a16ec7 902{
52090e4d 903 if (tree type = arg.type)
16a16ec7
AM
904 {
905 /* If this type contains non-trivial constructors, then it is
906 forbidden for the middle-end to create any new copies. */
907 if (TREE_ADDRESSABLE (type))
908 return true;
909
910 /* GCC post 3.4 passes *all* variable sized types by reference. */
c600df9a 911 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
16a16ec7
AM
912 return true;
913
914 /* If a record type should be passed the same as its first (and only)
915 member, use the type and mode of that member. */
916 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
917 {
52090e4d
RS
918 arg.type = TREE_TYPE (first_field (type));
919 arg.mode = TYPE_MODE (arg.type);
16a16ec7
AM
920 }
921 }
922
52090e4d 923 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
16a16ec7
AM
924}
925
fde65a89
RS
926/* Return true if TYPE should be passed by reference when passed to
927 the "..." arguments of a function. */
928
929bool
930pass_va_arg_by_reference (tree type)
931{
52090e4d 932 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
fde65a89
RS
933}
934
b12cdd6e
RS
935/* Decide whether ARG, which occurs in the state described by CA,
936 should be passed by reference. Return true if so and update
937 ARG accordingly. */
938
939bool
940apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
941{
942 if (pass_by_reference (ca, arg))
943 {
944 arg.type = build_pointer_type (arg.type);
945 arg.mode = TYPE_MODE (arg.type);
257caa55 946 arg.pass_by_reference = true;
b12cdd6e
RS
947 return true;
948 }
949 return false;
950}
951
7256c719 952/* Return true if ARG, which is passed by reference, should be callee
16a16ec7
AM
953 copied instead of caller copied. */
954
955bool
7256c719 956reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
16a16ec7 957{
7256c719 958 if (arg.type && TREE_ADDRESSABLE (arg.type))
16a16ec7 959 return false;
7256c719 960 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
16a16ec7
AM
961}
962
963
20efdf74
JL
964/* Precompute all register parameters as described by ARGS, storing values
965 into fields within the ARGS array.
966
967 NUM_ACTUALS indicates the total number elements in the ARGS array.
968
969 Set REG_PARM_SEEN if we encounter a register parameter. */
970
971static void
27e29549
RH
972precompute_register_parameters (int num_actuals, struct arg_data *args,
973 int *reg_parm_seen)
20efdf74
JL
974{
975 int i;
976
977 *reg_parm_seen = 0;
978
979 for (i = 0; i < num_actuals; i++)
980 if (args[i].reg != 0 && ! args[i].pass_on_stack)
981 {
982 *reg_parm_seen = 1;
983
984 if (args[i].value == 0)
985 {
986 push_temp_slots ();
84217346 987 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
988 preserve_temp_slots (args[i].value);
989 pop_temp_slots ();
20efdf74
JL
990 }
991
992 /* If we are to promote the function arg to a wider mode,
993 do it now. */
994
ac4c8f53
RS
995 machine_mode old_mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
996
997 /* Some ABIs require scalar floating point modes to be returned
998 in a wider scalar integer mode. We need to explicitly
999 reinterpret to an integer mode of the correct precision
1000 before extending to the desired result. */
1001 if (SCALAR_INT_MODE_P (args[i].mode)
1002 && SCALAR_FLOAT_MODE_P (old_mode)
1003 && known_gt (GET_MODE_SIZE (args[i].mode),
1004 GET_MODE_SIZE (old_mode)))
1005 args[i].value = convert_float_to_wider_int (args[i].mode, old_mode,
1006 args[i].value);
1007 else if (args[i].mode != old_mode)
1008 args[i].value = convert_modes (args[i].mode, old_mode,
1009 args[i].value, args[i].unsignedp);
20efdf74 1010
a7adbbcb
L
1011 /* If the value is a non-legitimate constant, force it into a
1012 pseudo now. TLS symbols sometimes need a call to resolve. */
1013 if (CONSTANT_P (args[i].value)
a21b3997
DE
1014 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1015 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
a7adbbcb
L
1016 args[i].value = force_reg (args[i].mode, args[i].value);
1017
27e29549
RH
1018 /* If we're going to have to load the value by parts, pull the
1019 parts into pseudos. The part extraction process can involve
1020 non-trivial computation. */
1021 if (GET_CODE (args[i].reg) == PARALLEL)
1022 {
1023 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 1024 args[i].parallel_value
27e29549
RH
1025 = emit_group_load_into_temps (args[i].reg, args[i].value,
1026 type, int_size_in_bytes (type));
1027 }
1028
f725a3ec 1029 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
1030 short loop, put the value into a pseudo and then put the pseudo
1031 into the hard reg.
1032
1033 For small register classes, also do this if this call uses
1034 register parameters. This is to avoid reload conflicts while
1035 loading the parameters registers. */
1036
27e29549
RH
1037 else if ((! (REG_P (args[i].value)
1038 || (GET_CODE (args[i].value) == SUBREG
1039 && REG_P (SUBREG_REG (args[i].value)))))
1040 && args[i].mode != BLKmode
e548c9df
AM
1041 && (set_src_cost (args[i].value, args[i].mode,
1042 optimize_insn_for_speed_p ())
1043 > COSTS_N_INSNS (1))
42db504c
SB
1044 && ((*reg_parm_seen
1045 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1046 || optimize))
20efdf74
JL
1047 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1048 }
1049}
1050
f73ad30e 1051#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1052
1053 /* The argument list is the property of the called routine and it
1054 may clobber it. If the fixed area has been used for previous
1055 parameters, we must save and restore it. */
3bdf5ad1 1056
20efdf74 1057static rtx
d329e058 1058save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1059{
a20c5714
RS
1060 unsigned int low;
1061 unsigned int high;
20efdf74 1062
b820d2b8
AM
1063 /* Compute the boundary of the area that needs to be saved, if any. */
1064 high = reg_parm_stack_space;
6dad9361
TS
1065 if (ARGS_GROW_DOWNWARD)
1066 high += 1;
1067
b820d2b8
AM
1068 if (high > highest_outgoing_arg_in_use)
1069 high = highest_outgoing_arg_in_use;
20efdf74 1070
b820d2b8 1071 for (low = 0; low < high; low++)
a20c5714 1072 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
b820d2b8
AM
1073 {
1074 int num_to_save;
ef4bddc2 1075 machine_mode save_mode;
b820d2b8 1076 int delta;
0a81f074 1077 rtx addr;
b820d2b8
AM
1078 rtx stack_area;
1079 rtx save_area;
20efdf74 1080
b820d2b8
AM
1081 while (stack_usage_map[--high] == 0)
1082 ;
20efdf74 1083
b820d2b8
AM
1084 *low_to_save = low;
1085 *high_to_save = high;
1086
1087 num_to_save = high - low + 1;
20efdf74 1088
b820d2b8
AM
1089 /* If we don't have the required alignment, must do this
1090 in BLKmode. */
fffbab82
RS
1091 scalar_int_mode imode;
1092 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1093 && (low & (MIN (GET_MODE_SIZE (imode),
1094 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1095 save_mode = imode;
1096 else
b820d2b8 1097 save_mode = BLKmode;
20efdf74 1098
6dad9361
TS
1099 if (ARGS_GROW_DOWNWARD)
1100 delta = -high;
1101 else
1102 delta = low;
1103
0a81f074
RS
1104 addr = plus_constant (Pmode, argblock, delta);
1105 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1106
b820d2b8
AM
1107 set_mem_align (stack_area, PARM_BOUNDARY);
1108 if (save_mode == BLKmode)
1109 {
9474e8ab 1110 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1111 emit_block_move (validize_mem (save_area), stack_area,
1112 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1113 }
1114 else
1115 {
1116 save_area = gen_reg_rtx (save_mode);
1117 emit_move_insn (save_area, stack_area);
1118 }
8ac61af7 1119
b820d2b8
AM
1120 return save_area;
1121 }
1122
1123 return NULL_RTX;
20efdf74
JL
1124}
1125
1126static void
d329e058 1127restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1128{
ef4bddc2 1129 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1130 int delta;
0a81f074 1131 rtx addr, stack_area;
b820d2b8 1132
6dad9361
TS
1133 if (ARGS_GROW_DOWNWARD)
1134 delta = -high_to_save;
1135 else
1136 delta = low_to_save;
1137
0a81f074
RS
1138 addr = plus_constant (Pmode, argblock, delta);
1139 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1140 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1141
1142 if (save_mode != BLKmode)
1143 emit_move_insn (stack_area, save_area);
1144 else
44bb111a
RH
1145 emit_block_move (stack_area, validize_mem (save_area),
1146 GEN_INT (high_to_save - low_to_save + 1),
1147 BLOCK_OP_CALL_PARM);
20efdf74 1148}
19652adf 1149#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1150
20efdf74
JL
1151/* If any elements in ARGS refer to parameters that are to be passed in
1152 registers, but not in memory, and whose alignment does not permit a
1153 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1154 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1155
1156 Pseudos for each unaligned argument will be stored into the array
1157 args[argnum].aligned_regs. The caller is responsible for deallocating
1158 the aligned_regs array if it is nonzero. */
1159
20efdf74 1160static void
d329e058 1161store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1162{
1163 int i, j;
f725a3ec 1164
20efdf74
JL
1165 for (i = 0; i < num_actuals; i++)
1166 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1167 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1168 && args[i].mode == BLKmode
852d22b4
EB
1169 && MEM_P (args[i].value)
1170 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1171 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1172 {
1173 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1174 int endian_correction = 0;
20efdf74 1175
78a52f11
RH
1176 if (args[i].partial)
1177 {
1178 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1179 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1180 }
1181 else
1182 {
1183 args[i].n_aligned_regs
1184 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1185 }
1186
5ed6ace5 1187 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1188
6e985040
AM
1189 /* Structures smaller than a word are normally aligned to the
1190 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1191 this means we must skip the empty high order bytes when
1192 calculating the bit offset. */
6e985040
AM
1193 if (bytes < UNITS_PER_WORD
1194#ifdef BLOCK_REG_PADDING
1195 && (BLOCK_REG_PADDING (args[i].mode,
1196 TREE_TYPE (args[i].tree_value), 1)
76b0cbf8 1197 == PAD_DOWNWARD)
6e985040
AM
1198#else
1199 && BYTES_BIG_ENDIAN
1200#endif
1201 )
1202 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1203
1204 for (j = 0; j < args[i].n_aligned_regs; j++)
1205 {
1206 rtx reg = gen_reg_rtx (word_mode);
1207 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1208 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1209
1210 args[i].aligned_regs[j] = reg;
c6285bd7 1211 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1212 word_mode, word_mode, false, NULL);
20efdf74
JL
1213
1214 /* There is no need to restrict this code to loading items
1215 in TYPE_ALIGN sized hunks. The bitfield instructions can
1216 load up entire word sized registers efficiently.
1217
1218 ??? This may not be needed anymore.
1219 We use to emit a clobber here but that doesn't let later
1220 passes optimize the instructions we emit. By storing 0 into
1221 the register later passes know the first AND to zero out the
1222 bitfield being set in the register is unnecessary. The store
1223 of 0 will be deleted as will at least the first AND. */
1224
1225 emit_move_insn (reg, const0_rtx);
1226
1227 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1228 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1229 word_mode, word, false);
20efdf74
JL
1230 }
1231 }
1232}
1233
9a385c2d
DM
1234/* Issue an error if CALL_EXPR was flagged as requiring
1235 tall-call optimization. */
1236
18963d3b 1237void
9a385c2d
DM
1238maybe_complain_about_tail_call (tree call_expr, const char *reason)
1239{
1240 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1241 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1242 return;
1243
1244 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1245}
1246
d7cdf113 1247/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 1248 CALL_EXPR EXP.
d7cdf113
JL
1249
1250 NUM_ACTUALS is the total number of parameters.
1251
1252 N_NAMED_ARGS is the total number of named arguments.
1253
078a18a4
SL
1254 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1255 value, or null.
1256
d7cdf113
JL
1257 FNDECL is the tree code for the target of this call (if known)
1258
1259 ARGS_SO_FAR holds state needed by the target to know where to place
1260 the next argument.
1261
1262 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1263 for arguments which are passed in registers.
1264
1265 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1266 and may be modified by this routine.
1267
f2d33f13 1268 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 1269 flags which may be modified by this routine.
dd292d0a 1270
6de9cd9a
DN
1271 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1272 that requires allocation of stack space.
1273
dd292d0a
MM
1274 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1275 the thunked-to function. */
d7cdf113
JL
1276
1277static void
d329e058
AJ
1278initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1279 struct arg_data *args,
1280 struct args_size *args_size,
1281 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 1282 tree exp, tree struct_value_addr_value,
45769134 1283 tree fndecl, tree fntype,
d5cc9181 1284 cumulative_args_t args_so_far,
d329e058 1285 int reg_parm_stack_space,
a20c5714
RS
1286 rtx *old_stack_level,
1287 poly_int64_pod *old_pending_adj,
dd292d0a 1288 int *must_preallocate, int *ecf_flags,
6de9cd9a 1289 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 1290{
d5cc9181 1291 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 1292 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
1293
1294 /* Count arg position in order args appear. */
1295 int argpos;
1296
1297 int i;
f725a3ec 1298
d7cdf113
JL
1299 args_size->constant = 0;
1300 args_size->var = 0;
1301
1302 /* In this loop, we consider args in the order they are written.
3d9684ae 1303 We fill up ARGS from the back. */
d7cdf113 1304
3d9684ae 1305 i = num_actuals - 1;
078a18a4 1306 {
31db0fe0 1307 int j = i;
078a18a4
SL
1308 call_expr_arg_iterator iter;
1309 tree arg;
1310
1311 if (struct_value_addr_value)
1312 {
1313 args[j].tree_value = struct_value_addr_value;
3d9684ae 1314 j--;
078a18a4 1315 }
afc610db 1316 argpos = 0;
078a18a4
SL
1317 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1318 {
1319 tree argtype = TREE_TYPE (arg);
d5e254e1 1320
078a18a4
SL
1321 if (targetm.calls.split_complex_arg
1322 && argtype
1323 && TREE_CODE (argtype) == COMPLEX_TYPE
1324 && targetm.calls.split_complex_arg (argtype))
1325 {
1326 tree subtype = TREE_TYPE (argtype);
078a18a4 1327 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 1328 j--;
078a18a4
SL
1329 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1330 }
1331 else
1332 args[j].tree_value = arg;
3d9684ae 1333 j--;
afc610db 1334 argpos++;
078a18a4
SL
1335 }
1336 }
1337
d7cdf113 1338 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 1339 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 1340 {
078a18a4 1341 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 1342 int unsignedp;
d7cdf113 1343
d7cdf113 1344 /* Replace erroneous argument with constant zero. */
d0f062fb 1345 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1346 args[i].tree_value = integer_zero_node, type = integer_type_node;
1347
ebf0bf7f
JJ
1348 /* If TYPE is a transparent union or record, pass things the way
1349 we would pass the first field of the union or record. We have
1350 already verified that the modes are the same. */
920ea3b8 1351 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
ebf0bf7f 1352 type = TREE_TYPE (first_field (type));
d7cdf113
JL
1353
1354 /* Decide where to pass this arg.
1355
1356 args[i].reg is nonzero if all or part is passed in registers.
1357
1358 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1359 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
1360
1361 args[i].pass_on_stack is nonzero if the argument must at least be
1362 computed on the stack. It may then be loaded back into registers
1363 if args[i].reg is nonzero.
1364
1365 These decisions are driven by the FUNCTION_... macros and must agree
e53b6e56 1366 with those made by function.cc. */
d7cdf113
JL
1367
1368 /* See if this argument should be passed by invisible reference. */
cf0d189e
RS
1369 function_arg_info arg (type, argpos < n_named_args);
1370 if (pass_by_reference (args_so_far_pnt, arg))
d7cdf113 1371 {
defafb78
EB
1372 const bool callee_copies
1373 = reference_callee_copied (args_so_far_pnt, arg);
1374 tree base;
1375
1376 /* If we're compiling a thunk, pass directly the address of an object
1377 already in memory, instead of making a copy. Likewise if we want
1378 to make the copy in the callee instead of the caller. */
1379 if ((call_from_thunk_p || callee_copies)
316bdb2e
RB
1380 && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
1381 && ((base = get_base_address (args[i].tree_value)), true)
defafb78
EB
1382 && TREE_CODE (base) != SSA_NAME
1383 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
d7cdf113 1384 {
006e317a
JH
1385 /* We may have turned the parameter value into an SSA name.
1386 Go back to the original parameter so we can take the
1387 address. */
1388 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1389 {
1390 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1391 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1392 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1393 }
fe8dd12e
JH
1394 /* Argument setup code may have copied the value to register. We
1395 revert that optimization now because the tail call code must
1396 use the original location. */
1397 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1398 && !MEM_P (DECL_RTL (args[i].tree_value))
1399 && DECL_INCOMING_RTL (args[i].tree_value)
1400 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1401 set_decl_rtl (args[i].tree_value,
1402 DECL_INCOMING_RTL (args[i].tree_value));
1403
c4b9a87e
ER
1404 mark_addressable (args[i].tree_value);
1405
9969aaf6
RH
1406 /* We can't use sibcalls if a callee-copied argument is
1407 stored in the current function's frame. */
1408 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
1409 {
1410 *may_tailcall = false;
1411 maybe_complain_about_tail_call (exp,
1412 "a callee-copied argument is"
cefc0906 1413 " stored in the current"
9a385c2d
DM
1414 " function's frame");
1415 }
9fd47435 1416
db3927fb
AH
1417 args[i].tree_value = build_fold_addr_expr_loc (loc,
1418 args[i].tree_value);
9969aaf6
RH
1419 type = TREE_TYPE (args[i].tree_value);
1420
becfd6e5
KZ
1421 if (*ecf_flags & ECF_CONST)
1422 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 1423 }
d7cdf113
JL
1424 else
1425 {
1426 /* We make a copy of the object and pass the address to the
1427 function being called. */
1428 rtx copy;
1429
d0f062fb 1430 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
1431 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1432 || (flag_stack_check == GENERIC_STACK_CHECK
1433 && compare_tree_int (TYPE_SIZE_UNIT (type),
1434 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
1435 {
1436 /* This is a variable-sized object. Make space on the stack
1437 for it. */
078a18a4 1438 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
1439
1440 if (*old_stack_level == 0)
1441 {
9eac0f2a 1442 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
1443 *old_pending_adj = pending_stack_adjust;
1444 pending_stack_adjust = 0;
1445 }
1446
d3c12306
EB
1447 /* We can pass TRUE as the 4th argument because we just
1448 saved the stack pointer and will restore it right after
1449 the call. */
3a42502d
RH
1450 copy = allocate_dynamic_stack_space (size_rtx,
1451 TYPE_ALIGN (type),
1452 TYPE_ALIGN (type),
9e878cf1
EB
1453 max_int_size_in_bytes
1454 (type),
3a42502d
RH
1455 true);
1456 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 1457 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1458 }
1459 else
9474e8ab 1460 copy = assign_temp (type, 1, 0);
d7cdf113 1461
ee45a32d 1462 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 1463
becfd6e5
KZ
1464 /* Just change the const function to pure and then let
1465 the next test clear the pure based on
1466 callee_copies. */
1467 if (*ecf_flags & ECF_CONST)
1468 {
1469 *ecf_flags &= ~ECF_CONST;
1470 *ecf_flags |= ECF_PURE;
1471 }
1472
1473 if (!callee_copies && *ecf_flags & ECF_PURE)
1474 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
1475
1476 args[i].tree_value
db3927fb 1477 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 1478 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 1479 *may_tailcall = false;
9a385c2d
DM
1480 maybe_complain_about_tail_call (exp,
1481 "argument must be passed"
1482 " by copying");
d7cdf113 1483 }
257caa55 1484 arg.pass_by_reference = true;
d7cdf113
JL
1485 }
1486
8df83eae 1487 unsignedp = TYPE_UNSIGNED (type);
cf0d189e
RS
1488 arg.type = type;
1489 arg.mode
1490 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1491 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
1492
1493 args[i].unsignedp = unsignedp;
cf0d189e 1494 args[i].mode = arg.mode;
7d167afd 1495
974aedcc
MP
1496 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
1497
6783fdb7 1498 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
3c07301f 1499
7d167afd
JJ
1500 /* If this is a sibling call and the machine has register windows, the
1501 register window has to be unwinded before calling the routine, so
1502 arguments have to go into the incoming registers. */
3c07301f
NF
1503 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1504 args[i].tail_call_reg
6783fdb7 1505 = targetm.calls.function_incoming_arg (args_so_far, arg);
3c07301f
NF
1506 else
1507 args[i].tail_call_reg = args[i].reg;
7d167afd 1508
d7cdf113 1509 if (args[i].reg)
a7c81bc1 1510 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
d7cdf113 1511
0ffef200 1512 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
d7cdf113
JL
1513
1514 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1515 it means that we are to pass this arg in the register(s) designated
1516 by the PARALLEL, but also to pass it in the stack. */
1517 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1518 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1519 args[i].pass_on_stack = 1;
1520
1521 /* If this is an addressable type, we must preallocate the stack
1522 since we must evaluate the object into its final location.
1523
1524 If this is to be passed in both registers and the stack, it is simpler
1525 to preallocate. */
1526 if (TREE_ADDRESSABLE (type)
1527 || (args[i].pass_on_stack && args[i].reg != 0))
1528 *must_preallocate = 1;
1529
d7cdf113 1530 /* Compute the stack-size of this argument. */
31db0fe0 1531 if (args[i].reg == 0 || args[i].partial != 0
d5e254e1
IE
1532 || reg_parm_stack_space > 0
1533 || args[i].pass_on_stack)
cf0d189e 1534 locate_and_pad_parm (arg.mode, type,
d7cdf113
JL
1535#ifdef STACK_PARMS_IN_REG_PARM_AREA
1536 1,
1537#else
1538 args[i].reg != 0,
1539#endif
2e4ceca5 1540 reg_parm_stack_space,
e7949876
AM
1541 args[i].pass_on_stack ? 0 : args[i].partial,
1542 fndecl, args_size, &args[i].locate);
648bb159
RS
1543#ifdef BLOCK_REG_PADDING
1544 else
1545 /* The argument is passed entirely in registers. See at which
1546 end it should be padded. */
1547 args[i].locate.where_pad =
cf0d189e 1548 BLOCK_REG_PADDING (arg.mode, type,
648bb159
RS
1549 int_size_in_bytes (type) <= UNITS_PER_WORD);
1550#endif
f725a3ec 1551
d7cdf113
JL
1552 /* Update ARGS_SIZE, the total stack space for args so far. */
1553
e7949876
AM
1554 args_size->constant += args[i].locate.size.constant;
1555 if (args[i].locate.size.var)
1556 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1557
1558 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1559 have been used, etc. */
1560
6930c98c
RS
1561 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
1562 promoted_mode used for function_arg above. However, the
e53b6e56 1563 corresponding handling of incoming arguments in function.cc
6930c98c 1564 does pass the promoted mode. */
cf0d189e
RS
1565 arg.mode = TYPE_MODE (type);
1566 targetm.calls.function_arg_advance (args_so_far, arg);
d7cdf113
JL
1567 }
1568}
1569
599f37b6
JL
1570/* Update ARGS_SIZE to contain the total size for the argument block.
1571 Return the original constant component of the argument block's size.
1572
1573 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1574 for arguments passed in registers. */
1575
a20c5714 1576static poly_int64
d329e058
AJ
1577compute_argument_block_size (int reg_parm_stack_space,
1578 struct args_size *args_size,
033df0b9 1579 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 1580 tree fntype ATTRIBUTE_UNUSED,
d329e058 1581 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6 1582{
a20c5714 1583 poly_int64 unadjusted_args_size = args_size->constant;
599f37b6 1584
f73ad30e
JH
1585 /* For accumulate outgoing args mode we don't need to align, since the frame
1586 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1587 backends from generating misaligned frame sizes. */
f73ad30e
JH
1588 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1589 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1590
599f37b6
JL
1591 /* Compute the actual size of the argument block required. The variable
1592 and constant sizes must be combined, the size may have to be rounded,
1593 and there may be a minimum required size. */
1594
1595 if (args_size->var)
1596 {
1597 args_size->var = ARGS_SIZE_TREE (*args_size);
1598 args_size->constant = 0;
1599
c2f8b491
JH
1600 preferred_stack_boundary /= BITS_PER_UNIT;
1601 if (preferred_stack_boundary > 1)
1503a7ec
JH
1602 {
1603 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1604 to add the delta, round and subtract the delta.
1503a7ec 1605 Currently no machine description requires this support. */
a20c5714
RS
1606 gcc_assert (multiple_p (stack_pointer_delta,
1607 preferred_stack_boundary));
1503a7ec
JH
1608 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1609 }
599f37b6
JL
1610
1611 if (reg_parm_stack_space > 0)
1612 {
1613 args_size->var
1614 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1615 ssize_int (reg_parm_stack_space));
599f37b6 1616
599f37b6
JL
1617 /* The area corresponding to register parameters is not to count in
1618 the size of the block we need. So make the adjustment. */
5d059ed9 1619 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
1620 args_size->var
1621 = size_binop (MINUS_EXPR, args_size->var,
1622 ssize_int (reg_parm_stack_space));
599f37b6
JL
1623 }
1624 }
1625 else
1626 {
c2f8b491 1627 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1628 if (preferred_stack_boundary < 1)
1629 preferred_stack_boundary = 1;
a20c5714
RS
1630 args_size->constant = (aligned_upper_bound (args_size->constant
1631 + stack_pointer_delta,
1632 preferred_stack_boundary)
1503a7ec 1633 - stack_pointer_delta);
599f37b6 1634
a20c5714
RS
1635 args_size->constant = upper_bound (args_size->constant,
1636 reg_parm_stack_space);
599f37b6 1637
5d059ed9 1638 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 1639 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
1640 }
1641 return unadjusted_args_size;
1642}
1643
19832c77 1644/* Precompute parameters as needed for a function call.
cc0b1adc 1645
f2d33f13 1646 FLAGS is mask of ECF_* constants.
cc0b1adc 1647
cc0b1adc
JL
1648 NUM_ACTUALS is the number of arguments.
1649
f725a3ec
KH
1650 ARGS is an array containing information for each argument; this
1651 routine fills in the INITIAL_VALUE and VALUE fields for each
1652 precomputed argument. */
cc0b1adc
JL
1653
1654static void
84b8030f 1655precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
1656{
1657 int i;
1658
3638733b 1659 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 1660 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
1661
1662 /* If we preallocated the stack space, and some arguments must be passed
1663 on the stack, then we must precompute any parameter which contains a
1664 function call which will store arguments on the stack.
1665 Otherwise, evaluating the parameter may clobber previous parameters
1666 which have already been stored into the stack. (we have code to avoid
1667 such case by saving the outgoing stack arguments, but it results in
1668 worse code) */
84b8030f 1669 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 1670 return;
7ae4ad28 1671
cc0b1adc 1672 for (i = 0; i < num_actuals; i++)
82c82743 1673 {
cde0f3fd 1674 tree type;
ef4bddc2 1675 machine_mode mode;
ddef6bc7 1676
84b8030f 1677 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
1678 continue;
1679
82c82743 1680 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
1681 type = TREE_TYPE (args[i].tree_value);
1682 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 1683
82c82743 1684 args[i].initial_value = args[i].value
84217346 1685 = expand_normal (args[i].tree_value);
cc0b1adc 1686
cde0f3fd 1687 mode = TYPE_MODE (type);
82c82743
RH
1688 if (mode != args[i].mode)
1689 {
cde0f3fd 1690 int unsignedp = args[i].unsignedp;
82c82743
RH
1691 args[i].value
1692 = convert_modes (args[i].mode, mode,
1693 args[i].value, args[i].unsignedp);
cde0f3fd 1694
82c82743
RH
1695 /* CSE will replace this only if it contains args[i].value
1696 pseudo, so convert it down to the declared mode using
1697 a SUBREG. */
1698 if (REG_P (args[i].value)
cde0f3fd
PB
1699 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1700 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
1701 {
1702 args[i].initial_value
1703 = gen_lowpart_SUBREG (mode, args[i].value);
1704 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 1705 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 1706 }
82c82743
RH
1707 }
1708 }
cc0b1adc
JL
1709}
1710
0f9b3ea6
JL
1711/* Given the current state of MUST_PREALLOCATE and information about
1712 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1713 compute and return the final value for MUST_PREALLOCATE. */
1714
1715static int
b8698a0f 1716finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 1717 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
1718{
1719 /* See if we have or want to preallocate stack space.
1720
1721 If we would have to push a partially-in-regs parm
1722 before other stack parms, preallocate stack space instead.
1723
1724 If the size of some parm is not a multiple of the required stack
1725 alignment, we must preallocate.
1726
1727 If the total size of arguments that would otherwise create a copy in
1728 a temporary (such as a CALL) is more than half the total argument list
1729 size, preallocation is faster.
1730
1731 Another reason to preallocate is if we have a machine (like the m88k)
1732 where stack alignment is required to be maintained between every
1733 pair of insns, not just when the call is made. However, we assume here
1734 that such machines either do not have push insns (and hence preallocation
1735 would occur anyway) or the problem is taken care of with
1736 PUSH_ROUNDING. */
1737
1738 if (! must_preallocate)
1739 {
1740 int partial_seen = 0;
a20c5714 1741 poly_int64 copy_to_evaluate_size = 0;
0f9b3ea6
JL
1742 int i;
1743
1744 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1745 {
1746 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1747 partial_seen = 1;
1748 else if (partial_seen && args[i].reg == 0)
1749 must_preallocate = 1;
1750
1751 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1752 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1753 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1754 || TREE_CODE (args[i].tree_value) == COND_EXPR
1755 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1756 copy_to_evaluate_size
1757 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1758 }
1759
a20c5714
RS
1760 if (maybe_ne (args_size->constant, 0)
1761 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
0f9b3ea6
JL
1762 must_preallocate = 1;
1763 }
1764 return must_preallocate;
1765}
599f37b6 1766
a45bdd02
JL
1767/* If we preallocated stack space, compute the address of each argument
1768 and store it into the ARGS array.
1769
f725a3ec 1770 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1771 validized when it is used.
1772
1773 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1774
1775static void
d329e058 1776compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
1777{
1778 if (argblock)
1779 {
1780 rtx arg_reg = argblock;
a20c5714
RS
1781 int i;
1782 poly_int64 arg_offset = 0;
a45bdd02
JL
1783
1784 if (GET_CODE (argblock) == PLUS)
a20c5714
RS
1785 {
1786 arg_reg = XEXP (argblock, 0);
1787 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
1788 }
a45bdd02
JL
1789
1790 for (i = 0; i < num_actuals; i++)
1791 {
e7949876
AM
1792 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1793 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 1794 rtx addr;
bfc45551 1795 unsigned int align, boundary;
a20c5714 1796 poly_uint64 units_on_stack = 0;
ef4bddc2 1797 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
1798
1799 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
1800 if (! args[i].pass_on_stack
1801 && args[i].reg != 0
1802 && args[i].partial == 0)
a45bdd02
JL
1803 continue;
1804
5b8b4a88
JJ
1805 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
1806 continue;
1807
a708f4b6 1808 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
0a81f074 1809 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1810
1811 if (args[i].partial != 0)
1812 {
1813 /* Only part of the parameter is being passed on the stack.
1814 Generate a simple memory reference of the correct size. */
1815 units_on_stack = args[i].locate.size.constant;
a20c5714 1816 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
f4b31647 1817 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
7816b87e 1818 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 1819 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
1820 }
1821 else
1822 {
1823 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1824 set_mem_attributes (args[i].stack,
1825 TREE_TYPE (args[i].tree_value), 1);
1826 }
bfc45551
AM
1827 align = BITS_PER_UNIT;
1828 boundary = args[i].locate.boundary;
a20c5714 1829 poly_int64 offset_val;
76b0cbf8 1830 if (args[i].locate.where_pad != PAD_DOWNWARD)
bfc45551 1831 align = boundary;
a20c5714 1832 else if (poly_int_rtx_p (offset, &offset_val))
bfc45551 1833 {
a20c5714
RS
1834 align = least_bit_hwi (boundary);
1835 unsigned int offset_align
1836 = known_alignment (offset_val) * BITS_PER_UNIT;
1837 if (offset_align != 0)
1838 align = MIN (align, offset_align);
bfc45551
AM
1839 }
1840 set_mem_align (args[i].stack, align);
a45bdd02 1841
a708f4b6 1842 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
0a81f074 1843 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1844
1845 if (args[i].partial != 0)
1846 {
1847 /* Only part of the parameter is being passed on the stack.
1848 Generate a simple memory reference of the correct size.
1849 */
1850 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 1851 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
1852 }
1853 else
1854 {
1855 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1856 set_mem_attributes (args[i].stack_slot,
1857 TREE_TYPE (args[i].tree_value), 1);
1858 }
bfc45551 1859 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
1860
1861 /* Function incoming arguments may overlap with sibling call
1862 outgoing arguments and we cannot allow reordering of reads
1863 from function arguments with stores to outgoing arguments
1864 of sibling calls. */
ba4828e0
RK
1865 set_mem_alias_set (args[i].stack, 0);
1866 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1867 }
1868 }
1869}
f725a3ec 1870
a45bdd02
JL
1871/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1872 in a call instruction.
1873
1874 FNDECL is the tree node for the target function. For an indirect call
1875 FNDECL will be NULL_TREE.
1876
09e2bf48 1877 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
1878
1879static rtx
d329e058 1880rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
1881{
1882 rtx funexp;
1883
1884 /* Get the function to call, in the form of RTL. */
1885 if (fndecl)
1886 {
ad960f56 1887 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 1888 TREE_USED (fndecl) = 1;
a45bdd02
JL
1889
1890 /* Get a SYMBOL_REF rtx for the function address. */
1891 funexp = XEXP (DECL_RTL (fndecl), 0);
1892 }
1893 else
1894 /* Generate an rtx (probably a pseudo-register) for the address. */
1895 {
1896 push_temp_slots ();
84217346 1897 funexp = expand_normal (addr);
f725a3ec 1898 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1899 }
1900 return funexp;
1901}
1902
4b522b8f
TV
1903/* Return the static chain for this function, if any. */
1904
1905rtx
1906rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
1907{
1908 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
1909 return NULL;
1910
1911 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
1912}
1913
5275901c
JJ
1914/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1915static struct
1916{
1917 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1918 or NULL_RTX if none has been scanned yet. */
48810515 1919 rtx_insn *scan_start;
5275901c
JJ
1920 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1921 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1922 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1923 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 1924 vec<rtx> cache;
5275901c
JJ
1925} internal_arg_pointer_exp_state;
1926
e9f56944 1927static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
1928
1929/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1930 the tail call sequence, starting with first insn that hasn't been
1931 scanned yet, and note for each pseudo on the LHS whether it is based
1932 on crtl->args.internal_arg_pointer or not, and what offset from that
1933 that pointer it has. */
1934
1935static void
1936internal_arg_pointer_based_exp_scan (void)
1937{
48810515 1938 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
1939
1940 if (scan_start == NULL_RTX)
1941 insn = get_insns ();
1942 else
1943 insn = NEXT_INSN (scan_start);
1944
1945 while (insn)
1946 {
1947 rtx set = single_set (insn);
1948 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1949 {
1950 rtx val = NULL_RTX;
1951 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1952 /* Punt on pseudos set multiple times. */
9771b263
DN
1953 if (idx < internal_arg_pointer_exp_state.cache.length ()
1954 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
1955 != NULL_RTX))
1956 val = pc_rtx;
1957 else
1958 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1959 if (val != NULL_RTX)
1960 {
9771b263 1961 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718 1962 internal_arg_pointer_exp_state.cache
cb3874dc 1963 .safe_grow_cleared (idx + 1, true);
9771b263 1964 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
1965 }
1966 }
1967 if (NEXT_INSN (insn) == NULL_RTX)
1968 scan_start = insn;
1969 insn = NEXT_INSN (insn);
1970 }
1971
1972 internal_arg_pointer_exp_state.scan_start = scan_start;
1973}
1974
5275901c
JJ
1975/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1976 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1977 it with fixed offset, or PC if this is with variable or unknown offset.
1978 TOPLEVEL is true if the function is invoked at the topmost level. */
1979
1980static rtx
e9f56944 1981internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
1982{
1983 if (CONSTANT_P (rtl))
1984 return NULL_RTX;
1985
1986 if (rtl == crtl->args.internal_arg_pointer)
1987 return const0_rtx;
1988
1989 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1990 return NULL_RTX;
1991
a20c5714
RS
1992 poly_int64 offset;
1993 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
5275901c
JJ
1994 {
1995 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1996 if (val == NULL_RTX || val == pc_rtx)
1997 return val;
a20c5714 1998 return plus_constant (Pmode, val, offset);
5275901c
JJ
1999 }
2000
2001 /* When called at the topmost level, scan pseudo assignments in between the
2002 last scanned instruction in the tail call sequence and the latest insn
2003 in that sequence. */
2004 if (toplevel)
2005 internal_arg_pointer_based_exp_scan ();
2006
2007 if (REG_P (rtl))
2008 {
2009 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
2010 if (idx < internal_arg_pointer_exp_state.cache.length ())
2011 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
2012
2013 return NULL_RTX;
2014 }
2015
e9f56944
RS
2016 subrtx_iterator::array_type array;
2017 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2018 {
2019 const_rtx x = *iter;
2020 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2021 return pc_rtx;
2022 if (MEM_P (x))
2023 iter.skip_subrtxes ();
2024 }
5275901c
JJ
2025
2026 return NULL_RTX;
2027}
2028
a20c5714
RS
2029/* Return true if SIZE bytes starting from address ADDR might overlap an
2030 already-clobbered argument area. This function is used to determine
2031 if we should give up a sibcall. */
07eef816
KH
2032
2033static bool
a20c5714 2034mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
07eef816 2035{
a20c5714
RS
2036 poly_int64 i;
2037 unsigned HOST_WIDE_INT start, end;
5275901c 2038 rtx val;
07eef816 2039
a20c5714
RS
2040 if (bitmap_empty_p (stored_args_map)
2041 && stored_args_watermark == HOST_WIDE_INT_M1U)
4189fb53 2042 return false;
5275901c
JJ
2043 val = internal_arg_pointer_based_exp (addr, true);
2044 if (val == NULL_RTX)
2045 return false;
a20c5714 2046 else if (!poly_int_rtx_p (val, &i))
6c3cb698 2047 return true;
a20c5714
RS
2048
2049 if (known_eq (size, 0U))
2050 return false;
76e048a8
KT
2051
2052 if (STACK_GROWS_DOWNWARD)
2053 i -= crtl->args.pretend_args_size;
2054 else
2055 i += crtl->args.pretend_args_size;
2056
6dad9361
TS
2057 if (ARGS_GROW_DOWNWARD)
2058 i = -i - size;
2059
a20c5714
RS
2060 /* We can ignore any references to the function's pretend args,
2061 which at this point would manifest as negative values of I. */
2062 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2063 return false;
07eef816 2064
a20c5714
RS
2065 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2066 if (!(i + size).is_constant (&end))
2067 end = HOST_WIDE_INT_M1U;
2068
2069 if (end > stored_args_watermark)
2070 return true;
2071
2072 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2073 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2074 if (bitmap_bit_p (stored_args_map, k))
2075 return true;
07eef816
KH
2076
2077 return false;
2078}
2079
21a3b983
JL
2080/* Do the register loads required for any wholly-register parms or any
2081 parms which are passed both on the stack and in a register. Their
f725a3ec 2082 expressions were already evaluated.
21a3b983
JL
2083
2084 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
2085 insns in the CALL_INSN_FUNCTION_USAGE field.
2086
40b0345d 2087 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 2088 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
2089
2090static void
d329e058
AJ
2091load_register_parameters (struct arg_data *args, int num_actuals,
2092 rtx *call_fusage, int flags, int is_sibcall,
2093 int *sibcall_failure)
21a3b983
JL
2094{
2095 int i, j;
2096
21a3b983 2097 for (i = 0; i < num_actuals; i++)
21a3b983 2098 {
099e9712
JH
2099 rtx reg = ((flags & ECF_SIBCALL)
2100 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
2101 if (reg)
2102 {
6e985040
AM
2103 int partial = args[i].partial;
2104 int nregs;
95fe7b48
RS
2105 poly_int64 size = 0;
2106 HOST_WIDE_INT const_size = 0;
48810515 2107 rtx_insn *before_arg = get_last_insn ();
ed6fd2ae
RS
2108 tree tree_value = args[i].tree_value;
2109 tree type = TREE_TYPE (tree_value);
920ea3b8 2110 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
72834792 2111 type = TREE_TYPE (first_field (type));
f0078f86
AM
2112 /* Set non-negative if we must move a word at a time, even if
2113 just one word (e.g, partial == 4 && mode == DFmode). Set
2114 to -1 if we just use a normal move insn. This value can be
2115 zero if the argument is a zero size structure. */
6e985040 2116 nregs = -1;
78a52f11
RH
2117 if (GET_CODE (reg) == PARALLEL)
2118 ;
2119 else if (partial)
2120 {
2121 gcc_assert (partial % UNITS_PER_WORD == 0);
2122 nregs = partial / UNITS_PER_WORD;
2123 }
72834792 2124 else if (TYPE_MODE (type) == BLKmode)
6e985040 2125 {
95fe7b48
RS
2126 /* Variable-sized parameters should be described by a
2127 PARALLEL instead. */
72834792 2128 const_size = int_size_in_bytes (type);
95fe7b48
RS
2129 gcc_assert (const_size >= 0);
2130 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2131 size = const_size;
6e985040
AM
2132 }
2133 else
2134 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
2135
2136 /* Handle calls that pass values in multiple non-contiguous
2137 locations. The Irix 6 ABI has examples of this. */
2138
2139 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 2140 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
2141
2142 /* If simple case, just do move. If normal partial, store_one_arg
2143 has already loaded the register for us. In all other cases,
2144 load the register(s) from memory. */
2145
9206d736
AM
2146 else if (nregs == -1)
2147 {
2148 emit_move_insn (reg, args[i].value);
6e985040 2149#ifdef BLOCK_REG_PADDING
9206d736
AM
2150 /* Handle case where we have a value that needs shifting
2151 up to the msb. eg. a QImode value and we're padding
2152 upward on a BYTES_BIG_ENDIAN machine. */
95fe7b48
RS
2153 if (args[i].locate.where_pad
2154 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
9206d736 2155 {
95fe7b48
RS
2156 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2157 if (maybe_lt (size, UNITS_PER_WORD))
2158 {
2159 rtx x;
2160 poly_int64 shift
2161 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2162
2163 /* Assigning REG here rather than a temp makes
2164 CALL_FUSAGE report the whole reg as used.
2165 Strictly speaking, the call only uses SIZE
2166 bytes at the msb end, but it doesn't seem worth
2167 generating rtl to say that. */
2168 reg = gen_rtx_REG (word_mode, REGNO (reg));
2169 x = expand_shift (LSHIFT_EXPR, word_mode,
2170 reg, shift, reg, 1);
2171 if (x != reg)
2172 emit_move_insn (reg, x);
2173 }
9206d736 2174 }
6e985040 2175#endif
9206d736 2176 }
21a3b983
JL
2177
2178 /* If we have pre-computed the values to put in the registers in
2179 the case of non-aligned structures, copy them in now. */
2180
2181 else if (args[i].n_aligned_regs != 0)
2182 for (j = 0; j < args[i].n_aligned_regs; j++)
2183 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2184 args[i].aligned_regs[j]);
2185
ed6fd2ae
RS
2186 /* If we need a single register and the source is a constant
2187 VAR_DECL with a simple constructor, expand that constructor
2188 via a pseudo rather than read from (possibly misaligned)
2189 memory. PR middle-end/95126. */
2190 else if (nregs == 1
2191 && partial == 0
2192 && !args[i].pass_on_stack
2193 && VAR_P (tree_value)
2194 && TREE_READONLY (tree_value)
2195 && !TREE_SIDE_EFFECTS (tree_value)
2196 && immediate_const_ctor_p (DECL_INITIAL (tree_value)))
2197 {
2198 rtx target = gen_reg_rtx (word_mode);
c00e1e3a
RS
2199 store_constructor (DECL_INITIAL (tree_value), target, 0,
2200 int_expr_size (DECL_INITIAL (tree_value)),
2201 false);
ed6fd2ae 2202 reg = gen_rtx_REG (word_mode, REGNO (reg));
c00e1e3a 2203 emit_move_insn (reg, target);
ed6fd2ae 2204 }
3b2ee170 2205 else if (partial == 0 || args[i].pass_on_stack)
6e985040 2206 {
95fe7b48
RS
2207 /* SIZE and CONST_SIZE are 0 for partial arguments and
2208 the size of a BLKmode type otherwise. */
2209 gcc_checking_assert (known_eq (size, const_size));
1a8cb155 2210 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 2211
3b2ee170
IS
2212 /* Check for overlap with already clobbered argument area,
2213 providing that this has non-zero size. */
07eef816 2214 if (is_sibcall
95fe7b48 2215 && const_size != 0
a20c5714 2216 && (mem_might_overlap_already_clobbered_arg_p
95fe7b48 2217 (XEXP (args[i].value, 0), const_size)))
07eef816
KH
2218 *sibcall_failure = 1;
2219
95fe7b48 2220 if (const_size % UNITS_PER_WORD == 0
984b2054
AM
2221 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2222 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2223 else
2224 {
2225 if (nregs > 1)
2226 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2227 args[i].mode);
2228 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2229 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
95fe7b48 2230 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
ee45a32d 2231 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
2232 word_mode, word_mode, false,
2233 NULL);
984b2054
AM
2234 if (BYTES_BIG_ENDIAN)
2235 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2236 BITS_PER_WORD - bitsize, dest, 1);
2237 if (x != dest)
2238 emit_move_insn (dest, x);
2239 }
2240
6e985040 2241 /* Handle a BLKmode that needs shifting. */
95fe7b48 2242 if (nregs == 1 && const_size < UNITS_PER_WORD
03ca1672 2243#ifdef BLOCK_REG_PADDING
76b0cbf8 2244 && args[i].locate.where_pad == PAD_DOWNWARD
03ca1672
UW
2245#else
2246 && BYTES_BIG_ENDIAN
2247#endif
984b2054 2248 )
6e985040 2249 {
984b2054 2250 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
95fe7b48 2251 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
984b2054
AM
2252 enum tree_code dir = (BYTES_BIG_ENDIAN
2253 ? RSHIFT_EXPR : LSHIFT_EXPR);
2254 rtx x;
6e985040 2255
984b2054
AM
2256 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2257 if (x != dest)
2258 emit_move_insn (dest, x);
6e985040 2259 }
6e985040 2260 }
21a3b983 2261
0cdca92b
DJ
2262 /* When a parameter is a block, and perhaps in other cases, it is
2263 possible that it did a load from an argument slot that was
32dd366d 2264 already clobbered. */
0cdca92b
DJ
2265 if (is_sibcall
2266 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2267 *sibcall_failure = 1;
2268
21a3b983
JL
2269 /* Handle calls that pass values in multiple non-contiguous
2270 locations. The Irix 6 ABI has examples of this. */
2271 if (GET_CODE (reg) == PARALLEL)
2272 use_group_regs (call_fusage, reg);
2273 else if (nregs == -1)
72834792 2274 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
faa00334
AO
2275 else if (nregs > 0)
2276 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
2277 }
2278 }
2279}
2280
739fb049
MM
2281/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2282 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2283 bytes, then we would need to push some additional bytes to pad the
a20c5714 2284 arguments. So, we try to compute an adjust to the stack pointer for an
ce48579b
RH
2285 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2286 bytes. Then, when the arguments are pushed the stack will be perfectly
a20c5714 2287 aligned.
739fb049 2288
a20c5714
RS
2289 Return true if this optimization is possible, storing the adjustment
2290 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2291 bytes that should be popped after the call. */
2292
2293static bool
2294combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2295 poly_int64 unadjusted_args_size,
d329e058 2296 struct args_size *args_size,
95899b34 2297 unsigned int preferred_unit_stack_boundary)
739fb049
MM
2298{
2299 /* The number of bytes to pop so that the stack will be
2300 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
a20c5714 2301 poly_int64 adjustment;
739fb049
MM
2302 /* The alignment of the stack after the arguments are pushed, if we
2303 just pushed the arguments without adjust the stack here. */
95899b34 2304 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 2305
a20c5714
RS
2306 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2307 preferred_unit_stack_boundary,
2308 &unadjusted_alignment))
2309 return false;
739fb049
MM
2310
2311 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2312 as possible -- leaving just enough left to cancel out the
2313 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2314 PENDING_STACK_ADJUST is non-negative, and congruent to
2315 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2316
2317 /* Begin by trying to pop all the bytes. */
a20c5714
RS
2318 unsigned HOST_WIDE_INT tmp_misalignment;
2319 if (!known_misalignment (pending_stack_adjust,
2320 preferred_unit_stack_boundary,
2321 &tmp_misalignment))
2322 return false;
2323 unadjusted_alignment -= tmp_misalignment;
739fb049
MM
2324 adjustment = pending_stack_adjust;
2325 /* Push enough additional bytes that the stack will be aligned
2326 after the arguments are pushed. */
0aae1572
NS
2327 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2328 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 2329
a20c5714
RS
2330 /* We need to know whether the adjusted argument size
2331 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2332 or a deallocation. */
2333 if (!ordered_p (adjustment, unadjusted_args_size))
2334 return false;
2335
739fb049
MM
2336 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2337 bytes after the call. The right number is the entire
2338 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2339 by the arguments in the first place. */
f725a3ec 2340 args_size->constant
739fb049
MM
2341 = pending_stack_adjust - adjustment + unadjusted_args_size;
2342
a20c5714
RS
2343 *adjustment_out = adjustment;
2344 return true;
739fb049
MM
2345}
2346
c67846f2
JJ
2347/* Scan X expression if it does not dereference any argument slots
2348 we already clobbered by tail call arguments (as noted in stored_args_map
2349 bitmap).
da7d8304 2350 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
2351 zero otherwise. */
2352
2353static int
d329e058 2354check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
2355{
2356 RTX_CODE code;
2357 int i, j;
c67846f2
JJ
2358 const char *fmt;
2359
2360 if (x == NULL_RTX)
2361 return 0;
2362
2363 code = GET_CODE (x);
2364
6c3cb698
KY
2365 /* We need not check the operands of the CALL expression itself. */
2366 if (code == CALL)
2367 return 0;
2368
c67846f2 2369 if (code == MEM)
a20c5714
RS
2370 return (mem_might_overlap_already_clobbered_arg_p
2371 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
c67846f2 2372
f725a3ec 2373 /* Scan all subexpressions. */
c67846f2
JJ
2374 fmt = GET_RTX_FORMAT (code);
2375 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2376 {
2377 if (*fmt == 'e')
f725a3ec
KH
2378 {
2379 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2380 return 1;
2381 }
c67846f2 2382 else if (*fmt == 'E')
f725a3ec
KH
2383 {
2384 for (j = 0; j < XVECLEN (x, i); j++)
2385 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2386 return 1;
2387 }
c67846f2
JJ
2388 }
2389 return 0;
c67846f2
JJ
2390}
2391
2392/* Scan sequence after INSN if it does not dereference any argument slots
2393 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
2394 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2395 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2396 should be 0). Return nonzero if sequence after INSN dereferences such argument
2397 slots, zero otherwise. */
c67846f2
JJ
2398
2399static int
48810515
DM
2400check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2401 int mark_stored_args_map)
f725a3ec 2402{
a20c5714
RS
2403 poly_uint64 low, high;
2404 unsigned HOST_WIDE_INT const_low, const_high;
c67846f2
JJ
2405
2406 if (insn == NULL_RTX)
2407 insn = get_insns ();
2408 else
2409 insn = NEXT_INSN (insn);
2410
2411 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
2412 if (INSN_P (insn)
2413 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
2414 break;
2415
0cdca92b
DJ
2416 if (mark_stored_args_map)
2417 {
6dad9361
TS
2418 if (ARGS_GROW_DOWNWARD)
2419 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2420 else
2421 low = arg->locate.slot_offset.constant;
a20c5714 2422 high = low + arg->locate.size.constant;
d60eab50 2423
a20c5714
RS
2424 const_low = constant_lower_bound (low);
2425 if (high.is_constant (&const_high))
2426 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
2427 bitmap_set_bit (stored_args_map, i);
2428 else
2429 stored_args_watermark = MIN (stored_args_watermark, const_low);
0cdca92b 2430 }
c67846f2
JJ
2431 return insn != NULL_RTX;
2432}
2433
bef5d8b6
RS
2434/* Given that a function returns a value of mode MODE at the most
2435 significant end of hard register VALUE, shift VALUE left or right
2436 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 2437
bef5d8b6 2438bool
ef4bddc2 2439shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 2440{
bef5d8b6 2441 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
abd3c800 2442 machine_mode value_mode = GET_MODE (value);
73a699ae
RS
2443 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
2444
2445 if (known_eq (shift, 0))
bef5d8b6
RS
2446 return false;
2447
2448 /* Use ashr rather than lshr for right shifts. This is for the benefit
2449 of the MIPS port, which requires SImode values to be sign-extended
2450 when stored in 64-bit registers. */
abd3c800
RS
2451 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2452 value, gen_int_shift_amount (value_mode, shift),
2453 value, 1, OPTAB_WIDEN))
bef5d8b6
RS
2454 gcc_unreachable ();
2455 return true;
c988af2b
RS
2456}
2457
3fb30019
RS
2458/* If X is a likely-spilled register value, copy it to a pseudo
2459 register and return that register. Return X otherwise. */
2460
2461static rtx
2462avoid_likely_spilled_reg (rtx x)
2463{
82d6e6fc 2464 rtx new_rtx;
3fb30019
RS
2465
2466 if (REG_P (x)
2467 && HARD_REGISTER_P (x)
07b8f0a8 2468 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
2469 {
2470 /* Make sure that we generate a REG rather than a CONCAT.
2471 Moves into CONCATs can need nontrivial instructions,
2472 and the whole point of this function is to avoid
2473 using the hard register directly in such a situation. */
2474 generating_concat_p = 0;
82d6e6fc 2475 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 2476 generating_concat_p = 1;
82d6e6fc
KG
2477 emit_move_insn (new_rtx, x);
2478 return new_rtx;
3fb30019
RS
2479 }
2480 return x;
2481}
2482
b40d90e6
DM
2483/* Helper function for expand_call.
2484 Return false is EXP is not implementable as a sibling call. */
2485
2486static bool
2487can_implement_as_sibling_call_p (tree exp,
2488 rtx structure_value_addr,
2489 tree funtype,
b40d90e6
DM
2490 tree fndecl,
2491 int flags,
2492 tree addr,
2493 const args_size &args_size)
2494{
2495 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
2496 {
2497 maybe_complain_about_tail_call
2498 (exp,
2499 "machine description does not have"
2500 " a sibcall_epilogue instruction pattern");
2501 return false;
2502 }
b40d90e6
DM
2503
2504 /* Doing sibling call optimization needs some work, since
2505 structure_value_addr can be allocated on the stack.
2506 It does not seem worth the effort since few optimizable
2507 sibling calls will return a structure. */
2508 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
2509 {
2510 maybe_complain_about_tail_call (exp, "callee returns a structure");
2511 return false;
2512 }
b40d90e6 2513
b40d90e6
DM
2514 /* Check whether the target is able to optimize the call
2515 into a sibcall. */
2516 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
2517 {
2518 maybe_complain_about_tail_call (exp,
2519 "target is not able to optimize the"
2520 " call into a sibling call");
2521 return false;
2522 }
b40d90e6
DM
2523
2524 /* Functions that do not return exactly once may not be sibcall
2525 optimized. */
9a385c2d
DM
2526 if (flags & ECF_RETURNS_TWICE)
2527 {
2528 maybe_complain_about_tail_call (exp, "callee returns twice");
2529 return false;
2530 }
2531 if (flags & ECF_NORETURN)
2532 {
2533 maybe_complain_about_tail_call (exp, "callee does not return");
2534 return false;
2535 }
b40d90e6
DM
2536
2537 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
2538 {
2539 maybe_complain_about_tail_call (exp, "volatile function type");
2540 return false;
2541 }
b40d90e6
DM
2542
2543 /* If the called function is nested in the current one, it might access
2544 some of the caller's arguments, but could clobber them beforehand if
2545 the argument areas are shared. */
2546 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
2547 {
2548 maybe_complain_about_tail_call (exp, "nested function");
2549 return false;
2550 }
b40d90e6
DM
2551
2552 /* If this function requires more stack slots than the current
2553 function, we cannot change it into a sibling call.
2554 crtl->args.pretend_args_size is not part of the
2555 stack allocated by our caller. */
a20c5714
RS
2556 if (maybe_gt (args_size.constant,
2557 crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
2558 {
2559 maybe_complain_about_tail_call (exp,
2560 "callee required more stack slots"
2561 " than the caller");
2562 return false;
2563 }
b40d90e6
DM
2564
2565 /* If the callee pops its own arguments, then it must pop exactly
2566 the same number of arguments as the current function. */
a20c5714
RS
2567 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
2568 args_size.constant),
2569 targetm.calls.return_pops_args (current_function_decl,
2570 TREE_TYPE
2571 (current_function_decl),
2572 crtl->args.size)))
9a385c2d
DM
2573 {
2574 maybe_complain_about_tail_call (exp,
2575 "inconsistent number of"
2576 " popped arguments");
2577 return false;
2578 }
b40d90e6
DM
2579
2580 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
2581 {
2582 maybe_complain_about_tail_call (exp, "frontend does not support"
2583 " sibling call");
2584 return false;
2585 }
b40d90e6
DM
2586
2587 /* All checks passed. */
2588 return true;
2589}
2590
957ed738
L
2591/* Update stack alignment when the parameter is passed in the stack
2592 since the outgoing parameter requires extra alignment on the calling
2593 function side. */
2594
2595static void
2596update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
2597{
2598 if (crtl->stack_alignment_needed < locate->boundary)
2599 crtl->stack_alignment_needed = locate->boundary;
2600 if (crtl->preferred_stack_boundary < locate->boundary)
2601 crtl->preferred_stack_boundary = locate->boundary;
2602}
2603
5039610b 2604/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
2605 and return an rtx for its value.
2606 Store the value in TARGET (specified as an rtx) if convenient.
2607 If the value is stored in TARGET then TARGET is returned.
2608 If IGNORE is nonzero, then we ignore the value of the function call. */
2609
2610rtx
d329e058 2611expand_call (tree exp, rtx target, int ignore)
51bbfa0c 2612{
0a1c58a2
JL
2613 /* Nonzero if we are currently expanding a call. */
2614 static int currently_expanding_call = 0;
2615
51bbfa0c
RS
2616 /* RTX for the function to be called. */
2617 rtx funexp;
0a1c58a2 2618 /* Sequence of insns to perform a normal "call". */
48810515 2619 rtx_insn *normal_call_insns = NULL;
6de9cd9a 2620 /* Sequence of insns to perform a tail "call". */
48810515 2621 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
2622 /* Data type of the function. */
2623 tree funtype;
ded9bf77 2624 tree type_arg_types;
28ed065e 2625 tree rettype;
51bbfa0c
RS
2626 /* Declaration of the function being called,
2627 or 0 if the function is computed (not known by name). */
2628 tree fndecl = 0;
57782ad8
MM
2629 /* The type of the function being called. */
2630 tree fntype;
6de9cd9a 2631 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 2632 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 2633 int pass;
51bbfa0c
RS
2634
2635 /* Register in which non-BLKmode value will be returned,
2636 or 0 if no value or if value is BLKmode. */
2637 rtx valreg;
2638 /* Address where we should return a BLKmode value;
2639 0 if value not BLKmode. */
2640 rtx structure_value_addr = 0;
2641 /* Nonzero if that address is being passed by treating it as
2642 an extra, implicit first parameter. Otherwise,
2643 it is passed by being copied directly into struct_value_rtx. */
2644 int structure_value_addr_parm = 0;
078a18a4
SL
2645 /* Holds the value of implicit argument for the struct value. */
2646 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
2647 /* Size of aggregate value wanted, or zero if none wanted
2648 or if we are using the non-reentrant PCC calling convention
2649 or expecting the value in registers. */
5c8e61cf 2650 poly_int64 struct_value_size = 0;
51bbfa0c
RS
2651 /* Nonzero if called function returns an aggregate in memory PCC style,
2652 by returning the address of where to find it. */
2653 int pcc_struct_value = 0;
61f71b34 2654 rtx struct_value = 0;
51bbfa0c
RS
2655
2656 /* Number of actual parameters in this call, including struct value addr. */
2657 int num_actuals;
2658 /* Number of named args. Args after this are anonymous ones
2659 and they must all go on the stack. */
2660 int n_named_args;
078a18a4
SL
2661 /* Number of complex actual arguments that need to be split. */
2662 int num_complex_actuals = 0;
51bbfa0c
RS
2663
2664 /* Vector of information about each argument.
2665 Arguments are numbered in the order they will be pushed,
2666 not the order they are written. */
2667 struct arg_data *args;
2668
2669 /* Total size in bytes of all the stack-parms scanned so far. */
2670 struct args_size args_size;
099e9712 2671 struct args_size adjusted_args_size;
51bbfa0c 2672 /* Size of arguments before any adjustments (such as rounding). */
a20c5714 2673 poly_int64 unadjusted_args_size;
51bbfa0c 2674 /* Data on reg parms scanned so far. */
d5cc9181
JR
2675 CUMULATIVE_ARGS args_so_far_v;
2676 cumulative_args_t args_so_far;
51bbfa0c
RS
2677 /* Nonzero if a reg parm has been scanned. */
2678 int reg_parm_seen;
efd65a8b 2679 /* Nonzero if this is an indirect function call. */
51bbfa0c 2680
f725a3ec 2681 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
2682 If stack space is allocated for register parameters, but not by the
2683 caller, then it is preallocated in the fixed part of the stack frame.
2684 So the entire argument block must then be preallocated (i.e., we
2685 ignore PUSH_ROUNDING in that case). */
2686
967b4653 2687 int must_preallocate = !targetm.calls.push_argument (0);
51bbfa0c 2688
f72aed24 2689 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
2690 int reg_parm_stack_space = 0;
2691
51bbfa0c
RS
2692 /* Address of space preallocated for stack parms
2693 (on machines that lack push insns), or 0 if space not preallocated. */
2694 rtx argblock = 0;
2695
e384e6b5 2696 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 2697 int flags = 0;
e384e6b5 2698 int return_flags = 0;
f73ad30e 2699#ifdef REG_PARM_STACK_SPACE
51bbfa0c 2700 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
2701 saved, if any. */
2702 int low_to_save, high_to_save;
51bbfa0c
RS
2703 rtx save_area = 0; /* Place that it is saved */
2704#endif
2705
a20c5714 2706 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
51bbfa0c 2707 char *initial_stack_usage_map = stack_usage_map;
a20c5714 2708 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 2709 char *stack_usage_map_buf = NULL;
51bbfa0c 2710
a20c5714 2711 poly_int64 old_stack_allocated;
38afb23f
OH
2712
2713 /* State variables to track stack modifications. */
51bbfa0c 2714 rtx old_stack_level = 0;
38afb23f 2715 int old_stack_arg_under_construction = 0;
a20c5714 2716 poly_int64 old_pending_adj = 0;
51bbfa0c 2717 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
2718
2719 /* Some stack pointer alterations we make are performed via
2720 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2721 which we then also need to save/restore along the way. */
a20c5714 2722 poly_int64 old_stack_pointer_delta = 0;
38afb23f 2723
0a1c58a2 2724 rtx call_fusage;
5039610b 2725 tree addr = CALL_EXPR_FN (exp);
b3694847 2726 int i;
739fb049 2727 /* The alignment of the stack, in bits. */
95899b34 2728 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 2729 /* The alignment of the stack, in bytes. */
95899b34 2730 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
2731 /* The static chain value to use for this call. */
2732 rtx static_chain_value;
f2d33f13
JH
2733 /* See if this is "nothrow" function call. */
2734 if (TREE_NOTHROW (exp))
2735 flags |= ECF_NOTHROW;
2736
6de9cd9a
DN
2737 /* See if we can find a DECL-node for the actual function, and get the
2738 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
2739 fndecl = get_callee_fndecl (exp);
2740 if (fndecl)
51bbfa0c 2741 {
57782ad8 2742 fntype = TREE_TYPE (fndecl);
39b0dce7 2743 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 2744 return_flags |= decl_return_flags (fndecl);
51bbfa0c 2745 }
39b0dce7 2746 else
72954a4f 2747 {
28ed065e 2748 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 2749 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
2750 if (CALL_EXPR_BY_DESCRIPTOR (exp))
2751 flags |= ECF_BY_DESCRIPTOR;
72954a4f 2752 }
28ed065e 2753 rettype = TREE_TYPE (exp);
7393c642 2754
57782ad8 2755 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 2756
8c6a8269
RS
2757 /* Warn if this value is an aggregate type,
2758 regardless of which calling convention we are using for it. */
28ed065e 2759 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 2760 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 2761
becfd6e5
KZ
2762 /* If the result of a non looping pure or const function call is
2763 ignored (or void), and none of its arguments are volatile, we can
2764 avoid expanding the call and just evaluate the arguments for
2765 side-effects. */
8c6a8269 2766 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 2767 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8ebf6b99 2768 && (flags & ECF_NOTHROW)
8c6a8269 2769 && (ignore || target == const0_rtx
28ed065e 2770 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
2771 {
2772 bool volatilep = false;
2773 tree arg;
078a18a4 2774 call_expr_arg_iterator iter;
8c6a8269 2775
078a18a4
SL
2776 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2777 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
2778 {
2779 volatilep = true;
2780 break;
2781 }
2782
2783 if (! volatilep)
2784 {
078a18a4
SL
2785 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2786 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
2787 return const0_rtx;
2788 }
2789 }
2790
6f90e075 2791#ifdef REG_PARM_STACK_SPACE
5d059ed9 2792 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 2793#endif
6f90e075 2794
5d059ed9 2795 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
967b4653 2796 && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
e5e809f4 2797 must_preallocate = 1;
e5e809f4 2798
51bbfa0c
RS
2799 /* Set up a place to return a structure. */
2800
2801 /* Cater to broken compilers. */
d47d0a8d 2802 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
2803 {
2804 /* This call returns a big structure. */
84b8030f 2805 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
2806
2807#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2808 {
2809 pcc_struct_value = 1;
9e7b1d0a
RS
2810 }
2811#else /* not PCC_STATIC_STRUCT_RETURN */
2812 {
5c8e61cf
RS
2813 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
2814 struct_value_size = -1;
51bbfa0c 2815
391756ad
EB
2816 /* Even if it is semantically safe to use the target as the return
2817 slot, it may be not sufficiently aligned for the return type. */
2818 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2819 && target
2820 && MEM_P (target)
ffc8b52f
JJ
2821 /* If rettype is addressable, we may not create a temporary.
2822 If target is properly aligned at runtime and the compiler
2823 just doesn't know about it, it will work fine, otherwise it
2824 will be UB. */
2825 && (TREE_ADDRESSABLE (rettype)
2826 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2827 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
2828 MEM_ALIGN (target)))))
9e7b1d0a
RS
2829 structure_value_addr = XEXP (target, 0);
2830 else
2831 {
9e7b1d0a
RS
2832 /* For variable-sized objects, we must be called with a target
2833 specified. If we were to allocate space on the stack here,
2834 we would have no way of knowing when to free it. */
9474e8ab 2835 rtx d = assign_temp (rettype, 1, 1);
4361b41d 2836 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2837 target = 0;
2838 }
2839 }
2840#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2841 }
2842
099e9712 2843 /* Figure out the amount to which the stack should be aligned. */
099e9712 2844 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
2845 if (fndecl)
2846 {
3dafb85c 2847 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
2848 /* Without automatic stack alignment, we can't increase preferred
2849 stack boundary. With automatic stack alignment, it is
2850 unnecessary since unless we can guarantee that all callers will
2851 align the outgoing stack properly, callee has to align its
2852 stack anyway. */
2853 if (i
2854 && i->preferred_incoming_stack_boundary
2855 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
2856 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2857 }
099e9712
JH
2858
2859 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 2860 funtype = TREE_TYPE (addr);
366de0ce 2861 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
2862 funtype = TREE_TYPE (funtype);
2863
078a18a4
SL
2864 /* Count whether there are actual complex arguments that need to be split
2865 into their real and imaginary parts. Munge the type_arg_types
2866 appropriately here as well. */
42ba5130 2867 if (targetm.calls.split_complex_arg)
ded9bf77 2868 {
078a18a4
SL
2869 call_expr_arg_iterator iter;
2870 tree arg;
2871 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2872 {
2873 tree type = TREE_TYPE (arg);
2874 if (type && TREE_CODE (type) == COMPLEX_TYPE
2875 && targetm.calls.split_complex_arg (type))
2876 num_complex_actuals++;
2877 }
ded9bf77 2878 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
2879 }
2880 else
2881 type_arg_types = TYPE_ARG_TYPES (funtype);
2882
099e9712 2883 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2884 cfun->calls_alloca = 1;
099e9712
JH
2885
2886 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
2887 as if it were an extra parameter. Put the argument expression
2888 in structure_value_addr_value. */
61f71b34 2889 if (structure_value_addr && struct_value == 0)
099e9712
JH
2890 {
2891 /* If structure_value_addr is a REG other than
2892 virtual_outgoing_args_rtx, we can use always use it. If it
2893 is not a REG, we must always copy it into a register.
2894 If it is virtual_outgoing_args_rtx, we must copy it to another
2895 register in some cases. */
f8cfc6aa 2896 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
2897 || (ACCUMULATE_OUTGOING_ARGS
2898 && stack_arg_under_construction
2899 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 2900 ? copy_addr_to_reg (convert_memory_address
57782ad8 2901 (Pmode, structure_value_addr))
099e9712
JH
2902 : structure_value_addr);
2903
078a18a4
SL
2904 structure_value_addr_value =
2905 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
31db0fe0 2906 structure_value_addr_parm = 1;
099e9712
JH
2907 }
2908
2909 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
2910 num_actuals =
2911 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
2912
2913 /* Compute number of named args.
3a4d587b
AM
2914 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2915
2916 if (type_arg_types != 0)
2917 n_named_args
2918 = (list_length (type_arg_types)
2919 /* Count the struct value address, if it is passed as a parm. */
2920 + structure_value_addr_parm);
2921 else
2922 /* If we know nothing, treat all args as named. */
2923 n_named_args = num_actuals;
2924
2925 /* Start updating where the next arg would go.
2926
2927 On some machines (such as the PA) indirect calls have a different
2928 calling convention than normal calls. The fourth argument in
2929 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2930 or not. */
d5cc9181
JR
2931 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2932 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
2933
2934 /* Now possibly adjust the number of named args.
099e9712 2935 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
2936 We do include the last named arg if
2937 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
2938 (If no anonymous args follow, the result of list_length is actually
2939 one too large. This is harmless.)
2940
4ac8340c 2941 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
2942 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2943 this machine will be able to place unnamed args that were passed
2944 in registers into the stack. So treat all args as named. This
2945 allows the insns emitting for a specific argument list to be
2946 independent of the function declaration.
4ac8340c
KH
2947
2948 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2949 we do not have any reliable way to pass unnamed args in
2950 registers, so we must force them into memory. */
099e9712 2951
3a4d587b 2952 if (type_arg_types != 0
d5cc9181 2953 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
2954 ;
2955 else if (type_arg_types != 0
d5cc9181 2956 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
2957 /* Don't include the last named arg. */
2958 --n_named_args;
099e9712 2959 else
3a4d587b 2960 /* Treat all args as named. */
099e9712
JH
2961 n_named_args = num_actuals;
2962
099e9712 2963 /* Make a vector to hold all the information about each arg. */
765fc0f7 2964 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 2965
d80d2d2a
KH
2966 /* Build up entries in the ARGS array, compute the size of the
2967 arguments into ARGS_SIZE, etc. */
099e9712 2968 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 2969 n_named_args, exp,
45769134 2970 structure_value_addr_value, fndecl, fntype,
d5cc9181 2971 args_so_far, reg_parm_stack_space,
099e9712 2972 &old_stack_level, &old_pending_adj,
dd292d0a 2973 &must_preallocate, &flags,
6de9cd9a 2974 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
2975
2976 if (args_size.var)
84b8030f 2977 must_preallocate = 1;
099e9712
JH
2978
2979 /* Now make final decision about preallocating stack space. */
2980 must_preallocate = finalize_must_preallocate (must_preallocate,
2981 num_actuals, args,
2982 &args_size);
2983
2984 /* If the structure value address will reference the stack pointer, we
2985 must stabilize it. We don't need to do this if we know that we are
2986 not going to adjust the stack pointer in processing this call. */
2987
2988 if (structure_value_addr
2989 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2990 || reg_mentioned_p (virtual_outgoing_args_rtx,
2991 structure_value_addr))
2992 && (args_size.var
a20c5714
RS
2993 || (!ACCUMULATE_OUTGOING_ARGS
2994 && maybe_ne (args_size.constant, 0))))
099e9712 2995 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 2996
7ae4ad28 2997 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 2998 pushed these optimizations into -O2. Don't try if we're already
fb158467 2999 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 3000 there's cleanups, as we know there's code to follow the call. */
099e9712 3001 if (currently_expanding_call++ != 0
44662f68 3002 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
6de9cd9a 3003 || args_size.var
6fb5fa3c 3004 || dbg_cnt (tail_call) == false)
6de9cd9a 3005 try_tail_call = 0;
099e9712 3006
4b8e35f1
JJ
3007 /* Workaround buggy C/C++ wrappers around Fortran routines with
3008 character(len=constant) arguments if the hidden string length arguments
3009 are passed on the stack; if the callers forget to pass those arguments,
3010 attempting to tail call in such routines leads to stack corruption.
3011 Avoid tail calls in functions where at least one such hidden string
3012 length argument is passed (partially or fully) on the stack in the
3013 caller and the callee needs to pass any arguments on the stack.
3014 See PR90329. */
3015 if (try_tail_call && maybe_ne (args_size.constant, 0))
3016 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3017 arg; arg = DECL_CHAIN (arg))
3018 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3019 {
3020 subrtx_iterator::array_type array;
3021 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3022 if (MEM_P (*iter))
3023 {
3024 try_tail_call = 0;
3025 break;
3026 }
3027 }
3028
9a385c2d
DM
3029 /* If the user has marked the function as requiring tail-call
3030 optimization, attempt it. */
3031 if (must_tail_call)
3032 try_tail_call = 1;
3033
099e9712 3034 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 3035 if (try_tail_call)
9a385c2d
DM
3036 try_tail_call = can_implement_as_sibling_call_p (exp,
3037 structure_value_addr,
3038 funtype,
9a385c2d 3039 fndecl,
b40d90e6 3040 flags, addr, args_size);
497eb8c3 3041
c69cd1f5
JJ
3042 /* Check if caller and callee disagree in promotion of function
3043 return value. */
3044 if (try_tail_call)
3045 {
ef4bddc2
RS
3046 machine_mode caller_mode, caller_promoted_mode;
3047 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
3048 int caller_unsignedp, callee_unsignedp;
3049 tree caller_res = DECL_RESULT (current_function_decl);
3050
3051 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 3052 caller_mode = DECL_MODE (caller_res);
c69cd1f5 3053 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
3054 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3055 caller_promoted_mode
3056 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3057 &caller_unsignedp,
3058 TREE_TYPE (current_function_decl), 1);
3059 callee_promoted_mode
666e3ceb 3060 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 3061 &callee_unsignedp,
666e3ceb 3062 funtype, 1);
c69cd1f5
JJ
3063 if (caller_mode != VOIDmode
3064 && (caller_promoted_mode != callee_promoted_mode
3065 || ((caller_mode != caller_promoted_mode
3066 || callee_mode != callee_promoted_mode)
3067 && (caller_unsignedp != callee_unsignedp
bd4288c0 3068 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
3069 {
3070 try_tail_call = 0;
3071 maybe_complain_about_tail_call (exp,
3072 "caller and callee disagree in"
3073 " promotion of function"
3074 " return value");
3075 }
c69cd1f5
JJ
3076 }
3077
01973e26
L
3078 /* Ensure current function's preferred stack boundary is at least
3079 what we need. Stack alignment may also increase preferred stack
3080 boundary. */
957ed738
L
3081 for (i = 0; i < num_actuals; i++)
3082 if (reg_parm_stack_space > 0
3083 || args[i].reg == 0
3084 || args[i].partial != 0
3085 || args[i].pass_on_stack)
3086 update_stack_alignment_for_call (&args[i].locate);
b5f772ce 3087 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 3088 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
3089 else
3090 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 3091
099e9712 3092 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 3093
3cf3da88
EB
3094 if (flag_callgraph_info)
3095 record_final_call (fndecl, EXPR_LOCATION (exp));
3096
0a1c58a2
JL
3097 /* We want to make two insn chains; one for a sibling call, the other
3098 for a normal call. We will select one of the two chains after
3099 initial RTL generation is complete. */
b820d2b8 3100 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
3101 {
3102 int sibcall_failure = 0;
6adbb51e 3103 bool normal_failure = false;
f5143c46 3104 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 3105 recursion "call". That way we know any adjustment after the tail
7ae4ad28 3106 recursion call can be ignored if we indeed use the tail
0a1c58a2 3107 call expansion. */
7f2f0a01 3108 saved_pending_stack_adjust save;
48810515
DM
3109 rtx_insn *insns, *before_call, *after_args;
3110 rtx next_arg_reg;
39842893 3111
0a1c58a2
JL
3112 if (pass == 0)
3113 {
0a1c58a2
JL
3114 /* State variables we need to save and restore between
3115 iterations. */
7f2f0a01 3116 save_pending_stack_adjust (&save);
0a1c58a2 3117 }
f2d33f13
JH
3118 if (pass)
3119 flags &= ~ECF_SIBCALL;
3120 else
3121 flags |= ECF_SIBCALL;
51bbfa0c 3122
0a1c58a2 3123 /* Other state variables that we must reinitialize each time
f2d33f13 3124 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
3125 argblock = 0;
3126 call_fusage = 0;
fa76d9e0 3127
f725a3ec 3128 /* Start a new sequence for the normal call case.
51bbfa0c 3129
0a1c58a2
JL
3130 From this point on, if the sibling call fails, we want to set
3131 sibcall_failure instead of continuing the loop. */
3132 start_sequence ();
eecb6f50 3133
0a1c58a2
JL
3134 /* Don't let pending stack adjusts add up to too much.
3135 Also, do all pending adjustments now if there is any chance
3136 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 3137 call sequence.
63579539
DJ
3138 Also do the adjustments before a throwing call, otherwise
3139 exception handling can fail; PR 19225. */
a20c5714
RS
3140 if (maybe_ge (pending_stack_adjust, 32)
3141 || (maybe_ne (pending_stack_adjust, 0)
9dd9bf80 3142 && (flags & ECF_MAY_BE_ALLOCA))
a20c5714 3143 || (maybe_ne (pending_stack_adjust, 0)
63579539 3144 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
3145 || pass == 0)
3146 do_pending_stack_adjust ();
51bbfa0c 3147
0a1c58a2 3148 /* Precompute any arguments as needed. */
f8a097cd 3149 if (pass)
84b8030f 3150 precompute_arguments (num_actuals, args);
51bbfa0c 3151
0a1c58a2
JL
3152 /* Now we are about to start emitting insns that can be deleted
3153 if a libcall is deleted. */
84b8030f 3154 if (pass && (flags & ECF_MALLOC))
0a1c58a2 3155 start_sequence ();
51bbfa0c 3156
87a5dc2d
JW
3157 if (pass == 0
3158 && crtl->stack_protect_guard
3159 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
3160 stack_protect_epilogue ();
3161
099e9712 3162 adjusted_args_size = args_size;
ce48579b
RH
3163 /* Compute the actual size of the argument block required. The variable
3164 and constant sizes must be combined, the size may have to be rounded,
3165 and there may be a minimum required size. When generating a sibcall
3166 pattern, do not round up, since we'll be re-using whatever space our
3167 caller provided. */
3168 unadjusted_args_size
f725a3ec
KH
3169 = compute_argument_block_size (reg_parm_stack_space,
3170 &adjusted_args_size,
5d059ed9 3171 fndecl, fntype,
ce48579b
RH
3172 (pass == 0 ? 0
3173 : preferred_stack_boundary));
3174
f725a3ec 3175 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 3176
f8a097cd 3177 /* The argument block when performing a sibling call is the
c22cacf3 3178 incoming argument block. */
f8a097cd 3179 if (pass == 0)
c67846f2 3180 {
2e3f842f 3181 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
3182 if (STACK_GROWS_DOWNWARD)
3183 argblock
3184 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3185 else
3186 argblock
3187 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3188
a20c5714
RS
3189 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3190 stored_args_map = sbitmap_alloc (map_size);
f61e445a 3191 bitmap_clear (stored_args_map);
a20c5714 3192 stored_args_watermark = HOST_WIDE_INT_M1U;
c67846f2 3193 }
ce48579b 3194
0a1c58a2
JL
3195 /* If we have no actual push instructions, or shouldn't use them,
3196 make space for all args right now. */
099e9712 3197 else if (adjusted_args_size.var != 0)
51bbfa0c 3198 {
0a1c58a2
JL
3199 if (old_stack_level == 0)
3200 {
9eac0f2a 3201 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 3202 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
3203 old_pending_adj = pending_stack_adjust;
3204 pending_stack_adjust = 0;
0a1c58a2
JL
3205 /* stack_arg_under_construction says whether a stack arg is
3206 being constructed at the old stack level. Pushing the stack
3207 gets a clean outgoing argument block. */
3208 old_stack_arg_under_construction = stack_arg_under_construction;
3209 stack_arg_under_construction = 0;
0a1c58a2 3210 }
099e9712 3211 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 3212 if (flag_stack_usage_info)
d3c12306 3213 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 3214 }
0a1c58a2
JL
3215 else
3216 {
3217 /* Note that we must go through the motions of allocating an argument
3218 block even if the size is zero because we may be storing args
3219 in the area reserved for register arguments, which may be part of
3220 the stack frame. */
26a258fe 3221
a20c5714 3222 poly_int64 needed = adjusted_args_size.constant;
51bbfa0c 3223
0a1c58a2
JL
3224 /* Store the maximum argument space used. It will be pushed by
3225 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3226 checking). */
51bbfa0c 3227
a20c5714
RS
3228 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3229 needed);
51bbfa0c 3230
0a1c58a2
JL
3231 if (must_preallocate)
3232 {
f73ad30e
JH
3233 if (ACCUMULATE_OUTGOING_ARGS)
3234 {
f8a097cd
JH
3235 /* Since the stack pointer will never be pushed, it is
3236 possible for the evaluation of a parm to clobber
3237 something we have already written to the stack.
3238 Since most function calls on RISC machines do not use
3239 the stack, this is uncommon, but must work correctly.
26a258fe 3240
f73ad30e 3241 Therefore, we save any area of the stack that was already
f8a097cd
JH
3242 written and that we are using. Here we set up to do this
3243 by making a new stack usage map from the old one. The
f725a3ec 3244 actual save will be done by store_one_arg.
26a258fe 3245
f73ad30e
JH
3246 Another approach might be to try to reorder the argument
3247 evaluations to avoid this conflicting stack usage. */
26a258fe 3248
f8a097cd
JH
3249 /* Since we will be writing into the entire argument area,
3250 the map must be allocated for its entire size, not just
3251 the part that is the responsibility of the caller. */
5d059ed9 3252 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 3253 needed += reg_parm_stack_space;
51bbfa0c 3254
a20c5714 3255 poly_int64 limit = needed;
6dad9361 3256 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
3257 limit += 1;
3258
3259 /* For polynomial sizes, this is the maximum possible
3260 size needed for arguments with a constant size
3261 and offset. */
3262 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3263 highest_outgoing_arg_in_use
3264 = MAX (initial_highest_arg_in_use, const_limit);
6dad9361 3265
04695783 3266 free (stack_usage_map_buf);
5ed6ace5 3267 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3268 stack_usage_map = stack_usage_map_buf;
51bbfa0c 3269
f73ad30e 3270 if (initial_highest_arg_in_use)
2e09e75a
JM
3271 memcpy (stack_usage_map, initial_stack_usage_map,
3272 initial_highest_arg_in_use);
2f4aa534 3273
f73ad30e 3274 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3275 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3276 (highest_outgoing_arg_in_use
3277 - initial_highest_arg_in_use));
3278 needed = 0;
2f4aa534 3279
f8a097cd
JH
3280 /* The address of the outgoing argument list must not be
3281 copied to a register here, because argblock would be left
3282 pointing to the wrong place after the call to
f725a3ec 3283 allocate_dynamic_stack_space below. */
2f4aa534 3284
f73ad30e 3285 argblock = virtual_outgoing_args_rtx;
f725a3ec 3286 }
f73ad30e 3287 else
26a258fe 3288 {
a20c5714
RS
3289 /* Try to reuse some or all of the pending_stack_adjust
3290 to get this space. */
3291 if (inhibit_defer_pop == 0
3292 && (combine_pending_stack_adjustment_and_call
3293 (&needed,
3294 unadjusted_args_size,
3295 &adjusted_args_size,
3296 preferred_unit_stack_boundary)))
0a1c58a2 3297 {
ce48579b
RH
3298 /* combine_pending_stack_adjustment_and_call computes
3299 an adjustment before the arguments are allocated.
3300 Account for them and see whether or not the stack
3301 needs to go up or down. */
3302 needed = unadjusted_args_size - needed;
3303
a20c5714
RS
3304 /* Checked by
3305 combine_pending_stack_adjustment_and_call. */
3306 gcc_checking_assert (ordered_p (needed, 0));
3307 if (maybe_lt (needed, 0))
f73ad30e 3308 {
ce48579b
RH
3309 /* We're releasing stack space. */
3310 /* ??? We can avoid any adjustment at all if we're
3311 already aligned. FIXME. */
3312 pending_stack_adjust = -needed;
3313 do_pending_stack_adjust ();
f73ad30e
JH
3314 needed = 0;
3315 }
f725a3ec 3316 else
ce48579b
RH
3317 /* We need to allocate space. We'll do that in
3318 push_block below. */
3319 pending_stack_adjust = 0;
0a1c58a2 3320 }
ce48579b
RH
3321
3322 /* Special case this because overhead of `push_block' in
3323 this case is non-trivial. */
a20c5714 3324 if (known_eq (needed, 0))
f73ad30e 3325 argblock = virtual_outgoing_args_rtx;
0a1c58a2 3326 else
d892f288 3327 {
a20c5714
RS
3328 rtx needed_rtx = gen_int_mode (needed, Pmode);
3329 argblock = push_block (needed_rtx, 0, 0);
6dad9361
TS
3330 if (ARGS_GROW_DOWNWARD)
3331 argblock = plus_constant (Pmode, argblock, needed);
d892f288 3332 }
f73ad30e 3333
f8a097cd
JH
3334 /* We only really need to call `copy_to_reg' in the case
3335 where push insns are going to be used to pass ARGBLOCK
3336 to a function call in ARGS. In that case, the stack
3337 pointer changes value from the allocation point to the
3338 call point, and hence the value of
3339 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3340 as well always do it. */
f73ad30e 3341 argblock = copy_to_reg (argblock);
38afb23f
OH
3342 }
3343 }
3344 }
0a1c58a2 3345
38afb23f
OH
3346 if (ACCUMULATE_OUTGOING_ARGS)
3347 {
3348 /* The save/restore code in store_one_arg handles all
3349 cases except one: a constructor call (including a C
3350 function returning a BLKmode struct) to initialize
3351 an argument. */
3352 if (stack_arg_under_construction)
3353 {
ac294f0b 3354 rtx push_size
a20c5714
RS
3355 = (gen_int_mode
3356 (adjusted_args_size.constant
3357 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3358 : TREE_TYPE (fndecl))
3359 ? 0 : reg_parm_stack_space), Pmode));
38afb23f
OH
3360 if (old_stack_level == 0)
3361 {
9eac0f2a 3362 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
3363 old_stack_pointer_delta = stack_pointer_delta;
3364 old_pending_adj = pending_stack_adjust;
3365 pending_stack_adjust = 0;
3366 /* stack_arg_under_construction says whether a stack
3367 arg is being constructed at the old stack level.
3368 Pushing the stack gets a clean outgoing argument
3369 block. */
3370 old_stack_arg_under_construction
3371 = stack_arg_under_construction;
3372 stack_arg_under_construction = 0;
3373 /* Make a new map for the new argument list. */
04695783 3374 free (stack_usage_map_buf);
b9eae1a9 3375 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3376 stack_usage_map = stack_usage_map_buf;
38afb23f 3377 highest_outgoing_arg_in_use = 0;
a20c5714 3378 stack_usage_watermark = HOST_WIDE_INT_M1U;
f73ad30e 3379 }
d3c12306
EB
3380 /* We can pass TRUE as the 4th argument because we just
3381 saved the stack pointer and will restore it right after
3382 the call. */
9e878cf1
EB
3383 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3384 -1, true);
0a1c58a2 3385 }
bfbf933a 3386
38afb23f
OH
3387 /* If argument evaluation might modify the stack pointer,
3388 copy the address of the argument list to a register. */
3389 for (i = 0; i < num_actuals; i++)
3390 if (args[i].pass_on_stack)
3391 {
3392 argblock = copy_addr_to_reg (argblock);
3393 break;
3394 }
3395 }
d329e058 3396
0a1c58a2 3397 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 3398
5ba53785
UB
3399 /* Stack is properly aligned, pops can't safely be deferred during
3400 the evaluation of the arguments. */
3401 NO_DEFER_POP;
3402
ac4ee457
UB
3403 /* Precompute all register parameters. It isn't safe to compute
3404 anything once we have started filling any specific hard regs.
3405 TLS symbols sometimes need a call to resolve. Precompute
3406 register parameters before any stack pointer manipulation
3407 to avoid unaligned stack in the called function. */
3408 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3409
5ba53785
UB
3410 OK_DEFER_POP;
3411
3d9684ae
JG
3412 /* Perform stack alignment before the first push (the last arg). */
3413 if (argblock == 0
a20c5714
RS
3414 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3415 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4e217aed 3416 {
0a1c58a2
JL
3417 /* When the stack adjustment is pending, we get better code
3418 by combining the adjustments. */
a20c5714
RS
3419 if (maybe_ne (pending_stack_adjust, 0)
3420 && ! inhibit_defer_pop
3421 && (combine_pending_stack_adjustment_and_call
3422 (&pending_stack_adjust,
3423 unadjusted_args_size,
3424 &adjusted_args_size,
3425 preferred_unit_stack_boundary)))
3426 do_pending_stack_adjust ();
0a1c58a2 3427 else if (argblock == 0)
a20c5714
RS
3428 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3429 - unadjusted_args_size,
3430 Pmode));
0a1c58a2 3431 }
ebcd0b57
JH
3432 /* Now that the stack is properly aligned, pops can't safely
3433 be deferred during the evaluation of the arguments. */
3434 NO_DEFER_POP;
51bbfa0c 3435
d3c12306
EB
3436 /* Record the maximum pushed stack space size. We need to delay
3437 doing it this far to take into account the optimization done
3438 by combine_pending_stack_adjustment_and_call. */
a11e0df4 3439 if (flag_stack_usage_info
d3c12306
EB
3440 && !ACCUMULATE_OUTGOING_ARGS
3441 && pass
3442 && adjusted_args_size.var == 0)
3443 {
a20c5714
RS
3444 poly_int64 pushed = (adjusted_args_size.constant
3445 + pending_stack_adjust);
3446 current_function_pushed_stack_size
3447 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
3448 }
3449
09e2bf48 3450 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 3451
5039610b
SL
3452 if (CALL_EXPR_STATIC_CHAIN (exp))
3453 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
3454 else
3455 static_chain_value = 0;
3456
f73ad30e 3457#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3458 /* Save the fixed argument area if it's part of the caller's frame and
3459 is clobbered by argument setup for this call. */
f8a097cd 3460 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
3461 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3462 &low_to_save, &high_to_save);
b94301c2 3463#endif
51bbfa0c 3464
0a1c58a2
JL
3465 /* Now store (and compute if necessary) all non-register parms.
3466 These come before register parms, since they can require block-moves,
3467 which could clobber the registers used for register parms.
3468 Parms which have partial registers are not stored here,
3469 but we do preallocate space here if they want that. */
51bbfa0c 3470
0a1c58a2 3471 for (i = 0; i < num_actuals; i++)
0196c95e 3472 {
31db0fe0 3473 if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 3474 {
48810515 3475 rtx_insn *before_arg = get_last_insn ();
0196c95e 3476
ddc923b5
MP
3477 /* We don't allow passing huge (> 2^30 B) arguments
3478 by value. It would cause an overflow later on. */
a20c5714 3479 if (constant_lower_bound (adjusted_args_size.constant)
ddc923b5
MP
3480 >= (1 << (HOST_BITS_PER_INT - 2)))
3481 {
3482 sorry ("passing too large argument on stack");
a717376e 3483 /* Don't worry about stack clean-up. */
6adbb51e
JJ
3484 if (pass == 0)
3485 sibcall_failure = 1;
3486 else
3487 normal_failure = true;
ddc923b5
MP
3488 continue;
3489 }
3490
0196c95e
JJ
3491 if (store_one_arg (&args[i], argblock, flags,
3492 adjusted_args_size.var != 0,
3493 reg_parm_stack_space)
3494 || (pass == 0
3495 && check_sibcall_argument_overlap (before_arg,
3496 &args[i], 1)))
3497 sibcall_failure = 1;
3498 }
3499
2b1c5433 3500 if (args[i].stack)
7d810276
JJ
3501 call_fusage
3502 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3503 gen_rtx_USE (VOIDmode, args[i].stack),
3504 call_fusage);
0196c95e 3505 }
0a1c58a2
JL
3506
3507 /* If we have a parm that is passed in registers but not in memory
3508 and whose alignment does not permit a direct copy into registers,
3509 make a group of pseudos that correspond to each register that we
3510 will later fill. */
3511 if (STRICT_ALIGNMENT)
3512 store_unaligned_arguments_into_pseudos (args, num_actuals);
3513
3514 /* Now store any partially-in-registers parm.
3515 This is the last place a block-move can happen. */
3516 if (reg_parm_seen)
3517 for (i = 0; i < num_actuals; i++)
3518 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 3519 {
48810515 3520 rtx_insn *before_arg = get_last_insn ();
c67846f2 3521
99206968
KT
3522 /* On targets with weird calling conventions (e.g. PA) it's
3523 hard to ensure that all cases of argument overlap between
3524 stack and registers work. Play it safe and bail out. */
3525 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3526 {
3527 sibcall_failure = 1;
3528 break;
3529 }
3530
4c6b3b2a
JJ
3531 if (store_one_arg (&args[i], argblock, flags,
3532 adjusted_args_size.var != 0,
3533 reg_parm_stack_space)
3534 || (pass == 0
3535 && check_sibcall_argument_overlap (before_arg,
0cdca92b 3536 &args[i], 1)))
c67846f2
JJ
3537 sibcall_failure = 1;
3538 }
51bbfa0c 3539
2f21e1ba
BS
3540 bool any_regs = false;
3541 for (i = 0; i < num_actuals; i++)
3542 if (args[i].reg != NULL_RTX)
3543 {
3544 any_regs = true;
3545 targetm.calls.call_args (args[i].reg, funtype);
3546 }
3547 if (!any_regs)
3548 targetm.calls.call_args (pc_rtx, funtype);
3549
3550 /* Figure out the register where the value, if any, will come back. */
3551 valreg = 0;
2f21e1ba
BS
3552 if (TYPE_MODE (rettype) != VOIDmode
3553 && ! structure_value_addr)
3554 {
3555 if (pcc_struct_value)
31db0fe0
ML
3556 valreg = hard_function_value (build_pointer_type (rettype),
3557 fndecl, NULL, (pass == 0));
2f21e1ba 3558 else
31db0fe0
ML
3559 valreg = hard_function_value (rettype, fndecl, fntype,
3560 (pass == 0));
2f21e1ba
BS
3561
3562 /* If VALREG is a PARALLEL whose first member has a zero
3563 offset, use that. This is for targets such as m68k that
3564 return the same value in multiple places. */
3565 if (GET_CODE (valreg) == PARALLEL)
3566 {
3567 rtx elem = XVECEXP (valreg, 0, 0);
3568 rtx where = XEXP (elem, 0);
3569 rtx offset = XEXP (elem, 1);
3570 if (offset == const0_rtx
3571 && GET_MODE (where) == GET_MODE (valreg))
3572 valreg = where;
3573 }
3574 }
3575
0a1c58a2
JL
3576 /* If register arguments require space on the stack and stack space
3577 was not preallocated, allocate stack space here for arguments
3578 passed in registers. */
5d059ed9 3579 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3580 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 3581 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 3582 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 3583
0a1c58a2
JL
3584 /* Pass the function the address in which to return a
3585 structure value. */
3586 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3587 {
7ae4ad28 3588 structure_value_addr
5ae6cd0d 3589 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 3590 emit_move_insn (struct_value,
0a1c58a2
JL
3591 force_reg (Pmode,
3592 force_operand (structure_value_addr,
3593 NULL_RTX)));
3594
f8cfc6aa 3595 if (REG_P (struct_value))
61f71b34 3596 use_reg (&call_fusage, struct_value);
0a1c58a2 3597 }
c2939b57 3598
05e6ee93 3599 after_args = get_last_insn ();
78bcf3dc
EB
3600 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3601 static_chain_value, &call_fusage,
3602 reg_parm_seen, flags);
6b8805cf 3603
0cdca92b
DJ
3604 load_register_parameters (args, num_actuals, &call_fusage, flags,
3605 pass == 0, &sibcall_failure);
f725a3ec 3606
0a1c58a2
JL
3607 /* Save a pointer to the last insn before the call, so that we can
3608 later safely search backwards to find the CALL_INSN. */
3609 before_call = get_last_insn ();
51bbfa0c 3610
7d167afd
JJ
3611 /* Set up next argument register. For sibling calls on machines
3612 with register windows this should be the incoming register. */
7d167afd 3613 if (pass == 0)
6783fdb7
RS
3614 next_arg_reg = targetm.calls.function_incoming_arg
3615 (args_so_far, function_arg_info::end_marker ());
7d167afd 3616 else
6783fdb7
RS
3617 next_arg_reg = targetm.calls.function_arg
3618 (args_so_far, function_arg_info::end_marker ());
7d167afd 3619
e384e6b5
BS
3620 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3621 {
3622 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 3623 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
3624 if (arg_nr >= 0
3625 && arg_nr < num_actuals
3626 && args[arg_nr].reg
e384e6b5
BS
3627 && valreg
3628 && REG_P (valreg)
3629 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3630 call_fusage
3631 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 3632 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
3633 call_fusage);
3634 }
0a1c58a2
JL
3635 /* All arguments and registers used for the call must be set up by
3636 now! */
3637
ce48579b 3638 /* Stack must be properly aligned now. */
366de0ce 3639 gcc_assert (!pass
a20c5714
RS
3640 || multiple_p (stack_pointer_delta,
3641 preferred_unit_stack_boundary));
ebcd0b57 3642
0a1c58a2 3643 /* Generate the actual call instruction. */
6de9cd9a 3644 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 3645 adjusted_args_size.constant, struct_value_size,
7d167afd 3646 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 3647 flags, args_so_far);
0a1c58a2 3648
1e288103 3649 if (flag_ipa_ra)
4f660b15 3650 {
48810515
DM
3651 rtx_call_insn *last;
3652 rtx datum = NULL_RTX;
4f660b15
RO
3653 if (fndecl != NULL_TREE)
3654 {
3655 datum = XEXP (DECL_RTL (fndecl), 0);
3656 gcc_assert (datum != NULL_RTX
3657 && GET_CODE (datum) == SYMBOL_REF);
3658 }
3659 last = last_call_insn ();
3660 add_reg_note (last, REG_CALL_DECL, datum);
3661 }
3662
05e6ee93
MM
3663 /* If the call setup or the call itself overlaps with anything
3664 of the argument setup we probably clobbered our call address.
3665 In that case we can't do sibcalls. */
3666 if (pass == 0
3667 && check_sibcall_argument_overlap (after_args, 0, 0))
3668 sibcall_failure = 1;
3669
bef5d8b6
RS
3670 /* If a non-BLKmode value is returned at the most significant end
3671 of a register, shift the register right by the appropriate amount
3672 and update VALREG accordingly. BLKmode values are handled by the
3673 group load/store machinery below. */
3674 if (!structure_value_addr
3675 && !pcc_struct_value
66de4d7c 3676 && TYPE_MODE (rettype) != VOIDmode
28ed065e 3677 && TYPE_MODE (rettype) != BLKmode
66de4d7c 3678 && REG_P (valreg)
28ed065e 3679 && targetm.calls.return_in_msb (rettype))
bef5d8b6 3680 {
28ed065e 3681 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 3682 sibcall_failure = 1;
28ed065e 3683 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
3684 }
3685
84b8030f 3686 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3687 {
3688 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 3689 rtx_insn *last, *insns;
0a1c58a2 3690
f725a3ec 3691 /* The return value from a malloc-like function is a pointer. */
28ed065e 3692 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 3693 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
3694
3695 emit_move_insn (temp, valreg);
3696
67914693 3697 /* The return value from a malloc-like function cannot alias
0a1c58a2
JL
3698 anything else. */
3699 last = get_last_insn ();
65c5f2a6 3700 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
3701
3702 /* Write out the sequence. */
3703 insns = get_insns ();
3704 end_sequence ();
2f937369 3705 emit_insn (insns);
0a1c58a2
JL
3706 valreg = temp;
3707 }
51bbfa0c 3708
6fb5fa3c 3709 /* For calls to `setjmp', etc., inform
e53b6e56 3710 function.cc:setjmp_warnings that it should complain if
6fb5fa3c
DB
3711 nonvolatile values are live. For functions that cannot
3712 return, inform flow that control does not fall through. */
51bbfa0c 3713
6e14af16 3714 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 3715 {
570a98eb 3716 /* The barrier must be emitted
0a1c58a2
JL
3717 immediately after the CALL_INSN. Some ports emit more
3718 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 3719
48810515 3720 rtx_insn *last = get_last_insn ();
4b4bf941 3721 while (!CALL_P (last))
0a1c58a2
JL
3722 {
3723 last = PREV_INSN (last);
3724 /* There was no CALL_INSN? */
366de0ce 3725 gcc_assert (last != before_call);
0a1c58a2 3726 }
51bbfa0c 3727
570a98eb 3728 emit_barrier_after (last);
8af61113 3729
f451eeef
JS
3730 /* Stack adjustments after a noreturn call are dead code.
3731 However when NO_DEFER_POP is in effect, we must preserve
3732 stack_pointer_delta. */
3733 if (inhibit_defer_pop == 0)
3734 {
3735 stack_pointer_delta = old_stack_allocated;
3736 pending_stack_adjust = 0;
3737 }
0a1c58a2 3738 }
51bbfa0c 3739
0a1c58a2 3740 /* If value type not void, return an rtx for the value. */
51bbfa0c 3741
28ed065e 3742 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 3743 || ignore)
b5cd4ed4 3744 target = const0_rtx;
0a1c58a2
JL
3745 else if (structure_value_addr)
3746 {
3c0cb5de 3747 if (target == 0 || !MEM_P (target))
0a1c58a2 3748 {
3bdf5ad1 3749 target
28ed065e
MM
3750 = gen_rtx_MEM (TYPE_MODE (rettype),
3751 memory_address (TYPE_MODE (rettype),
3bdf5ad1 3752 structure_value_addr));
28ed065e 3753 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
3754 }
3755 }
3756 else if (pcc_struct_value)
cacbd532 3757 {
0a1c58a2
JL
3758 /* This is the special C++ case where we need to
3759 know what the true target was. We take care to
3760 never use this value more than once in one expression. */
28ed065e 3761 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 3762 copy_to_reg (valreg));
28ed065e 3763 set_mem_attributes (target, rettype, 1);
cacbd532 3764 }
0a1c58a2
JL
3765 /* Handle calls that return values in multiple non-contiguous locations.
3766 The Irix 6 ABI has examples of this. */
3767 else if (GET_CODE (valreg) == PARALLEL)
3768 {
6de9cd9a 3769 if (target == 0)
5ef0b50d 3770 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
3771 else if (rtx_equal_p (target, valreg))
3772 ;
3773 else if (GET_CODE (target) == PARALLEL)
3774 /* Handle the result of a emit_group_move_into_temps
3775 call in the previous pass. */
3776 emit_group_move (target, valreg);
3777 else
28ed065e
MM
3778 emit_group_store (target, valreg, rettype,
3779 int_size_in_bytes (rettype));
0a1c58a2
JL
3780 }
3781 else if (target
28ed065e 3782 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
3783 && GET_MODE (target) == GET_MODE (valreg))
3784 {
51caaefe
EB
3785 bool may_overlap = false;
3786
f2d18690
KK
3787 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3788 reg to a plain register. */
3fb30019
RS
3789 if (!REG_P (target) || HARD_REGISTER_P (target))
3790 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 3791
51caaefe
EB
3792 /* If TARGET is a MEM in the argument area, and we have
3793 saved part of the argument area, then we can't store
3794 directly into TARGET as it may get overwritten when we
3795 restore the argument save area below. Don't work too
3796 hard though and simply force TARGET to a register if it
3797 is a MEM; the optimizer is quite likely to sort it out. */
3798 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3799 for (i = 0; i < num_actuals; i++)
3800 if (args[i].save_area)
3801 {
3802 may_overlap = true;
3803 break;
3804 }
0219237c 3805
51caaefe
EB
3806 if (may_overlap)
3807 target = copy_to_reg (valreg);
3808 else
3809 {
3810 /* TARGET and VALREG cannot be equal at this point
3811 because the latter would not have
3812 REG_FUNCTION_VALUE_P true, while the former would if
3813 it were referring to the same register.
3814
3815 If they refer to the same register, this move will be
3816 a no-op, except when function inlining is being
3817 done. */
3818 emit_move_insn (target, valreg);
3819
3820 /* If we are setting a MEM, this code must be executed.
3821 Since it is emitted after the call insn, sibcall
3822 optimization cannot be performed in that case. */
3823 if (MEM_P (target))
3824 sibcall_failure = 1;
3825 }
0a1c58a2 3826 }
0a1c58a2 3827 else
3fb30019 3828 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 3829
cde0f3fd
PB
3830 /* If we promoted this return value, make the proper SUBREG.
3831 TARGET might be const0_rtx here, so be careful. */
3832 if (REG_P (target)
28ed065e
MM
3833 && TYPE_MODE (rettype) != BLKmode
3834 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 3835 {
28ed065e 3836 tree type = rettype;
cde0f3fd 3837 int unsignedp = TYPE_UNSIGNED (type);
ac4c8f53 3838 machine_mode ret_mode = TYPE_MODE (type);
ef4bddc2 3839 machine_mode pmode;
cde0f3fd
PB
3840
3841 /* Ensure we promote as expected, and get the new unsignedness. */
ac4c8f53 3842 pmode = promote_function_mode (type, ret_mode, &unsignedp,
cde0f3fd
PB
3843 funtype, 1);
3844 gcc_assert (GET_MODE (target) == pmode);
3845
ac4c8f53
RS
3846 if (SCALAR_INT_MODE_P (pmode)
3847 && SCALAR_FLOAT_MODE_P (ret_mode)
3848 && known_gt (GET_MODE_SIZE (pmode), GET_MODE_SIZE (ret_mode)))
3849 target = convert_wider_int_to_float (ret_mode, pmode, target);
3850 else
3851 {
3852 target = gen_lowpart_SUBREG (ret_mode, target);
3853 SUBREG_PROMOTED_VAR_P (target) = 1;
3854 SUBREG_PROMOTED_SET (target, unsignedp);
3855 }
61f71b34 3856 }
84b55618 3857
0a1c58a2
JL
3858 /* If size of args is variable or this was a constructor call for a stack
3859 argument, restore saved stack-pointer value. */
51bbfa0c 3860
9dd9bf80 3861 if (old_stack_level)
0a1c58a2 3862 {
48810515 3863 rtx_insn *prev = get_last_insn ();
9a08d230 3864
9eac0f2a 3865 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 3866 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 3867
faf7a23d 3868 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 3869
0a1c58a2 3870 pending_stack_adjust = old_pending_adj;
d25cee4d 3871 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
3872 stack_arg_under_construction = old_stack_arg_under_construction;
3873 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3874 stack_usage_map = initial_stack_usage_map;
a20c5714 3875 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2
JL
3876 sibcall_failure = 1;
3877 }
f8a097cd 3878 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3879 {
51bbfa0c 3880#ifdef REG_PARM_STACK_SPACE
0a1c58a2 3881 if (save_area)
b820d2b8
AM
3882 restore_fixed_argument_area (save_area, argblock,
3883 high_to_save, low_to_save);
b94301c2 3884#endif
51bbfa0c 3885
0a1c58a2
JL
3886 /* If we saved any argument areas, restore them. */
3887 for (i = 0; i < num_actuals; i++)
3888 if (args[i].save_area)
3889 {
ef4bddc2 3890 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
3891 rtx stack_area
3892 = gen_rtx_MEM (save_mode,
3893 memory_address (save_mode,
3894 XEXP (args[i].stack_slot, 0)));
3895
3896 if (save_mode != BLKmode)
3897 emit_move_insn (stack_area, args[i].save_area);
3898 else
44bb111a 3899 emit_block_move (stack_area, args[i].save_area,
a20c5714
RS
3900 (gen_int_mode
3901 (args[i].locate.size.constant, Pmode)),
44bb111a 3902 BLOCK_OP_CALL_PARM);
0a1c58a2 3903 }
51bbfa0c 3904
0a1c58a2
JL
3905 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3906 stack_usage_map = initial_stack_usage_map;
a20c5714 3907 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2 3908 }
51bbfa0c 3909
d33606c3
EB
3910 /* If this was alloca, record the new stack level. */
3911 if (flags & ECF_MAY_BE_ALLOCA)
3912 record_new_stack_level ();
51bbfa0c 3913
0a1c58a2
JL
3914 /* Free up storage we no longer need. */
3915 for (i = 0; i < num_actuals; ++i)
04695783 3916 free (args[i].aligned_regs);
0a1c58a2 3917
2f21e1ba
BS
3918 targetm.calls.end_call_args ();
3919
0a1c58a2
JL
3920 insns = get_insns ();
3921 end_sequence ();
3922
3923 if (pass == 0)
3924 {
3925 tail_call_insns = insns;
3926
0a1c58a2
JL
3927 /* Restore the pending stack adjustment now that we have
3928 finished generating the sibling call sequence. */
1503a7ec 3929
7f2f0a01 3930 restore_pending_stack_adjust (&save);
099e9712
JH
3931
3932 /* Prepare arg structure for next iteration. */
f725a3ec 3933 for (i = 0; i < num_actuals; i++)
099e9712
JH
3934 {
3935 args[i].value = 0;
3936 args[i].aligned_regs = 0;
3937 args[i].stack = 0;
3938 }
c67846f2
JJ
3939
3940 sbitmap_free (stored_args_map);
48810515 3941 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 3942 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
3943 }
3944 else
38afb23f
OH
3945 {
3946 normal_call_insns = insns;
3947
3948 /* Verify that we've deallocated all the stack we used. */
6e14af16 3949 gcc_assert ((flags & ECF_NORETURN)
6adbb51e 3950 || normal_failure
a20c5714
RS
3951 || known_eq (old_stack_allocated,
3952 stack_pointer_delta
3953 - pending_stack_adjust));
6adbb51e
JJ
3954 if (normal_failure)
3955 normal_call_insns = NULL;
38afb23f 3956 }
fadb729c
JJ
3957
3958 /* If something prevents making this a sibling call,
3959 zero out the sequence. */
3960 if (sibcall_failure)
48810515 3961 tail_call_insns = NULL;
6de9cd9a
DN
3962 else
3963 break;
0a1c58a2
JL
3964 }
3965
1ea7e6ad 3966 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
3967 arguments too, as argument area is now clobbered by the call. */
3968 if (tail_call_insns)
0a1c58a2 3969 {
6de9cd9a 3970 emit_insn (tail_call_insns);
e3b5732b 3971 crtl->tail_call_emit = true;
0a1c58a2
JL
3972 }
3973 else
9a385c2d
DM
3974 {
3975 emit_insn (normal_call_insns);
3976 if (try_tail_call)
3977 /* Ideally we'd emit a message for all of the ways that it could
3978 have failed. */
3979 maybe_complain_about_tail_call (exp, "tail call production failed");
3980 }
51bbfa0c 3981
0a1c58a2 3982 currently_expanding_call--;
8e6a59fe 3983
04695783 3984 free (stack_usage_map_buf);
765fc0f7 3985 free (args);
51bbfa0c
RS
3986 return target;
3987}
ded9bf77 3988
6de9cd9a
DN
3989/* A sibling call sequence invalidates any REG_EQUIV notes made for
3990 this function's incoming arguments.
3991
3992 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
3993 in the rtl chain are those for incoming arguments, so we can look
3994 for REG_EQUIV notes between the start of the function and the
3995 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
3996
3997 This is (slight) overkill. We could keep track of the highest
3998 argument we clobber and be more selective in removing notes, but it
3999 does not seem to be worth the effort. */
29d51cdb 4000
6de9cd9a
DN
4001void
4002fixup_tail_calls (void)
4003{
48810515 4004 rtx_insn *insn;
29d51cdb
SB
4005
4006 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4007 {
a31830a7
SB
4008 rtx note;
4009
29d51cdb
SB
4010 /* There are never REG_EQUIV notes for the incoming arguments
4011 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4012 if (NOTE_P (insn)
a38e7aa5 4013 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
4014 break;
4015
a31830a7
SB
4016 note = find_reg_note (insn, REG_EQUIV, 0);
4017 if (note)
4018 remove_note (insn, note);
4019 note = find_reg_note (insn, REG_EQUIV, 0);
4020 gcc_assert (!note);
29d51cdb 4021 }
6de9cd9a
DN
4022}
4023
ded9bf77
AH
4024/* Traverse a list of TYPES and expand all complex types into their
4025 components. */
2f2b4a02 4026static tree
ded9bf77
AH
4027split_complex_types (tree types)
4028{
4029 tree p;
4030
42ba5130
RH
4031 /* Before allocating memory, check for the common case of no complex. */
4032 for (p = types; p; p = TREE_CHAIN (p))
4033 {
4034 tree type = TREE_VALUE (p);
4035 if (TREE_CODE (type) == COMPLEX_TYPE
4036 && targetm.calls.split_complex_arg (type))
c22cacf3 4037 goto found;
42ba5130
RH
4038 }
4039 return types;
4040
4041 found:
ded9bf77
AH
4042 types = copy_list (types);
4043
4044 for (p = types; p; p = TREE_CHAIN (p))
4045 {
4046 tree complex_type = TREE_VALUE (p);
4047
42ba5130
RH
4048 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4049 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
4050 {
4051 tree next, imag;
4052
4053 /* Rewrite complex type with component type. */
4054 TREE_VALUE (p) = TREE_TYPE (complex_type);
4055 next = TREE_CHAIN (p);
4056
4057 /* Add another component type for the imaginary part. */
4058 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4059 TREE_CHAIN (p) = imag;
4060 TREE_CHAIN (imag) = next;
4061
4062 /* Skip the newly created node. */
4063 p = TREE_CHAIN (p);
4064 }
4065 }
4066
4067 return types;
4068}
51bbfa0c 4069\f
db69559b
RS
4070/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4071 for a value of mode OUTMODE,
4072 with NARGS different arguments, passed as ARGS.
4073 Store the return value if RETVAL is nonzero: store it in VALUE if
4074 VALUE is nonnull, otherwise pick a convenient location. In either
4075 case return the location of the stored value.
8ac61af7 4076
db69559b
RS
4077 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4078 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4079 other types of library calls. */
4080
4081rtx
d329e058
AJ
4082emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4083 enum libcall_type fn_type,
db69559b 4084 machine_mode outmode, int nargs, rtx_mode_t *args)
43bc5f13 4085{
3c0fca12
RH
4086 /* Total size in bytes of all the stack-parms scanned so far. */
4087 struct args_size args_size;
4088 /* Size of arguments before any adjustments (such as rounding). */
4089 struct args_size original_args_size;
b3694847 4090 int argnum;
3c0fca12 4091 rtx fun;
81464b2c
KT
4092 /* Todo, choose the correct decl type of orgfun. Sadly this information
4093 isn't present here, so we default to native calling abi here. */
033df0b9 4094 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 4095 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 4096 int count;
3c0fca12 4097 rtx argblock = 0;
d5cc9181
JR
4098 CUMULATIVE_ARGS args_so_far_v;
4099 cumulative_args_t args_so_far;
f725a3ec
KH
4100 struct arg
4101 {
4102 rtx value;
ef4bddc2 4103 machine_mode mode;
f725a3ec
KH
4104 rtx reg;
4105 int partial;
e7949876 4106 struct locate_and_pad_arg_data locate;
f725a3ec
KH
4107 rtx save_area;
4108 };
3c0fca12
RH
4109 struct arg *argvec;
4110 int old_inhibit_defer_pop = inhibit_defer_pop;
4111 rtx call_fusage = 0;
4112 rtx mem_value = 0;
5591ee6f 4113 rtx valreg;
3c0fca12 4114 int pcc_struct_value = 0;
cf098191 4115 poly_int64 struct_value_size = 0;
52a11cbf 4116 int flags;
3c0fca12 4117 int reg_parm_stack_space = 0;
a20c5714 4118 poly_int64 needed;
48810515 4119 rtx_insn *before_call;
0ed4bf92 4120 bool have_push_fusage;
b0c48229 4121 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 4122
f73ad30e 4123#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
4124 /* Define the boundary of the register parm stack space that needs to be
4125 save, if any. */
726a989a 4126 int low_to_save = 0, high_to_save = 0;
f725a3ec 4127 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
4128#endif
4129
3c0fca12 4130 /* Size of the stack reserved for parameter registers. */
a20c5714 4131 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3c0fca12 4132 char *initial_stack_usage_map = stack_usage_map;
a20c5714 4133 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 4134 char *stack_usage_map_buf = NULL;
3c0fca12 4135
61f71b34
DD
4136 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4137
3c0fca12 4138#ifdef REG_PARM_STACK_SPACE
3c0fca12 4139 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
4140#endif
4141
0529235d 4142 /* By default, library functions cannot throw. */
52a11cbf
RH
4143 flags = ECF_NOTHROW;
4144
9555a122
RH
4145 switch (fn_type)
4146 {
4147 case LCT_NORMAL:
53d4257f 4148 break;
9555a122 4149 case LCT_CONST:
53d4257f
JH
4150 flags |= ECF_CONST;
4151 break;
9555a122 4152 case LCT_PURE:
53d4257f 4153 flags |= ECF_PURE;
9555a122 4154 break;
9555a122
RH
4155 case LCT_NORETURN:
4156 flags |= ECF_NORETURN;
4157 break;
4158 case LCT_THROW:
0529235d 4159 flags &= ~ECF_NOTHROW;
9555a122 4160 break;
9defc9b7
RH
4161 case LCT_RETURNS_TWICE:
4162 flags = ECF_RETURNS_TWICE;
4163 break;
9555a122 4164 }
3c0fca12
RH
4165 fun = orgfun;
4166
3c0fca12
RH
4167 /* Ensure current function's preferred stack boundary is at least
4168 what we need. */
cb91fab0
JH
4169 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4170 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
4171
4172 /* If this kind of value comes back in memory,
4173 decide where in memory it should come back. */
b0c48229 4174 if (outmode != VOIDmode)
3c0fca12 4175 {
ae2bcd98 4176 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 4177 if (aggregate_value_p (tfom, 0))
b0c48229 4178 {
3c0fca12 4179#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 4180 rtx pointer_reg
1d636cc6 4181 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
4182 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4183 pcc_struct_value = 1;
4184 if (value == 0)
4185 value = gen_reg_rtx (outmode);
3c0fca12 4186#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 4187 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 4188 if (value != 0 && MEM_P (value))
b0c48229
NB
4189 mem_value = value;
4190 else
9474e8ab 4191 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 4192#endif
b0c48229 4193 /* This call returns a big structure. */
84b8030f 4194 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 4195 }
3c0fca12 4196 }
b0c48229
NB
4197 else
4198 tfom = void_type_node;
3c0fca12
RH
4199
4200 /* ??? Unfinished: must pass the memory address as an argument. */
4201
4202 /* Copy all the libcall-arguments out of the varargs data
4203 and into a vector ARGVEC.
4204
4205 Compute how to pass each argument. We only support a very small subset
4206 of the full argument passing conventions to limit complexity here since
4207 library functions shouldn't have many args. */
4208
f883e0a7 4209 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 4210 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 4211
97fc4caf 4212#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 4213 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 4214#else
d5cc9181 4215 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 4216#endif
d5cc9181 4217 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
4218
4219 args_size.constant = 0;
4220 args_size.var = 0;
4221
4222 count = 0;
4223
4224 push_temp_slots ();
4225
4226 /* If there's a structure value address to be passed,
4227 either pass it in the special place, or pass it as an extra argument. */
61f71b34 4228 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
4229 {
4230 rtx addr = XEXP (mem_value, 0);
c22cacf3 4231
3c0fca12
RH
4232 nargs++;
4233
ee88d9aa
MK
4234 /* Make sure it is a reasonable operand for a move or push insn. */
4235 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
4236 && !(CONSTANT_P (addr)
4237 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
4238 addr = force_operand (addr, NULL_RTX);
4239
3c0fca12
RH
4240 argvec[count].value = addr;
4241 argvec[count].mode = Pmode;
4242 argvec[count].partial = 0;
4243
a7c81bc1 4244 function_arg_info ptr_arg (Pmode, /*named=*/true);
6783fdb7 4245 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
a7c81bc1 4246 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
3c0fca12
RH
4247
4248 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 4249#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 4250 1,
a4d5044f
CM
4251#else
4252 argvec[count].reg != 0,
4253#endif
2e4ceca5
UW
4254 reg_parm_stack_space, 0,
4255 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 4256
3c0fca12
RH
4257 if (argvec[count].reg == 0 || argvec[count].partial != 0
4258 || reg_parm_stack_space > 0)
e7949876 4259 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 4260
6930c98c 4261 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
3c0fca12
RH
4262
4263 count++;
4264 }
4265
db69559b 4266 for (unsigned int i = 0; count < nargs; i++, count++)
3c0fca12 4267 {
db69559b 4268 rtx val = args[i].first;
cf0d189e 4269 function_arg_info arg (args[i].second, /*named=*/true);
5e617be8 4270 int unsigned_p = 0;
3c0fca12
RH
4271
4272 /* We cannot convert the arg value to the mode the library wants here;
4273 must do it earlier where we know the signedness of the arg. */
cf0d189e
RS
4274 gcc_assert (arg.mode != BLKmode
4275 && (GET_MODE (val) == arg.mode
4276 || GET_MODE (val) == VOIDmode));
3c0fca12 4277
ee88d9aa
MK
4278 /* Make sure it is a reasonable operand for a move or push insn. */
4279 if (!REG_P (val) && !MEM_P (val)
cf0d189e
RS
4280 && !(CONSTANT_P (val)
4281 && targetm.legitimate_constant_p (arg.mode, val)))
ee88d9aa
MK
4282 val = force_operand (val, NULL_RTX);
4283
cf0d189e 4284 if (pass_by_reference (&args_so_far_v, arg))
3c0fca12 4285 {
f474c6f8 4286 rtx slot;
cf0d189e 4287 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
f474c6f8 4288
becfd6e5
KZ
4289 /* If this was a CONST function, it is now PURE since it now
4290 reads memory. */
99a32567
DM
4291 if (flags & ECF_CONST)
4292 {
4293 flags &= ~ECF_CONST;
4294 flags |= ECF_PURE;
4295 }
4296
e0c68ce9 4297 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
4298 {
4299 tree val_expr = MEM_EXPR (val);
4300 if (val_expr)
4301 mark_addressable (val_expr);
4302 slot = val;
4303 }
9969aaf6 4304 else
f474c6f8 4305 {
cf0d189e 4306 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
9474e8ab 4307 1, 1);
f474c6f8
AO
4308 emit_move_insn (slot, val);
4309 }
1da68f56 4310
6b5273c3
AO
4311 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4312 gen_rtx_USE (VOIDmode, slot),
4313 call_fusage);
f474c6f8
AO
4314 if (must_copy)
4315 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4316 gen_rtx_CLOBBER (VOIDmode,
4317 slot),
4318 call_fusage);
4319
cf0d189e 4320 arg.mode = Pmode;
257caa55 4321 arg.pass_by_reference = true;
f474c6f8 4322 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 4323 }
3c0fca12 4324
cf0d189e
RS
4325 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
4326 NULL_TREE, 0);
4327 argvec[count].mode = arg.mode;
4328 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
4329 unsigned_p);
6783fdb7 4330 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
3c0fca12 4331
3c0fca12 4332 argvec[count].partial
a7c81bc1 4333 = targetm.calls.arg_partial_bytes (args_so_far, arg);
3c0fca12 4334
3576f984
RS
4335 if (argvec[count].reg == 0
4336 || argvec[count].partial != 0
4337 || reg_parm_stack_space > 0)
4338 {
cf0d189e 4339 locate_and_pad_parm (arg.mode, NULL_TREE,
a4d5044f 4340#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 4341 1,
a4d5044f 4342#else
3576f984
RS
4343 argvec[count].reg != 0,
4344#endif
2e4ceca5 4345 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
4346 NULL_TREE, &args_size, &argvec[count].locate);
4347 args_size.constant += argvec[count].locate.size.constant;
4348 gcc_assert (!argvec[count].locate.size.var);
4349 }
4350#ifdef BLOCK_REG_PADDING
4351 else
4352 /* The argument is passed entirely in registers. See at which
4353 end it should be padded. */
4354 argvec[count].locate.where_pad =
cf0d189e
RS
4355 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
4356 known_le (GET_MODE_SIZE (arg.mode),
4357 UNITS_PER_WORD));
a4d5044f 4358#endif
3c0fca12 4359
6930c98c 4360 targetm.calls.function_arg_advance (args_so_far, arg);
3c0fca12 4361 }
3c0fca12 4362
957ed738
L
4363 for (int i = 0; i < nargs; i++)
4364 if (reg_parm_stack_space > 0
4365 || argvec[i].reg == 0
4366 || argvec[i].partial != 0)
4367 update_stack_alignment_for_call (&argvec[i].locate);
4368
3c0fca12
RH
4369 /* If this machine requires an external definition for library
4370 functions, write one out. */
4371 assemble_external_libcall (fun);
4372
4373 original_args_size = args_size;
a20c5714
RS
4374 args_size.constant = (aligned_upper_bound (args_size.constant
4375 + stack_pointer_delta,
4376 STACK_BYTES)
4377 - stack_pointer_delta);
3c0fca12 4378
a20c5714
RS
4379 args_size.constant = upper_bound (args_size.constant,
4380 reg_parm_stack_space);
3c0fca12 4381
5d059ed9 4382 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4383 args_size.constant -= reg_parm_stack_space;
3c0fca12 4384
a20c5714
RS
4385 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4386 args_size.constant);
3c0fca12 4387
a11e0df4 4388 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306 4389 {
a20c5714
RS
4390 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4391 current_function_pushed_stack_size
4392 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4393 }
4394
f73ad30e
JH
4395 if (ACCUMULATE_OUTGOING_ARGS)
4396 {
4397 /* Since the stack pointer will never be pushed, it is possible for
4398 the evaluation of a parm to clobber something we have already
4399 written to the stack. Since most function calls on RISC machines
4400 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 4401
f73ad30e
JH
4402 Therefore, we save any area of the stack that was already written
4403 and that we are using. Here we set up to do this by making a new
4404 stack usage map from the old one.
3c0fca12 4405
f73ad30e
JH
4406 Another approach might be to try to reorder the argument
4407 evaluations to avoid this conflicting stack usage. */
3c0fca12 4408
f73ad30e 4409 needed = args_size.constant;
3c0fca12 4410
f73ad30e
JH
4411 /* Since we will be writing into the entire argument area, the
4412 map must be allocated for its entire size, not just the part that
4413 is the responsibility of the caller. */
5d059ed9 4414 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4415 needed += reg_parm_stack_space;
3c0fca12 4416
a20c5714 4417 poly_int64 limit = needed;
6dad9361 4418 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
4419 limit += 1;
4420
4421 /* For polynomial sizes, this is the maximum possible size needed
4422 for arguments with a constant size and offset. */
4423 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4424 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4425 const_limit);
6dad9361 4426
5ed6ace5 4427 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4428 stack_usage_map = stack_usage_map_buf;
3c0fca12 4429
f73ad30e 4430 if (initial_highest_arg_in_use)
2e09e75a
JM
4431 memcpy (stack_usage_map, initial_stack_usage_map,
4432 initial_highest_arg_in_use);
3c0fca12 4433
f73ad30e 4434 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4435 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4436 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4437 needed = 0;
3c0fca12 4438
c39ada04 4439 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 4440 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
4441 new libcalls after we've instantiated the virtual regs, and if we
4442 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 4443
c39ada04 4444 if (virtuals_instantiated)
0a81f074
RS
4445 argblock = plus_constant (Pmode, stack_pointer_rtx,
4446 STACK_POINTER_OFFSET);
c39ada04
DD
4447 else
4448 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
4449 }
4450 else
4451 {
967b4653 4452 if (!targetm.calls.push_argument (0))
a20c5714 4453 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
f73ad30e 4454 }
3c0fca12 4455
3d9684ae 4456 /* We push args individually in reverse order, perform stack alignment
3c0fca12 4457 before the first push (the last arg). */
3d9684ae 4458 if (argblock == 0)
a20c5714
RS
4459 anti_adjust_stack (gen_int_mode (args_size.constant
4460 - original_args_size.constant,
4461 Pmode));
3c0fca12 4462
3d9684ae 4463 argnum = nargs - 1;
3c0fca12 4464
f73ad30e
JH
4465#ifdef REG_PARM_STACK_SPACE
4466 if (ACCUMULATE_OUTGOING_ARGS)
4467 {
4468 /* The argument list is the property of the called routine and it
4469 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
4470 parameters, we must save and restore it. */
4471 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4472 &low_to_save, &high_to_save);
3c0fca12
RH
4473 }
4474#endif
f725a3ec 4475
2f21e1ba
BS
4476 /* When expanding a normal call, args are stored in push order,
4477 which is the reverse of what we have here. */
4478 bool any_regs = false;
4479 for (int i = nargs; i-- > 0; )
4480 if (argvec[i].reg != NULL_RTX)
4481 {
4482 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4483 any_regs = true;
4484 }
4485 if (!any_regs)
4486 targetm.calls.call_args (pc_rtx, NULL_TREE);
4487
3c0fca12
RH
4488 /* Push the args that need to be pushed. */
4489
0ed4bf92
BS
4490 have_push_fusage = false;
4491
3c0fca12
RH
4492 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4493 are to be pushed. */
3d9684ae 4494 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4495 {
ef4bddc2 4496 machine_mode mode = argvec[argnum].mode;
b3694847 4497 rtx val = argvec[argnum].value;
3c0fca12
RH
4498 rtx reg = argvec[argnum].reg;
4499 int partial = argvec[argnum].partial;
6bdf8c2e 4500 unsigned int parm_align = argvec[argnum].locate.boundary;
a20c5714 4501 poly_int64 lower_bound = 0, upper_bound = 0;
3c0fca12
RH
4502
4503 if (! (reg != 0 && partial == 0))
4504 {
2b1c5433
JJ
4505 rtx use;
4506
f73ad30e
JH
4507 if (ACCUMULATE_OUTGOING_ARGS)
4508 {
f8a097cd
JH
4509 /* If this is being stored into a pre-allocated, fixed-size,
4510 stack area, save any previous data at that location. */
3c0fca12 4511
6dad9361
TS
4512 if (ARGS_GROW_DOWNWARD)
4513 {
4514 /* stack_slot is negative, but we want to index stack_usage_map
4515 with positive values. */
4516 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4517 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4518 }
4519 else
4520 {
4521 lower_bound = argvec[argnum].locate.slot_offset.constant;
4522 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4523 }
3c0fca12 4524
a20c5714
RS
4525 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4526 reg_parm_stack_space))
f73ad30e 4527 {
e7949876 4528 /* We need to make a save area. */
a20c5714 4529 poly_uint64 size
e7949876 4530 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 4531 machine_mode save_mode
f4b31647 4532 = int_mode_for_size (size, 1).else_blk ();
e7949876 4533 rtx adr
0a81f074 4534 = plus_constant (Pmode, argblock,
e7949876 4535 argvec[argnum].locate.offset.constant);
f73ad30e 4536 rtx stack_area
e7949876 4537 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 4538
9778f2f8
JH
4539 if (save_mode == BLKmode)
4540 {
4541 argvec[argnum].save_area
4542 = assign_stack_temp (BLKmode,
9474e8ab
MM
4543 argvec[argnum].locate.size.constant
4544 );
9778f2f8 4545
1a8cb155
RS
4546 emit_block_move (validize_mem
4547 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 4548 stack_area,
a20c5714
RS
4549 (gen_int_mode
4550 (argvec[argnum].locate.size.constant,
4551 Pmode)),
9778f2f8
JH
4552 BLOCK_OP_CALL_PARM);
4553 }
4554 else
4555 {
4556 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4557
4558 emit_move_insn (argvec[argnum].save_area, stack_area);
4559 }
f73ad30e 4560 }
3c0fca12 4561 }
19caa751 4562
6bdf8c2e 4563 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 4564 partial, reg, 0, argblock,
a20c5714
RS
4565 (gen_int_mode
4566 (argvec[argnum].locate.offset.constant, Pmode)),
e7949876 4567 reg_parm_stack_space,
99206968 4568 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 4569
3c0fca12 4570 /* Now mark the segment we just used. */
f73ad30e 4571 if (ACCUMULATE_OUTGOING_ARGS)
a20c5714 4572 mark_stack_region_used (lower_bound, upper_bound);
3c0fca12
RH
4573
4574 NO_DEFER_POP;
475a3eef 4575
e53b6e56 4576 /* Indicate argument access so that alias.cc knows that these
2b1c5433
JJ
4577 values are live. */
4578 if (argblock)
0a81f074 4579 use = plus_constant (Pmode, argblock,
2b1c5433 4580 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
4581 else if (have_push_fusage)
4582 continue;
2b1c5433 4583 else
0ed4bf92 4584 {
e53b6e56 4585 /* When arguments are pushed, trying to tell alias.cc where
0ed4bf92
BS
4586 exactly this argument is won't work, because the
4587 auto-increment causes confusion. So we merely indicate
4588 that we access something with a known mode somewhere on
4589 the stack. */
4590 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4591 gen_rtx_SCRATCH (Pmode));
4592 have_push_fusage = true;
4593 }
2b1c5433
JJ
4594 use = gen_rtx_MEM (argvec[argnum].mode, use);
4595 use = gen_rtx_USE (VOIDmode, use);
4596 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
4597 }
4598 }
4599
3d9684ae 4600 argnum = nargs - 1;
3c0fca12 4601
531ca746 4602 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
4603
4604 /* Now load any reg parms into their regs. */
4605
4606 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4607 are to be pushed. */
3d9684ae 4608 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4609 {
ef4bddc2 4610 machine_mode mode = argvec[argnum].mode;
b3694847 4611 rtx val = argvec[argnum].value;
3c0fca12
RH
4612 rtx reg = argvec[argnum].reg;
4613 int partial = argvec[argnum].partial;
460b171d 4614
3c0fca12
RH
4615 /* Handle calls that pass values in multiple non-contiguous
4616 locations. The PA64 has examples of this for library calls. */
4617 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 4618 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 4619 else if (reg != 0 && partial == 0)
460b171d
JB
4620 {
4621 emit_move_insn (reg, val);
4622#ifdef BLOCK_REG_PADDING
cf098191 4623 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
460b171d
JB
4624
4625 /* Copied from load_register_parameters. */
4626
4627 /* Handle case where we have a value that needs shifting
4628 up to the msb. eg. a QImode value and we're padding
4629 upward on a BYTES_BIG_ENDIAN machine. */
cf098191 4630 if (known_lt (size, UNITS_PER_WORD)
460b171d 4631 && (argvec[argnum].locate.where_pad
76b0cbf8 4632 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
460b171d
JB
4633 {
4634 rtx x;
cf098191 4635 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
460b171d
JB
4636
4637 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4638 report the whole reg as used. Strictly speaking, the
4639 call only uses SIZE bytes at the msb end, but it doesn't
4640 seem worth generating rtl to say that. */
4641 reg = gen_rtx_REG (word_mode, REGNO (reg));
4642 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4643 if (x != reg)
4644 emit_move_insn (reg, x);
4645 }
4646#endif
4647 }
3c0fca12
RH
4648
4649 NO_DEFER_POP;
4650 }
4651
3c0fca12
RH
4652 /* Any regs containing parms remain in use through the call. */
4653 for (count = 0; count < nargs; count++)
4654 {
4655 rtx reg = argvec[count].reg;
4656 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4657 use_group_regs (&call_fusage, reg);
4658 else if (reg != 0)
3b1bf459
BS
4659 {
4660 int partial = argvec[count].partial;
4661 if (partial)
4662 {
4663 int nregs;
4664 gcc_assert (partial % UNITS_PER_WORD == 0);
4665 nregs = partial / UNITS_PER_WORD;
4666 use_regs (&call_fusage, REGNO (reg), nregs);
4667 }
4668 else
4669 use_reg (&call_fusage, reg);
4670 }
3c0fca12
RH
4671 }
4672
4673 /* Pass the function the address in which to return a structure value. */
61f71b34 4674 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 4675 {
61f71b34 4676 emit_move_insn (struct_value,
3c0fca12
RH
4677 force_reg (Pmode,
4678 force_operand (XEXP (mem_value, 0),
4679 NULL_RTX)));
f8cfc6aa 4680 if (REG_P (struct_value))
61f71b34 4681 use_reg (&call_fusage, struct_value);
3c0fca12
RH
4682 }
4683
4684 /* Don't allow popping to be deferred, since then
4685 cse'ing of library calls could delete a call and leave the pop. */
4686 NO_DEFER_POP;
5591ee6f 4687 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 4688 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 4689
ce48579b 4690 /* Stack must be properly aligned now. */
a20c5714
RS
4691 gcc_assert (multiple_p (stack_pointer_delta,
4692 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
ebcd0b57 4693
695ee791
RH
4694 before_call = get_last_insn ();
4695
3cf3da88
EB
4696 if (flag_callgraph_info)
4697 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
4698
3c0fca12
RH
4699 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4700 will set inhibit_defer_pop to that value. */
de76b467
JH
4701 /* The return type is needed to decide how many bytes the function pops.
4702 Signedness plays no role in that, so for simplicity, we pretend it's
4703 always signed. We also assume that the list of arguments passed has
4704 no impact, so we pretend it is unknown. */
3c0fca12 4705
6de9cd9a 4706 emit_call_1 (fun, NULL,
f725a3ec 4707 get_identifier (XSTR (orgfun, 0)),
b0c48229 4708 build_function_type (tfom, NULL_TREE),
f725a3ec 4709 original_args_size.constant, args_size.constant,
3c0fca12 4710 struct_value_size,
d5cc9181 4711 targetm.calls.function_arg (args_so_far,
6783fdb7 4712 function_arg_info::end_marker ()),
5591ee6f 4713 valreg,
d5cc9181 4714 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 4715
1e288103 4716 if (flag_ipa_ra)
4f660b15 4717 {
e67d1102 4718 rtx datum = orgfun;
4f660b15 4719 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 4720 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
4721 add_reg_note (last, REG_CALL_DECL, datum);
4722 }
4723
460b171d
JB
4724 /* Right-shift returned value if necessary. */
4725 if (!pcc_struct_value
4726 && TYPE_MODE (tfom) != BLKmode
4727 && targetm.calls.return_in_msb (tfom))
4728 {
4729 shift_return_value (TYPE_MODE (tfom), false, valreg);
4730 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4731 }
4732
2f21e1ba
BS
4733 targetm.calls.end_call_args ();
4734
e53b6e56 4735 /* For calls to `setjmp', etc., inform function.cc:setjmp_warnings
6fb5fa3c
DB
4736 that it should complain if nonvolatile values are live. For
4737 functions that cannot return, inform flow that control does not
4738 fall through. */
6e14af16 4739 if (flags & ECF_NORETURN)
695ee791 4740 {
570a98eb 4741 /* The barrier note must be emitted
695ee791
RH
4742 immediately after the CALL_INSN. Some ports emit more than
4743 just a CALL_INSN above, so we must search for it here. */
48810515 4744 rtx_insn *last = get_last_insn ();
4b4bf941 4745 while (!CALL_P (last))
695ee791
RH
4746 {
4747 last = PREV_INSN (last);
4748 /* There was no CALL_INSN? */
366de0ce 4749 gcc_assert (last != before_call);
695ee791
RH
4750 }
4751
570a98eb 4752 emit_barrier_after (last);
695ee791
RH
4753 }
4754
85da11a6
EB
4755 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4756 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4757 if (flags & ECF_NOTHROW)
4758 {
48810515 4759 rtx_insn *last = get_last_insn ();
85da11a6
EB
4760 while (!CALL_P (last))
4761 {
4762 last = PREV_INSN (last);
4763 /* There was no CALL_INSN? */
4764 gcc_assert (last != before_call);
4765 }
4766
4767 make_reg_eh_region_note_nothrow_nononlocal (last);
4768 }
4769
3c0fca12
RH
4770 /* Now restore inhibit_defer_pop to its actual original value. */
4771 OK_DEFER_POP;
4772
4773 pop_temp_slots ();
4774
4775 /* Copy the value to the right place. */
de76b467 4776 if (outmode != VOIDmode && retval)
3c0fca12
RH
4777 {
4778 if (mem_value)
4779 {
4780 if (value == 0)
4781 value = mem_value;
4782 if (value != mem_value)
4783 emit_move_insn (value, mem_value);
4784 }
c3297561
AO
4785 else if (GET_CODE (valreg) == PARALLEL)
4786 {
4787 if (value == 0)
4788 value = gen_reg_rtx (outmode);
643642eb 4789 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 4790 }
3c0fca12 4791 else
7ab0aca2 4792 {
cde0f3fd 4793 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
4794 if (GET_MODE (valreg) != outmode)
4795 {
4796 int unsignedp = TYPE_UNSIGNED (tfom);
4797
cde0f3fd
PB
4798 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4799 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 4800 == GET_MODE (valreg));
7ab0aca2
RH
4801 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4802 }
4803
4804 if (value != 0)
4805 emit_move_insn (value, valreg);
4806 else
4807 value = valreg;
4808 }
3c0fca12
RH
4809 }
4810
f73ad30e 4811 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 4812 {
f73ad30e
JH
4813#ifdef REG_PARM_STACK_SPACE
4814 if (save_area)
b820d2b8
AM
4815 restore_fixed_argument_area (save_area, argblock,
4816 high_to_save, low_to_save);
3c0fca12 4817#endif
f725a3ec 4818
f73ad30e
JH
4819 /* If we saved any argument areas, restore them. */
4820 for (count = 0; count < nargs; count++)
4821 if (argvec[count].save_area)
4822 {
ef4bddc2 4823 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 4824 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
4825 argvec[count].locate.offset.constant);
4826 rtx stack_area = gen_rtx_MEM (save_mode,
4827 memory_address (save_mode, adr));
f73ad30e 4828
9778f2f8
JH
4829 if (save_mode == BLKmode)
4830 emit_block_move (stack_area,
1a8cb155
RS
4831 validize_mem
4832 (copy_rtx (argvec[count].save_area)),
a20c5714
RS
4833 (gen_int_mode
4834 (argvec[count].locate.size.constant, Pmode)),
9778f2f8
JH
4835 BLOCK_OP_CALL_PARM);
4836 else
4837 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 4838 }
3c0fca12 4839
f73ad30e
JH
4840 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4841 stack_usage_map = initial_stack_usage_map;
a20c5714 4842 stack_usage_watermark = initial_stack_usage_watermark;
f73ad30e 4843 }
43bc5f13 4844
04695783 4845 free (stack_usage_map_buf);
d9725c41 4846
de76b467
JH
4847 return value;
4848
4849}
4850\f
d5e254e1 4851
51bbfa0c
RS
4852/* Store a single argument for a function call
4853 into the register or memory area where it must be passed.
4854 *ARG describes the argument value and where to pass it.
4855
4856 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4857 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4858
4859 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4860 so must be careful about how the stack is used.
51bbfa0c
RS
4861
4862 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4863 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4864 that we need not worry about saving and restoring the stack.
4865
4c6b3b2a 4866 FNDECL is the declaration of the function we are calling.
f725a3ec 4867
da7d8304 4868 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 4869 zero otherwise. */
51bbfa0c 4870
4c6b3b2a 4871static int
d329e058
AJ
4872store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4873 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 4874{
b3694847 4875 tree pval = arg->tree_value;
51bbfa0c
RS
4876 rtx reg = 0;
4877 int partial = 0;
a20c5714
RS
4878 poly_int64 used = 0;
4879 poly_int64 lower_bound = 0, upper_bound = 0;
4c6b3b2a 4880 int sibcall_failure = 0;
51bbfa0c
RS
4881
4882 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 4883 return 1;
51bbfa0c 4884
cc79451b
RK
4885 /* Push a new temporary level for any temporaries we make for
4886 this argument. */
4887 push_temp_slots ();
4888
f8a097cd 4889 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4890 {
f73ad30e
JH
4891 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4892 save any previous data at that location. */
4893 if (argblock && ! variable_size && arg->stack)
4894 {
6dad9361
TS
4895 if (ARGS_GROW_DOWNWARD)
4896 {
4897 /* stack_slot is negative, but we want to index stack_usage_map
4898 with positive values. */
4899 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
4900 {
4901 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4902 upper_bound = -rtx_to_poly_int64 (offset) + 1;
4903 }
6dad9361
TS
4904 else
4905 upper_bound = 0;
51bbfa0c 4906
6dad9361
TS
4907 lower_bound = upper_bound - arg->locate.size.constant;
4908 }
f73ad30e 4909 else
6dad9361
TS
4910 {
4911 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
4912 {
4913 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4914 lower_bound = rtx_to_poly_int64 (offset);
4915 }
6dad9361
TS
4916 else
4917 lower_bound = 0;
51bbfa0c 4918
6dad9361
TS
4919 upper_bound = lower_bound + arg->locate.size.constant;
4920 }
51bbfa0c 4921
a20c5714
RS
4922 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4923 reg_parm_stack_space))
51bbfa0c 4924 {
e7949876 4925 /* We need to make a save area. */
a20c5714 4926 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
f4b31647
RS
4927 machine_mode save_mode
4928 = int_mode_for_size (size, 1).else_blk ();
e7949876
AM
4929 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4930 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
4931
4932 if (save_mode == BLKmode)
4933 {
9ee5337d
EB
4934 arg->save_area
4935 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 4936 preserve_temp_slots (arg->save_area);
1a8cb155
RS
4937 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4938 stack_area,
a20c5714
RS
4939 (gen_int_mode
4940 (arg->locate.size.constant, Pmode)),
44bb111a 4941 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4942 }
4943 else
4944 {
4945 arg->save_area = gen_reg_rtx (save_mode);
4946 emit_move_insn (arg->save_area, stack_area);
4947 }
51bbfa0c
RS
4948 }
4949 }
4950 }
b564df06 4951
51bbfa0c
RS
4952 /* If this isn't going to be placed on both the stack and in registers,
4953 set up the register and number of words. */
4954 if (! arg->pass_on_stack)
aa7634dd
DM
4955 {
4956 if (flags & ECF_SIBCALL)
4957 reg = arg->tail_call_reg;
4958 else
4959 reg = arg->reg;
4960 partial = arg->partial;
4961 }
51bbfa0c 4962
366de0ce
NS
4963 /* Being passed entirely in a register. We shouldn't be called in
4964 this case. */
4965 gcc_assert (reg == 0 || partial != 0);
c22cacf3 4966
4ab56118
RK
4967 /* If this arg needs special alignment, don't load the registers
4968 here. */
4969 if (arg->n_aligned_regs != 0)
4970 reg = 0;
f725a3ec 4971
4ab56118 4972 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4973 it directly into its stack slot. Otherwise, we can. */
4974 if (arg->value == 0)
d64f5a78 4975 {
d64f5a78
RS
4976 /* stack_arg_under_construction is nonzero if a function argument is
4977 being evaluated directly into the outgoing argument list and
4978 expand_call must take special action to preserve the argument list
4979 if it is called recursively.
4980
4981 For scalar function arguments stack_usage_map is sufficient to
4982 determine which stack slots must be saved and restored. Scalar
4983 arguments in general have pass_on_stack == 0.
4984
4985 If this argument is initialized by a function which takes the
4986 address of the argument (a C++ constructor or a C function
4987 returning a BLKmode structure), then stack_usage_map is
4988 insufficient and expand_call must push the stack around the
4989 function call. Such arguments have pass_on_stack == 1.
4990
4991 Note that it is always safe to set stack_arg_under_construction,
4992 but this generates suboptimal code if set when not needed. */
4993
4994 if (arg->pass_on_stack)
4995 stack_arg_under_construction++;
f73ad30e 4996
3a08477a
RK
4997 arg->value = expand_expr (pval,
4998 (partial
4999 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5000 ? NULL_RTX : arg->stack,
8403445a 5001 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
5002
5003 /* If we are promoting object (or for any other reason) the mode
5004 doesn't agree, convert the mode. */
5005
7373d92d
RK
5006 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5007 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5008 arg->value, arg->unsignedp);
1efe6448 5009
d64f5a78
RS
5010 if (arg->pass_on_stack)
5011 stack_arg_under_construction--;
d64f5a78 5012 }
51bbfa0c 5013
0dc42b03 5014 /* Check for overlap with already clobbered argument area. */
07eef816
KH
5015 if ((flags & ECF_SIBCALL)
5016 && MEM_P (arg->value)
a20c5714
RS
5017 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5018 arg->locate.size.constant))
07eef816 5019 sibcall_failure = 1;
0dc42b03 5020
51bbfa0c
RS
5021 /* Don't allow anything left on stack from computation
5022 of argument to alloca. */
f8a097cd 5023 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
5024 do_pending_stack_adjust ();
5025
5026 if (arg->value == arg->stack)
37a08a29
RK
5027 /* If the value is already in the stack slot, we are done. */
5028 ;
1efe6448 5029 else if (arg->mode != BLKmode)
51bbfa0c 5030 {
46bd2bee 5031 unsigned int parm_align;
51bbfa0c
RS
5032
5033 /* Argument is a scalar, not entirely passed in registers.
5034 (If part is passed in registers, arg->partial says how much
5035 and emit_push_insn will take care of putting it there.)
f725a3ec 5036
51bbfa0c
RS
5037 Push it, and if its size is less than the
5038 amount of space allocated to it,
5039 also bump stack pointer by the additional space.
5040 Note that in C the default argument promotions
5041 will prevent such mismatches. */
5042
7b4df2bf
RS
5043 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5044 ? 0 : GET_MODE_SIZE (arg->mode));
974aedcc 5045
51bbfa0c
RS
5046 /* Compute how much space the push instruction will push.
5047 On many machines, pushing a byte will advance the stack
5048 pointer by a halfword. */
5049#ifdef PUSH_ROUNDING
5050 size = PUSH_ROUNDING (size);
5051#endif
5052 used = size;
5053
5054 /* Compute how much space the argument should get:
5055 round up to a multiple of the alignment for arguments. */
76b0cbf8
RS
5056 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5057 != PAD_NONE)
7b4df2bf
RS
5058 /* At the moment we don't (need to) support ABIs for which the
5059 padding isn't known at compile time. In principle it should
5060 be easy to add though. */
5061 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c 5062
46bd2bee
JM
5063 /* Compute the alignment of the pushed argument. */
5064 parm_align = arg->locate.boundary;
76b0cbf8
RS
5065 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5066 == PAD_DOWNWARD)
46bd2bee 5067 {
a20c5714
RS
5068 poly_int64 pad = used - size;
5069 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5070 if (pad_align != 0)
5071 parm_align = MIN (parm_align, pad_align);
46bd2bee
JM
5072 }
5073
51bbfa0c
RS
5074 /* This isn't already where we want it on the stack, so put it there.
5075 This can either be done with push or copy insns. */
a20c5714 5076 if (maybe_ne (used, 0)
974aedcc
MP
5077 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5078 NULL_RTX, parm_align, partial, reg, used - size,
5079 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5080 reg_parm_stack_space,
5081 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
99206968 5082 sibcall_failure = 1;
841404cd
AO
5083
5084 /* Unless this is a partially-in-register argument, the argument is now
5085 in the stack. */
5086 if (partial == 0)
5087 arg->value = arg->stack;
51bbfa0c
RS
5088 }
5089 else
5090 {
5091 /* BLKmode, at least partly to be pushed. */
5092
1b1f20ca 5093 unsigned int parm_align;
a20c5714 5094 poly_int64 excess;
51bbfa0c
RS
5095 rtx size_rtx;
5096
5097 /* Pushing a nonscalar.
5098 If part is passed in registers, PARTIAL says how much
5099 and emit_push_insn will take care of putting it there. */
5100
5101 /* Round its size up to a multiple
5102 of the allocation unit for arguments. */
5103
e7949876 5104 if (arg->locate.size.var != 0)
51bbfa0c
RS
5105 {
5106 excess = 0;
e7949876 5107 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
5108 }
5109 else
5110 {
78a52f11
RH
5111 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5112 for BLKmode is careful to avoid it. */
5113 excess = (arg->locate.size.constant
974aedcc 5114 - arg_int_size_in_bytes (TREE_TYPE (pval))
78a52f11 5115 + partial);
974aedcc 5116 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
5117 NULL_RTX, TYPE_MODE (sizetype),
5118 EXPAND_NORMAL);
51bbfa0c
RS
5119 }
5120
bfc45551 5121 parm_align = arg->locate.boundary;
1b1f20ca
RH
5122
5123 /* When an argument is padded down, the block is aligned to
5124 PARM_BOUNDARY, but the actual argument isn't. */
76b0cbf8
RS
5125 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5126 == PAD_DOWNWARD)
1b1f20ca 5127 {
e7949876 5128 if (arg->locate.size.var)
1b1f20ca 5129 parm_align = BITS_PER_UNIT;
a20c5714 5130 else
1b1f20ca 5131 {
a20c5714
RS
5132 unsigned int excess_align
5133 = known_alignment (excess) * BITS_PER_UNIT;
5134 if (excess_align != 0)
5135 parm_align = MIN (parm_align, excess_align);
1b1f20ca
RH
5136 }
5137 }
5138
3c0cb5de 5139 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
5140 {
5141 /* emit_push_insn might not work properly if arg->value and
e7949876 5142 argblock + arg->locate.offset areas overlap. */
4c6b3b2a 5143 rtx x = arg->value;
a20c5714 5144 poly_int64 i = 0;
4c6b3b2a 5145
5284e559
RS
5146 if (strip_offset (XEXP (x, 0), &i)
5147 == crtl->args.internal_arg_pointer)
4c6b3b2a 5148 {
b3877860
KT
5149 /* arg.locate doesn't contain the pretend_args_size offset,
5150 it's part of argblock. Ensure we don't count it in I. */
5151 if (STACK_GROWS_DOWNWARD)
5152 i -= crtl->args.pretend_args_size;
5153 else
5154 i += crtl->args.pretend_args_size;
5155
e0a21ab9 5156 /* expand_call should ensure this. */
366de0ce 5157 gcc_assert (!arg->locate.offset.var
a20c5714
RS
5158 && arg->locate.size.var == 0);
5159 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
4c6b3b2a 5160
a20c5714 5161 if (known_eq (arg->locate.offset.constant, i))
d6c2c77c
JC
5162 {
5163 /* Even though they appear to be at the same location,
5164 if part of the outgoing argument is in registers,
5165 they aren't really at the same location. Check for
5166 this by making sure that the incoming size is the
5167 same as the outgoing size. */
a20c5714 5168 if (maybe_ne (arg->locate.size.constant, size_val))
4c6b3b2a
JJ
5169 sibcall_failure = 1;
5170 }
a20c5714
RS
5171 else if (maybe_in_range_p (arg->locate.offset.constant,
5172 i, size_val))
5173 sibcall_failure = 1;
5174 /* Use arg->locate.size.constant instead of size_rtx
5175 because we only care about the part of the argument
5176 on the stack. */
5177 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5178 arg->locate.size.constant))
5179 sibcall_failure = 1;
4c6b3b2a
JJ
5180 }
5181 }
5182
974aedcc
MP
5183 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5184 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5185 parm_align, partial, reg, excess, argblock,
5186 ARGS_SIZE_RTX (arg->locate.offset),
5187 reg_parm_stack_space,
5188 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
e6e6e0a9
JJ
5189 /* If we bypass emit_push_insn because it is a zero sized argument,
5190 we still might need to adjust stack if such argument requires
5191 extra alignment. See PR104558. */
5192 else if ((arg->locate.alignment_pad.var
5193 || maybe_ne (arg->locate.alignment_pad.constant, 0))
5194 && !argblock)
5195 anti_adjust_stack (ARGS_SIZE_RTX (arg->locate.alignment_pad));
51bbfa0c 5196
841404cd
AO
5197 /* Unless this is a partially-in-register argument, the argument is now
5198 in the stack.
51bbfa0c 5199
841404cd
AO
5200 ??? Unlike the case above, in which we want the actual
5201 address of the data, so that we can load it directly into a
5202 register, here we want the address of the stack slot, so that
5203 it's properly aligned for word-by-word copying or something
5204 like that. It's not clear that this is always correct. */
5205 if (partial == 0)
5206 arg->value = arg->stack_slot;
5207 }
8df3dbb7
RH
5208
5209 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5210 {
5211 tree type = TREE_TYPE (arg->tree_value);
5212 arg->parallel_value
5213 = emit_group_load_into_temps (arg->reg, arg->value, type,
5214 int_size_in_bytes (type));
5215 }
51bbfa0c 5216
8403445a
AM
5217 /* Mark all slots this store used. */
5218 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5219 && argblock && ! variable_size && arg->stack)
a20c5714 5220 mark_stack_region_used (lower_bound, upper_bound);
8403445a 5221
51bbfa0c
RS
5222 /* Once we have pushed something, pops can't safely
5223 be deferred during the rest of the arguments. */
5224 NO_DEFER_POP;
5225
9474e8ab 5226 /* Free any temporary slots made in processing this argument. */
cc79451b 5227 pop_temp_slots ();
4c6b3b2a
JJ
5228
5229 return sibcall_failure;
51bbfa0c 5230}
a4b1b92a 5231
0ffef200 5232/* Nonzero if we do not know how to pass ARG solely in registers. */
a4b1b92a 5233
fe984136 5234bool
0ffef200 5235must_pass_in_stack_var_size (const function_arg_info &arg)
fe984136 5236{
0ffef200 5237 if (!arg.type)
fe984136
RH
5238 return false;
5239
5240 /* If the type has variable size... */
c600df9a 5241 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
fe984136 5242 return true;
a4b1b92a 5243
fe984136
RH
5244 /* If the type is marked as addressable (it is required
5245 to be constructed into the stack)... */
0ffef200 5246 if (TREE_ADDRESSABLE (arg.type))
fe984136
RH
5247 return true;
5248
5249 return false;
5250}
a4b1b92a 5251
7ae4ad28 5252/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
5253 takes trailing padding of a structure into account. */
5254/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
5255
5256bool
0ffef200 5257must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
a4b1b92a 5258{
0ffef200 5259 if (!arg.type)
40cdfd5a 5260 return false;
a4b1b92a
RH
5261
5262 /* If the type has variable size... */
0ffef200 5263 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
a4b1b92a
RH
5264 return true;
5265
5266 /* If the type is marked as addressable (it is required
5267 to be constructed into the stack)... */
0ffef200 5268 if (TREE_ADDRESSABLE (arg.type))
a4b1b92a
RH
5269 return true;
5270
0ffef200 5271 if (TYPE_EMPTY_P (arg.type))
974aedcc
MP
5272 return false;
5273
a4b1b92a
RH
5274 /* If the padding and mode of the type is such that a copy into
5275 a register would put it into the wrong part of the register. */
0ffef200
RS
5276 if (arg.mode == BLKmode
5277 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5278 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
76b0cbf8 5279 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
a4b1b92a
RH
5280 return true;
5281
5282 return false;
5283}
6bf29a7e 5284
4f53599c
RS
5285/* Return true if TYPE must be passed on the stack when passed to
5286 the "..." arguments of a function. */
5287
5288bool
5289must_pass_va_arg_in_stack (tree type)
5290{
0ffef200
RS
5291 function_arg_info arg (type, /*named=*/false);
5292 return targetm.calls.must_pass_in_stack (arg);
4f53599c
RS
5293}
5294
3bce7904
RS
5295/* Return true if FIELD is the C++17 empty base field that should
5296 be ignored for ABI calling convention decisions in order to
5297 maintain ABI compatibility between C++14 and earlier, which doesn't
5298 add this FIELD to classes with empty bases, and C++17 and later
5299 which does. */
5300
5301bool
5302cxx17_empty_base_field_p (const_tree field)
5303{
5304 return (DECL_FIELD_ABI_IGNORED (field)
5305 && DECL_ARTIFICIAL (field)
5306 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
5307 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
5308}