]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
i386: Improve vector mode and TFmode ABS and NEG patterns
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
8d9254fc 2 Copyright (C) 1989-2020 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
4d0cdd0c 29#include "memmodel.h"
957060b5
AM
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
957060b5
AM
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "varasm.h"
2fb9a547 40#include "internal-fn.h"
36566b39
PK
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
670ee920 44#include "expr.h"
d6f4ec51 45#include "output.h"
b0c48229 46#include "langhooks.h"
b2dd096b 47#include "except.h"
6fb5fa3c 48#include "dbgcnt.h"
e9f56944 49#include "rtl-iter.h"
8bd9f164
MS
50#include "tree-vrp.h"
51#include "tree-ssanames.h"
4252ccd7 52#include "tree-ssa-strlen.h"
8bd9f164 53#include "intl.h"
314e6352 54#include "stringpool.h"
54aa6b58
MS
55#include "hash-map.h"
56#include "hash-traits.h"
314e6352 57#include "attribs.h"
cc8bea0a 58#include "builtins.h"
d677a8b6 59#include "gimple-fold.h"
76e048a8 60
c795bca9
BS
61/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
62#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
63
64/* Data structure and subroutines used within expand_call. */
65
66struct arg_data
67{
68 /* Tree node for this argument. */
69 tree tree_value;
1efe6448 70 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 71 machine_mode mode;
51bbfa0c
RS
72 /* Current RTL value for argument, or 0 if it isn't precomputed. */
73 rtx value;
74 /* Initially-compute RTL value for argument; only for const functions. */
75 rtx initial_value;
76 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 77 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
78 registers. */
79 rtx reg;
099e9712
JH
80 /* Register to pass this argument in when generating tail call sequence.
81 This is not the same register as for normal calls on machines with
82 register windows. */
83 rtx tail_call_reg;
8df3dbb7
RH
84 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
85 form for emit_group_move. */
86 rtx parallel_value;
84b55618
RK
87 /* If REG was promoted from the actual mode of the argument expression,
88 indicates whether the promotion is sign- or zero-extended. */
89 int unsignedp;
f0078f86
AM
90 /* Number of bytes to put in registers. 0 means put the whole arg
91 in registers. Also 0 if not passed in registers. */
51bbfa0c 92 int partial;
da7d8304 93 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
94 Note that some arguments may be passed on the stack
95 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
96 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 97 int pass_on_stack;
e7949876
AM
98 /* Some fields packaged up for locate_and_pad_parm. */
99 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
100 /* Location on the stack at which parameter should be stored. The store
101 has already been done if STACK == VALUE. */
102 rtx stack;
103 /* Location on the stack of the start of this argument slot. This can
104 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 105 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 106 rtx stack_slot;
51bbfa0c
RS
107 /* Place that this stack area has been saved, if needed. */
108 rtx save_area;
4ab56118
RK
109 /* If an argument's alignment does not permit direct copying into registers,
110 copy in smaller-sized pieces into pseudos. These are stored in a
111 block pointed to by this field. The next field says how many
112 word-sized pseudos we made. */
113 rtx *aligned_regs;
114 int n_aligned_regs;
51bbfa0c
RS
115};
116
da7d8304 117/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
118 the corresponding stack location has been used.
119 This vector is used to prevent a function call within an argument from
120 clobbering any stack already set up. */
121static char *stack_usage_map;
122
123/* Size of STACK_USAGE_MAP. */
a20c5714
RS
124static unsigned int highest_outgoing_arg_in_use;
125
126/* Assume that any stack location at this byte index is used,
127 without checking the contents of stack_usage_map. */
128static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
2f4aa534 129
c67846f2
JJ
130/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
131 stack location's tail call argument has been already stored into the stack.
132 This bitmap is used to prevent sibling call optimization if function tries
133 to use parent's incoming argument slots when they have been already
134 overwritten with tail call arguments. */
135static sbitmap stored_args_map;
136
a20c5714
RS
137/* Assume that any virtual-incoming location at this byte index has been
138 stored, without checking the contents of stored_args_map. */
139static unsigned HOST_WIDE_INT stored_args_watermark;
140
2f4aa534
RS
141/* stack_arg_under_construction is nonzero when an argument may be
142 initialized with a constructor call (including a C function that
143 returns a BLKmode struct) and expand_call must take special action
144 to make sure the object being constructed does not overlap the
145 argument list for the constructor call. */
0405cc0e 146static int stack_arg_under_construction;
51bbfa0c 147
d329e058
AJ
148static void precompute_register_parameters (int, struct arg_data *, int *);
149static int store_one_arg (struct arg_data *, rtx, int, int, int);
150static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
151static int finalize_must_preallocate (int, int, struct arg_data *,
152 struct args_size *);
84b8030f 153static void precompute_arguments (int, struct arg_data *);
d329e058
AJ
154static void compute_argument_addresses (struct arg_data *, rtx, int);
155static rtx rtx_for_function_call (tree, tree);
156static void load_register_parameters (struct arg_data *, int, rtx *, int,
157 int, int *);
6ea2b70d 158static int special_function_p (const_tree, int);
d329e058 159static int check_sibcall_argument_overlap_1 (rtx);
48810515 160static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058 161
2f2b4a02 162static tree split_complex_types (tree);
21a3b983 163
f73ad30e 164#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
165static rtx save_fixed_argument_area (int, rtx, int *, int *);
166static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 167#endif
51bbfa0c 168\f
a20c5714
RS
169/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
170 stack region might already be in use. */
171
172static bool
173stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
174 unsigned int reg_parm_stack_space)
175{
176 unsigned HOST_WIDE_INT const_lower, const_upper;
177 const_lower = constant_lower_bound (lower_bound);
178 if (!upper_bound.is_constant (&const_upper))
179 const_upper = HOST_WIDE_INT_M1U;
180
181 if (const_upper > stack_usage_watermark)
182 return true;
183
184 /* Don't worry about things in the fixed argument area;
185 it has already been saved. */
186 const_lower = MAX (const_lower, reg_parm_stack_space);
187 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
188 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
189 if (stack_usage_map[i])
190 return true;
191 return false;
192}
193
194/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
195 stack region are now in use. */
196
197static void
198mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
199{
200 unsigned HOST_WIDE_INT const_lower, const_upper;
201 const_lower = constant_lower_bound (lower_bound);
202 if (upper_bound.is_constant (&const_upper))
203 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
204 stack_usage_map[i] = 1;
205 else
206 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
207}
208
51bbfa0c
RS
209/* Force FUNEXP into a form suitable for the address of a CALL,
210 and return that as an rtx. Also load the static chain register
211 if FNDECL is a nested function.
212
77cac2f2
RK
213 CALL_FUSAGE points to a variable holding the prospective
214 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 215
03dacb02 216rtx
f2d3d07e 217prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 218 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 219{
ba228239 220 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
221 but not for a constant address if -fno-function-cse. */
222 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
223 {
224 /* If it's an indirect call by descriptor, generate code to perform
225 runtime identification of the pointer and load the descriptor. */
226 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
227 {
228 const int bit_val = targetm.calls.custom_function_descriptors;
229 rtx call_lab = gen_label_rtx ();
230
231 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
232 fndecl_or_type
233 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
234 fndecl_or_type);
235 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
236 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
237
84355514
AS
238 if (GET_MODE (funexp) != Pmode)
239 funexp = convert_memory_address (Pmode, funexp);
240
4c640e26
EB
241 /* Avoid long live ranges around function calls. */
242 funexp = copy_to_mode_reg (Pmode, funexp);
243
244 if (REG_P (chain))
245 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
246
247 /* Emit the runtime identification pattern. */
248 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
249 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
250 call_lab);
251
252 /* Statically predict the branch to very likely taken. */
253 rtx_insn *insn = get_last_insn ();
254 if (JUMP_P (insn))
255 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
256
257 /* Load the descriptor. */
258 rtx mem = gen_rtx_MEM (ptr_mode,
259 plus_constant (Pmode, funexp, - bit_val));
260 MEM_NOTRAP_P (mem) = 1;
261 mem = convert_memory_address (Pmode, mem);
262 emit_move_insn (chain, mem);
263
264 mem = gen_rtx_MEM (ptr_mode,
265 plus_constant (Pmode, funexp,
266 POINTER_SIZE / BITS_PER_UNIT
267 - bit_val));
268 MEM_NOTRAP_P (mem) = 1;
269 mem = convert_memory_address (Pmode, mem);
270 emit_move_insn (funexp, mem);
271
272 emit_label (call_lab);
273
274 if (REG_P (chain))
275 {
276 use_reg (call_fusage, chain);
277 STATIC_CHAIN_REG_P (chain) = 1;
278 }
279
280 /* Make sure we're not going to be overwritten below. */
281 gcc_assert (!static_chain_value);
282 }
283
284 /* If we are using registers for parameters, force the
285 function address into a register now. */
286 funexp = ((reg_parm_seen
287 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
288 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
289 : memory_address (FUNCTION_MODE, funexp));
290 }
408702b4 291 else
51bbfa0c 292 {
408702b4
RL
293 /* funexp could be a SYMBOL_REF represents a function pointer which is
294 of ptr_mode. In this case, it should be converted into address mode
295 to be a valid address for memory rtx pattern. See PR 64971. */
296 if (GET_MODE (funexp) != Pmode)
297 funexp = convert_memory_address (Pmode, funexp);
298
4c640e26 299 if (!(flags & ECF_SIBCALL))
408702b4
RL
300 {
301 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
302 funexp = force_reg (Pmode, funexp);
303 }
51bbfa0c
RS
304 }
305
f2d3d07e
RH
306 if (static_chain_value != 0
307 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
308 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 309 {
531ca746
RH
310 rtx chain;
311
f2d3d07e 312 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 313 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 314
531ca746
RH
315 emit_move_insn (chain, static_chain_value);
316 if (REG_P (chain))
4c640e26
EB
317 {
318 use_reg (call_fusage, chain);
319 STATIC_CHAIN_REG_P (chain) = 1;
320 }
51bbfa0c
RS
321 }
322
323 return funexp;
324}
325
326/* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
329
607ea900 330 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
331 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
332 its own args.
2c8da025 333
079e7538
NF
334 FUNTYPE is the data type of the function. This is given to the hook
335 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
336 own args. We used to allow an identifier for library functions, but
337 that doesn't work when the return type is an aggregate type and the
338 calling convention says that the pointer to this aggregate is to be
339 popped by the callee.
51bbfa0c
RS
340
341 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
342 ROUNDED_STACK_SIZE is that number rounded up to
343 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
344 both to put into the call insn and to generate explicit popping
345 code if necessary.
51bbfa0c
RS
346
347 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
348 It is zero if this call doesn't want a structure value.
349
350 NEXT_ARG_REG is the rtx that results from executing
6783fdb7
RS
351 targetm.calls.function_arg (&args_so_far,
352 function_arg_info::end_marker ());
51bbfa0c
RS
353 just after all the args have had their registers assigned.
354 This could be whatever you like, but normally it is the first
355 arg-register beyond those used for args in this call,
356 or 0 if all the arg-registers are used in this call.
357 It is passed on to `gen_call' so you can put this info in the call insn.
358
359 VALREG is a hard register in which a value is returned,
360 or 0 if the call does not return a value.
361
362 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
363 the args to this call were processed.
364 We restore `inhibit_defer_pop' to that value.
365
94b25f81 366 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 367 denote registers used by the called function. */
f725a3ec 368
322e3e34 369static void
28ed065e 370emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 371 tree funtype ATTRIBUTE_UNUSED,
a20c5714
RS
372 poly_int64 stack_size ATTRIBUTE_UNUSED,
373 poly_int64 rounded_stack_size,
5c8e61cf 374 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
d329e058
AJ
375 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
376 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 377 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 378{
a20c5714 379 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
58d745ec 380 rtx call, funmem, pat;
51bbfa0c 381 int already_popped = 0;
a20c5714 382 poly_int64 n_popped = 0;
a00fe3b7
RS
383
384 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
385 patterns exist). Any popping that the callee does on return will
386 be from our caller's frame rather than ours. */
387 if (!(ecf_flags & ECF_SIBCALL))
388 {
389 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 390
fa5322fa 391#ifdef CALL_POPS_ARGS
a00fe3b7 392 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 393#endif
a00fe3b7 394 }
d329e058 395
51bbfa0c
RS
396 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
397 and we don't want to load it into a register as an optimization,
398 because prepare_call_address already did it if it should be done. */
399 if (GET_CODE (funexp) != SYMBOL_REF)
400 funexp = memory_address (FUNCTION_MODE, funexp);
401
325f5379
JJ
402 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
403 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
404 {
405 tree t = fndecl;
e79983f4 406
047d33a0
AO
407 /* Although a built-in FUNCTION_DECL and its non-__builtin
408 counterpart compare equal and get a shared mem_attrs, they
409 produce different dump output in compare-debug compilations,
410 if an entry gets garbage collected in one compilation, then
411 adds a different (but equivalent) entry, while the other
412 doesn't run the garbage collector at the same spot and then
413 shares the mem_attr with the equivalent entry. */
e79983f4
MM
414 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
415 {
416 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
417 if (t2)
418 t = t2;
419 }
420
421 set_mem_expr (funmem, t);
047d33a0 422 }
325f5379 423 else if (fntree)
e19f6650 424 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 425
58d745ec 426 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 427 {
0a1c58a2 428 if (valreg)
58d745ec
RS
429 pat = targetm.gen_sibcall_value (valreg, funmem,
430 rounded_stack_size_rtx,
431 next_arg_reg, NULL_RTX);
0a1c58a2 432 else
58d745ec 433 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
5c8e61cf
RS
434 next_arg_reg,
435 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 436 }
8ac61af7
RK
437 /* If the target has "call" or "call_value" insns, then prefer them
438 if no arguments are actually popped. If the target does not have
439 "call" or "call_value" insns, then we must use the popping versions
440 even if the call has no arguments to pop. */
a20c5714 441 else if (maybe_ne (n_popped, 0)
58d745ec
RS
442 || !(valreg
443 ? targetm.have_call_value ()
444 : targetm.have_call ()))
51bbfa0c 445 {
a20c5714 446 rtx n_pop = gen_int_mode (n_popped, Pmode);
51bbfa0c
RS
447
448 /* If this subroutine pops its own args, record that in the call insn
449 if possible, for the sake of frame pointer elimination. */
2c8da025 450
51bbfa0c 451 if (valreg)
58d745ec
RS
452 pat = targetm.gen_call_value_pop (valreg, funmem,
453 rounded_stack_size_rtx,
454 next_arg_reg, n_pop);
51bbfa0c 455 else
58d745ec
RS
456 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
457 next_arg_reg, n_pop);
51bbfa0c 458
51bbfa0c
RS
459 already_popped = 1;
460 }
461 else
0a1c58a2
JL
462 {
463 if (valreg)
58d745ec
RS
464 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
465 next_arg_reg, NULL_RTX);
0a1c58a2 466 else
58d745ec 467 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
5c8e61cf 468 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 469 }
58d745ec 470 emit_insn (pat);
51bbfa0c 471
ee960939 472 /* Find the call we just emitted. */
e67d1102 473 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 474
325f5379
JJ
475 /* Some target create a fresh MEM instead of reusing the one provided
476 above. Set its MEM_EXPR. */
da4fdf2d
SB
477 call = get_call_rtx_from (call_insn);
478 if (call
325f5379
JJ
479 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
480 && MEM_EXPR (funmem) != NULL_TREE)
481 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
482
ee960939
OH
483 /* Put the register usage information there. */
484 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
485
486 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
487 if (ecf_flags & ECF_CONST)
488 RTL_CONST_CALL_P (call_insn) = 1;
489
490 /* If this is a pure call, then set the insn's unchanging bit. */
491 if (ecf_flags & ECF_PURE)
492 RTL_PURE_CALL_P (call_insn) = 1;
493
494 /* If this is a const call, then set the insn's unchanging bit. */
495 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
496 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 497
1d65f45c
RH
498 /* Create a nothrow REG_EH_REGION note, if needed. */
499 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 500
ca3920ad 501 if (ecf_flags & ECF_NORETURN)
65c5f2a6 502 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 503
570a98eb 504 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 505 {
65c5f2a6 506 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 507 cfun->calls_setjmp = 1;
9defc9b7 508 }
570a98eb 509
0a1c58a2
JL
510 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
511
b1e64e0d
RS
512 /* Restore this now, so that we do defer pops for this call's args
513 if the context of the call as a whole permits. */
514 inhibit_defer_pop = old_inhibit_defer_pop;
515
a20c5714 516 if (maybe_ne (n_popped, 0))
51bbfa0c
RS
517 {
518 if (!already_popped)
e3da301d 519 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
520 = gen_rtx_EXPR_LIST (VOIDmode,
521 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
522 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 523 rounded_stack_size -= n_popped;
a20c5714 524 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
1503a7ec 525 stack_pointer_delta -= n_popped;
2e3f842f 526
68184180 527 add_args_size_note (call_insn, stack_pointer_delta);
9a08d230 528
2e3f842f
L
529 /* If popup is needed, stack realign must use DRAP */
530 if (SUPPORTS_STACK_ALIGNMENT)
531 crtl->need_drap = true;
51bbfa0c 532 }
f8f75b16
JJ
533 /* For noreturn calls when not accumulating outgoing args force
534 REG_ARGS_SIZE note to prevent crossjumping of calls with different
535 args sizes. */
536 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
68184180 537 add_args_size_note (call_insn, stack_pointer_delta);
51bbfa0c 538
f73ad30e 539 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 540 {
f73ad30e
JH
541 /* If returning from the subroutine does not automatically pop the args,
542 we need an instruction to pop them sooner or later.
543 Perhaps do it now; perhaps just record how much space to pop later.
544
545 If returning from the subroutine does pop the args, indicate that the
546 stack pointer will be changed. */
547
a20c5714 548 if (maybe_ne (rounded_stack_size, 0))
f73ad30e 549 {
9dd9bf80 550 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
551 /* Just pretend we did the pop. */
552 stack_pointer_delta -= rounded_stack_size;
553 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 554 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
555 pending_stack_adjust += rounded_stack_size;
556 else
557 adjust_stack (rounded_stack_size_rtx);
558 }
51bbfa0c 559 }
f73ad30e
JH
560 /* When we accumulate outgoing args, we must avoid any stack manipulations.
561 Restore the stack pointer to its original value now. Usually
562 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
563 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
564 popping variants of functions exist as well.
565
566 ??? We may optimize similar to defer_pop above, but it is
567 probably not worthwhile.
f725a3ec 568
f73ad30e
JH
569 ??? It will be worthwhile to enable combine_stack_adjustments even for
570 such machines. */
a20c5714
RS
571 else if (maybe_ne (n_popped, 0))
572 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
51bbfa0c
RS
573}
574
25f0609b
BE
575/* Determine if the function identified by FNDECL is one with
576 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
577
578 For example, if the function might return more than one time (setjmp), then
25f0609b 579 set ECF_RETURNS_TWICE.
20efdf74 580
25f0609b 581 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
582 space from the stack such as alloca. */
583
f2d33f13 584static int
6ea2b70d 585special_function_p (const_tree fndecl, int flags)
20efdf74 586{
d5e254e1
IE
587 tree name_decl = DECL_NAME (fndecl);
588
182ce042
DM
589 if (maybe_special_function_p (fndecl)
590 && IDENTIFIER_LENGTH (name_decl) <= 11)
20efdf74 591 {
d5e254e1 592 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 593 const char *tname = name;
20efdf74 594
ca54603f
JL
595 /* We assume that alloca will always be called by name. It
596 makes no sense to pass it as a pointer-to-function to
597 anything that does not understand its behavior. */
4e722cf1
JJ
598 if (IDENTIFIER_LENGTH (name_decl) == 6
599 && name[0] == 'a'
600 && ! strcmp (name, "alloca"))
f2d33f13 601 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 602
25f0609b 603 /* Disregard prefix _ or __. */
20efdf74
JL
604 if (name[0] == '_')
605 {
25f0609b 606 if (name[1] == '_')
20efdf74
JL
607 tname += 2;
608 else
609 tname += 1;
610 }
611
25f0609b
BE
612 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
613 if (! strcmp (tname, "setjmp")
614 || ! strcmp (tname, "sigsetjmp")
615 || ! strcmp (name, "savectx")
616 || ! strcmp (name, "vfork")
617 || ! strcmp (name, "getcontext"))
618 flags |= ECF_RETURNS_TWICE;
20efdf74 619 }
d1c38823 620
9e878cf1
EB
621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
622 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
623 flags |= ECF_MAY_BE_ALLOCA;
4e722cf1 624
f2d33f13 625 return flags;
20efdf74
JL
626}
627
e384e6b5
BS
628/* Similar to special_function_p; return a set of ERF_ flags for the
629 function FNDECL. */
630static int
631decl_return_flags (tree fndecl)
632{
633 tree attr;
634 tree type = TREE_TYPE (fndecl);
635 if (!type)
636 return 0;
637
638 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
639 if (!attr)
640 return 0;
641
642 attr = TREE_VALUE (TREE_VALUE (attr));
643 if (!attr || TREE_STRING_LENGTH (attr) < 1)
644 return 0;
645
646 switch (TREE_STRING_POINTER (attr)[0])
647 {
648 case '1':
649 case '2':
650 case '3':
651 case '4':
652 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
653
654 case 'm':
655 return ERF_NOALIAS;
656
657 case '.':
658 default:
659 return 0;
660 }
661}
662
bae802f9 663/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 664
f2d33f13 665int
6ea2b70d 666setjmp_call_p (const_tree fndecl)
f2d33f13 667{
275311c4
MP
668 if (DECL_IS_RETURNS_TWICE (fndecl))
669 return ECF_RETURNS_TWICE;
f2d33f13
JH
670 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
671}
672
726a989a 673
159e8ef0 674/* Return true if STMT may be an alloca call. */
726a989a
RB
675
676bool
159e8ef0 677gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
678{
679 tree fndecl;
680
681 if (!is_gimple_call (stmt))
682 return false;
683
684 fndecl = gimple_call_fndecl (stmt);
685 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
686 return true;
687
688 return false;
689}
690
159e8ef0
BE
691/* Return true if STMT is a builtin alloca call. */
692
693bool
694gimple_alloca_call_p (const gimple *stmt)
695{
696 tree fndecl;
697
698 if (!is_gimple_call (stmt))
699 return false;
700
701 fndecl = gimple_call_fndecl (stmt);
3d78e008 702 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
159e8ef0
BE
703 switch (DECL_FUNCTION_CODE (fndecl))
704 {
9e878cf1 705 CASE_BUILT_IN_ALLOCA:
eacac712 706 return gimple_call_num_args (stmt) > 0;
159e8ef0
BE
707 default:
708 break;
709 }
710
711 return false;
712}
713
714/* Return true when exp contains a builtin alloca call. */
726a989a 715
c986baf6 716bool
6ea2b70d 717alloca_call_p (const_tree exp)
c986baf6 718{
2284b034 719 tree fndecl;
c986baf6 720 if (TREE_CODE (exp) == CALL_EXPR
2284b034 721 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
722 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
723 switch (DECL_FUNCTION_CODE (fndecl))
724 {
9e878cf1 725 CASE_BUILT_IN_ALLOCA:
159e8ef0
BE
726 return true;
727 default:
728 break;
729 }
730
c986baf6
JH
731 return false;
732}
733
0a35513e
AH
734/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
735 function. Return FALSE otherwise. */
736
737static bool
738is_tm_builtin (const_tree fndecl)
739{
740 if (fndecl == NULL)
741 return false;
742
743 if (decl_is_tm_clone (fndecl))
744 return true;
745
746 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
747 {
748 switch (DECL_FUNCTION_CODE (fndecl))
749 {
750 case BUILT_IN_TM_COMMIT:
751 case BUILT_IN_TM_COMMIT_EH:
752 case BUILT_IN_TM_ABORT:
753 case BUILT_IN_TM_IRREVOCABLE:
754 case BUILT_IN_TM_GETTMCLONE_IRR:
755 case BUILT_IN_TM_MEMCPY:
756 case BUILT_IN_TM_MEMMOVE:
757 case BUILT_IN_TM_MEMSET:
758 CASE_BUILT_IN_TM_STORE (1):
759 CASE_BUILT_IN_TM_STORE (2):
760 CASE_BUILT_IN_TM_STORE (4):
761 CASE_BUILT_IN_TM_STORE (8):
762 CASE_BUILT_IN_TM_STORE (FLOAT):
763 CASE_BUILT_IN_TM_STORE (DOUBLE):
764 CASE_BUILT_IN_TM_STORE (LDOUBLE):
765 CASE_BUILT_IN_TM_STORE (M64):
766 CASE_BUILT_IN_TM_STORE (M128):
767 CASE_BUILT_IN_TM_STORE (M256):
768 CASE_BUILT_IN_TM_LOAD (1):
769 CASE_BUILT_IN_TM_LOAD (2):
770 CASE_BUILT_IN_TM_LOAD (4):
771 CASE_BUILT_IN_TM_LOAD (8):
772 CASE_BUILT_IN_TM_LOAD (FLOAT):
773 CASE_BUILT_IN_TM_LOAD (DOUBLE):
774 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
775 CASE_BUILT_IN_TM_LOAD (M64):
776 CASE_BUILT_IN_TM_LOAD (M128):
777 CASE_BUILT_IN_TM_LOAD (M256):
778 case BUILT_IN_TM_LOG:
779 case BUILT_IN_TM_LOG_1:
780 case BUILT_IN_TM_LOG_2:
781 case BUILT_IN_TM_LOG_4:
782 case BUILT_IN_TM_LOG_8:
783 case BUILT_IN_TM_LOG_FLOAT:
784 case BUILT_IN_TM_LOG_DOUBLE:
785 case BUILT_IN_TM_LOG_LDOUBLE:
786 case BUILT_IN_TM_LOG_M64:
787 case BUILT_IN_TM_LOG_M128:
788 case BUILT_IN_TM_LOG_M256:
789 return true;
790 default:
791 break;
792 }
793 }
794 return false;
795}
796
b5cd4ed4 797/* Detect flags (function attributes) from the function decl or type node. */
7393c642 798
4977bab6 799int
6ea2b70d 800flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
801{
802 int flags = 0;
36dbb93d 803
f2d33f13
JH
804 if (DECL_P (exp))
805 {
806 /* The function exp may have the `malloc' attribute. */
36dbb93d 807 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
808 flags |= ECF_MALLOC;
809
6e9a3221
AN
810 /* The function exp may have the `returns_twice' attribute. */
811 if (DECL_IS_RETURNS_TWICE (exp))
812 flags |= ECF_RETURNS_TWICE;
813
becfd6e5 814 /* Process the pure and const attributes. */
9e3920e9 815 if (TREE_READONLY (exp))
becfd6e5
KZ
816 flags |= ECF_CONST;
817 if (DECL_PURE_P (exp))
e238ccac 818 flags |= ECF_PURE;
becfd6e5
KZ
819 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
820 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 821
dcd6de6d
ZD
822 if (DECL_IS_NOVOPS (exp))
823 flags |= ECF_NOVOPS;
46a4da10
JH
824 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
825 flags |= ECF_LEAF;
cb59f689
JH
826 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
827 flags |= ECF_COLD;
dcd6de6d 828
f2d33f13
JH
829 if (TREE_NOTHROW (exp))
830 flags |= ECF_NOTHROW;
2b187c63 831
0a35513e
AH
832 if (flag_tm)
833 {
834 if (is_tm_builtin (exp))
835 flags |= ECF_TM_BUILTIN;
fe924d9f 836 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
837 || lookup_attribute ("transaction_pure",
838 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
839 flags |= ECF_TM_PURE;
840 }
841
6de9cd9a 842 flags = special_function_p (exp, flags);
f2d33f13 843 }
0a35513e
AH
844 else if (TYPE_P (exp))
845 {
846 if (TYPE_READONLY (exp))
847 flags |= ECF_CONST;
848
849 if (flag_tm
850 && ((flags & ECF_CONST) != 0
851 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
852 flags |= ECF_TM_PURE;
853 }
17fc8d6f
AH
854 else
855 gcc_unreachable ();
f2d33f13
JH
856
857 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
858 {
859 flags |= ECF_NORETURN;
860 if (flags & (ECF_CONST|ECF_PURE))
861 flags |= ECF_LOOPING_CONST_OR_PURE;
862 }
f2d33f13
JH
863
864 return flags;
865}
866
f027e0a2
JM
867/* Detect flags from a CALL_EXPR. */
868
869int
fa233e34 870call_expr_flags (const_tree t)
f027e0a2
JM
871{
872 int flags;
873 tree decl = get_callee_fndecl (t);
874
875 if (decl)
876 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
877 else if (CALL_EXPR_FN (t) == NULL_TREE)
878 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
879 else
880 {
4c640e26
EB
881 tree type = TREE_TYPE (CALL_EXPR_FN (t));
882 if (type && TREE_CODE (type) == POINTER_TYPE)
883 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
884 else
885 flags = 0;
4c640e26
EB
886 if (CALL_EXPR_BY_DESCRIPTOR (t))
887 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
888 }
889
890 return flags;
891}
892
52090e4d 893/* Return true if ARG should be passed by invisible reference. */
16a16ec7
AM
894
895bool
52090e4d 896pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
16a16ec7 897{
52090e4d 898 if (tree type = arg.type)
16a16ec7
AM
899 {
900 /* If this type contains non-trivial constructors, then it is
901 forbidden for the middle-end to create any new copies. */
902 if (TREE_ADDRESSABLE (type))
903 return true;
904
905 /* GCC post 3.4 passes *all* variable sized types by reference. */
c600df9a 906 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
16a16ec7
AM
907 return true;
908
909 /* If a record type should be passed the same as its first (and only)
910 member, use the type and mode of that member. */
911 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
912 {
52090e4d
RS
913 arg.type = TREE_TYPE (first_field (type));
914 arg.mode = TYPE_MODE (arg.type);
16a16ec7
AM
915 }
916 }
917
52090e4d 918 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
16a16ec7
AM
919}
920
fde65a89
RS
921/* Return true if TYPE should be passed by reference when passed to
922 the "..." arguments of a function. */
923
924bool
925pass_va_arg_by_reference (tree type)
926{
52090e4d 927 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
fde65a89
RS
928}
929
b12cdd6e
RS
930/* Decide whether ARG, which occurs in the state described by CA,
931 should be passed by reference. Return true if so and update
932 ARG accordingly. */
933
934bool
935apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
936{
937 if (pass_by_reference (ca, arg))
938 {
939 arg.type = build_pointer_type (arg.type);
940 arg.mode = TYPE_MODE (arg.type);
257caa55 941 arg.pass_by_reference = true;
b12cdd6e
RS
942 return true;
943 }
944 return false;
945}
946
7256c719 947/* Return true if ARG, which is passed by reference, should be callee
16a16ec7
AM
948 copied instead of caller copied. */
949
950bool
7256c719 951reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
16a16ec7 952{
7256c719 953 if (arg.type && TREE_ADDRESSABLE (arg.type))
16a16ec7 954 return false;
7256c719 955 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
16a16ec7
AM
956}
957
958
20efdf74
JL
959/* Precompute all register parameters as described by ARGS, storing values
960 into fields within the ARGS array.
961
962 NUM_ACTUALS indicates the total number elements in the ARGS array.
963
964 Set REG_PARM_SEEN if we encounter a register parameter. */
965
966static void
27e29549
RH
967precompute_register_parameters (int num_actuals, struct arg_data *args,
968 int *reg_parm_seen)
20efdf74
JL
969{
970 int i;
971
972 *reg_parm_seen = 0;
973
974 for (i = 0; i < num_actuals; i++)
975 if (args[i].reg != 0 && ! args[i].pass_on_stack)
976 {
977 *reg_parm_seen = 1;
978
979 if (args[i].value == 0)
980 {
981 push_temp_slots ();
84217346 982 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
983 preserve_temp_slots (args[i].value);
984 pop_temp_slots ();
20efdf74
JL
985 }
986
987 /* If we are to promote the function arg to a wider mode,
988 do it now. */
989
990 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
991 args[i].value
992 = convert_modes (args[i].mode,
993 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
994 args[i].value, args[i].unsignedp);
995
a7adbbcb
L
996 /* If the value is a non-legitimate constant, force it into a
997 pseudo now. TLS symbols sometimes need a call to resolve. */
998 if (CONSTANT_P (args[i].value)
999 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1000 args[i].value = force_reg (args[i].mode, args[i].value);
1001
27e29549
RH
1002 /* If we're going to have to load the value by parts, pull the
1003 parts into pseudos. The part extraction process can involve
1004 non-trivial computation. */
1005 if (GET_CODE (args[i].reg) == PARALLEL)
1006 {
1007 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 1008 args[i].parallel_value
27e29549
RH
1009 = emit_group_load_into_temps (args[i].reg, args[i].value,
1010 type, int_size_in_bytes (type));
1011 }
1012
f725a3ec 1013 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
1014 short loop, put the value into a pseudo and then put the pseudo
1015 into the hard reg.
1016
1017 For small register classes, also do this if this call uses
1018 register parameters. This is to avoid reload conflicts while
1019 loading the parameters registers. */
1020
27e29549
RH
1021 else if ((! (REG_P (args[i].value)
1022 || (GET_CODE (args[i].value) == SUBREG
1023 && REG_P (SUBREG_REG (args[i].value)))))
1024 && args[i].mode != BLKmode
e548c9df
AM
1025 && (set_src_cost (args[i].value, args[i].mode,
1026 optimize_insn_for_speed_p ())
1027 > COSTS_N_INSNS (1))
42db504c
SB
1028 && ((*reg_parm_seen
1029 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1030 || optimize))
20efdf74
JL
1031 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1032 }
1033}
1034
f73ad30e 1035#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1036
1037 /* The argument list is the property of the called routine and it
1038 may clobber it. If the fixed area has been used for previous
1039 parameters, we must save and restore it. */
3bdf5ad1 1040
20efdf74 1041static rtx
d329e058 1042save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1043{
a20c5714
RS
1044 unsigned int low;
1045 unsigned int high;
20efdf74 1046
b820d2b8
AM
1047 /* Compute the boundary of the area that needs to be saved, if any. */
1048 high = reg_parm_stack_space;
6dad9361
TS
1049 if (ARGS_GROW_DOWNWARD)
1050 high += 1;
1051
b820d2b8
AM
1052 if (high > highest_outgoing_arg_in_use)
1053 high = highest_outgoing_arg_in_use;
20efdf74 1054
b820d2b8 1055 for (low = 0; low < high; low++)
a20c5714 1056 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
b820d2b8
AM
1057 {
1058 int num_to_save;
ef4bddc2 1059 machine_mode save_mode;
b820d2b8 1060 int delta;
0a81f074 1061 rtx addr;
b820d2b8
AM
1062 rtx stack_area;
1063 rtx save_area;
20efdf74 1064
b820d2b8
AM
1065 while (stack_usage_map[--high] == 0)
1066 ;
20efdf74 1067
b820d2b8
AM
1068 *low_to_save = low;
1069 *high_to_save = high;
1070
1071 num_to_save = high - low + 1;
20efdf74 1072
b820d2b8
AM
1073 /* If we don't have the required alignment, must do this
1074 in BLKmode. */
fffbab82
RS
1075 scalar_int_mode imode;
1076 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1077 && (low & (MIN (GET_MODE_SIZE (imode),
1078 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1079 save_mode = imode;
1080 else
b820d2b8 1081 save_mode = BLKmode;
20efdf74 1082
6dad9361
TS
1083 if (ARGS_GROW_DOWNWARD)
1084 delta = -high;
1085 else
1086 delta = low;
1087
0a81f074
RS
1088 addr = plus_constant (Pmode, argblock, delta);
1089 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1090
b820d2b8
AM
1091 set_mem_align (stack_area, PARM_BOUNDARY);
1092 if (save_mode == BLKmode)
1093 {
9474e8ab 1094 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1095 emit_block_move (validize_mem (save_area), stack_area,
1096 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1097 }
1098 else
1099 {
1100 save_area = gen_reg_rtx (save_mode);
1101 emit_move_insn (save_area, stack_area);
1102 }
8ac61af7 1103
b820d2b8
AM
1104 return save_area;
1105 }
1106
1107 return NULL_RTX;
20efdf74
JL
1108}
1109
1110static void
d329e058 1111restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1112{
ef4bddc2 1113 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1114 int delta;
0a81f074 1115 rtx addr, stack_area;
b820d2b8 1116
6dad9361
TS
1117 if (ARGS_GROW_DOWNWARD)
1118 delta = -high_to_save;
1119 else
1120 delta = low_to_save;
1121
0a81f074
RS
1122 addr = plus_constant (Pmode, argblock, delta);
1123 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1124 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1125
1126 if (save_mode != BLKmode)
1127 emit_move_insn (stack_area, save_area);
1128 else
44bb111a
RH
1129 emit_block_move (stack_area, validize_mem (save_area),
1130 GEN_INT (high_to_save - low_to_save + 1),
1131 BLOCK_OP_CALL_PARM);
20efdf74 1132}
19652adf 1133#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1134
20efdf74
JL
1135/* If any elements in ARGS refer to parameters that are to be passed in
1136 registers, but not in memory, and whose alignment does not permit a
1137 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1138 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1139
1140 Pseudos for each unaligned argument will be stored into the array
1141 args[argnum].aligned_regs. The caller is responsible for deallocating
1142 the aligned_regs array if it is nonzero. */
1143
20efdf74 1144static void
d329e058 1145store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1146{
1147 int i, j;
f725a3ec 1148
20efdf74
JL
1149 for (i = 0; i < num_actuals; i++)
1150 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1151 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1152 && args[i].mode == BLKmode
852d22b4
EB
1153 && MEM_P (args[i].value)
1154 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1155 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1156 {
1157 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1158 int endian_correction = 0;
20efdf74 1159
78a52f11
RH
1160 if (args[i].partial)
1161 {
1162 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1163 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1164 }
1165 else
1166 {
1167 args[i].n_aligned_regs
1168 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1169 }
1170
5ed6ace5 1171 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1172
6e985040
AM
1173 /* Structures smaller than a word are normally aligned to the
1174 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1175 this means we must skip the empty high order bytes when
1176 calculating the bit offset. */
6e985040
AM
1177 if (bytes < UNITS_PER_WORD
1178#ifdef BLOCK_REG_PADDING
1179 && (BLOCK_REG_PADDING (args[i].mode,
1180 TREE_TYPE (args[i].tree_value), 1)
76b0cbf8 1181 == PAD_DOWNWARD)
6e985040
AM
1182#else
1183 && BYTES_BIG_ENDIAN
1184#endif
1185 )
1186 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1187
1188 for (j = 0; j < args[i].n_aligned_regs; j++)
1189 {
1190 rtx reg = gen_reg_rtx (word_mode);
1191 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1192 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1193
1194 args[i].aligned_regs[j] = reg;
c6285bd7 1195 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1196 word_mode, word_mode, false, NULL);
20efdf74
JL
1197
1198 /* There is no need to restrict this code to loading items
1199 in TYPE_ALIGN sized hunks. The bitfield instructions can
1200 load up entire word sized registers efficiently.
1201
1202 ??? This may not be needed anymore.
1203 We use to emit a clobber here but that doesn't let later
1204 passes optimize the instructions we emit. By storing 0 into
1205 the register later passes know the first AND to zero out the
1206 bitfield being set in the register is unnecessary. The store
1207 of 0 will be deleted as will at least the first AND. */
1208
1209 emit_move_insn (reg, const0_rtx);
1210
1211 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1212 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1213 word_mode, word, false);
20efdf74
JL
1214 }
1215 }
1216}
1217
8bd9f164
MS
1218/* The limit set by -Walloc-larger-than=. */
1219static GTY(()) tree alloc_object_size_limit;
1220
1221/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1222 setting if the option is specified, or to the maximum object size if it
1223 is not. Return the initialized value. */
1224
1225static tree
1226alloc_max_size (void)
1227{
d258f4aa
MS
1228 if (alloc_object_size_limit)
1229 return alloc_object_size_limit;
8bd9f164 1230
0aaafa5e
MS
1231 HOST_WIDE_INT limit = warn_alloc_size_limit;
1232 if (limit == HOST_WIDE_INT_MAX)
1233 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1234
1235 alloc_object_size_limit = build_int_cst (size_type_node, limit);
d258f4aa 1236
8bd9f164
MS
1237 return alloc_object_size_limit;
1238}
1239
c16880ef 1240/* Return true when EXP's range can be determined and set RANGE[] to it
cc8bea0a
MS
1241 after adjusting it if necessary to make EXP a represents a valid size
1242 of object, or a valid size argument to an allocation function declared
1243 with attribute alloc_size (whose argument may be signed), or to a string
1244 manipulation function like memset. When ALLOW_ZERO is true, allow
1245 returning a range of [0, 0] for a size in an anti-range [1, N] where
1246 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1247 allocation functions like malloc but it is a valid argument to
1248 functions like memset. */
8bd9f164 1249
c16880ef 1250bool
cc8bea0a 1251get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
8bd9f164 1252{
54aa6b58
MS
1253 if (!exp)
1254 return false;
1255
c16880ef 1256 if (tree_fits_uhwi_p (exp))
8bd9f164 1257 {
c16880ef
MS
1258 /* EXP is a constant. */
1259 range[0] = range[1] = exp;
1260 return true;
1261 }
1262
cc8bea0a
MS
1263 tree exptype = TREE_TYPE (exp);
1264 bool integral = INTEGRAL_TYPE_P (exptype);
1265
c16880ef 1266 wide_int min, max;
54994253 1267 enum value_range_kind range_type;
cc8bea0a 1268
72930d9f
MS
1269 if (integral)
1270 range_type = determine_value_range (exp, &min, &max);
cc8bea0a
MS
1271 else
1272 range_type = VR_VARYING;
c16880ef
MS
1273
1274 if (range_type == VR_VARYING)
1275 {
cc8bea0a
MS
1276 if (integral)
1277 {
1278 /* Use the full range of the type of the expression when
1279 no value range information is available. */
1280 range[0] = TYPE_MIN_VALUE (exptype);
1281 range[1] = TYPE_MAX_VALUE (exptype);
1282 return true;
1283 }
1284
c16880ef
MS
1285 range[0] = NULL_TREE;
1286 range[1] = NULL_TREE;
1287 return false;
1288 }
1289
c16880ef 1290 unsigned expprec = TYPE_PRECISION (exptype);
c16880ef
MS
1291
1292 bool signed_p = !TYPE_UNSIGNED (exptype);
1293
1294 if (range_type == VR_ANTI_RANGE)
1295 {
1296 if (signed_p)
8bd9f164 1297 {
8e6cdc90 1298 if (wi::les_p (max, 0))
8bd9f164 1299 {
c16880ef
MS
1300 /* EXP is not in a strictly negative range. That means
1301 it must be in some (not necessarily strictly) positive
1302 range which includes zero. Since in signed to unsigned
1303 conversions negative values end up converted to large
1304 positive values, and otherwise they are not valid sizes,
1305 the resulting range is in both cases [0, TYPE_MAX]. */
8e6cdc90
RS
1306 min = wi::zero (expprec);
1307 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
8bd9f164 1308 }
8e6cdc90 1309 else if (wi::les_p (min - 1, 0))
c16880ef
MS
1310 {
1311 /* EXP is not in a negative-positive range. That means EXP
1312 is either negative, or greater than max. Since negative
1313 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1314 min = max + 1;
8e6cdc90 1315 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
c16880ef
MS
1316 }
1317 else
1318 {
1319 max = min - 1;
8e6cdc90 1320 min = wi::zero (expprec);
c16880ef
MS
1321 }
1322 }
8e6cdc90 1323 else if (wi::eq_p (0, min - 1))
c16880ef
MS
1324 {
1325 /* EXP is unsigned and not in the range [1, MAX]. That means
1326 it's either zero or greater than MAX. Even though 0 would
cc8bea0a
MS
1327 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1328 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1329 is greater than the limit the whole range is diagnosed. */
1330 if (allow_zero)
1331 min = max = wi::zero (expprec);
1332 else
1333 {
1334 min = max + 1;
1335 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1336 }
c16880ef
MS
1337 }
1338 else
1339 {
1340 max = min - 1;
8e6cdc90 1341 min = wi::zero (expprec);
8bd9f164
MS
1342 }
1343 }
1344
c16880ef
MS
1345 range[0] = wide_int_to_tree (exptype, min);
1346 range[1] = wide_int_to_tree (exptype, max);
1347
1348 return true;
8bd9f164
MS
1349}
1350
1351/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1352 whose argument numbers given by IDX with values given by ARGS exceed
1353 the maximum object size or cause an unsigned oveflow (wrapping) when
302db8ba
MS
1354 multiplied. FN is null when EXP is a call via a function pointer.
1355 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1356 for functions like malloc, and non-null for those like calloc that
1357 are decorated with a two-argument attribute alloc_size. */
8bd9f164
MS
1358
1359void
1360maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1361{
1362 /* The range each of the (up to) two arguments is known to be in. */
1363 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1364
1365 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1366 tree maxobjsize = alloc_max_size ();
1367
1368 location_t loc = EXPR_LOCATION (exp);
1369
302db8ba 1370 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
8bd9f164
MS
1371 bool warned = false;
1372
1373 /* Validate each argument individually. */
1374 for (unsigned i = 0; i != 2 && args[i]; ++i)
1375 {
1376 if (TREE_CODE (args[i]) == INTEGER_CST)
1377 {
1378 argrange[i][0] = args[i];
1379 argrange[i][1] = args[i];
1380
1381 if (tree_int_cst_lt (args[i], integer_zero_node))
1382 {
1383 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1384 "%Kargument %i value %qE is negative",
1385 exp, idx[i] + 1, args[i]);
8bd9f164
MS
1386 }
1387 else if (integer_zerop (args[i]))
1388 {
1389 /* Avoid issuing -Walloc-zero for allocation functions other
1390 than __builtin_alloca that are declared with attribute
1391 returns_nonnull because there's no portability risk. This
1392 avoids warning for such calls to libiberty's xmalloc and
1393 friends.
1394 Also avoid issuing the warning for calls to function named
1395 "alloca". */
cb1180d5
RS
1396 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1397 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1398 : !lookup_attribute ("returns_nonnull",
1399 TYPE_ATTRIBUTES (fntype)))
8bd9f164 1400 warned = warning_at (loc, OPT_Walloc_zero,
c16880ef
MS
1401 "%Kargument %i value is zero",
1402 exp, idx[i] + 1);
8bd9f164
MS
1403 }
1404 else if (tree_int_cst_lt (maxobjsize, args[i]))
1405 {
1406 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1407 mode and with -fno-exceptions as a way to indicate array
1408 size overflow. There's no good way to detect C++98 here
1409 so avoid diagnosing these calls for all C++ modes. */
1410 if (i == 0
302db8ba 1411 && fn
8bd9f164
MS
1412 && !args[1]
1413 && lang_GNU_CXX ()
cb50701e 1414 && DECL_IS_OPERATOR_NEW_P (fn)
8bd9f164
MS
1415 && integer_all_onesp (args[i]))
1416 continue;
1417
1418 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1419 "%Kargument %i value %qE exceeds "
8bd9f164 1420 "maximum object size %E",
c16880ef 1421 exp, idx[i] + 1, args[i], maxobjsize);
8bd9f164
MS
1422 }
1423 }
c16880ef
MS
1424 else if (TREE_CODE (args[i]) == SSA_NAME
1425 && get_size_range (args[i], argrange[i]))
8bd9f164 1426 {
8bd9f164
MS
1427 /* Verify that the argument's range is not negative (including
1428 upper bound of zero). */
1429 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1430 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1431 {
1432 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1433 "%Kargument %i range [%E, %E] is negative",
1434 exp, idx[i] + 1,
1435 argrange[i][0], argrange[i][1]);
8bd9f164
MS
1436 }
1437 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1438 {
1439 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1440 "%Kargument %i range [%E, %E] exceeds "
8bd9f164 1441 "maximum object size %E",
c16880ef
MS
1442 exp, idx[i] + 1,
1443 argrange[i][0], argrange[i][1],
8bd9f164
MS
1444 maxobjsize);
1445 }
1446 }
1447 }
1448
1449 if (!argrange[0])
1450 return;
1451
1452 /* For a two-argument alloc_size, validate the product of the two
1453 arguments if both of their values or ranges are known. */
1454 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1455 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1456 && !integer_onep (argrange[0][0])
1457 && !integer_onep (argrange[1][0]))
1458 {
1459 /* Check for overflow in the product of a function decorated with
1460 attribute alloc_size (X, Y). */
1461 unsigned szprec = TYPE_PRECISION (size_type_node);
1462 wide_int x = wi::to_wide (argrange[0][0], szprec);
1463 wide_int y = wi::to_wide (argrange[1][0], szprec);
1464
4a669ac3 1465 wi::overflow_type vflow;
8bd9f164
MS
1466 wide_int prod = wi::umul (x, y, &vflow);
1467
1468 if (vflow)
1469 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1470 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1471 "exceeds %<SIZE_MAX%>",
c16880ef 1472 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1473 idx[0] + 1, idx[1] + 1);
1474 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1475 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1476 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1477 "exceeds maximum object size %E",
c16880ef 1478 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1479 idx[0] + 1, idx[1] + 1,
1480 maxobjsize);
1481
1482 if (warned)
1483 {
1484 /* Print the full range of each of the two arguments to make
1485 it clear when it is, in fact, in a range and not constant. */
1486 if (argrange[0][0] != argrange [0][1])
1487 inform (loc, "argument %i in the range [%E, %E]",
1488 idx[0] + 1, argrange[0][0], argrange[0][1]);
1489 if (argrange[1][0] != argrange [1][1])
1490 inform (loc, "argument %i in the range [%E, %E]",
1491 idx[1] + 1, argrange[1][0], argrange[1][1]);
1492 }
1493 }
1494
302db8ba 1495 if (warned && fn)
8bd9f164
MS
1496 {
1497 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1498
1499 if (DECL_IS_BUILTIN (fn))
1500 inform (loc,
1501 "in a call to built-in allocation function %qD", fn);
1502 else
1503 inform (fnloc,
1504 "in a call to allocation function %qD declared here", fn);
1505 }
1506}
1507
6a33d0ff
MS
1508/* If EXPR refers to a character array or pointer declared attribute
1509 nonstring return a decl for that array or pointer and set *REF to
1510 the referenced enclosing object or pointer. Otherwise returns
1511 null. */
1512
1513tree
1514get_attr_nonstring_decl (tree expr, tree *ref)
1515{
1516 tree decl = expr;
665db3ae 1517 tree var = NULL_TREE;
6a33d0ff
MS
1518 if (TREE_CODE (decl) == SSA_NAME)
1519 {
1520 gimple *def = SSA_NAME_DEF_STMT (decl);
1521
1522 if (is_gimple_assign (def))
1523 {
1524 tree_code code = gimple_assign_rhs_code (def);
1525 if (code == ADDR_EXPR
1526 || code == COMPONENT_REF
1527 || code == VAR_DECL)
1528 decl = gimple_assign_rhs1 (def);
1529 }
665db3ae
JL
1530 else
1531 var = SSA_NAME_VAR (decl);
6a33d0ff
MS
1532 }
1533
1534 if (TREE_CODE (decl) == ADDR_EXPR)
1535 decl = TREE_OPERAND (decl, 0);
1536
665db3ae
JL
1537 /* To simplify calling code, store the referenced DECL regardless of
1538 the attribute determined below, but avoid storing the SSA_NAME_VAR
1539 obtained above (it's not useful for dataflow purposes). */
6a33d0ff
MS
1540 if (ref)
1541 *ref = decl;
1542
665db3ae
JL
1543 /* Use the SSA_NAME_VAR that was determined above to see if it's
1544 declared nonstring. Otherwise drill down into the referenced
1545 DECL. */
1546 if (var)
1547 decl = var;
1548 else if (TREE_CODE (decl) == ARRAY_REF)
27560569
MS
1549 decl = TREE_OPERAND (decl, 0);
1550 else if (TREE_CODE (decl) == COMPONENT_REF)
6a33d0ff 1551 decl = TREE_OPERAND (decl, 1);
27560569
MS
1552 else if (TREE_CODE (decl) == MEM_REF)
1553 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
6a33d0ff
MS
1554
1555 if (DECL_P (decl)
1556 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1557 return decl;
1558
1559 return NULL_TREE;
1560}
1561
6a33d0ff
MS
1562/* Warn about passing a non-string array/pointer to a function that
1563 expects a nul-terminated string argument. */
1564
1565void
1566maybe_warn_nonstring_arg (tree fndecl, tree exp)
1567{
3d78e008 1568 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6a33d0ff
MS
1569 return;
1570
2c8861b7 1571 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
781ff3d8
MS
1572 return;
1573
1a9b15a7 1574 /* Avoid clearly invalid calls (more checking done below). */
2438cb6a 1575 unsigned nargs = call_expr_nargs (exp);
1a9b15a7
MS
1576 if (!nargs)
1577 return;
2438cb6a 1578
6a33d0ff
MS
1579 /* The bound argument to a bounded string function like strncpy. */
1580 tree bound = NULL_TREE;
1581
5d6655eb
MS
1582 /* The longest known or possible string argument to one of the comparison
1583 functions. If the length is less than the bound it is used instead.
1584 Since the length is only used for warning and not for code generation
1585 disable strict mode in the calls to get_range_strlen below. */
1586 tree maxlen = NULL_TREE;
d677a8b6 1587
6a33d0ff
MS
1588 /* It's safe to call "bounded" string functions with a non-string
1589 argument since the functions provide an explicit bound for this
4252ccd7
MS
1590 purpose. The exception is strncat where the bound may refer to
1591 either the destination or the source. */
1592 int fncode = DECL_FUNCTION_CODE (fndecl);
1593 switch (fncode)
6a33d0ff 1594 {
d677a8b6 1595 case BUILT_IN_STRCMP:
6a33d0ff
MS
1596 case BUILT_IN_STRNCMP:
1597 case BUILT_IN_STRNCASECMP:
d677a8b6
MS
1598 {
1599 /* For these, if one argument refers to one or more of a set
1600 of string constants or arrays of known size, determine
1601 the range of their known or possible lengths and use it
1602 conservatively as the bound for the unbounded function,
1603 and to adjust the range of the bound of the bounded ones. */
2c8861b7
JJ
1604 for (unsigned argno = 0;
1605 argno < MIN (nargs, 2)
5d6655eb 1606 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
d677a8b6
MS
1607 {
1608 tree arg = CALL_EXPR_ARG (exp, argno);
1609 if (!get_attr_nonstring_decl (arg))
5d6655eb
MS
1610 {
1611 c_strlen_data lendata = { };
a7160771
MS
1612 /* Set MAXBOUND to an arbitrary non-null non-integer
1613 node as a request to have it set to the length of
1614 the longest string in a PHI. */
1615 lendata.maxbound = arg;
5d6655eb
MS
1616 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1617 maxlen = lendata.maxbound;
1618 }
d677a8b6
MS
1619 }
1620 }
1621 /* Fall through. */
1622
4252ccd7 1623 case BUILT_IN_STRNCAT:
d677a8b6 1624 case BUILT_IN_STPNCPY:
6a33d0ff 1625 case BUILT_IN_STRNCPY:
2c8861b7 1626 if (nargs > 2)
781ff3d8
MS
1627 bound = CALL_EXPR_ARG (exp, 2);
1628 break;
6a33d0ff
MS
1629
1630 case BUILT_IN_STRNDUP:
2c8861b7 1631 if (nargs > 1)
781ff3d8
MS
1632 bound = CALL_EXPR_ARG (exp, 1);
1633 break;
1634
1635 case BUILT_IN_STRNLEN:
2438cb6a 1636 {
781ff3d8
MS
1637 tree arg = CALL_EXPR_ARG (exp, 0);
1638 if (!get_attr_nonstring_decl (arg))
5d6655eb
MS
1639 {
1640 c_strlen_data lendata = { };
a7160771
MS
1641 /* Set MAXBOUND to an arbitrary non-null non-integer
1642 node as a request to have it set to the length of
1643 the longest string in a PHI. */
1644 lendata.maxbound = arg;
5d6655eb
MS
1645 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1646 maxlen = lendata.maxbound;
1647 }
2c8861b7 1648 if (nargs > 1)
781ff3d8 1649 bound = CALL_EXPR_ARG (exp, 1);
2438cb6a
MS
1650 break;
1651 }
6a33d0ff
MS
1652
1653 default:
1654 break;
1655 }
1656
1657 /* Determine the range of the bound argument (if specified). */
1658 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1659 if (bound)
36537a1c
MS
1660 {
1661 STRIP_NOPS (bound);
1662 get_size_range (bound, bndrng);
1663 }
6a33d0ff 1664
781ff3d8
MS
1665 location_t loc = EXPR_LOCATION (exp);
1666
1667 if (bndrng[0])
1668 {
1669 /* Diagnose excessive bound prior the adjustment below and
1670 regardless of attribute nonstring. */
1671 tree maxobjsize = max_object_size ();
1672 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1673 {
1674 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1675 warning_at (loc, OPT_Wstringop_overflow_,
1676 "%K%qD specified bound %E "
1677 "exceeds maximum object size %E",
1678 exp, fndecl, bndrng[0], maxobjsize);
1679 else
1680 warning_at (loc, OPT_Wstringop_overflow_,
1681 "%K%qD specified bound [%E, %E] "
1682 "exceeds maximum object size %E",
1683 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1684 return;
1685 }
1686 }
1687
5d6655eb 1688 if (maxlen && !integer_all_onesp (maxlen))
d677a8b6
MS
1689 {
1690 /* Add one for the nul. */
5d6655eb
MS
1691 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1692 size_one_node);
d677a8b6
MS
1693
1694 if (!bndrng[0])
1695 {
1696 /* Conservatively use the upper bound of the lengths for
1697 both the lower and the upper bound of the operation. */
5d6655eb
MS
1698 bndrng[0] = maxlen;
1699 bndrng[1] = maxlen;
d677a8b6
MS
1700 bound = void_type_node;
1701 }
df161fc2 1702 else if (maxlen)
d677a8b6 1703 {
39c71bc3 1704 /* Replace the bound on the operation with the upper bound
d677a8b6 1705 of the length of the string if the latter is smaller. */
5d6655eb
MS
1706 if (tree_int_cst_lt (maxlen, bndrng[0]))
1707 bndrng[0] = maxlen;
1708 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1709 bndrng[1] = maxlen;
d677a8b6
MS
1710 }
1711 }
1712
6a33d0ff
MS
1713 /* Iterate over the built-in function's formal arguments and check
1714 each const char* against the actual argument. If the actual
1715 argument is declared attribute non-string issue a warning unless
1716 the argument's maximum length is bounded. */
1717 function_args_iterator it;
1718 function_args_iter_init (&it, TREE_TYPE (fndecl));
1719
1720 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1721 {
2438cb6a
MS
1722 /* Avoid iterating past the declared argument in a call
1723 to function declared without a prototype. */
1724 if (argno >= nargs)
1725 break;
1726
6a33d0ff
MS
1727 tree argtype = function_args_iter_cond (&it);
1728 if (!argtype)
1729 break;
1730
1731 if (TREE_CODE (argtype) != POINTER_TYPE)
1732 continue;
1733
1734 argtype = TREE_TYPE (argtype);
1735
1736 if (TREE_CODE (argtype) != INTEGER_TYPE
1737 || !TYPE_READONLY (argtype))
1738 continue;
1739
1740 argtype = TYPE_MAIN_VARIANT (argtype);
1741 if (argtype != char_type_node)
1742 continue;
1743
1744 tree callarg = CALL_EXPR_ARG (exp, argno);
1745 if (TREE_CODE (callarg) == ADDR_EXPR)
1746 callarg = TREE_OPERAND (callarg, 0);
1747
1748 /* See if the destination is declared with attribute "nonstring". */
1749 tree decl = get_attr_nonstring_decl (callarg);
1750 if (!decl)
1751 continue;
1752
d677a8b6 1753 /* The maximum number of array elements accessed. */
6a33d0ff 1754 offset_int wibnd = 0;
4252ccd7
MS
1755
1756 if (argno && fncode == BUILT_IN_STRNCAT)
1757 {
1758 /* See if the bound in strncat is derived from the length
1759 of the strlen of the destination (as it's expected to be).
1760 If so, reset BOUND and FNCODE to trigger a warning. */
1761 tree dstarg = CALL_EXPR_ARG (exp, 0);
1762 if (is_strlen_related_p (dstarg, bound))
1763 {
1764 /* The bound applies to the destination, not to the source,
1765 so reset these to trigger a warning without mentioning
1766 the bound. */
1767 bound = NULL;
1768 fncode = 0;
1769 }
1770 else if (bndrng[1])
1771 /* Use the upper bound of the range for strncat. */
1772 wibnd = wi::to_offset (bndrng[1]);
1773 }
1774 else if (bndrng[0])
1775 /* Use the lower bound of the range for functions other than
1776 strncat. */
6a33d0ff
MS
1777 wibnd = wi::to_offset (bndrng[0]);
1778
4252ccd7 1779 /* Determine the size of the argument array if it is one. */
6a33d0ff 1780 offset_int asize = wibnd;
4252ccd7
MS
1781 bool known_size = false;
1782 tree type = TREE_TYPE (decl);
6a33d0ff 1783
d677a8b6
MS
1784 /* Determine the array size. For arrays of unknown bound and
1785 pointers reset BOUND to trigger the appropriate warning. */
6a33d0ff 1786 if (TREE_CODE (type) == ARRAY_TYPE)
d677a8b6
MS
1787 {
1788 if (tree arrbnd = TYPE_DOMAIN (type))
1789 {
1790 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
4252ccd7
MS
1791 {
1792 asize = wi::to_offset (arrbnd) + 1;
1793 known_size = true;
1794 }
d677a8b6
MS
1795 }
1796 else if (bound == void_type_node)
1797 bound = NULL_TREE;
1798 }
1799 else if (bound == void_type_node)
1800 bound = NULL_TREE;
6a33d0ff 1801
4252ccd7
MS
1802 /* In a call to strncat with a bound in a range whose lower but
1803 not upper bound is less than the array size, reset ASIZE to
1804 be the same as the bound and the other variable to trigger
1805 the apprpriate warning below. */
1806 if (fncode == BUILT_IN_STRNCAT
1807 && bndrng[0] != bndrng[1]
1808 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1809 && (!known_size
1810 || wi::ltu_p (asize, wibnd)))
1811 {
1812 asize = wibnd;
1813 bound = NULL_TREE;
1814 fncode = 0;
1815 }
1816
6a33d0ff
MS
1817 bool warned = false;
1818
097f82ec 1819 auto_diagnostic_group d;
6a33d0ff 1820 if (wi::ltu_p (asize, wibnd))
4252ccd7
MS
1821 {
1822 if (bndrng[0] == bndrng[1])
1823 warned = warning_at (loc, OPT_Wstringop_overflow_,
1824 "%qD argument %i declared attribute "
1825 "%<nonstring%> is smaller than the specified "
1826 "bound %wu",
1827 fndecl, argno + 1, wibnd.to_uhwi ());
1828 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1829 warned = warning_at (loc, OPT_Wstringop_overflow_,
1830 "%qD argument %i declared attribute "
1831 "%<nonstring%> is smaller than "
1832 "the specified bound [%E, %E]",
1833 fndecl, argno + 1, bndrng[0], bndrng[1]);
1834 else
1835 warned = warning_at (loc, OPT_Wstringop_overflow_,
1836 "%qD argument %i declared attribute "
1837 "%<nonstring%> may be smaller than "
1838 "the specified bound [%E, %E]",
1839 fndecl, argno + 1, bndrng[0], bndrng[1]);
1840 }
1841 else if (fncode == BUILT_IN_STRNCAT)
1842 ; /* Avoid warning for calls to strncat() when the bound
1843 is equal to the size of the non-string argument. */
6a33d0ff
MS
1844 else if (!bound)
1845 warned = warning_at (loc, OPT_Wstringop_overflow_,
1846 "%qD argument %i declared attribute %<nonstring%>",
1847 fndecl, argno + 1);
1848
1849 if (warned)
1850 inform (DECL_SOURCE_LOCATION (decl),
1851 "argument %qD declared here", decl);
1852 }
1853}
1854
9a385c2d
DM
1855/* Issue an error if CALL_EXPR was flagged as requiring
1856 tall-call optimization. */
1857
1858static void
1859maybe_complain_about_tail_call (tree call_expr, const char *reason)
1860{
1861 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1862 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1863 return;
1864
1865 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1866}
1867
54aa6b58
MS
1868/* Used to define rdwr_map below. */
1869struct rdwr_access_hash: int_hash<int, -1> { };
1870
1871/* A mapping between argument number corresponding to attribute access
1872 mode (read_only, write_only, or read_write) and operands. */
1873typedef hash_map<rdwr_access_hash, attr_access> rdwr_map;
1874
1875/* Initialize a mapping for a call to function FNDECL declared with
ccacf77b 1876 attribute access. Each attribute positional operand inserts one
54aa6b58
MS
1877 entry into the mapping with the operand number as the key. */
1878
1879static void
1880init_attr_rdwr_indices (rdwr_map *rwm, tree fntype)
1881{
1882 if (!fntype)
1883 return;
1884
ccacf77b
MS
1885 for (tree access = TYPE_ATTRIBUTES (fntype);
1886 (access = lookup_attribute ("access", access));
1887 access = TREE_CHAIN (access))
54aa6b58 1888 {
ccacf77b
MS
1889 /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
1890 is the attribute argument's value. */
1891 tree mode = TREE_VALUE (access);
1892 gcc_assert (TREE_CODE (mode) == TREE_LIST);
1893 mode = TREE_VALUE (mode);
1894 gcc_assert (TREE_CODE (mode) == STRING_CST);
1895
1896 const char *modestr = TREE_STRING_POINTER (mode);
1897 for (const char *m = modestr; *m; )
54aa6b58 1898 {
ccacf77b 1899 attr_access acc = { };
54aa6b58 1900
ccacf77b
MS
1901 switch (*m)
1902 {
1903 case 'r': acc.mode = acc.read_only; break;
1904 case 'w': acc.mode = acc.write_only; break;
1905 default: acc.mode = acc.read_write; break;
1906 }
1907
1908 char *end;
1909 acc.ptrarg = strtoul (++m, &end, 10);
54aa6b58 1910 m = end;
ccacf77b
MS
1911 if (*m == ',')
1912 {
1913 acc.sizarg = strtoul (++m, &end, 10);
1914 m = end;
1915 }
1916 else
1917 acc.sizarg = UINT_MAX;
54aa6b58 1918
ccacf77b
MS
1919 acc.ptr = NULL_TREE;
1920 acc.size = NULL_TREE;
54aa6b58 1921
ccacf77b
MS
1922 /* Unconditionally add an entry for the required pointer
1923 operand of the attribute, and one for the optional size
1924 operand when it's specified. */
1925 rwm->put (acc.ptrarg, acc);
1926 if (acc.sizarg != UINT_MAX)
1927 rwm->put (acc.sizarg, acc);
1928 }
54aa6b58
MS
1929 }
1930}
1931
1932/* Returns the type of the argument ARGNO to function with type FNTYPE
1933 or null when the typoe cannot be determined or no such argument exists. */
1934
1935static tree
1936fntype_argno_type (tree fntype, unsigned argno)
1937{
1938 if (!prototype_p (fntype))
1939 return NULL_TREE;
1940
1941 tree argtype;
1942 function_args_iterator it;
1943 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1944 if (argno-- == 0)
1945 return argtype;
1946
1947 return NULL_TREE;
1948}
1949
1950/* Helper to append the "rdwr" attribute specification described
1951 by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1952 diagnostics. */
1953
1954static inline void
1955append_attrname (const std::pair<int, attr_access> &access,
1956 char *attrstr, size_t strsize)
1957{
1958 /* Append the relevant attribute to the string. This (deliberately)
1959 appends the attribute pointer operand even when none was specified. */
1960 size_t len = strlen (attrstr);
1961
1962 const char *atname
1963 = (access.second.mode == attr_access::read_only
1964 ? "read_only"
1965 : (access.second.mode == attr_access::write_only
1966 ? "write_only" : "read_write"));
1967
1968 const char *sep = len ? ", " : "";
1969
1970 if (access.second.sizarg == UINT_MAX)
1971 snprintf (attrstr + len, strsize - len,
1972 "%s%s (%i)", sep, atname,
1973 access.second.ptrarg + 1);
1974 else
1975 snprintf (attrstr + len, strsize - len,
1976 "%s%s (%i, %i)", sep, atname,
1977 access.second.ptrarg + 1, access.second.sizarg + 1);
1978}
1979
1980/* Iterate over attribute access read-only, read-write, and write-only
1981 arguments and diagnose past-the-end accesses and related problems
1982 in the function call EXP. */
1983
1984static void
1985maybe_warn_rdwr_sizes (rdwr_map *rwm, tree exp)
1986{
1987 tree fndecl = NULL_TREE;
1988 tree fntype = NULL_TREE;
1989 if (tree fnaddr = CALL_EXPR_FN (exp))
1990 {
1991 if (TREE_CODE (fnaddr) == ADDR_EXPR)
1992 {
1993 fndecl = TREE_OPERAND (fnaddr, 0);
1994 fntype = TREE_TYPE (fndecl);
1995 }
1996 else
1997 fntype = TREE_TYPE (TREE_TYPE (fnaddr));
1998 }
1999
2000 if (!fntype)
2001 return;
2002
2003 /* A string describing the attributes that the warnings issued by this
2004 function apply to. Used to print one informational note per function
2005 call, rather than one per warning. That reduces clutter. */
2006 char attrstr[80];
2007 attrstr[0] = 0;
2008
2009 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
2010 {
2011 std::pair<int, attr_access> access = *it;
2012
2013 /* Get the function call arguments corresponding to the attribute's
2014 positional arguments. When both arguments have been specified
2015 there will be two entries in *RWM, one for each. They are
2016 cross-referenced by their respective argument numbers in
2017 ACCESS.PTRARG and ACCESS.SIZARG. */
2018 const int ptridx = access.second.ptrarg;
2019 const int sizidx = access.second.sizarg;
2020
2021 gcc_assert (ptridx != -1);
2022 gcc_assert (access.first == ptridx || access.first == sizidx);
2023
2024 /* The pointer is set to null for the entry corresponding to
2025 the size argument. Skip it. It's handled when the entry
2026 corresponding to the pointer argument comes up. */
2027 if (!access.second.ptr)
2028 continue;
2029
2030 tree argtype = fntype_argno_type (fntype, ptridx);
2031 argtype = TREE_TYPE (argtype);
2032
2033 tree size;
2034 if (sizidx == -1)
2035 {
2036 /* If only the pointer attribute operand was specified
2037 and not size, set SIZE to the size of one element of
2038 the pointed to type to detect smaller objects (null
2039 pointers are diagnosed in this case only if
2040 the pointer is also declared with attribute nonnull. */
2041 size = size_one_node;
2042 }
2043 else
2044 size = rwm->get (sizidx)->size;
2045
2046 tree ptr = access.second.ptr;
2047 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2048 if (get_size_range (size, sizrng, true)
2049 && tree_int_cst_sgn (sizrng[0]) < 0
2050 && tree_int_cst_sgn (sizrng[1]) < 0)
2051 {
2052 /* Warn about negative sizes. */
2053 bool warned = false;
2054 location_t loc = EXPR_LOCATION (exp);
2055 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2056 warned = warning_at (loc, OPT_Wstringop_overflow_,
2057 "%Kargument %i value %E is negative",
2058 exp, sizidx + 1, size);
2059 else
2060 warned = warning_at (loc, OPT_Wstringop_overflow_,
2061 "%Kargument %i range [%E, %E] is negative",
2062 exp, sizidx + 1, sizrng[0], sizrng[1]);
2063 if (warned)
2064 {
2065 append_attrname (access, attrstr, sizeof attrstr);
2066 /* Avoid warning again for the same attribute. */
2067 continue;
2068 }
2069 }
2070
2071 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2072 {
2073 if (COMPLETE_TYPE_P (argtype))
2074 {
2075 /* Multiple SIZE by the size of the type the pointer
2076 argument points to. If it's incomplete the size
2077 is used as is. */
2078 size = NULL_TREE;
2079 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2080 if (TREE_CODE (argsize) == INTEGER_CST)
2081 {
2082 const int prec = TYPE_PRECISION (sizetype);
2083 wide_int minsize = wi::to_wide (sizrng[0], prec);
2084 minsize *= wi::to_wide (argsize, prec);
2085 size = wide_int_to_tree (sizetype, minsize);
2086 }
2087 }
2088 }
2089 else
2090 size = NULL_TREE;
2091
2092 if (sizidx >= 0
2093 && integer_zerop (ptr)
2094 && tree_int_cst_sgn (sizrng[0]) > 0)
2095 {
2096 /* Warn about null pointers with positive sizes. This is
2097 different from also declaring the pointer argument with
2098 attribute nonnull when the function accepts null pointers
2099 only when the corresponding size is zero. */
2100 bool warned = false;
2101 location_t loc = EXPR_LOCATION (exp);
2102 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2103 warned = warning_at (loc, OPT_Wnonnull,
2104 "%Kargument %i is null but the corresponding "
2105 "size argument %i value is %E",
2106 exp, ptridx + 1, sizidx + 1, size);
2107 else
2108 warned = warning_at (loc, OPT_Wnonnull,
2109 "%Kargument %i is null but the corresponding "
2110 "size argument %i range is [%E, %E]",
2111 exp, ptridx + 1, sizidx + 1,
2112 sizrng[0], sizrng[1]);
2113 if (warned)
2114 {
2115 append_attrname (access, attrstr, sizeof attrstr);
2116 /* Avoid warning again for the same attribute. */
2117 continue;
2118 }
2119 }
2120
2121 tree objsize = compute_objsize (ptr, 0);
2122
2123 tree srcsize;
2124 if (access.second.mode == attr_access::write_only)
2125 {
2126 /* For a write-only argument there is no source. */
2127 srcsize = NULL_TREE;
2128 }
2129 else
2130 {
2131 /* For read-only and read-write attributes also set the source
2132 size. */
2133 srcsize = objsize;
2134 if (access.second.mode == attr_access::read_only)
2135 {
2136 /* For a read-only attribute there is no destination so
2137 clear OBJSIZE. This emits "reading N bytes" kind of
2138 diagnostics instead of the "writing N bytes" kind. */
2139 objsize = NULL_TREE;
2140 }
2141 }
2142
2143 /* Clear the no-warning bit in case it was set in a prior
2144 iteration so that accesses via different arguments are
2145 diagnosed. */
2146 TREE_NO_WARNING (exp) = false;
2147 check_access (exp, NULL_TREE, NULL_TREE, size, /*maxread=*/ NULL_TREE,
2148 srcsize, objsize);
2149
2150 if (TREE_NO_WARNING (exp))
2151 /* If check_access issued a warning above, append the relevant
2152 attribute to the string. */
2153 append_attrname (access, attrstr, sizeof attrstr);
2154 }
2155
2156 if (!*attrstr)
2157 return;
2158
2159 if (fndecl)
2160 inform (DECL_SOURCE_LOCATION (fndecl),
2161 "in a call to function %qD declared with attribute %qs",
2162 fndecl, attrstr);
2163 else
2164 inform (EXPR_LOCATION (fndecl),
2165 "in a call with type %qT and attribute %qs",
2166 fntype, attrstr);
2167
2168 /* Set the bit in case if was cleared and not set above. */
2169 TREE_NO_WARNING (exp) = true;
2170}
2171
d7cdf113 2172/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 2173 CALL_EXPR EXP.
d7cdf113
JL
2174
2175 NUM_ACTUALS is the total number of parameters.
2176
2177 N_NAMED_ARGS is the total number of named arguments.
2178
078a18a4
SL
2179 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2180 value, or null.
2181
d7cdf113
JL
2182 FNDECL is the tree code for the target of this call (if known)
2183
2184 ARGS_SO_FAR holds state needed by the target to know where to place
2185 the next argument.
2186
2187 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2188 for arguments which are passed in registers.
2189
2190 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2191 and may be modified by this routine.
2192
f2d33f13 2193 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 2194 flags which may be modified by this routine.
dd292d0a 2195
6de9cd9a
DN
2196 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2197 that requires allocation of stack space.
2198
dd292d0a
MM
2199 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2200 the thunked-to function. */
d7cdf113
JL
2201
2202static void
d329e058
AJ
2203initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2204 struct arg_data *args,
2205 struct args_size *args_size,
2206 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 2207 tree exp, tree struct_value_addr_value,
45769134 2208 tree fndecl, tree fntype,
d5cc9181 2209 cumulative_args_t args_so_far,
d329e058 2210 int reg_parm_stack_space,
a20c5714
RS
2211 rtx *old_stack_level,
2212 poly_int64_pod *old_pending_adj,
dd292d0a 2213 int *must_preallocate, int *ecf_flags,
6de9cd9a 2214 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 2215{
d5cc9181 2216 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 2217 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
2218
2219 /* Count arg position in order args appear. */
2220 int argpos;
2221
2222 int i;
f725a3ec 2223
d7cdf113
JL
2224 args_size->constant = 0;
2225 args_size->var = 0;
2226
d5e254e1
IE
2227 bitmap_obstack_initialize (NULL);
2228
d7cdf113 2229 /* In this loop, we consider args in the order they are written.
3d9684ae 2230 We fill up ARGS from the back. */
d7cdf113 2231
3d9684ae 2232 i = num_actuals - 1;
078a18a4 2233 {
31db0fe0 2234 int j = i;
078a18a4
SL
2235 call_expr_arg_iterator iter;
2236 tree arg;
d5e254e1 2237 bitmap slots = NULL;
078a18a4
SL
2238
2239 if (struct_value_addr_value)
2240 {
2241 args[j].tree_value = struct_value_addr_value;
3d9684ae 2242 j--;
078a18a4 2243 }
afc610db 2244 argpos = 0;
078a18a4
SL
2245 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2246 {
2247 tree argtype = TREE_TYPE (arg);
d5e254e1 2248
078a18a4
SL
2249 if (targetm.calls.split_complex_arg
2250 && argtype
2251 && TREE_CODE (argtype) == COMPLEX_TYPE
2252 && targetm.calls.split_complex_arg (argtype))
2253 {
2254 tree subtype = TREE_TYPE (argtype);
078a18a4 2255 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 2256 j--;
078a18a4
SL
2257 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2258 }
2259 else
2260 args[j].tree_value = arg;
3d9684ae 2261 j--;
afc610db 2262 argpos++;
078a18a4 2263 }
d5e254e1
IE
2264
2265 if (slots)
2266 BITMAP_FREE (slots);
078a18a4
SL
2267 }
2268
d5e254e1
IE
2269 bitmap_obstack_release (NULL);
2270
302db8ba
MS
2271 /* Extract attribute alloc_size from the type of the called expression
2272 (which could be a function or a function pointer) and if set, store
2273 the indices of the corresponding arguments in ALLOC_IDX, and then
2274 the actual argument(s) at those indices in ALLOC_ARGS. */
8bd9f164 2275 int alloc_idx[2] = { -1, -1 };
302db8ba
MS
2276 if (tree alloc_size = lookup_attribute ("alloc_size",
2277 TYPE_ATTRIBUTES (fntype)))
8bd9f164
MS
2278 {
2279 tree args = TREE_VALUE (alloc_size);
2280 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2281 if (TREE_CHAIN (args))
2282 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2283 }
2284
2285 /* Array for up to the two attribute alloc_size arguments. */
2286 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2287
54aa6b58
MS
2288 /* Map of attribute read_only, write_only, or read_write specifications
2289 for function arguments. */
2290 rdwr_map rdwr_idx;
2291 init_attr_rdwr_indices (&rdwr_idx, fntype);
2292
d7cdf113 2293 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 2294 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 2295 {
078a18a4 2296 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 2297 int unsignedp;
d7cdf113 2298
d7cdf113 2299 /* Replace erroneous argument with constant zero. */
d0f062fb 2300 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
2301 args[i].tree_value = integer_zero_node, type = integer_type_node;
2302
ebf0bf7f
JJ
2303 /* If TYPE is a transparent union or record, pass things the way
2304 we would pass the first field of the union or record. We have
2305 already verified that the modes are the same. */
920ea3b8 2306 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
ebf0bf7f 2307 type = TREE_TYPE (first_field (type));
d7cdf113
JL
2308
2309 /* Decide where to pass this arg.
2310
2311 args[i].reg is nonzero if all or part is passed in registers.
2312
2313 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 2314 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
2315
2316 args[i].pass_on_stack is nonzero if the argument must at least be
2317 computed on the stack. It may then be loaded back into registers
2318 if args[i].reg is nonzero.
2319
2320 These decisions are driven by the FUNCTION_... macros and must agree
2321 with those made by function.c. */
2322
2323 /* See if this argument should be passed by invisible reference. */
cf0d189e
RS
2324 function_arg_info arg (type, argpos < n_named_args);
2325 if (pass_by_reference (args_so_far_pnt, arg))
d7cdf113 2326 {
9969aaf6 2327 bool callee_copies;
d6e1acf6 2328 tree base = NULL_TREE;
9969aaf6 2329
cf0d189e 2330 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
9969aaf6
RH
2331
2332 /* If we're compiling a thunk, pass through invisible references
2333 instead of making a copy. */
dd292d0a 2334 if (call_from_thunk_p
9969aaf6
RH
2335 || (callee_copies
2336 && !TREE_ADDRESSABLE (type)
2337 && (base = get_base_address (args[i].tree_value))
9c3d55b4 2338 && TREE_CODE (base) != SSA_NAME
9969aaf6 2339 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
d7cdf113 2340 {
006e317a
JH
2341 /* We may have turned the parameter value into an SSA name.
2342 Go back to the original parameter so we can take the
2343 address. */
2344 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2345 {
2346 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2347 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2348 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2349 }
fe8dd12e
JH
2350 /* Argument setup code may have copied the value to register. We
2351 revert that optimization now because the tail call code must
2352 use the original location. */
2353 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2354 && !MEM_P (DECL_RTL (args[i].tree_value))
2355 && DECL_INCOMING_RTL (args[i].tree_value)
2356 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2357 set_decl_rtl (args[i].tree_value,
2358 DECL_INCOMING_RTL (args[i].tree_value));
2359
c4b9a87e
ER
2360 mark_addressable (args[i].tree_value);
2361
9969aaf6
RH
2362 /* We can't use sibcalls if a callee-copied argument is
2363 stored in the current function's frame. */
2364 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
2365 {
2366 *may_tailcall = false;
2367 maybe_complain_about_tail_call (exp,
2368 "a callee-copied argument is"
cefc0906 2369 " stored in the current"
9a385c2d
DM
2370 " function's frame");
2371 }
9fd47435 2372
db3927fb
AH
2373 args[i].tree_value = build_fold_addr_expr_loc (loc,
2374 args[i].tree_value);
9969aaf6
RH
2375 type = TREE_TYPE (args[i].tree_value);
2376
becfd6e5
KZ
2377 if (*ecf_flags & ECF_CONST)
2378 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 2379 }
d7cdf113
JL
2380 else
2381 {
2382 /* We make a copy of the object and pass the address to the
2383 function being called. */
2384 rtx copy;
2385
d0f062fb 2386 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
2387 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2388 || (flag_stack_check == GENERIC_STACK_CHECK
2389 && compare_tree_int (TYPE_SIZE_UNIT (type),
2390 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
2391 {
2392 /* This is a variable-sized object. Make space on the stack
2393 for it. */
078a18a4 2394 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
2395
2396 if (*old_stack_level == 0)
2397 {
9eac0f2a 2398 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
2399 *old_pending_adj = pending_stack_adjust;
2400 pending_stack_adjust = 0;
2401 }
2402
d3c12306
EB
2403 /* We can pass TRUE as the 4th argument because we just
2404 saved the stack pointer and will restore it right after
2405 the call. */
3a42502d
RH
2406 copy = allocate_dynamic_stack_space (size_rtx,
2407 TYPE_ALIGN (type),
2408 TYPE_ALIGN (type),
9e878cf1
EB
2409 max_int_size_in_bytes
2410 (type),
3a42502d
RH
2411 true);
2412 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 2413 set_mem_attributes (copy, type, 1);
d7cdf113
JL
2414 }
2415 else
9474e8ab 2416 copy = assign_temp (type, 1, 0);
d7cdf113 2417
ee45a32d 2418 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 2419
becfd6e5
KZ
2420 /* Just change the const function to pure and then let
2421 the next test clear the pure based on
2422 callee_copies. */
2423 if (*ecf_flags & ECF_CONST)
2424 {
2425 *ecf_flags &= ~ECF_CONST;
2426 *ecf_flags |= ECF_PURE;
2427 }
2428
2429 if (!callee_copies && *ecf_flags & ECF_PURE)
2430 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
2431
2432 args[i].tree_value
db3927fb 2433 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 2434 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 2435 *may_tailcall = false;
9a385c2d
DM
2436 maybe_complain_about_tail_call (exp,
2437 "argument must be passed"
2438 " by copying");
d7cdf113 2439 }
257caa55 2440 arg.pass_by_reference = true;
d7cdf113
JL
2441 }
2442
8df83eae 2443 unsignedp = TYPE_UNSIGNED (type);
cf0d189e
RS
2444 arg.type = type;
2445 arg.mode
2446 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2447 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
2448
2449 args[i].unsignedp = unsignedp;
cf0d189e 2450 args[i].mode = arg.mode;
7d167afd 2451
974aedcc
MP
2452 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2453
6783fdb7 2454 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
3c07301f 2455
d5e254e1 2456 if (args[i].reg && CONST_INT_P (args[i].reg))
dbcdd561 2457 args[i].reg = NULL;
d5e254e1 2458
7d167afd
JJ
2459 /* If this is a sibling call and the machine has register windows, the
2460 register window has to be unwinded before calling the routine, so
2461 arguments have to go into the incoming registers. */
3c07301f
NF
2462 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2463 args[i].tail_call_reg
6783fdb7 2464 = targetm.calls.function_incoming_arg (args_so_far, arg);
3c07301f
NF
2465 else
2466 args[i].tail_call_reg = args[i].reg;
7d167afd 2467
d7cdf113 2468 if (args[i].reg)
a7c81bc1 2469 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
d7cdf113 2470
0ffef200 2471 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
d7cdf113
JL
2472
2473 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2474 it means that we are to pass this arg in the register(s) designated
2475 by the PARALLEL, but also to pass it in the stack. */
2476 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2477 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2478 args[i].pass_on_stack = 1;
2479
2480 /* If this is an addressable type, we must preallocate the stack
2481 since we must evaluate the object into its final location.
2482
2483 If this is to be passed in both registers and the stack, it is simpler
2484 to preallocate. */
2485 if (TREE_ADDRESSABLE (type)
2486 || (args[i].pass_on_stack && args[i].reg != 0))
2487 *must_preallocate = 1;
2488
d7cdf113 2489 /* Compute the stack-size of this argument. */
31db0fe0 2490 if (args[i].reg == 0 || args[i].partial != 0
d5e254e1
IE
2491 || reg_parm_stack_space > 0
2492 || args[i].pass_on_stack)
cf0d189e 2493 locate_and_pad_parm (arg.mode, type,
d7cdf113
JL
2494#ifdef STACK_PARMS_IN_REG_PARM_AREA
2495 1,
2496#else
2497 args[i].reg != 0,
2498#endif
2e4ceca5 2499 reg_parm_stack_space,
e7949876
AM
2500 args[i].pass_on_stack ? 0 : args[i].partial,
2501 fndecl, args_size, &args[i].locate);
648bb159
RS
2502#ifdef BLOCK_REG_PADDING
2503 else
2504 /* The argument is passed entirely in registers. See at which
2505 end it should be padded. */
2506 args[i].locate.where_pad =
cf0d189e 2507 BLOCK_REG_PADDING (arg.mode, type,
648bb159
RS
2508 int_size_in_bytes (type) <= UNITS_PER_WORD);
2509#endif
f725a3ec 2510
d7cdf113
JL
2511 /* Update ARGS_SIZE, the total stack space for args so far. */
2512
e7949876
AM
2513 args_size->constant += args[i].locate.size.constant;
2514 if (args[i].locate.size.var)
2515 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
2516
2517 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2518 have been used, etc. */
2519
6930c98c
RS
2520 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2521 promoted_mode used for function_arg above. However, the
2522 corresponding handling of incoming arguments in function.c
2523 does pass the promoted mode. */
cf0d189e
RS
2524 arg.mode = TYPE_MODE (type);
2525 targetm.calls.function_arg_advance (args_so_far, arg);
8bd9f164
MS
2526
2527 /* Store argument values for functions decorated with attribute
2528 alloc_size. */
2529 if (argpos == alloc_idx[0])
2530 alloc_args[0] = args[i].tree_value;
2531 else if (argpos == alloc_idx[1])
2532 alloc_args[1] = args[i].tree_value;
54aa6b58
MS
2533
2534 /* Save the actual argument that corresponds to the access attribute
2535 operand for later processing. */
2536 if (attr_access *access = rdwr_idx.get (argpos))
2537 {
2538 if (POINTER_TYPE_P (type))
2539 {
2540 access->ptr = args[i].tree_value;
2541 gcc_assert (access->size == NULL_TREE);
2542 }
2543 else
2544 {
2545 access->size = args[i].tree_value;
2546 gcc_assert (access->ptr == NULL_TREE);
2547 }
2548 }
8bd9f164
MS
2549 }
2550
2551 if (alloc_args[0])
2552 {
2553 /* Check the arguments of functions decorated with attribute
2554 alloc_size. */
2555 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
d7cdf113 2556 }
6a33d0ff
MS
2557
2558 /* Detect passing non-string arguments to functions expecting
2559 nul-terminated strings. */
2560 maybe_warn_nonstring_arg (fndecl, exp);
54aa6b58
MS
2561
2562 /* Check read_only, write_only, and read_write arguments. */
2563 maybe_warn_rdwr_sizes (&rdwr_idx, exp);
d7cdf113
JL
2564}
2565
599f37b6
JL
2566/* Update ARGS_SIZE to contain the total size for the argument block.
2567 Return the original constant component of the argument block's size.
2568
2569 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2570 for arguments passed in registers. */
2571
a20c5714 2572static poly_int64
d329e058
AJ
2573compute_argument_block_size (int reg_parm_stack_space,
2574 struct args_size *args_size,
033df0b9 2575 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 2576 tree fntype ATTRIBUTE_UNUSED,
d329e058 2577 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6 2578{
a20c5714 2579 poly_int64 unadjusted_args_size = args_size->constant;
599f37b6 2580
f73ad30e
JH
2581 /* For accumulate outgoing args mode we don't need to align, since the frame
2582 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 2583 backends from generating misaligned frame sizes. */
f73ad30e
JH
2584 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2585 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 2586
599f37b6
JL
2587 /* Compute the actual size of the argument block required. The variable
2588 and constant sizes must be combined, the size may have to be rounded,
2589 and there may be a minimum required size. */
2590
2591 if (args_size->var)
2592 {
2593 args_size->var = ARGS_SIZE_TREE (*args_size);
2594 args_size->constant = 0;
2595
c2f8b491
JH
2596 preferred_stack_boundary /= BITS_PER_UNIT;
2597 if (preferred_stack_boundary > 1)
1503a7ec
JH
2598 {
2599 /* We don't handle this case yet. To handle it correctly we have
f5143c46 2600 to add the delta, round and subtract the delta.
1503a7ec 2601 Currently no machine description requires this support. */
a20c5714
RS
2602 gcc_assert (multiple_p (stack_pointer_delta,
2603 preferred_stack_boundary));
1503a7ec
JH
2604 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2605 }
599f37b6
JL
2606
2607 if (reg_parm_stack_space > 0)
2608 {
2609 args_size->var
2610 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 2611 ssize_int (reg_parm_stack_space));
599f37b6 2612
599f37b6
JL
2613 /* The area corresponding to register parameters is not to count in
2614 the size of the block we need. So make the adjustment. */
5d059ed9 2615 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
2616 args_size->var
2617 = size_binop (MINUS_EXPR, args_size->var,
2618 ssize_int (reg_parm_stack_space));
599f37b6
JL
2619 }
2620 }
2621 else
2622 {
c2f8b491 2623 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
2624 if (preferred_stack_boundary < 1)
2625 preferred_stack_boundary = 1;
a20c5714
RS
2626 args_size->constant = (aligned_upper_bound (args_size->constant
2627 + stack_pointer_delta,
2628 preferred_stack_boundary)
1503a7ec 2629 - stack_pointer_delta);
599f37b6 2630
a20c5714
RS
2631 args_size->constant = upper_bound (args_size->constant,
2632 reg_parm_stack_space);
599f37b6 2633
5d059ed9 2634 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 2635 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
2636 }
2637 return unadjusted_args_size;
2638}
2639
19832c77 2640/* Precompute parameters as needed for a function call.
cc0b1adc 2641
f2d33f13 2642 FLAGS is mask of ECF_* constants.
cc0b1adc 2643
cc0b1adc
JL
2644 NUM_ACTUALS is the number of arguments.
2645
f725a3ec
KH
2646 ARGS is an array containing information for each argument; this
2647 routine fills in the INITIAL_VALUE and VALUE fields for each
2648 precomputed argument. */
cc0b1adc
JL
2649
2650static void
84b8030f 2651precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
2652{
2653 int i;
2654
3638733b 2655 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 2656 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
2657
2658 /* If we preallocated the stack space, and some arguments must be passed
2659 on the stack, then we must precompute any parameter which contains a
2660 function call which will store arguments on the stack.
2661 Otherwise, evaluating the parameter may clobber previous parameters
2662 which have already been stored into the stack. (we have code to avoid
2663 such case by saving the outgoing stack arguments, but it results in
2664 worse code) */
84b8030f 2665 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 2666 return;
7ae4ad28 2667
cc0b1adc 2668 for (i = 0; i < num_actuals; i++)
82c82743 2669 {
cde0f3fd 2670 tree type;
ef4bddc2 2671 machine_mode mode;
ddef6bc7 2672
84b8030f 2673 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
2674 continue;
2675
82c82743 2676 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
2677 type = TREE_TYPE (args[i].tree_value);
2678 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 2679
82c82743 2680 args[i].initial_value = args[i].value
84217346 2681 = expand_normal (args[i].tree_value);
cc0b1adc 2682
cde0f3fd 2683 mode = TYPE_MODE (type);
82c82743
RH
2684 if (mode != args[i].mode)
2685 {
cde0f3fd 2686 int unsignedp = args[i].unsignedp;
82c82743
RH
2687 args[i].value
2688 = convert_modes (args[i].mode, mode,
2689 args[i].value, args[i].unsignedp);
cde0f3fd 2690
82c82743
RH
2691 /* CSE will replace this only if it contains args[i].value
2692 pseudo, so convert it down to the declared mode using
2693 a SUBREG. */
2694 if (REG_P (args[i].value)
cde0f3fd
PB
2695 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2696 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
2697 {
2698 args[i].initial_value
2699 = gen_lowpart_SUBREG (mode, args[i].value);
2700 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 2701 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 2702 }
82c82743
RH
2703 }
2704 }
cc0b1adc
JL
2705}
2706
0f9b3ea6
JL
2707/* Given the current state of MUST_PREALLOCATE and information about
2708 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2709 compute and return the final value for MUST_PREALLOCATE. */
2710
2711static int
b8698a0f 2712finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 2713 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
2714{
2715 /* See if we have or want to preallocate stack space.
2716
2717 If we would have to push a partially-in-regs parm
2718 before other stack parms, preallocate stack space instead.
2719
2720 If the size of some parm is not a multiple of the required stack
2721 alignment, we must preallocate.
2722
2723 If the total size of arguments that would otherwise create a copy in
2724 a temporary (such as a CALL) is more than half the total argument list
2725 size, preallocation is faster.
2726
2727 Another reason to preallocate is if we have a machine (like the m88k)
2728 where stack alignment is required to be maintained between every
2729 pair of insns, not just when the call is made. However, we assume here
2730 that such machines either do not have push insns (and hence preallocation
2731 would occur anyway) or the problem is taken care of with
2732 PUSH_ROUNDING. */
2733
2734 if (! must_preallocate)
2735 {
2736 int partial_seen = 0;
a20c5714 2737 poly_int64 copy_to_evaluate_size = 0;
0f9b3ea6
JL
2738 int i;
2739
2740 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2741 {
2742 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2743 partial_seen = 1;
2744 else if (partial_seen && args[i].reg == 0)
2745 must_preallocate = 1;
2746
2747 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2748 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2749 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2750 || TREE_CODE (args[i].tree_value) == COND_EXPR
2751 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2752 copy_to_evaluate_size
2753 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2754 }
2755
a20c5714
RS
2756 if (maybe_ne (args_size->constant, 0)
2757 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
0f9b3ea6
JL
2758 must_preallocate = 1;
2759 }
2760 return must_preallocate;
2761}
599f37b6 2762
a45bdd02
JL
2763/* If we preallocated stack space, compute the address of each argument
2764 and store it into the ARGS array.
2765
f725a3ec 2766 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
2767 validized when it is used.
2768
2769 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2770
2771static void
d329e058 2772compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
2773{
2774 if (argblock)
2775 {
2776 rtx arg_reg = argblock;
a20c5714
RS
2777 int i;
2778 poly_int64 arg_offset = 0;
a45bdd02
JL
2779
2780 if (GET_CODE (argblock) == PLUS)
a20c5714
RS
2781 {
2782 arg_reg = XEXP (argblock, 0);
2783 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2784 }
a45bdd02
JL
2785
2786 for (i = 0; i < num_actuals; i++)
2787 {
e7949876
AM
2788 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2789 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 2790 rtx addr;
bfc45551 2791 unsigned int align, boundary;
a20c5714 2792 poly_uint64 units_on_stack = 0;
ef4bddc2 2793 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
2794
2795 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
2796 if (! args[i].pass_on_stack
2797 && args[i].reg != 0
2798 && args[i].partial == 0)
a45bdd02
JL
2799 continue;
2800
5b8b4a88
JJ
2801 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2802 continue;
2803
a708f4b6 2804 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
0a81f074 2805 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2806
2807 if (args[i].partial != 0)
2808 {
2809 /* Only part of the parameter is being passed on the stack.
2810 Generate a simple memory reference of the correct size. */
2811 units_on_stack = args[i].locate.size.constant;
a20c5714 2812 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
f4b31647 2813 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
7816b87e 2814 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 2815 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
2816 }
2817 else
2818 {
2819 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2820 set_mem_attributes (args[i].stack,
2821 TREE_TYPE (args[i].tree_value), 1);
2822 }
bfc45551
AM
2823 align = BITS_PER_UNIT;
2824 boundary = args[i].locate.boundary;
a20c5714 2825 poly_int64 offset_val;
76b0cbf8 2826 if (args[i].locate.where_pad != PAD_DOWNWARD)
bfc45551 2827 align = boundary;
a20c5714 2828 else if (poly_int_rtx_p (offset, &offset_val))
bfc45551 2829 {
a20c5714
RS
2830 align = least_bit_hwi (boundary);
2831 unsigned int offset_align
2832 = known_alignment (offset_val) * BITS_PER_UNIT;
2833 if (offset_align != 0)
2834 align = MIN (align, offset_align);
bfc45551
AM
2835 }
2836 set_mem_align (args[i].stack, align);
a45bdd02 2837
a708f4b6 2838 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
0a81f074 2839 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2840
2841 if (args[i].partial != 0)
2842 {
2843 /* Only part of the parameter is being passed on the stack.
2844 Generate a simple memory reference of the correct size.
2845 */
2846 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 2847 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
2848 }
2849 else
2850 {
2851 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2852 set_mem_attributes (args[i].stack_slot,
2853 TREE_TYPE (args[i].tree_value), 1);
2854 }
bfc45551 2855 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
2856
2857 /* Function incoming arguments may overlap with sibling call
2858 outgoing arguments and we cannot allow reordering of reads
2859 from function arguments with stores to outgoing arguments
2860 of sibling calls. */
ba4828e0
RK
2861 set_mem_alias_set (args[i].stack, 0);
2862 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
2863 }
2864 }
2865}
f725a3ec 2866
a45bdd02
JL
2867/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2868 in a call instruction.
2869
2870 FNDECL is the tree node for the target function. For an indirect call
2871 FNDECL will be NULL_TREE.
2872
09e2bf48 2873 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
2874
2875static rtx
d329e058 2876rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
2877{
2878 rtx funexp;
2879
2880 /* Get the function to call, in the form of RTL. */
2881 if (fndecl)
2882 {
ad960f56 2883 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 2884 TREE_USED (fndecl) = 1;
a45bdd02
JL
2885
2886 /* Get a SYMBOL_REF rtx for the function address. */
2887 funexp = XEXP (DECL_RTL (fndecl), 0);
2888 }
2889 else
2890 /* Generate an rtx (probably a pseudo-register) for the address. */
2891 {
2892 push_temp_slots ();
84217346 2893 funexp = expand_normal (addr);
f725a3ec 2894 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
2895 }
2896 return funexp;
2897}
2898
4b522b8f
TV
2899/* Return the static chain for this function, if any. */
2900
2901rtx
2902rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2903{
2904 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2905 return NULL;
2906
2907 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2908}
2909
5275901c
JJ
2910/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2911static struct
2912{
2913 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2914 or NULL_RTX if none has been scanned yet. */
48810515 2915 rtx_insn *scan_start;
5275901c
JJ
2916 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2917 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2918 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2919 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 2920 vec<rtx> cache;
5275901c
JJ
2921} internal_arg_pointer_exp_state;
2922
e9f56944 2923static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
2924
2925/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2926 the tail call sequence, starting with first insn that hasn't been
2927 scanned yet, and note for each pseudo on the LHS whether it is based
2928 on crtl->args.internal_arg_pointer or not, and what offset from that
2929 that pointer it has. */
2930
2931static void
2932internal_arg_pointer_based_exp_scan (void)
2933{
48810515 2934 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
2935
2936 if (scan_start == NULL_RTX)
2937 insn = get_insns ();
2938 else
2939 insn = NEXT_INSN (scan_start);
2940
2941 while (insn)
2942 {
2943 rtx set = single_set (insn);
2944 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2945 {
2946 rtx val = NULL_RTX;
2947 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2948 /* Punt on pseudos set multiple times. */
9771b263
DN
2949 if (idx < internal_arg_pointer_exp_state.cache.length ()
2950 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
2951 != NULL_RTX))
2952 val = pc_rtx;
2953 else
2954 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2955 if (val != NULL_RTX)
2956 {
9771b263 2957 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718
RS
2958 internal_arg_pointer_exp_state.cache
2959 .safe_grow_cleared (idx + 1);
9771b263 2960 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
2961 }
2962 }
2963 if (NEXT_INSN (insn) == NULL_RTX)
2964 scan_start = insn;
2965 insn = NEXT_INSN (insn);
2966 }
2967
2968 internal_arg_pointer_exp_state.scan_start = scan_start;
2969}
2970
5275901c
JJ
2971/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2972 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2973 it with fixed offset, or PC if this is with variable or unknown offset.
2974 TOPLEVEL is true if the function is invoked at the topmost level. */
2975
2976static rtx
e9f56944 2977internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
2978{
2979 if (CONSTANT_P (rtl))
2980 return NULL_RTX;
2981
2982 if (rtl == crtl->args.internal_arg_pointer)
2983 return const0_rtx;
2984
2985 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2986 return NULL_RTX;
2987
a20c5714
RS
2988 poly_int64 offset;
2989 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
5275901c
JJ
2990 {
2991 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2992 if (val == NULL_RTX || val == pc_rtx)
2993 return val;
a20c5714 2994 return plus_constant (Pmode, val, offset);
5275901c
JJ
2995 }
2996
2997 /* When called at the topmost level, scan pseudo assignments in between the
2998 last scanned instruction in the tail call sequence and the latest insn
2999 in that sequence. */
3000 if (toplevel)
3001 internal_arg_pointer_based_exp_scan ();
3002
3003 if (REG_P (rtl))
3004 {
3005 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
3006 if (idx < internal_arg_pointer_exp_state.cache.length ())
3007 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
3008
3009 return NULL_RTX;
3010 }
3011
e9f56944
RS
3012 subrtx_iterator::array_type array;
3013 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3014 {
3015 const_rtx x = *iter;
3016 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3017 return pc_rtx;
3018 if (MEM_P (x))
3019 iter.skip_subrtxes ();
3020 }
5275901c
JJ
3021
3022 return NULL_RTX;
3023}
3024
a20c5714
RS
3025/* Return true if SIZE bytes starting from address ADDR might overlap an
3026 already-clobbered argument area. This function is used to determine
3027 if we should give up a sibcall. */
07eef816
KH
3028
3029static bool
a20c5714 3030mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
07eef816 3031{
a20c5714
RS
3032 poly_int64 i;
3033 unsigned HOST_WIDE_INT start, end;
5275901c 3034 rtx val;
07eef816 3035
a20c5714
RS
3036 if (bitmap_empty_p (stored_args_map)
3037 && stored_args_watermark == HOST_WIDE_INT_M1U)
4189fb53 3038 return false;
5275901c
JJ
3039 val = internal_arg_pointer_based_exp (addr, true);
3040 if (val == NULL_RTX)
3041 return false;
a20c5714 3042 else if (!poly_int_rtx_p (val, &i))
6c3cb698 3043 return true;
a20c5714
RS
3044
3045 if (known_eq (size, 0U))
3046 return false;
76e048a8
KT
3047
3048 if (STACK_GROWS_DOWNWARD)
3049 i -= crtl->args.pretend_args_size;
3050 else
3051 i += crtl->args.pretend_args_size;
3052
6dad9361
TS
3053 if (ARGS_GROW_DOWNWARD)
3054 i = -i - size;
3055
a20c5714
RS
3056 /* We can ignore any references to the function's pretend args,
3057 which at this point would manifest as negative values of I. */
3058 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3059 return false;
07eef816 3060
a20c5714
RS
3061 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3062 if (!(i + size).is_constant (&end))
3063 end = HOST_WIDE_INT_M1U;
3064
3065 if (end > stored_args_watermark)
3066 return true;
3067
3068 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3069 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3070 if (bitmap_bit_p (stored_args_map, k))
3071 return true;
07eef816
KH
3072
3073 return false;
3074}
3075
21a3b983
JL
3076/* Do the register loads required for any wholly-register parms or any
3077 parms which are passed both on the stack and in a register. Their
f725a3ec 3078 expressions were already evaluated.
21a3b983
JL
3079
3080 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
3081 insns in the CALL_INSN_FUNCTION_USAGE field.
3082
40b0345d 3083 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 3084 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
3085
3086static void
d329e058
AJ
3087load_register_parameters (struct arg_data *args, int num_actuals,
3088 rtx *call_fusage, int flags, int is_sibcall,
3089 int *sibcall_failure)
21a3b983
JL
3090{
3091 int i, j;
3092
21a3b983 3093 for (i = 0; i < num_actuals; i++)
21a3b983 3094 {
099e9712
JH
3095 rtx reg = ((flags & ECF_SIBCALL)
3096 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
3097 if (reg)
3098 {
6e985040
AM
3099 int partial = args[i].partial;
3100 int nregs;
95fe7b48
RS
3101 poly_int64 size = 0;
3102 HOST_WIDE_INT const_size = 0;
48810515 3103 rtx_insn *before_arg = get_last_insn ();
72834792 3104 tree type = TREE_TYPE (args[i].tree_value);
920ea3b8 3105 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
72834792 3106 type = TREE_TYPE (first_field (type));
f0078f86
AM
3107 /* Set non-negative if we must move a word at a time, even if
3108 just one word (e.g, partial == 4 && mode == DFmode). Set
3109 to -1 if we just use a normal move insn. This value can be
3110 zero if the argument is a zero size structure. */
6e985040 3111 nregs = -1;
78a52f11
RH
3112 if (GET_CODE (reg) == PARALLEL)
3113 ;
3114 else if (partial)
3115 {
3116 gcc_assert (partial % UNITS_PER_WORD == 0);
3117 nregs = partial / UNITS_PER_WORD;
3118 }
72834792 3119 else if (TYPE_MODE (type) == BLKmode)
6e985040 3120 {
95fe7b48
RS
3121 /* Variable-sized parameters should be described by a
3122 PARALLEL instead. */
72834792 3123 const_size = int_size_in_bytes (type);
95fe7b48
RS
3124 gcc_assert (const_size >= 0);
3125 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3126 size = const_size;
6e985040
AM
3127 }
3128 else
3129 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
3130
3131 /* Handle calls that pass values in multiple non-contiguous
3132 locations. The Irix 6 ABI has examples of this. */
3133
3134 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 3135 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
3136
3137 /* If simple case, just do move. If normal partial, store_one_arg
3138 has already loaded the register for us. In all other cases,
3139 load the register(s) from memory. */
3140
9206d736
AM
3141 else if (nregs == -1)
3142 {
3143 emit_move_insn (reg, args[i].value);
6e985040 3144#ifdef BLOCK_REG_PADDING
9206d736
AM
3145 /* Handle case where we have a value that needs shifting
3146 up to the msb. eg. a QImode value and we're padding
3147 upward on a BYTES_BIG_ENDIAN machine. */
95fe7b48
RS
3148 if (args[i].locate.where_pad
3149 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
9206d736 3150 {
95fe7b48
RS
3151 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3152 if (maybe_lt (size, UNITS_PER_WORD))
3153 {
3154 rtx x;
3155 poly_int64 shift
3156 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3157
3158 /* Assigning REG here rather than a temp makes
3159 CALL_FUSAGE report the whole reg as used.
3160 Strictly speaking, the call only uses SIZE
3161 bytes at the msb end, but it doesn't seem worth
3162 generating rtl to say that. */
3163 reg = gen_rtx_REG (word_mode, REGNO (reg));
3164 x = expand_shift (LSHIFT_EXPR, word_mode,
3165 reg, shift, reg, 1);
3166 if (x != reg)
3167 emit_move_insn (reg, x);
3168 }
9206d736 3169 }
6e985040 3170#endif
9206d736 3171 }
21a3b983
JL
3172
3173 /* If we have pre-computed the values to put in the registers in
3174 the case of non-aligned structures, copy them in now. */
3175
3176 else if (args[i].n_aligned_regs != 0)
3177 for (j = 0; j < args[i].n_aligned_regs; j++)
3178 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3179 args[i].aligned_regs[j]);
3180
3b2ee170 3181 else if (partial == 0 || args[i].pass_on_stack)
6e985040 3182 {
95fe7b48
RS
3183 /* SIZE and CONST_SIZE are 0 for partial arguments and
3184 the size of a BLKmode type otherwise. */
3185 gcc_checking_assert (known_eq (size, const_size));
1a8cb155 3186 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 3187
3b2ee170
IS
3188 /* Check for overlap with already clobbered argument area,
3189 providing that this has non-zero size. */
07eef816 3190 if (is_sibcall
95fe7b48 3191 && const_size != 0
a20c5714 3192 && (mem_might_overlap_already_clobbered_arg_p
95fe7b48 3193 (XEXP (args[i].value, 0), const_size)))
07eef816
KH
3194 *sibcall_failure = 1;
3195
95fe7b48 3196 if (const_size % UNITS_PER_WORD == 0
984b2054
AM
3197 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3198 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3199 else
3200 {
3201 if (nregs > 1)
3202 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3203 args[i].mode);
3204 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3205 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
95fe7b48 3206 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
ee45a32d 3207 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
3208 word_mode, word_mode, false,
3209 NULL);
984b2054
AM
3210 if (BYTES_BIG_ENDIAN)
3211 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3212 BITS_PER_WORD - bitsize, dest, 1);
3213 if (x != dest)
3214 emit_move_insn (dest, x);
3215 }
3216
6e985040 3217 /* Handle a BLKmode that needs shifting. */
95fe7b48 3218 if (nregs == 1 && const_size < UNITS_PER_WORD
03ca1672 3219#ifdef BLOCK_REG_PADDING
76b0cbf8 3220 && args[i].locate.where_pad == PAD_DOWNWARD
03ca1672
UW
3221#else
3222 && BYTES_BIG_ENDIAN
3223#endif
984b2054 3224 )
6e985040 3225 {
984b2054 3226 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
95fe7b48 3227 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
984b2054
AM
3228 enum tree_code dir = (BYTES_BIG_ENDIAN
3229 ? RSHIFT_EXPR : LSHIFT_EXPR);
3230 rtx x;
6e985040 3231
984b2054
AM
3232 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3233 if (x != dest)
3234 emit_move_insn (dest, x);
6e985040 3235 }
6e985040 3236 }
21a3b983 3237
0cdca92b
DJ
3238 /* When a parameter is a block, and perhaps in other cases, it is
3239 possible that it did a load from an argument slot that was
32dd366d 3240 already clobbered. */
0cdca92b
DJ
3241 if (is_sibcall
3242 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3243 *sibcall_failure = 1;
3244
21a3b983
JL
3245 /* Handle calls that pass values in multiple non-contiguous
3246 locations. The Irix 6 ABI has examples of this. */
3247 if (GET_CODE (reg) == PARALLEL)
3248 use_group_regs (call_fusage, reg);
3249 else if (nregs == -1)
72834792 3250 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
faa00334
AO
3251 else if (nregs > 0)
3252 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
3253 }
3254 }
3255}
3256
739fb049
MM
3257/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3258 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3259 bytes, then we would need to push some additional bytes to pad the
a20c5714 3260 arguments. So, we try to compute an adjust to the stack pointer for an
ce48579b
RH
3261 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3262 bytes. Then, when the arguments are pushed the stack will be perfectly
a20c5714 3263 aligned.
739fb049 3264
a20c5714
RS
3265 Return true if this optimization is possible, storing the adjustment
3266 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3267 bytes that should be popped after the call. */
3268
3269static bool
3270combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3271 poly_int64 unadjusted_args_size,
d329e058 3272 struct args_size *args_size,
95899b34 3273 unsigned int preferred_unit_stack_boundary)
739fb049
MM
3274{
3275 /* The number of bytes to pop so that the stack will be
3276 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
a20c5714 3277 poly_int64 adjustment;
739fb049
MM
3278 /* The alignment of the stack after the arguments are pushed, if we
3279 just pushed the arguments without adjust the stack here. */
95899b34 3280 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 3281
a20c5714
RS
3282 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3283 preferred_unit_stack_boundary,
3284 &unadjusted_alignment))
3285 return false;
739fb049
MM
3286
3287 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3288 as possible -- leaving just enough left to cancel out the
3289 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3290 PENDING_STACK_ADJUST is non-negative, and congruent to
3291 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3292
3293 /* Begin by trying to pop all the bytes. */
a20c5714
RS
3294 unsigned HOST_WIDE_INT tmp_misalignment;
3295 if (!known_misalignment (pending_stack_adjust,
3296 preferred_unit_stack_boundary,
3297 &tmp_misalignment))
3298 return false;
3299 unadjusted_alignment -= tmp_misalignment;
739fb049
MM
3300 adjustment = pending_stack_adjust;
3301 /* Push enough additional bytes that the stack will be aligned
3302 after the arguments are pushed. */
0aae1572
NS
3303 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3304 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 3305
a20c5714
RS
3306 /* We need to know whether the adjusted argument size
3307 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3308 or a deallocation. */
3309 if (!ordered_p (adjustment, unadjusted_args_size))
3310 return false;
3311
739fb049
MM
3312 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3313 bytes after the call. The right number is the entire
3314 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3315 by the arguments in the first place. */
f725a3ec 3316 args_size->constant
739fb049
MM
3317 = pending_stack_adjust - adjustment + unadjusted_args_size;
3318
a20c5714
RS
3319 *adjustment_out = adjustment;
3320 return true;
739fb049
MM
3321}
3322
c67846f2
JJ
3323/* Scan X expression if it does not dereference any argument slots
3324 we already clobbered by tail call arguments (as noted in stored_args_map
3325 bitmap).
da7d8304 3326 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
3327 zero otherwise. */
3328
3329static int
d329e058 3330check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
3331{
3332 RTX_CODE code;
3333 int i, j;
c67846f2
JJ
3334 const char *fmt;
3335
3336 if (x == NULL_RTX)
3337 return 0;
3338
3339 code = GET_CODE (x);
3340
6c3cb698
KY
3341 /* We need not check the operands of the CALL expression itself. */
3342 if (code == CALL)
3343 return 0;
3344
c67846f2 3345 if (code == MEM)
a20c5714
RS
3346 return (mem_might_overlap_already_clobbered_arg_p
3347 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
c67846f2 3348
f725a3ec 3349 /* Scan all subexpressions. */
c67846f2
JJ
3350 fmt = GET_RTX_FORMAT (code);
3351 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3352 {
3353 if (*fmt == 'e')
f725a3ec
KH
3354 {
3355 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3356 return 1;
3357 }
c67846f2 3358 else if (*fmt == 'E')
f725a3ec
KH
3359 {
3360 for (j = 0; j < XVECLEN (x, i); j++)
3361 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3362 return 1;
3363 }
c67846f2
JJ
3364 }
3365 return 0;
c67846f2
JJ
3366}
3367
3368/* Scan sequence after INSN if it does not dereference any argument slots
3369 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
3370 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3371 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3372 should be 0). Return nonzero if sequence after INSN dereferences such argument
3373 slots, zero otherwise. */
c67846f2
JJ
3374
3375static int
48810515
DM
3376check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3377 int mark_stored_args_map)
f725a3ec 3378{
a20c5714
RS
3379 poly_uint64 low, high;
3380 unsigned HOST_WIDE_INT const_low, const_high;
c67846f2
JJ
3381
3382 if (insn == NULL_RTX)
3383 insn = get_insns ();
3384 else
3385 insn = NEXT_INSN (insn);
3386
3387 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
3388 if (INSN_P (insn)
3389 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
3390 break;
3391
0cdca92b
DJ
3392 if (mark_stored_args_map)
3393 {
6dad9361
TS
3394 if (ARGS_GROW_DOWNWARD)
3395 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3396 else
3397 low = arg->locate.slot_offset.constant;
a20c5714 3398 high = low + arg->locate.size.constant;
d60eab50 3399
a20c5714
RS
3400 const_low = constant_lower_bound (low);
3401 if (high.is_constant (&const_high))
3402 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3403 bitmap_set_bit (stored_args_map, i);
3404 else
3405 stored_args_watermark = MIN (stored_args_watermark, const_low);
0cdca92b 3406 }
c67846f2
JJ
3407 return insn != NULL_RTX;
3408}
3409
bef5d8b6
RS
3410/* Given that a function returns a value of mode MODE at the most
3411 significant end of hard register VALUE, shift VALUE left or right
3412 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 3413
bef5d8b6 3414bool
ef4bddc2 3415shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 3416{
bef5d8b6 3417 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
abd3c800 3418 machine_mode value_mode = GET_MODE (value);
73a699ae
RS
3419 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3420
3421 if (known_eq (shift, 0))
bef5d8b6
RS
3422 return false;
3423
3424 /* Use ashr rather than lshr for right shifts. This is for the benefit
3425 of the MIPS port, which requires SImode values to be sign-extended
3426 when stored in 64-bit registers. */
abd3c800
RS
3427 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3428 value, gen_int_shift_amount (value_mode, shift),
3429 value, 1, OPTAB_WIDEN))
bef5d8b6
RS
3430 gcc_unreachable ();
3431 return true;
c988af2b
RS
3432}
3433
3fb30019
RS
3434/* If X is a likely-spilled register value, copy it to a pseudo
3435 register and return that register. Return X otherwise. */
3436
3437static rtx
3438avoid_likely_spilled_reg (rtx x)
3439{
82d6e6fc 3440 rtx new_rtx;
3fb30019
RS
3441
3442 if (REG_P (x)
3443 && HARD_REGISTER_P (x)
07b8f0a8 3444 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
3445 {
3446 /* Make sure that we generate a REG rather than a CONCAT.
3447 Moves into CONCATs can need nontrivial instructions,
3448 and the whole point of this function is to avoid
3449 using the hard register directly in such a situation. */
3450 generating_concat_p = 0;
82d6e6fc 3451 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 3452 generating_concat_p = 1;
82d6e6fc
KG
3453 emit_move_insn (new_rtx, x);
3454 return new_rtx;
3fb30019
RS
3455 }
3456 return x;
3457}
3458
b40d90e6
DM
3459/* Helper function for expand_call.
3460 Return false is EXP is not implementable as a sibling call. */
3461
3462static bool
3463can_implement_as_sibling_call_p (tree exp,
3464 rtx structure_value_addr,
3465 tree funtype,
dfbdde16 3466 int reg_parm_stack_space ATTRIBUTE_UNUSED,
b40d90e6
DM
3467 tree fndecl,
3468 int flags,
3469 tree addr,
3470 const args_size &args_size)
3471{
3472 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
3473 {
3474 maybe_complain_about_tail_call
3475 (exp,
3476 "machine description does not have"
3477 " a sibcall_epilogue instruction pattern");
3478 return false;
3479 }
b40d90e6
DM
3480
3481 /* Doing sibling call optimization needs some work, since
3482 structure_value_addr can be allocated on the stack.
3483 It does not seem worth the effort since few optimizable
3484 sibling calls will return a structure. */
3485 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
3486 {
3487 maybe_complain_about_tail_call (exp, "callee returns a structure");
3488 return false;
3489 }
b40d90e6
DM
3490
3491#ifdef REG_PARM_STACK_SPACE
67914693 3492 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
b40d90e6
DM
3493 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3494 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3495 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
9a385c2d
DM
3496 {
3497 maybe_complain_about_tail_call (exp,
3498 "inconsistent size of stack space"
3499 " allocated for arguments which are"
3500 " passed in registers");
3501 return false;
3502 }
b40d90e6
DM
3503#endif
3504
3505 /* Check whether the target is able to optimize the call
3506 into a sibcall. */
3507 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
3508 {
3509 maybe_complain_about_tail_call (exp,
3510 "target is not able to optimize the"
3511 " call into a sibling call");
3512 return false;
3513 }
b40d90e6
DM
3514
3515 /* Functions that do not return exactly once may not be sibcall
3516 optimized. */
9a385c2d
DM
3517 if (flags & ECF_RETURNS_TWICE)
3518 {
3519 maybe_complain_about_tail_call (exp, "callee returns twice");
3520 return false;
3521 }
3522 if (flags & ECF_NORETURN)
3523 {
3524 maybe_complain_about_tail_call (exp, "callee does not return");
3525 return false;
3526 }
b40d90e6
DM
3527
3528 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
3529 {
3530 maybe_complain_about_tail_call (exp, "volatile function type");
3531 return false;
3532 }
b40d90e6
DM
3533
3534 /* If the called function is nested in the current one, it might access
3535 some of the caller's arguments, but could clobber them beforehand if
3536 the argument areas are shared. */
3537 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
3538 {
3539 maybe_complain_about_tail_call (exp, "nested function");
3540 return false;
3541 }
b40d90e6
DM
3542
3543 /* If this function requires more stack slots than the current
3544 function, we cannot change it into a sibling call.
3545 crtl->args.pretend_args_size is not part of the
3546 stack allocated by our caller. */
a20c5714
RS
3547 if (maybe_gt (args_size.constant,
3548 crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
3549 {
3550 maybe_complain_about_tail_call (exp,
3551 "callee required more stack slots"
3552 " than the caller");
3553 return false;
3554 }
b40d90e6
DM
3555
3556 /* If the callee pops its own arguments, then it must pop exactly
3557 the same number of arguments as the current function. */
a20c5714
RS
3558 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3559 args_size.constant),
3560 targetm.calls.return_pops_args (current_function_decl,
3561 TREE_TYPE
3562 (current_function_decl),
3563 crtl->args.size)))
9a385c2d
DM
3564 {
3565 maybe_complain_about_tail_call (exp,
3566 "inconsistent number of"
3567 " popped arguments");
3568 return false;
3569 }
b40d90e6
DM
3570
3571 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
3572 {
3573 maybe_complain_about_tail_call (exp, "frontend does not support"
3574 " sibling call");
3575 return false;
3576 }
b40d90e6
DM
3577
3578 /* All checks passed. */
3579 return true;
3580}
3581
957ed738
L
3582/* Update stack alignment when the parameter is passed in the stack
3583 since the outgoing parameter requires extra alignment on the calling
3584 function side. */
3585
3586static void
3587update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3588{
3589 if (crtl->stack_alignment_needed < locate->boundary)
3590 crtl->stack_alignment_needed = locate->boundary;
3591 if (crtl->preferred_stack_boundary < locate->boundary)
3592 crtl->preferred_stack_boundary = locate->boundary;
3593}
3594
5039610b 3595/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
3596 and return an rtx for its value.
3597 Store the value in TARGET (specified as an rtx) if convenient.
3598 If the value is stored in TARGET then TARGET is returned.
3599 If IGNORE is nonzero, then we ignore the value of the function call. */
3600
3601rtx
d329e058 3602expand_call (tree exp, rtx target, int ignore)
51bbfa0c 3603{
0a1c58a2
JL
3604 /* Nonzero if we are currently expanding a call. */
3605 static int currently_expanding_call = 0;
3606
51bbfa0c
RS
3607 /* RTX for the function to be called. */
3608 rtx funexp;
0a1c58a2 3609 /* Sequence of insns to perform a normal "call". */
48810515 3610 rtx_insn *normal_call_insns = NULL;
6de9cd9a 3611 /* Sequence of insns to perform a tail "call". */
48810515 3612 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
3613 /* Data type of the function. */
3614 tree funtype;
ded9bf77 3615 tree type_arg_types;
28ed065e 3616 tree rettype;
51bbfa0c
RS
3617 /* Declaration of the function being called,
3618 or 0 if the function is computed (not known by name). */
3619 tree fndecl = 0;
57782ad8
MM
3620 /* The type of the function being called. */
3621 tree fntype;
6de9cd9a 3622 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 3623 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 3624 int pass;
51bbfa0c
RS
3625
3626 /* Register in which non-BLKmode value will be returned,
3627 or 0 if no value or if value is BLKmode. */
3628 rtx valreg;
3629 /* Address where we should return a BLKmode value;
3630 0 if value not BLKmode. */
3631 rtx structure_value_addr = 0;
3632 /* Nonzero if that address is being passed by treating it as
3633 an extra, implicit first parameter. Otherwise,
3634 it is passed by being copied directly into struct_value_rtx. */
3635 int structure_value_addr_parm = 0;
078a18a4
SL
3636 /* Holds the value of implicit argument for the struct value. */
3637 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
3638 /* Size of aggregate value wanted, or zero if none wanted
3639 or if we are using the non-reentrant PCC calling convention
3640 or expecting the value in registers. */
5c8e61cf 3641 poly_int64 struct_value_size = 0;
51bbfa0c
RS
3642 /* Nonzero if called function returns an aggregate in memory PCC style,
3643 by returning the address of where to find it. */
3644 int pcc_struct_value = 0;
61f71b34 3645 rtx struct_value = 0;
51bbfa0c
RS
3646
3647 /* Number of actual parameters in this call, including struct value addr. */
3648 int num_actuals;
3649 /* Number of named args. Args after this are anonymous ones
3650 and they must all go on the stack. */
3651 int n_named_args;
078a18a4
SL
3652 /* Number of complex actual arguments that need to be split. */
3653 int num_complex_actuals = 0;
51bbfa0c
RS
3654
3655 /* Vector of information about each argument.
3656 Arguments are numbered in the order they will be pushed,
3657 not the order they are written. */
3658 struct arg_data *args;
3659
3660 /* Total size in bytes of all the stack-parms scanned so far. */
3661 struct args_size args_size;
099e9712 3662 struct args_size adjusted_args_size;
51bbfa0c 3663 /* Size of arguments before any adjustments (such as rounding). */
a20c5714 3664 poly_int64 unadjusted_args_size;
51bbfa0c 3665 /* Data on reg parms scanned so far. */
d5cc9181
JR
3666 CUMULATIVE_ARGS args_so_far_v;
3667 cumulative_args_t args_so_far;
51bbfa0c
RS
3668 /* Nonzero if a reg parm has been scanned. */
3669 int reg_parm_seen;
efd65a8b 3670 /* Nonzero if this is an indirect function call. */
51bbfa0c 3671
f725a3ec 3672 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
3673 If stack space is allocated for register parameters, but not by the
3674 caller, then it is preallocated in the fixed part of the stack frame.
3675 So the entire argument block must then be preallocated (i.e., we
3676 ignore PUSH_ROUNDING in that case). */
3677
f73ad30e 3678 int must_preallocate = !PUSH_ARGS;
51bbfa0c 3679
f72aed24 3680 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
3681 int reg_parm_stack_space = 0;
3682
51bbfa0c
RS
3683 /* Address of space preallocated for stack parms
3684 (on machines that lack push insns), or 0 if space not preallocated. */
3685 rtx argblock = 0;
3686
e384e6b5 3687 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 3688 int flags = 0;
e384e6b5 3689 int return_flags = 0;
f73ad30e 3690#ifdef REG_PARM_STACK_SPACE
51bbfa0c 3691 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
3692 saved, if any. */
3693 int low_to_save, high_to_save;
51bbfa0c
RS
3694 rtx save_area = 0; /* Place that it is saved */
3695#endif
3696
a20c5714 3697 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
51bbfa0c 3698 char *initial_stack_usage_map = stack_usage_map;
a20c5714 3699 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 3700 char *stack_usage_map_buf = NULL;
51bbfa0c 3701
a20c5714 3702 poly_int64 old_stack_allocated;
38afb23f
OH
3703
3704 /* State variables to track stack modifications. */
51bbfa0c 3705 rtx old_stack_level = 0;
38afb23f 3706 int old_stack_arg_under_construction = 0;
a20c5714 3707 poly_int64 old_pending_adj = 0;
51bbfa0c 3708 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
3709
3710 /* Some stack pointer alterations we make are performed via
3711 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3712 which we then also need to save/restore along the way. */
a20c5714 3713 poly_int64 old_stack_pointer_delta = 0;
38afb23f 3714
0a1c58a2 3715 rtx call_fusage;
5039610b 3716 tree addr = CALL_EXPR_FN (exp);
b3694847 3717 int i;
739fb049 3718 /* The alignment of the stack, in bits. */
95899b34 3719 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 3720 /* The alignment of the stack, in bytes. */
95899b34 3721 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
3722 /* The static chain value to use for this call. */
3723 rtx static_chain_value;
f2d33f13
JH
3724 /* See if this is "nothrow" function call. */
3725 if (TREE_NOTHROW (exp))
3726 flags |= ECF_NOTHROW;
3727
6de9cd9a
DN
3728 /* See if we can find a DECL-node for the actual function, and get the
3729 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
3730 fndecl = get_callee_fndecl (exp);
3731 if (fndecl)
51bbfa0c 3732 {
57782ad8 3733 fntype = TREE_TYPE (fndecl);
39b0dce7 3734 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 3735 return_flags |= decl_return_flags (fndecl);
51bbfa0c 3736 }
39b0dce7 3737 else
72954a4f 3738 {
28ed065e 3739 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 3740 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
3741 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3742 flags |= ECF_BY_DESCRIPTOR;
72954a4f 3743 }
28ed065e 3744 rettype = TREE_TYPE (exp);
7393c642 3745
57782ad8 3746 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 3747
8c6a8269
RS
3748 /* Warn if this value is an aggregate type,
3749 regardless of which calling convention we are using for it. */
28ed065e 3750 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 3751 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 3752
becfd6e5
KZ
3753 /* If the result of a non looping pure or const function call is
3754 ignored (or void), and none of its arguments are volatile, we can
3755 avoid expanding the call and just evaluate the arguments for
3756 side-effects. */
8c6a8269 3757 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 3758 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8c6a8269 3759 && (ignore || target == const0_rtx
28ed065e 3760 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
3761 {
3762 bool volatilep = false;
3763 tree arg;
078a18a4 3764 call_expr_arg_iterator iter;
8c6a8269 3765
078a18a4
SL
3766 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3767 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
3768 {
3769 volatilep = true;
3770 break;
3771 }
3772
3773 if (! volatilep)
3774 {
078a18a4
SL
3775 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3776 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
3777 return const0_rtx;
3778 }
3779 }
3780
6f90e075 3781#ifdef REG_PARM_STACK_SPACE
5d059ed9 3782 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 3783#endif
6f90e075 3784
5d059ed9 3785 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3786 && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 3787 must_preallocate = 1;
e5e809f4 3788
51bbfa0c
RS
3789 /* Set up a place to return a structure. */
3790
3791 /* Cater to broken compilers. */
d47d0a8d 3792 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
3793 {
3794 /* This call returns a big structure. */
84b8030f 3795 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
3796
3797#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
3798 {
3799 pcc_struct_value = 1;
9e7b1d0a
RS
3800 }
3801#else /* not PCC_STATIC_STRUCT_RETURN */
3802 {
5c8e61cf
RS
3803 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3804 struct_value_size = -1;
51bbfa0c 3805
391756ad
EB
3806 /* Even if it is semantically safe to use the target as the return
3807 slot, it may be not sufficiently aligned for the return type. */
3808 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3809 && target
3810 && MEM_P (target)
ffc8b52f
JJ
3811 /* If rettype is addressable, we may not create a temporary.
3812 If target is properly aligned at runtime and the compiler
3813 just doesn't know about it, it will work fine, otherwise it
3814 will be UB. */
3815 && (TREE_ADDRESSABLE (rettype)
3816 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3817 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3818 MEM_ALIGN (target)))))
9e7b1d0a
RS
3819 structure_value_addr = XEXP (target, 0);
3820 else
3821 {
9e7b1d0a
RS
3822 /* For variable-sized objects, we must be called with a target
3823 specified. If we were to allocate space on the stack here,
3824 we would have no way of knowing when to free it. */
9474e8ab 3825 rtx d = assign_temp (rettype, 1, 1);
4361b41d 3826 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
3827 target = 0;
3828 }
3829 }
3830#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
3831 }
3832
099e9712 3833 /* Figure out the amount to which the stack should be aligned. */
099e9712 3834 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
3835 if (fndecl)
3836 {
3dafb85c 3837 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
3838 /* Without automatic stack alignment, we can't increase preferred
3839 stack boundary. With automatic stack alignment, it is
3840 unnecessary since unless we can guarantee that all callers will
3841 align the outgoing stack properly, callee has to align its
3842 stack anyway. */
3843 if (i
3844 && i->preferred_incoming_stack_boundary
3845 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
3846 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3847 }
099e9712
JH
3848
3849 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 3850 funtype = TREE_TYPE (addr);
366de0ce 3851 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
3852 funtype = TREE_TYPE (funtype);
3853
078a18a4
SL
3854 /* Count whether there are actual complex arguments that need to be split
3855 into their real and imaginary parts. Munge the type_arg_types
3856 appropriately here as well. */
42ba5130 3857 if (targetm.calls.split_complex_arg)
ded9bf77 3858 {
078a18a4
SL
3859 call_expr_arg_iterator iter;
3860 tree arg;
3861 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3862 {
3863 tree type = TREE_TYPE (arg);
3864 if (type && TREE_CODE (type) == COMPLEX_TYPE
3865 && targetm.calls.split_complex_arg (type))
3866 num_complex_actuals++;
3867 }
ded9bf77 3868 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
3869 }
3870 else
3871 type_arg_types = TYPE_ARG_TYPES (funtype);
3872
099e9712 3873 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 3874 cfun->calls_alloca = 1;
099e9712
JH
3875
3876 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
3877 as if it were an extra parameter. Put the argument expression
3878 in structure_value_addr_value. */
61f71b34 3879 if (structure_value_addr && struct_value == 0)
099e9712
JH
3880 {
3881 /* If structure_value_addr is a REG other than
3882 virtual_outgoing_args_rtx, we can use always use it. If it
3883 is not a REG, we must always copy it into a register.
3884 If it is virtual_outgoing_args_rtx, we must copy it to another
3885 register in some cases. */
f8cfc6aa 3886 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
3887 || (ACCUMULATE_OUTGOING_ARGS
3888 && stack_arg_under_construction
3889 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 3890 ? copy_addr_to_reg (convert_memory_address
57782ad8 3891 (Pmode, structure_value_addr))
099e9712
JH
3892 : structure_value_addr);
3893
078a18a4
SL
3894 structure_value_addr_value =
3895 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
31db0fe0 3896 structure_value_addr_parm = 1;
099e9712
JH
3897 }
3898
3899 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
3900 num_actuals =
3901 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
3902
3903 /* Compute number of named args.
3a4d587b
AM
3904 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3905
3906 if (type_arg_types != 0)
3907 n_named_args
3908 = (list_length (type_arg_types)
3909 /* Count the struct value address, if it is passed as a parm. */
3910 + structure_value_addr_parm);
3911 else
3912 /* If we know nothing, treat all args as named. */
3913 n_named_args = num_actuals;
3914
3915 /* Start updating where the next arg would go.
3916
3917 On some machines (such as the PA) indirect calls have a different
3918 calling convention than normal calls. The fourth argument in
3919 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3920 or not. */
d5cc9181
JR
3921 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3922 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
3923
3924 /* Now possibly adjust the number of named args.
099e9712 3925 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
3926 We do include the last named arg if
3927 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
3928 (If no anonymous args follow, the result of list_length is actually
3929 one too large. This is harmless.)
3930
4ac8340c 3931 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
3932 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3933 this machine will be able to place unnamed args that were passed
3934 in registers into the stack. So treat all args as named. This
3935 allows the insns emitting for a specific argument list to be
3936 independent of the function declaration.
4ac8340c
KH
3937
3938 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3939 we do not have any reliable way to pass unnamed args in
3940 registers, so we must force them into memory. */
099e9712 3941
3a4d587b 3942 if (type_arg_types != 0
d5cc9181 3943 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
3944 ;
3945 else if (type_arg_types != 0
d5cc9181 3946 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
3947 /* Don't include the last named arg. */
3948 --n_named_args;
099e9712 3949 else
3a4d587b 3950 /* Treat all args as named. */
099e9712
JH
3951 n_named_args = num_actuals;
3952
099e9712 3953 /* Make a vector to hold all the information about each arg. */
765fc0f7 3954 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 3955
d80d2d2a
KH
3956 /* Build up entries in the ARGS array, compute the size of the
3957 arguments into ARGS_SIZE, etc. */
099e9712 3958 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 3959 n_named_args, exp,
45769134 3960 structure_value_addr_value, fndecl, fntype,
d5cc9181 3961 args_so_far, reg_parm_stack_space,
099e9712 3962 &old_stack_level, &old_pending_adj,
dd292d0a 3963 &must_preallocate, &flags,
6de9cd9a 3964 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
3965
3966 if (args_size.var)
84b8030f 3967 must_preallocate = 1;
099e9712
JH
3968
3969 /* Now make final decision about preallocating stack space. */
3970 must_preallocate = finalize_must_preallocate (must_preallocate,
3971 num_actuals, args,
3972 &args_size);
3973
3974 /* If the structure value address will reference the stack pointer, we
3975 must stabilize it. We don't need to do this if we know that we are
3976 not going to adjust the stack pointer in processing this call. */
3977
3978 if (structure_value_addr
3979 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3980 || reg_mentioned_p (virtual_outgoing_args_rtx,
3981 structure_value_addr))
3982 && (args_size.var
a20c5714
RS
3983 || (!ACCUMULATE_OUTGOING_ARGS
3984 && maybe_ne (args_size.constant, 0))))
099e9712 3985 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 3986
7ae4ad28 3987 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 3988 pushed these optimizations into -O2. Don't try if we're already
fb158467 3989 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 3990 there's cleanups, as we know there's code to follow the call. */
099e9712 3991 if (currently_expanding_call++ != 0
44662f68 3992 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
6de9cd9a 3993 || args_size.var
6fb5fa3c 3994 || dbg_cnt (tail_call) == false)
6de9cd9a 3995 try_tail_call = 0;
099e9712 3996
4b8e35f1
JJ
3997 /* Workaround buggy C/C++ wrappers around Fortran routines with
3998 character(len=constant) arguments if the hidden string length arguments
3999 are passed on the stack; if the callers forget to pass those arguments,
4000 attempting to tail call in such routines leads to stack corruption.
4001 Avoid tail calls in functions where at least one such hidden string
4002 length argument is passed (partially or fully) on the stack in the
4003 caller and the callee needs to pass any arguments on the stack.
4004 See PR90329. */
4005 if (try_tail_call && maybe_ne (args_size.constant, 0))
4006 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4007 arg; arg = DECL_CHAIN (arg))
4008 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4009 {
4010 subrtx_iterator::array_type array;
4011 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4012 if (MEM_P (*iter))
4013 {
4014 try_tail_call = 0;
4015 break;
4016 }
4017 }
4018
9a385c2d
DM
4019 /* If the user has marked the function as requiring tail-call
4020 optimization, attempt it. */
4021 if (must_tail_call)
4022 try_tail_call = 1;
4023
099e9712 4024 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 4025 if (try_tail_call)
9a385c2d
DM
4026 try_tail_call = can_implement_as_sibling_call_p (exp,
4027 structure_value_addr,
4028 funtype,
4029 reg_parm_stack_space,
4030 fndecl,
b40d90e6 4031 flags, addr, args_size);
497eb8c3 4032
c69cd1f5
JJ
4033 /* Check if caller and callee disagree in promotion of function
4034 return value. */
4035 if (try_tail_call)
4036 {
ef4bddc2
RS
4037 machine_mode caller_mode, caller_promoted_mode;
4038 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
4039 int caller_unsignedp, callee_unsignedp;
4040 tree caller_res = DECL_RESULT (current_function_decl);
4041
4042 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 4043 caller_mode = DECL_MODE (caller_res);
c69cd1f5 4044 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
4045 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4046 caller_promoted_mode
4047 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4048 &caller_unsignedp,
4049 TREE_TYPE (current_function_decl), 1);
4050 callee_promoted_mode
666e3ceb 4051 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 4052 &callee_unsignedp,
666e3ceb 4053 funtype, 1);
c69cd1f5
JJ
4054 if (caller_mode != VOIDmode
4055 && (caller_promoted_mode != callee_promoted_mode
4056 || ((caller_mode != caller_promoted_mode
4057 || callee_mode != callee_promoted_mode)
4058 && (caller_unsignedp != callee_unsignedp
bd4288c0 4059 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
4060 {
4061 try_tail_call = 0;
4062 maybe_complain_about_tail_call (exp,
4063 "caller and callee disagree in"
4064 " promotion of function"
4065 " return value");
4066 }
c69cd1f5
JJ
4067 }
4068
01973e26
L
4069 /* Ensure current function's preferred stack boundary is at least
4070 what we need. Stack alignment may also increase preferred stack
4071 boundary. */
957ed738
L
4072 for (i = 0; i < num_actuals; i++)
4073 if (reg_parm_stack_space > 0
4074 || args[i].reg == 0
4075 || args[i].partial != 0
4076 || args[i].pass_on_stack)
4077 update_stack_alignment_for_call (&args[i].locate);
b5f772ce 4078 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 4079 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
4080 else
4081 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 4082
099e9712 4083 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 4084
3cf3da88
EB
4085 if (flag_callgraph_info)
4086 record_final_call (fndecl, EXPR_LOCATION (exp));
4087
0a1c58a2
JL
4088 /* We want to make two insn chains; one for a sibling call, the other
4089 for a normal call. We will select one of the two chains after
4090 initial RTL generation is complete. */
b820d2b8 4091 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
4092 {
4093 int sibcall_failure = 0;
f5143c46 4094 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 4095 recursion "call". That way we know any adjustment after the tail
7ae4ad28 4096 recursion call can be ignored if we indeed use the tail
0a1c58a2 4097 call expansion. */
7f2f0a01 4098 saved_pending_stack_adjust save;
48810515
DM
4099 rtx_insn *insns, *before_call, *after_args;
4100 rtx next_arg_reg;
39842893 4101
0a1c58a2
JL
4102 if (pass == 0)
4103 {
0a1c58a2
JL
4104 /* State variables we need to save and restore between
4105 iterations. */
7f2f0a01 4106 save_pending_stack_adjust (&save);
0a1c58a2 4107 }
f2d33f13
JH
4108 if (pass)
4109 flags &= ~ECF_SIBCALL;
4110 else
4111 flags |= ECF_SIBCALL;
51bbfa0c 4112
0a1c58a2 4113 /* Other state variables that we must reinitialize each time
f2d33f13 4114 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
4115 argblock = 0;
4116 call_fusage = 0;
fa76d9e0 4117
f725a3ec 4118 /* Start a new sequence for the normal call case.
51bbfa0c 4119
0a1c58a2
JL
4120 From this point on, if the sibling call fails, we want to set
4121 sibcall_failure instead of continuing the loop. */
4122 start_sequence ();
eecb6f50 4123
0a1c58a2
JL
4124 /* Don't let pending stack adjusts add up to too much.
4125 Also, do all pending adjustments now if there is any chance
4126 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 4127 call sequence.
63579539
DJ
4128 Also do the adjustments before a throwing call, otherwise
4129 exception handling can fail; PR 19225. */
a20c5714
RS
4130 if (maybe_ge (pending_stack_adjust, 32)
4131 || (maybe_ne (pending_stack_adjust, 0)
9dd9bf80 4132 && (flags & ECF_MAY_BE_ALLOCA))
a20c5714 4133 || (maybe_ne (pending_stack_adjust, 0)
63579539 4134 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
4135 || pass == 0)
4136 do_pending_stack_adjust ();
51bbfa0c 4137
0a1c58a2 4138 /* Precompute any arguments as needed. */
f8a097cd 4139 if (pass)
84b8030f 4140 precompute_arguments (num_actuals, args);
51bbfa0c 4141
0a1c58a2
JL
4142 /* Now we are about to start emitting insns that can be deleted
4143 if a libcall is deleted. */
84b8030f 4144 if (pass && (flags & ECF_MALLOC))
0a1c58a2 4145 start_sequence ();
51bbfa0c 4146
87a5dc2d
JW
4147 if (pass == 0
4148 && crtl->stack_protect_guard
4149 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
4150 stack_protect_epilogue ();
4151
099e9712 4152 adjusted_args_size = args_size;
ce48579b
RH
4153 /* Compute the actual size of the argument block required. The variable
4154 and constant sizes must be combined, the size may have to be rounded,
4155 and there may be a minimum required size. When generating a sibcall
4156 pattern, do not round up, since we'll be re-using whatever space our
4157 caller provided. */
4158 unadjusted_args_size
f725a3ec
KH
4159 = compute_argument_block_size (reg_parm_stack_space,
4160 &adjusted_args_size,
5d059ed9 4161 fndecl, fntype,
ce48579b
RH
4162 (pass == 0 ? 0
4163 : preferred_stack_boundary));
4164
f725a3ec 4165 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 4166
f8a097cd 4167 /* The argument block when performing a sibling call is the
c22cacf3 4168 incoming argument block. */
f8a097cd 4169 if (pass == 0)
c67846f2 4170 {
2e3f842f 4171 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
4172 if (STACK_GROWS_DOWNWARD)
4173 argblock
4174 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4175 else
4176 argblock
4177 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4178
a20c5714
RS
4179 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4180 stored_args_map = sbitmap_alloc (map_size);
f61e445a 4181 bitmap_clear (stored_args_map);
a20c5714 4182 stored_args_watermark = HOST_WIDE_INT_M1U;
c67846f2 4183 }
ce48579b 4184
0a1c58a2
JL
4185 /* If we have no actual push instructions, or shouldn't use them,
4186 make space for all args right now. */
099e9712 4187 else if (adjusted_args_size.var != 0)
51bbfa0c 4188 {
0a1c58a2
JL
4189 if (old_stack_level == 0)
4190 {
9eac0f2a 4191 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 4192 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
4193 old_pending_adj = pending_stack_adjust;
4194 pending_stack_adjust = 0;
0a1c58a2
JL
4195 /* stack_arg_under_construction says whether a stack arg is
4196 being constructed at the old stack level. Pushing the stack
4197 gets a clean outgoing argument block. */
4198 old_stack_arg_under_construction = stack_arg_under_construction;
4199 stack_arg_under_construction = 0;
0a1c58a2 4200 }
099e9712 4201 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 4202 if (flag_stack_usage_info)
d3c12306 4203 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 4204 }
0a1c58a2
JL
4205 else
4206 {
4207 /* Note that we must go through the motions of allocating an argument
4208 block even if the size is zero because we may be storing args
4209 in the area reserved for register arguments, which may be part of
4210 the stack frame. */
26a258fe 4211
a20c5714 4212 poly_int64 needed = adjusted_args_size.constant;
51bbfa0c 4213
0a1c58a2
JL
4214 /* Store the maximum argument space used. It will be pushed by
4215 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4216 checking). */
51bbfa0c 4217
a20c5714
RS
4218 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4219 needed);
51bbfa0c 4220
0a1c58a2
JL
4221 if (must_preallocate)
4222 {
f73ad30e
JH
4223 if (ACCUMULATE_OUTGOING_ARGS)
4224 {
f8a097cd
JH
4225 /* Since the stack pointer will never be pushed, it is
4226 possible for the evaluation of a parm to clobber
4227 something we have already written to the stack.
4228 Since most function calls on RISC machines do not use
4229 the stack, this is uncommon, but must work correctly.
26a258fe 4230
f73ad30e 4231 Therefore, we save any area of the stack that was already
f8a097cd
JH
4232 written and that we are using. Here we set up to do this
4233 by making a new stack usage map from the old one. The
f725a3ec 4234 actual save will be done by store_one_arg.
26a258fe 4235
f73ad30e
JH
4236 Another approach might be to try to reorder the argument
4237 evaluations to avoid this conflicting stack usage. */
26a258fe 4238
f8a097cd
JH
4239 /* Since we will be writing into the entire argument area,
4240 the map must be allocated for its entire size, not just
4241 the part that is the responsibility of the caller. */
5d059ed9 4242 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4243 needed += reg_parm_stack_space;
51bbfa0c 4244
a20c5714 4245 poly_int64 limit = needed;
6dad9361 4246 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
4247 limit += 1;
4248
4249 /* For polynomial sizes, this is the maximum possible
4250 size needed for arguments with a constant size
4251 and offset. */
4252 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4253 highest_outgoing_arg_in_use
4254 = MAX (initial_highest_arg_in_use, const_limit);
6dad9361 4255
04695783 4256 free (stack_usage_map_buf);
5ed6ace5 4257 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4258 stack_usage_map = stack_usage_map_buf;
51bbfa0c 4259
f73ad30e 4260 if (initial_highest_arg_in_use)
2e09e75a
JM
4261 memcpy (stack_usage_map, initial_stack_usage_map,
4262 initial_highest_arg_in_use);
2f4aa534 4263
f73ad30e 4264 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4265 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4266 (highest_outgoing_arg_in_use
4267 - initial_highest_arg_in_use));
4268 needed = 0;
2f4aa534 4269
f8a097cd
JH
4270 /* The address of the outgoing argument list must not be
4271 copied to a register here, because argblock would be left
4272 pointing to the wrong place after the call to
f725a3ec 4273 allocate_dynamic_stack_space below. */
2f4aa534 4274
f73ad30e 4275 argblock = virtual_outgoing_args_rtx;
f725a3ec 4276 }
f73ad30e 4277 else
26a258fe 4278 {
a20c5714
RS
4279 /* Try to reuse some or all of the pending_stack_adjust
4280 to get this space. */
4281 if (inhibit_defer_pop == 0
4282 && (combine_pending_stack_adjustment_and_call
4283 (&needed,
4284 unadjusted_args_size,
4285 &adjusted_args_size,
4286 preferred_unit_stack_boundary)))
0a1c58a2 4287 {
ce48579b
RH
4288 /* combine_pending_stack_adjustment_and_call computes
4289 an adjustment before the arguments are allocated.
4290 Account for them and see whether or not the stack
4291 needs to go up or down. */
4292 needed = unadjusted_args_size - needed;
4293
a20c5714
RS
4294 /* Checked by
4295 combine_pending_stack_adjustment_and_call. */
4296 gcc_checking_assert (ordered_p (needed, 0));
4297 if (maybe_lt (needed, 0))
f73ad30e 4298 {
ce48579b
RH
4299 /* We're releasing stack space. */
4300 /* ??? We can avoid any adjustment at all if we're
4301 already aligned. FIXME. */
4302 pending_stack_adjust = -needed;
4303 do_pending_stack_adjust ();
f73ad30e
JH
4304 needed = 0;
4305 }
f725a3ec 4306 else
ce48579b
RH
4307 /* We need to allocate space. We'll do that in
4308 push_block below. */
4309 pending_stack_adjust = 0;
0a1c58a2 4310 }
ce48579b
RH
4311
4312 /* Special case this because overhead of `push_block' in
4313 this case is non-trivial. */
a20c5714 4314 if (known_eq (needed, 0))
f73ad30e 4315 argblock = virtual_outgoing_args_rtx;
0a1c58a2 4316 else
d892f288 4317 {
a20c5714
RS
4318 rtx needed_rtx = gen_int_mode (needed, Pmode);
4319 argblock = push_block (needed_rtx, 0, 0);
6dad9361
TS
4320 if (ARGS_GROW_DOWNWARD)
4321 argblock = plus_constant (Pmode, argblock, needed);
d892f288 4322 }
f73ad30e 4323
f8a097cd
JH
4324 /* We only really need to call `copy_to_reg' in the case
4325 where push insns are going to be used to pass ARGBLOCK
4326 to a function call in ARGS. In that case, the stack
4327 pointer changes value from the allocation point to the
4328 call point, and hence the value of
4329 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4330 as well always do it. */
f73ad30e 4331 argblock = copy_to_reg (argblock);
38afb23f
OH
4332 }
4333 }
4334 }
0a1c58a2 4335
38afb23f
OH
4336 if (ACCUMULATE_OUTGOING_ARGS)
4337 {
4338 /* The save/restore code in store_one_arg handles all
4339 cases except one: a constructor call (including a C
4340 function returning a BLKmode struct) to initialize
4341 an argument. */
4342 if (stack_arg_under_construction)
4343 {
ac294f0b 4344 rtx push_size
a20c5714
RS
4345 = (gen_int_mode
4346 (adjusted_args_size.constant
4347 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4348 : TREE_TYPE (fndecl))
4349 ? 0 : reg_parm_stack_space), Pmode));
38afb23f
OH
4350 if (old_stack_level == 0)
4351 {
9eac0f2a 4352 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
4353 old_stack_pointer_delta = stack_pointer_delta;
4354 old_pending_adj = pending_stack_adjust;
4355 pending_stack_adjust = 0;
4356 /* stack_arg_under_construction says whether a stack
4357 arg is being constructed at the old stack level.
4358 Pushing the stack gets a clean outgoing argument
4359 block. */
4360 old_stack_arg_under_construction
4361 = stack_arg_under_construction;
4362 stack_arg_under_construction = 0;
4363 /* Make a new map for the new argument list. */
04695783 4364 free (stack_usage_map_buf);
b9eae1a9 4365 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4366 stack_usage_map = stack_usage_map_buf;
38afb23f 4367 highest_outgoing_arg_in_use = 0;
a20c5714 4368 stack_usage_watermark = HOST_WIDE_INT_M1U;
f73ad30e 4369 }
d3c12306
EB
4370 /* We can pass TRUE as the 4th argument because we just
4371 saved the stack pointer and will restore it right after
4372 the call. */
9e878cf1
EB
4373 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4374 -1, true);
0a1c58a2 4375 }
bfbf933a 4376
38afb23f
OH
4377 /* If argument evaluation might modify the stack pointer,
4378 copy the address of the argument list to a register. */
4379 for (i = 0; i < num_actuals; i++)
4380 if (args[i].pass_on_stack)
4381 {
4382 argblock = copy_addr_to_reg (argblock);
4383 break;
4384 }
4385 }
d329e058 4386
0a1c58a2 4387 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 4388
5ba53785
UB
4389 /* Stack is properly aligned, pops can't safely be deferred during
4390 the evaluation of the arguments. */
4391 NO_DEFER_POP;
4392
ac4ee457
UB
4393 /* Precompute all register parameters. It isn't safe to compute
4394 anything once we have started filling any specific hard regs.
4395 TLS symbols sometimes need a call to resolve. Precompute
4396 register parameters before any stack pointer manipulation
4397 to avoid unaligned stack in the called function. */
4398 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4399
5ba53785
UB
4400 OK_DEFER_POP;
4401
3d9684ae
JG
4402 /* Perform stack alignment before the first push (the last arg). */
4403 if (argblock == 0
a20c5714
RS
4404 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4405 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4e217aed 4406 {
0a1c58a2
JL
4407 /* When the stack adjustment is pending, we get better code
4408 by combining the adjustments. */
a20c5714
RS
4409 if (maybe_ne (pending_stack_adjust, 0)
4410 && ! inhibit_defer_pop
4411 && (combine_pending_stack_adjustment_and_call
4412 (&pending_stack_adjust,
4413 unadjusted_args_size,
4414 &adjusted_args_size,
4415 preferred_unit_stack_boundary)))
4416 do_pending_stack_adjust ();
0a1c58a2 4417 else if (argblock == 0)
a20c5714
RS
4418 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4419 - unadjusted_args_size,
4420 Pmode));
0a1c58a2 4421 }
ebcd0b57
JH
4422 /* Now that the stack is properly aligned, pops can't safely
4423 be deferred during the evaluation of the arguments. */
4424 NO_DEFER_POP;
51bbfa0c 4425
d3c12306
EB
4426 /* Record the maximum pushed stack space size. We need to delay
4427 doing it this far to take into account the optimization done
4428 by combine_pending_stack_adjustment_and_call. */
a11e0df4 4429 if (flag_stack_usage_info
d3c12306
EB
4430 && !ACCUMULATE_OUTGOING_ARGS
4431 && pass
4432 && adjusted_args_size.var == 0)
4433 {
a20c5714
RS
4434 poly_int64 pushed = (adjusted_args_size.constant
4435 + pending_stack_adjust);
4436 current_function_pushed_stack_size
4437 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4438 }
4439
09e2bf48 4440 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 4441
5039610b
SL
4442 if (CALL_EXPR_STATIC_CHAIN (exp))
4443 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
4444 else
4445 static_chain_value = 0;
4446
f73ad30e 4447#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
4448 /* Save the fixed argument area if it's part of the caller's frame and
4449 is clobbered by argument setup for this call. */
f8a097cd 4450 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
4451 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4452 &low_to_save, &high_to_save);
b94301c2 4453#endif
51bbfa0c 4454
0a1c58a2
JL
4455 /* Now store (and compute if necessary) all non-register parms.
4456 These come before register parms, since they can require block-moves,
4457 which could clobber the registers used for register parms.
4458 Parms which have partial registers are not stored here,
4459 but we do preallocate space here if they want that. */
51bbfa0c 4460
0a1c58a2 4461 for (i = 0; i < num_actuals; i++)
0196c95e 4462 {
31db0fe0 4463 if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 4464 {
48810515 4465 rtx_insn *before_arg = get_last_insn ();
0196c95e 4466
ddc923b5
MP
4467 /* We don't allow passing huge (> 2^30 B) arguments
4468 by value. It would cause an overflow later on. */
a20c5714 4469 if (constant_lower_bound (adjusted_args_size.constant)
ddc923b5
MP
4470 >= (1 << (HOST_BITS_PER_INT - 2)))
4471 {
4472 sorry ("passing too large argument on stack");
4473 continue;
4474 }
4475
0196c95e
JJ
4476 if (store_one_arg (&args[i], argblock, flags,
4477 adjusted_args_size.var != 0,
4478 reg_parm_stack_space)
4479 || (pass == 0
4480 && check_sibcall_argument_overlap (before_arg,
4481 &args[i], 1)))
4482 sibcall_failure = 1;
4483 }
4484
2b1c5433 4485 if (args[i].stack)
7d810276
JJ
4486 call_fusage
4487 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4488 gen_rtx_USE (VOIDmode, args[i].stack),
4489 call_fusage);
0196c95e 4490 }
0a1c58a2
JL
4491
4492 /* If we have a parm that is passed in registers but not in memory
4493 and whose alignment does not permit a direct copy into registers,
4494 make a group of pseudos that correspond to each register that we
4495 will later fill. */
4496 if (STRICT_ALIGNMENT)
4497 store_unaligned_arguments_into_pseudos (args, num_actuals);
4498
4499 /* Now store any partially-in-registers parm.
4500 This is the last place a block-move can happen. */
4501 if (reg_parm_seen)
4502 for (i = 0; i < num_actuals; i++)
4503 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 4504 {
48810515 4505 rtx_insn *before_arg = get_last_insn ();
c67846f2 4506
99206968
KT
4507 /* On targets with weird calling conventions (e.g. PA) it's
4508 hard to ensure that all cases of argument overlap between
4509 stack and registers work. Play it safe and bail out. */
4510 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4511 {
4512 sibcall_failure = 1;
4513 break;
4514 }
4515
4c6b3b2a
JJ
4516 if (store_one_arg (&args[i], argblock, flags,
4517 adjusted_args_size.var != 0,
4518 reg_parm_stack_space)
4519 || (pass == 0
4520 && check_sibcall_argument_overlap (before_arg,
0cdca92b 4521 &args[i], 1)))
c67846f2
JJ
4522 sibcall_failure = 1;
4523 }
51bbfa0c 4524
2f21e1ba
BS
4525 bool any_regs = false;
4526 for (i = 0; i < num_actuals; i++)
4527 if (args[i].reg != NULL_RTX)
4528 {
4529 any_regs = true;
4530 targetm.calls.call_args (args[i].reg, funtype);
4531 }
4532 if (!any_regs)
4533 targetm.calls.call_args (pc_rtx, funtype);
4534
4535 /* Figure out the register where the value, if any, will come back. */
4536 valreg = 0;
2f21e1ba
BS
4537 if (TYPE_MODE (rettype) != VOIDmode
4538 && ! structure_value_addr)
4539 {
4540 if (pcc_struct_value)
31db0fe0
ML
4541 valreg = hard_function_value (build_pointer_type (rettype),
4542 fndecl, NULL, (pass == 0));
2f21e1ba 4543 else
31db0fe0
ML
4544 valreg = hard_function_value (rettype, fndecl, fntype,
4545 (pass == 0));
2f21e1ba
BS
4546
4547 /* If VALREG is a PARALLEL whose first member has a zero
4548 offset, use that. This is for targets such as m68k that
4549 return the same value in multiple places. */
4550 if (GET_CODE (valreg) == PARALLEL)
4551 {
4552 rtx elem = XVECEXP (valreg, 0, 0);
4553 rtx where = XEXP (elem, 0);
4554 rtx offset = XEXP (elem, 1);
4555 if (offset == const0_rtx
4556 && GET_MODE (where) == GET_MODE (valreg))
4557 valreg = where;
4558 }
4559 }
4560
0a1c58a2
JL
4561 /* If register arguments require space on the stack and stack space
4562 was not preallocated, allocate stack space here for arguments
4563 passed in registers. */
5d059ed9 4564 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 4565 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 4566 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 4567 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 4568
0a1c58a2
JL
4569 /* Pass the function the address in which to return a
4570 structure value. */
4571 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4572 {
7ae4ad28 4573 structure_value_addr
5ae6cd0d 4574 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 4575 emit_move_insn (struct_value,
0a1c58a2
JL
4576 force_reg (Pmode,
4577 force_operand (structure_value_addr,
4578 NULL_RTX)));
4579
f8cfc6aa 4580 if (REG_P (struct_value))
61f71b34 4581 use_reg (&call_fusage, struct_value);
0a1c58a2 4582 }
c2939b57 4583
05e6ee93 4584 after_args = get_last_insn ();
78bcf3dc
EB
4585 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4586 static_chain_value, &call_fusage,
4587 reg_parm_seen, flags);
6b8805cf 4588
0cdca92b
DJ
4589 load_register_parameters (args, num_actuals, &call_fusage, flags,
4590 pass == 0, &sibcall_failure);
f725a3ec 4591
0a1c58a2
JL
4592 /* Save a pointer to the last insn before the call, so that we can
4593 later safely search backwards to find the CALL_INSN. */
4594 before_call = get_last_insn ();
51bbfa0c 4595
7d167afd
JJ
4596 /* Set up next argument register. For sibling calls on machines
4597 with register windows this should be the incoming register. */
7d167afd 4598 if (pass == 0)
6783fdb7
RS
4599 next_arg_reg = targetm.calls.function_incoming_arg
4600 (args_so_far, function_arg_info::end_marker ());
7d167afd 4601 else
6783fdb7
RS
4602 next_arg_reg = targetm.calls.function_arg
4603 (args_so_far, function_arg_info::end_marker ());
7d167afd 4604
e384e6b5
BS
4605 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4606 {
4607 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 4608 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
4609 if (arg_nr >= 0
4610 && arg_nr < num_actuals
4611 && args[arg_nr].reg
e384e6b5
BS
4612 && valreg
4613 && REG_P (valreg)
4614 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4615 call_fusage
4616 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 4617 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
4618 call_fusage);
4619 }
0a1c58a2
JL
4620 /* All arguments and registers used for the call must be set up by
4621 now! */
4622
ce48579b 4623 /* Stack must be properly aligned now. */
366de0ce 4624 gcc_assert (!pass
a20c5714
RS
4625 || multiple_p (stack_pointer_delta,
4626 preferred_unit_stack_boundary));
ebcd0b57 4627
0a1c58a2 4628 /* Generate the actual call instruction. */
6de9cd9a 4629 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 4630 adjusted_args_size.constant, struct_value_size,
7d167afd 4631 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 4632 flags, args_so_far);
0a1c58a2 4633
1e288103 4634 if (flag_ipa_ra)
4f660b15 4635 {
48810515
DM
4636 rtx_call_insn *last;
4637 rtx datum = NULL_RTX;
4f660b15
RO
4638 if (fndecl != NULL_TREE)
4639 {
4640 datum = XEXP (DECL_RTL (fndecl), 0);
4641 gcc_assert (datum != NULL_RTX
4642 && GET_CODE (datum) == SYMBOL_REF);
4643 }
4644 last = last_call_insn ();
4645 add_reg_note (last, REG_CALL_DECL, datum);
4646 }
4647
05e6ee93
MM
4648 /* If the call setup or the call itself overlaps with anything
4649 of the argument setup we probably clobbered our call address.
4650 In that case we can't do sibcalls. */
4651 if (pass == 0
4652 && check_sibcall_argument_overlap (after_args, 0, 0))
4653 sibcall_failure = 1;
4654
bef5d8b6
RS
4655 /* If a non-BLKmode value is returned at the most significant end
4656 of a register, shift the register right by the appropriate amount
4657 and update VALREG accordingly. BLKmode values are handled by the
4658 group load/store machinery below. */
4659 if (!structure_value_addr
4660 && !pcc_struct_value
66de4d7c 4661 && TYPE_MODE (rettype) != VOIDmode
28ed065e 4662 && TYPE_MODE (rettype) != BLKmode
66de4d7c 4663 && REG_P (valreg)
28ed065e 4664 && targetm.calls.return_in_msb (rettype))
bef5d8b6 4665 {
28ed065e 4666 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 4667 sibcall_failure = 1;
28ed065e 4668 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
4669 }
4670
84b8030f 4671 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
4672 {
4673 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 4674 rtx_insn *last, *insns;
0a1c58a2 4675
f725a3ec 4676 /* The return value from a malloc-like function is a pointer. */
28ed065e 4677 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 4678 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
4679
4680 emit_move_insn (temp, valreg);
4681
67914693 4682 /* The return value from a malloc-like function cannot alias
0a1c58a2
JL
4683 anything else. */
4684 last = get_last_insn ();
65c5f2a6 4685 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
4686
4687 /* Write out the sequence. */
4688 insns = get_insns ();
4689 end_sequence ();
2f937369 4690 emit_insn (insns);
0a1c58a2
JL
4691 valreg = temp;
4692 }
51bbfa0c 4693
6fb5fa3c
DB
4694 /* For calls to `setjmp', etc., inform
4695 function.c:setjmp_warnings that it should complain if
4696 nonvolatile values are live. For functions that cannot
4697 return, inform flow that control does not fall through. */
51bbfa0c 4698
6e14af16 4699 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 4700 {
570a98eb 4701 /* The barrier must be emitted
0a1c58a2
JL
4702 immediately after the CALL_INSN. Some ports emit more
4703 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 4704
48810515 4705 rtx_insn *last = get_last_insn ();
4b4bf941 4706 while (!CALL_P (last))
0a1c58a2
JL
4707 {
4708 last = PREV_INSN (last);
4709 /* There was no CALL_INSN? */
366de0ce 4710 gcc_assert (last != before_call);
0a1c58a2 4711 }
51bbfa0c 4712
570a98eb 4713 emit_barrier_after (last);
8af61113 4714
f451eeef
JS
4715 /* Stack adjustments after a noreturn call are dead code.
4716 However when NO_DEFER_POP is in effect, we must preserve
4717 stack_pointer_delta. */
4718 if (inhibit_defer_pop == 0)
4719 {
4720 stack_pointer_delta = old_stack_allocated;
4721 pending_stack_adjust = 0;
4722 }
0a1c58a2 4723 }
51bbfa0c 4724
0a1c58a2 4725 /* If value type not void, return an rtx for the value. */
51bbfa0c 4726
28ed065e 4727 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 4728 || ignore)
b5cd4ed4 4729 target = const0_rtx;
0a1c58a2
JL
4730 else if (structure_value_addr)
4731 {
3c0cb5de 4732 if (target == 0 || !MEM_P (target))
0a1c58a2 4733 {
3bdf5ad1 4734 target
28ed065e
MM
4735 = gen_rtx_MEM (TYPE_MODE (rettype),
4736 memory_address (TYPE_MODE (rettype),
3bdf5ad1 4737 structure_value_addr));
28ed065e 4738 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
4739 }
4740 }
4741 else if (pcc_struct_value)
cacbd532 4742 {
0a1c58a2
JL
4743 /* This is the special C++ case where we need to
4744 know what the true target was. We take care to
4745 never use this value more than once in one expression. */
28ed065e 4746 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 4747 copy_to_reg (valreg));
28ed065e 4748 set_mem_attributes (target, rettype, 1);
cacbd532 4749 }
0a1c58a2
JL
4750 /* Handle calls that return values in multiple non-contiguous locations.
4751 The Irix 6 ABI has examples of this. */
4752 else if (GET_CODE (valreg) == PARALLEL)
4753 {
6de9cd9a 4754 if (target == 0)
5ef0b50d 4755 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
4756 else if (rtx_equal_p (target, valreg))
4757 ;
4758 else if (GET_CODE (target) == PARALLEL)
4759 /* Handle the result of a emit_group_move_into_temps
4760 call in the previous pass. */
4761 emit_group_move (target, valreg);
4762 else
28ed065e
MM
4763 emit_group_store (target, valreg, rettype,
4764 int_size_in_bytes (rettype));
0a1c58a2
JL
4765 }
4766 else if (target
28ed065e 4767 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
4768 && GET_MODE (target) == GET_MODE (valreg))
4769 {
51caaefe
EB
4770 bool may_overlap = false;
4771
f2d18690
KK
4772 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4773 reg to a plain register. */
3fb30019
RS
4774 if (!REG_P (target) || HARD_REGISTER_P (target))
4775 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 4776
51caaefe
EB
4777 /* If TARGET is a MEM in the argument area, and we have
4778 saved part of the argument area, then we can't store
4779 directly into TARGET as it may get overwritten when we
4780 restore the argument save area below. Don't work too
4781 hard though and simply force TARGET to a register if it
4782 is a MEM; the optimizer is quite likely to sort it out. */
4783 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4784 for (i = 0; i < num_actuals; i++)
4785 if (args[i].save_area)
4786 {
4787 may_overlap = true;
4788 break;
4789 }
0219237c 4790
51caaefe
EB
4791 if (may_overlap)
4792 target = copy_to_reg (valreg);
4793 else
4794 {
4795 /* TARGET and VALREG cannot be equal at this point
4796 because the latter would not have
4797 REG_FUNCTION_VALUE_P true, while the former would if
4798 it were referring to the same register.
4799
4800 If they refer to the same register, this move will be
4801 a no-op, except when function inlining is being
4802 done. */
4803 emit_move_insn (target, valreg);
4804
4805 /* If we are setting a MEM, this code must be executed.
4806 Since it is emitted after the call insn, sibcall
4807 optimization cannot be performed in that case. */
4808 if (MEM_P (target))
4809 sibcall_failure = 1;
4810 }
0a1c58a2 4811 }
0a1c58a2 4812 else
3fb30019 4813 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 4814
cde0f3fd
PB
4815 /* If we promoted this return value, make the proper SUBREG.
4816 TARGET might be const0_rtx here, so be careful. */
4817 if (REG_P (target)
28ed065e
MM
4818 && TYPE_MODE (rettype) != BLKmode
4819 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 4820 {
28ed065e 4821 tree type = rettype;
cde0f3fd 4822 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2 4823 machine_mode pmode;
cde0f3fd
PB
4824
4825 /* Ensure we promote as expected, and get the new unsignedness. */
4826 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4827 funtype, 1);
4828 gcc_assert (GET_MODE (target) == pmode);
4829
91914e56
RS
4830 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4831 GET_MODE (target));
cde0f3fd
PB
4832 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4833 SUBREG_PROMOTED_VAR_P (target) = 1;
362d42dc 4834 SUBREG_PROMOTED_SET (target, unsignedp);
61f71b34 4835 }
84b55618 4836
0a1c58a2
JL
4837 /* If size of args is variable or this was a constructor call for a stack
4838 argument, restore saved stack-pointer value. */
51bbfa0c 4839
9dd9bf80 4840 if (old_stack_level)
0a1c58a2 4841 {
48810515 4842 rtx_insn *prev = get_last_insn ();
9a08d230 4843
9eac0f2a 4844 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 4845 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 4846
faf7a23d 4847 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 4848
0a1c58a2 4849 pending_stack_adjust = old_pending_adj;
d25cee4d 4850 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
4851 stack_arg_under_construction = old_stack_arg_under_construction;
4852 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4853 stack_usage_map = initial_stack_usage_map;
a20c5714 4854 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2
JL
4855 sibcall_failure = 1;
4856 }
f8a097cd 4857 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 4858 {
51bbfa0c 4859#ifdef REG_PARM_STACK_SPACE
0a1c58a2 4860 if (save_area)
b820d2b8
AM
4861 restore_fixed_argument_area (save_area, argblock,
4862 high_to_save, low_to_save);
b94301c2 4863#endif
51bbfa0c 4864
0a1c58a2
JL
4865 /* If we saved any argument areas, restore them. */
4866 for (i = 0; i < num_actuals; i++)
4867 if (args[i].save_area)
4868 {
ef4bddc2 4869 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
4870 rtx stack_area
4871 = gen_rtx_MEM (save_mode,
4872 memory_address (save_mode,
4873 XEXP (args[i].stack_slot, 0)));
4874
4875 if (save_mode != BLKmode)
4876 emit_move_insn (stack_area, args[i].save_area);
4877 else
44bb111a 4878 emit_block_move (stack_area, args[i].save_area,
a20c5714
RS
4879 (gen_int_mode
4880 (args[i].locate.size.constant, Pmode)),
44bb111a 4881 BLOCK_OP_CALL_PARM);
0a1c58a2 4882 }
51bbfa0c 4883
0a1c58a2
JL
4884 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4885 stack_usage_map = initial_stack_usage_map;
a20c5714 4886 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2 4887 }
51bbfa0c 4888
d33606c3
EB
4889 /* If this was alloca, record the new stack level. */
4890 if (flags & ECF_MAY_BE_ALLOCA)
4891 record_new_stack_level ();
51bbfa0c 4892
0a1c58a2
JL
4893 /* Free up storage we no longer need. */
4894 for (i = 0; i < num_actuals; ++i)
04695783 4895 free (args[i].aligned_regs);
0a1c58a2 4896
2f21e1ba
BS
4897 targetm.calls.end_call_args ();
4898
0a1c58a2
JL
4899 insns = get_insns ();
4900 end_sequence ();
4901
4902 if (pass == 0)
4903 {
4904 tail_call_insns = insns;
4905
0a1c58a2
JL
4906 /* Restore the pending stack adjustment now that we have
4907 finished generating the sibling call sequence. */
1503a7ec 4908
7f2f0a01 4909 restore_pending_stack_adjust (&save);
099e9712
JH
4910
4911 /* Prepare arg structure for next iteration. */
f725a3ec 4912 for (i = 0; i < num_actuals; i++)
099e9712
JH
4913 {
4914 args[i].value = 0;
4915 args[i].aligned_regs = 0;
4916 args[i].stack = 0;
4917 }
c67846f2
JJ
4918
4919 sbitmap_free (stored_args_map);
48810515 4920 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 4921 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
4922 }
4923 else
38afb23f
OH
4924 {
4925 normal_call_insns = insns;
4926
4927 /* Verify that we've deallocated all the stack we used. */
6e14af16 4928 gcc_assert ((flags & ECF_NORETURN)
a20c5714
RS
4929 || known_eq (old_stack_allocated,
4930 stack_pointer_delta
4931 - pending_stack_adjust));
38afb23f 4932 }
fadb729c
JJ
4933
4934 /* If something prevents making this a sibling call,
4935 zero out the sequence. */
4936 if (sibcall_failure)
48810515 4937 tail_call_insns = NULL;
6de9cd9a
DN
4938 else
4939 break;
0a1c58a2
JL
4940 }
4941
1ea7e6ad 4942 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
4943 arguments too, as argument area is now clobbered by the call. */
4944 if (tail_call_insns)
0a1c58a2 4945 {
6de9cd9a 4946 emit_insn (tail_call_insns);
e3b5732b 4947 crtl->tail_call_emit = true;
0a1c58a2
JL
4948 }
4949 else
9a385c2d
DM
4950 {
4951 emit_insn (normal_call_insns);
4952 if (try_tail_call)
4953 /* Ideally we'd emit a message for all of the ways that it could
4954 have failed. */
4955 maybe_complain_about_tail_call (exp, "tail call production failed");
4956 }
51bbfa0c 4957
0a1c58a2 4958 currently_expanding_call--;
8e6a59fe 4959
04695783 4960 free (stack_usage_map_buf);
765fc0f7 4961 free (args);
51bbfa0c
RS
4962 return target;
4963}
ded9bf77 4964
6de9cd9a
DN
4965/* A sibling call sequence invalidates any REG_EQUIV notes made for
4966 this function's incoming arguments.
4967
4968 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
4969 in the rtl chain are those for incoming arguments, so we can look
4970 for REG_EQUIV notes between the start of the function and the
4971 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
4972
4973 This is (slight) overkill. We could keep track of the highest
4974 argument we clobber and be more selective in removing notes, but it
4975 does not seem to be worth the effort. */
29d51cdb 4976
6de9cd9a
DN
4977void
4978fixup_tail_calls (void)
4979{
48810515 4980 rtx_insn *insn;
29d51cdb
SB
4981
4982 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4983 {
a31830a7
SB
4984 rtx note;
4985
29d51cdb
SB
4986 /* There are never REG_EQUIV notes for the incoming arguments
4987 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4988 if (NOTE_P (insn)
a38e7aa5 4989 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
4990 break;
4991
a31830a7
SB
4992 note = find_reg_note (insn, REG_EQUIV, 0);
4993 if (note)
4994 remove_note (insn, note);
4995 note = find_reg_note (insn, REG_EQUIV, 0);
4996 gcc_assert (!note);
29d51cdb 4997 }
6de9cd9a
DN
4998}
4999
ded9bf77
AH
5000/* Traverse a list of TYPES and expand all complex types into their
5001 components. */
2f2b4a02 5002static tree
ded9bf77
AH
5003split_complex_types (tree types)
5004{
5005 tree p;
5006
42ba5130
RH
5007 /* Before allocating memory, check for the common case of no complex. */
5008 for (p = types; p; p = TREE_CHAIN (p))
5009 {
5010 tree type = TREE_VALUE (p);
5011 if (TREE_CODE (type) == COMPLEX_TYPE
5012 && targetm.calls.split_complex_arg (type))
c22cacf3 5013 goto found;
42ba5130
RH
5014 }
5015 return types;
5016
5017 found:
ded9bf77
AH
5018 types = copy_list (types);
5019
5020 for (p = types; p; p = TREE_CHAIN (p))
5021 {
5022 tree complex_type = TREE_VALUE (p);
5023
42ba5130
RH
5024 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5025 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
5026 {
5027 tree next, imag;
5028
5029 /* Rewrite complex type with component type. */
5030 TREE_VALUE (p) = TREE_TYPE (complex_type);
5031 next = TREE_CHAIN (p);
5032
5033 /* Add another component type for the imaginary part. */
5034 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5035 TREE_CHAIN (p) = imag;
5036 TREE_CHAIN (imag) = next;
5037
5038 /* Skip the newly created node. */
5039 p = TREE_CHAIN (p);
5040 }
5041 }
5042
5043 return types;
5044}
51bbfa0c 5045\f
db69559b
RS
5046/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5047 for a value of mode OUTMODE,
5048 with NARGS different arguments, passed as ARGS.
5049 Store the return value if RETVAL is nonzero: store it in VALUE if
5050 VALUE is nonnull, otherwise pick a convenient location. In either
5051 case return the location of the stored value.
8ac61af7 5052
db69559b
RS
5053 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5054 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5055 other types of library calls. */
5056
5057rtx
d329e058
AJ
5058emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5059 enum libcall_type fn_type,
db69559b 5060 machine_mode outmode, int nargs, rtx_mode_t *args)
43bc5f13 5061{
3c0fca12
RH
5062 /* Total size in bytes of all the stack-parms scanned so far. */
5063 struct args_size args_size;
5064 /* Size of arguments before any adjustments (such as rounding). */
5065 struct args_size original_args_size;
b3694847 5066 int argnum;
3c0fca12 5067 rtx fun;
81464b2c
KT
5068 /* Todo, choose the correct decl type of orgfun. Sadly this information
5069 isn't present here, so we default to native calling abi here. */
033df0b9 5070 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 5071 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 5072 int count;
3c0fca12 5073 rtx argblock = 0;
d5cc9181
JR
5074 CUMULATIVE_ARGS args_so_far_v;
5075 cumulative_args_t args_so_far;
f725a3ec
KH
5076 struct arg
5077 {
5078 rtx value;
ef4bddc2 5079 machine_mode mode;
f725a3ec
KH
5080 rtx reg;
5081 int partial;
e7949876 5082 struct locate_and_pad_arg_data locate;
f725a3ec
KH
5083 rtx save_area;
5084 };
3c0fca12
RH
5085 struct arg *argvec;
5086 int old_inhibit_defer_pop = inhibit_defer_pop;
5087 rtx call_fusage = 0;
5088 rtx mem_value = 0;
5591ee6f 5089 rtx valreg;
3c0fca12 5090 int pcc_struct_value = 0;
cf098191 5091 poly_int64 struct_value_size = 0;
52a11cbf 5092 int flags;
3c0fca12 5093 int reg_parm_stack_space = 0;
a20c5714 5094 poly_int64 needed;
48810515 5095 rtx_insn *before_call;
0ed4bf92 5096 bool have_push_fusage;
b0c48229 5097 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 5098
f73ad30e 5099#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
5100 /* Define the boundary of the register parm stack space that needs to be
5101 save, if any. */
726a989a 5102 int low_to_save = 0, high_to_save = 0;
f725a3ec 5103 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
5104#endif
5105
3c0fca12 5106 /* Size of the stack reserved for parameter registers. */
a20c5714 5107 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3c0fca12 5108 char *initial_stack_usage_map = stack_usage_map;
a20c5714 5109 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 5110 char *stack_usage_map_buf = NULL;
3c0fca12 5111
61f71b34
DD
5112 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5113
3c0fca12 5114#ifdef REG_PARM_STACK_SPACE
3c0fca12 5115 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
5116#endif
5117
0529235d 5118 /* By default, library functions cannot throw. */
52a11cbf
RH
5119 flags = ECF_NOTHROW;
5120
9555a122
RH
5121 switch (fn_type)
5122 {
5123 case LCT_NORMAL:
53d4257f 5124 break;
9555a122 5125 case LCT_CONST:
53d4257f
JH
5126 flags |= ECF_CONST;
5127 break;
9555a122 5128 case LCT_PURE:
53d4257f 5129 flags |= ECF_PURE;
9555a122 5130 break;
9555a122
RH
5131 case LCT_NORETURN:
5132 flags |= ECF_NORETURN;
5133 break;
5134 case LCT_THROW:
0529235d 5135 flags &= ~ECF_NOTHROW;
9555a122 5136 break;
9defc9b7
RH
5137 case LCT_RETURNS_TWICE:
5138 flags = ECF_RETURNS_TWICE;
5139 break;
9555a122 5140 }
3c0fca12
RH
5141 fun = orgfun;
5142
3c0fca12
RH
5143 /* Ensure current function's preferred stack boundary is at least
5144 what we need. */
cb91fab0
JH
5145 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5146 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
5147
5148 /* If this kind of value comes back in memory,
5149 decide where in memory it should come back. */
b0c48229 5150 if (outmode != VOIDmode)
3c0fca12 5151 {
ae2bcd98 5152 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 5153 if (aggregate_value_p (tfom, 0))
b0c48229 5154 {
3c0fca12 5155#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 5156 rtx pointer_reg
1d636cc6 5157 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
5158 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5159 pcc_struct_value = 1;
5160 if (value == 0)
5161 value = gen_reg_rtx (outmode);
3c0fca12 5162#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 5163 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 5164 if (value != 0 && MEM_P (value))
b0c48229
NB
5165 mem_value = value;
5166 else
9474e8ab 5167 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 5168#endif
b0c48229 5169 /* This call returns a big structure. */
84b8030f 5170 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 5171 }
3c0fca12 5172 }
b0c48229
NB
5173 else
5174 tfom = void_type_node;
3c0fca12
RH
5175
5176 /* ??? Unfinished: must pass the memory address as an argument. */
5177
5178 /* Copy all the libcall-arguments out of the varargs data
5179 and into a vector ARGVEC.
5180
5181 Compute how to pass each argument. We only support a very small subset
5182 of the full argument passing conventions to limit complexity here since
5183 library functions shouldn't have many args. */
5184
f883e0a7 5185 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 5186 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 5187
97fc4caf 5188#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 5189 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 5190#else
d5cc9181 5191 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 5192#endif
d5cc9181 5193 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
5194
5195 args_size.constant = 0;
5196 args_size.var = 0;
5197
5198 count = 0;
5199
5200 push_temp_slots ();
5201
5202 /* If there's a structure value address to be passed,
5203 either pass it in the special place, or pass it as an extra argument. */
61f71b34 5204 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
5205 {
5206 rtx addr = XEXP (mem_value, 0);
c22cacf3 5207
3c0fca12
RH
5208 nargs++;
5209
ee88d9aa
MK
5210 /* Make sure it is a reasonable operand for a move or push insn. */
5211 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
5212 && !(CONSTANT_P (addr)
5213 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
5214 addr = force_operand (addr, NULL_RTX);
5215
3c0fca12
RH
5216 argvec[count].value = addr;
5217 argvec[count].mode = Pmode;
5218 argvec[count].partial = 0;
5219
a7c81bc1 5220 function_arg_info ptr_arg (Pmode, /*named=*/true);
6783fdb7 5221 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
a7c81bc1 5222 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
3c0fca12
RH
5223
5224 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 5225#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 5226 1,
a4d5044f
CM
5227#else
5228 argvec[count].reg != 0,
5229#endif
2e4ceca5
UW
5230 reg_parm_stack_space, 0,
5231 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 5232
3c0fca12
RH
5233 if (argvec[count].reg == 0 || argvec[count].partial != 0
5234 || reg_parm_stack_space > 0)
e7949876 5235 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 5236
6930c98c 5237 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
3c0fca12
RH
5238
5239 count++;
5240 }
5241
db69559b 5242 for (unsigned int i = 0; count < nargs; i++, count++)
3c0fca12 5243 {
db69559b 5244 rtx val = args[i].first;
cf0d189e 5245 function_arg_info arg (args[i].second, /*named=*/true);
5e617be8 5246 int unsigned_p = 0;
3c0fca12
RH
5247
5248 /* We cannot convert the arg value to the mode the library wants here;
5249 must do it earlier where we know the signedness of the arg. */
cf0d189e
RS
5250 gcc_assert (arg.mode != BLKmode
5251 && (GET_MODE (val) == arg.mode
5252 || GET_MODE (val) == VOIDmode));
3c0fca12 5253
ee88d9aa
MK
5254 /* Make sure it is a reasonable operand for a move or push insn. */
5255 if (!REG_P (val) && !MEM_P (val)
cf0d189e
RS
5256 && !(CONSTANT_P (val)
5257 && targetm.legitimate_constant_p (arg.mode, val)))
ee88d9aa
MK
5258 val = force_operand (val, NULL_RTX);
5259
cf0d189e 5260 if (pass_by_reference (&args_so_far_v, arg))
3c0fca12 5261 {
f474c6f8 5262 rtx slot;
cf0d189e 5263 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
f474c6f8 5264
becfd6e5
KZ
5265 /* If this was a CONST function, it is now PURE since it now
5266 reads memory. */
99a32567
DM
5267 if (flags & ECF_CONST)
5268 {
5269 flags &= ~ECF_CONST;
5270 flags |= ECF_PURE;
5271 }
5272
e0c68ce9 5273 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
5274 {
5275 tree val_expr = MEM_EXPR (val);
5276 if (val_expr)
5277 mark_addressable (val_expr);
5278 slot = val;
5279 }
9969aaf6 5280 else
f474c6f8 5281 {
cf0d189e 5282 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
9474e8ab 5283 1, 1);
f474c6f8
AO
5284 emit_move_insn (slot, val);
5285 }
1da68f56 5286
6b5273c3
AO
5287 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5288 gen_rtx_USE (VOIDmode, slot),
5289 call_fusage);
f474c6f8
AO
5290 if (must_copy)
5291 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5292 gen_rtx_CLOBBER (VOIDmode,
5293 slot),
5294 call_fusage);
5295
cf0d189e 5296 arg.mode = Pmode;
257caa55 5297 arg.pass_by_reference = true;
f474c6f8 5298 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 5299 }
3c0fca12 5300
cf0d189e
RS
5301 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5302 NULL_TREE, 0);
5303 argvec[count].mode = arg.mode;
5304 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5305 unsigned_p);
6783fdb7 5306 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
3c0fca12 5307
3c0fca12 5308 argvec[count].partial
a7c81bc1 5309 = targetm.calls.arg_partial_bytes (args_so_far, arg);
3c0fca12 5310
3576f984
RS
5311 if (argvec[count].reg == 0
5312 || argvec[count].partial != 0
5313 || reg_parm_stack_space > 0)
5314 {
cf0d189e 5315 locate_and_pad_parm (arg.mode, NULL_TREE,
a4d5044f 5316#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 5317 1,
a4d5044f 5318#else
3576f984
RS
5319 argvec[count].reg != 0,
5320#endif
2e4ceca5 5321 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
5322 NULL_TREE, &args_size, &argvec[count].locate);
5323 args_size.constant += argvec[count].locate.size.constant;
5324 gcc_assert (!argvec[count].locate.size.var);
5325 }
5326#ifdef BLOCK_REG_PADDING
5327 else
5328 /* The argument is passed entirely in registers. See at which
5329 end it should be padded. */
5330 argvec[count].locate.where_pad =
cf0d189e
RS
5331 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5332 known_le (GET_MODE_SIZE (arg.mode),
5333 UNITS_PER_WORD));
a4d5044f 5334#endif
3c0fca12 5335
6930c98c 5336 targetm.calls.function_arg_advance (args_so_far, arg);
3c0fca12 5337 }
3c0fca12 5338
957ed738
L
5339 for (int i = 0; i < nargs; i++)
5340 if (reg_parm_stack_space > 0
5341 || argvec[i].reg == 0
5342 || argvec[i].partial != 0)
5343 update_stack_alignment_for_call (&argvec[i].locate);
5344
3c0fca12
RH
5345 /* If this machine requires an external definition for library
5346 functions, write one out. */
5347 assemble_external_libcall (fun);
5348
5349 original_args_size = args_size;
a20c5714
RS
5350 args_size.constant = (aligned_upper_bound (args_size.constant
5351 + stack_pointer_delta,
5352 STACK_BYTES)
5353 - stack_pointer_delta);
3c0fca12 5354
a20c5714
RS
5355 args_size.constant = upper_bound (args_size.constant,
5356 reg_parm_stack_space);
3c0fca12 5357
5d059ed9 5358 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 5359 args_size.constant -= reg_parm_stack_space;
3c0fca12 5360
a20c5714
RS
5361 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5362 args_size.constant);
3c0fca12 5363
a11e0df4 5364 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306 5365 {
a20c5714
RS
5366 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5367 current_function_pushed_stack_size
5368 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
5369 }
5370
f73ad30e
JH
5371 if (ACCUMULATE_OUTGOING_ARGS)
5372 {
5373 /* Since the stack pointer will never be pushed, it is possible for
5374 the evaluation of a parm to clobber something we have already
5375 written to the stack. Since most function calls on RISC machines
5376 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 5377
f73ad30e
JH
5378 Therefore, we save any area of the stack that was already written
5379 and that we are using. Here we set up to do this by making a new
5380 stack usage map from the old one.
3c0fca12 5381
f73ad30e
JH
5382 Another approach might be to try to reorder the argument
5383 evaluations to avoid this conflicting stack usage. */
3c0fca12 5384
f73ad30e 5385 needed = args_size.constant;
3c0fca12 5386
f73ad30e
JH
5387 /* Since we will be writing into the entire argument area, the
5388 map must be allocated for its entire size, not just the part that
5389 is the responsibility of the caller. */
5d059ed9 5390 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 5391 needed += reg_parm_stack_space;
3c0fca12 5392
a20c5714 5393 poly_int64 limit = needed;
6dad9361 5394 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
5395 limit += 1;
5396
5397 /* For polynomial sizes, this is the maximum possible size needed
5398 for arguments with a constant size and offset. */
5399 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5400 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5401 const_limit);
6dad9361 5402
5ed6ace5 5403 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 5404 stack_usage_map = stack_usage_map_buf;
3c0fca12 5405
f73ad30e 5406 if (initial_highest_arg_in_use)
2e09e75a
JM
5407 memcpy (stack_usage_map, initial_stack_usage_map,
5408 initial_highest_arg_in_use);
3c0fca12 5409
f73ad30e 5410 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 5411 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
5412 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5413 needed = 0;
3c0fca12 5414
c39ada04 5415 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 5416 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
5417 new libcalls after we've instantiated the virtual regs, and if we
5418 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 5419
c39ada04 5420 if (virtuals_instantiated)
0a81f074
RS
5421 argblock = plus_constant (Pmode, stack_pointer_rtx,
5422 STACK_POINTER_OFFSET);
c39ada04
DD
5423 else
5424 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
5425 }
5426 else
5427 {
5428 if (!PUSH_ARGS)
a20c5714 5429 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
f73ad30e 5430 }
3c0fca12 5431
3d9684ae 5432 /* We push args individually in reverse order, perform stack alignment
3c0fca12 5433 before the first push (the last arg). */
3d9684ae 5434 if (argblock == 0)
a20c5714
RS
5435 anti_adjust_stack (gen_int_mode (args_size.constant
5436 - original_args_size.constant,
5437 Pmode));
3c0fca12 5438
3d9684ae 5439 argnum = nargs - 1;
3c0fca12 5440
f73ad30e
JH
5441#ifdef REG_PARM_STACK_SPACE
5442 if (ACCUMULATE_OUTGOING_ARGS)
5443 {
5444 /* The argument list is the property of the called routine and it
5445 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
5446 parameters, we must save and restore it. */
5447 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5448 &low_to_save, &high_to_save);
3c0fca12
RH
5449 }
5450#endif
f725a3ec 5451
2f21e1ba
BS
5452 /* When expanding a normal call, args are stored in push order,
5453 which is the reverse of what we have here. */
5454 bool any_regs = false;
5455 for (int i = nargs; i-- > 0; )
5456 if (argvec[i].reg != NULL_RTX)
5457 {
5458 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5459 any_regs = true;
5460 }
5461 if (!any_regs)
5462 targetm.calls.call_args (pc_rtx, NULL_TREE);
5463
3c0fca12
RH
5464 /* Push the args that need to be pushed. */
5465
0ed4bf92
BS
5466 have_push_fusage = false;
5467
3c0fca12
RH
5468 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5469 are to be pushed. */
3d9684ae 5470 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5471 {
ef4bddc2 5472 machine_mode mode = argvec[argnum].mode;
b3694847 5473 rtx val = argvec[argnum].value;
3c0fca12
RH
5474 rtx reg = argvec[argnum].reg;
5475 int partial = argvec[argnum].partial;
6bdf8c2e 5476 unsigned int parm_align = argvec[argnum].locate.boundary;
a20c5714 5477 poly_int64 lower_bound = 0, upper_bound = 0;
3c0fca12
RH
5478
5479 if (! (reg != 0 && partial == 0))
5480 {
2b1c5433
JJ
5481 rtx use;
5482
f73ad30e
JH
5483 if (ACCUMULATE_OUTGOING_ARGS)
5484 {
f8a097cd
JH
5485 /* If this is being stored into a pre-allocated, fixed-size,
5486 stack area, save any previous data at that location. */
3c0fca12 5487
6dad9361
TS
5488 if (ARGS_GROW_DOWNWARD)
5489 {
5490 /* stack_slot is negative, but we want to index stack_usage_map
5491 with positive values. */
5492 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5493 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5494 }
5495 else
5496 {
5497 lower_bound = argvec[argnum].locate.slot_offset.constant;
5498 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5499 }
3c0fca12 5500
a20c5714
RS
5501 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5502 reg_parm_stack_space))
f73ad30e 5503 {
e7949876 5504 /* We need to make a save area. */
a20c5714 5505 poly_uint64 size
e7949876 5506 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 5507 machine_mode save_mode
f4b31647 5508 = int_mode_for_size (size, 1).else_blk ();
e7949876 5509 rtx adr
0a81f074 5510 = plus_constant (Pmode, argblock,
e7949876 5511 argvec[argnum].locate.offset.constant);
f73ad30e 5512 rtx stack_area
e7949876 5513 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 5514
9778f2f8
JH
5515 if (save_mode == BLKmode)
5516 {
5517 argvec[argnum].save_area
5518 = assign_stack_temp (BLKmode,
9474e8ab
MM
5519 argvec[argnum].locate.size.constant
5520 );
9778f2f8 5521
1a8cb155
RS
5522 emit_block_move (validize_mem
5523 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 5524 stack_area,
a20c5714
RS
5525 (gen_int_mode
5526 (argvec[argnum].locate.size.constant,
5527 Pmode)),
9778f2f8
JH
5528 BLOCK_OP_CALL_PARM);
5529 }
5530 else
5531 {
5532 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5533
5534 emit_move_insn (argvec[argnum].save_area, stack_area);
5535 }
f73ad30e 5536 }
3c0fca12 5537 }
19caa751 5538
6bdf8c2e 5539 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 5540 partial, reg, 0, argblock,
a20c5714
RS
5541 (gen_int_mode
5542 (argvec[argnum].locate.offset.constant, Pmode)),
e7949876 5543 reg_parm_stack_space,
99206968 5544 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 5545
3c0fca12 5546 /* Now mark the segment we just used. */
f73ad30e 5547 if (ACCUMULATE_OUTGOING_ARGS)
a20c5714 5548 mark_stack_region_used (lower_bound, upper_bound);
3c0fca12
RH
5549
5550 NO_DEFER_POP;
475a3eef 5551
2b1c5433
JJ
5552 /* Indicate argument access so that alias.c knows that these
5553 values are live. */
5554 if (argblock)
0a81f074 5555 use = plus_constant (Pmode, argblock,
2b1c5433 5556 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
5557 else if (have_push_fusage)
5558 continue;
2b1c5433 5559 else
0ed4bf92
BS
5560 {
5561 /* When arguments are pushed, trying to tell alias.c where
5562 exactly this argument is won't work, because the
5563 auto-increment causes confusion. So we merely indicate
5564 that we access something with a known mode somewhere on
5565 the stack. */
5566 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5567 gen_rtx_SCRATCH (Pmode));
5568 have_push_fusage = true;
5569 }
2b1c5433
JJ
5570 use = gen_rtx_MEM (argvec[argnum].mode, use);
5571 use = gen_rtx_USE (VOIDmode, use);
5572 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
5573 }
5574 }
5575
3d9684ae 5576 argnum = nargs - 1;
3c0fca12 5577
531ca746 5578 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
5579
5580 /* Now load any reg parms into their regs. */
5581
5582 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5583 are to be pushed. */
3d9684ae 5584 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5585 {
ef4bddc2 5586 machine_mode mode = argvec[argnum].mode;
b3694847 5587 rtx val = argvec[argnum].value;
3c0fca12
RH
5588 rtx reg = argvec[argnum].reg;
5589 int partial = argvec[argnum].partial;
460b171d 5590
3c0fca12
RH
5591 /* Handle calls that pass values in multiple non-contiguous
5592 locations. The PA64 has examples of this for library calls. */
5593 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 5594 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 5595 else if (reg != 0 && partial == 0)
460b171d
JB
5596 {
5597 emit_move_insn (reg, val);
5598#ifdef BLOCK_REG_PADDING
cf098191 5599 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
460b171d
JB
5600
5601 /* Copied from load_register_parameters. */
5602
5603 /* Handle case where we have a value that needs shifting
5604 up to the msb. eg. a QImode value and we're padding
5605 upward on a BYTES_BIG_ENDIAN machine. */
cf098191 5606 if (known_lt (size, UNITS_PER_WORD)
460b171d 5607 && (argvec[argnum].locate.where_pad
76b0cbf8 5608 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
460b171d
JB
5609 {
5610 rtx x;
cf098191 5611 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
460b171d
JB
5612
5613 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5614 report the whole reg as used. Strictly speaking, the
5615 call only uses SIZE bytes at the msb end, but it doesn't
5616 seem worth generating rtl to say that. */
5617 reg = gen_rtx_REG (word_mode, REGNO (reg));
5618 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5619 if (x != reg)
5620 emit_move_insn (reg, x);
5621 }
5622#endif
5623 }
3c0fca12
RH
5624
5625 NO_DEFER_POP;
5626 }
5627
3c0fca12
RH
5628 /* Any regs containing parms remain in use through the call. */
5629 for (count = 0; count < nargs; count++)
5630 {
5631 rtx reg = argvec[count].reg;
5632 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5633 use_group_regs (&call_fusage, reg);
5634 else if (reg != 0)
3b1bf459
BS
5635 {
5636 int partial = argvec[count].partial;
5637 if (partial)
5638 {
5639 int nregs;
5640 gcc_assert (partial % UNITS_PER_WORD == 0);
5641 nregs = partial / UNITS_PER_WORD;
5642 use_regs (&call_fusage, REGNO (reg), nregs);
5643 }
5644 else
5645 use_reg (&call_fusage, reg);
5646 }
3c0fca12
RH
5647 }
5648
5649 /* Pass the function the address in which to return a structure value. */
61f71b34 5650 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 5651 {
61f71b34 5652 emit_move_insn (struct_value,
3c0fca12
RH
5653 force_reg (Pmode,
5654 force_operand (XEXP (mem_value, 0),
5655 NULL_RTX)));
f8cfc6aa 5656 if (REG_P (struct_value))
61f71b34 5657 use_reg (&call_fusage, struct_value);
3c0fca12
RH
5658 }
5659
5660 /* Don't allow popping to be deferred, since then
5661 cse'ing of library calls could delete a call and leave the pop. */
5662 NO_DEFER_POP;
5591ee6f 5663 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 5664 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 5665
ce48579b 5666 /* Stack must be properly aligned now. */
a20c5714
RS
5667 gcc_assert (multiple_p (stack_pointer_delta,
5668 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
ebcd0b57 5669
695ee791
RH
5670 before_call = get_last_insn ();
5671
3cf3da88
EB
5672 if (flag_callgraph_info)
5673 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5674
3c0fca12
RH
5675 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5676 will set inhibit_defer_pop to that value. */
de76b467
JH
5677 /* The return type is needed to decide how many bytes the function pops.
5678 Signedness plays no role in that, so for simplicity, we pretend it's
5679 always signed. We also assume that the list of arguments passed has
5680 no impact, so we pretend it is unknown. */
3c0fca12 5681
6de9cd9a 5682 emit_call_1 (fun, NULL,
f725a3ec 5683 get_identifier (XSTR (orgfun, 0)),
b0c48229 5684 build_function_type (tfom, NULL_TREE),
f725a3ec 5685 original_args_size.constant, args_size.constant,
3c0fca12 5686 struct_value_size,
d5cc9181 5687 targetm.calls.function_arg (args_so_far,
6783fdb7 5688 function_arg_info::end_marker ()),
5591ee6f 5689 valreg,
d5cc9181 5690 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 5691
1e288103 5692 if (flag_ipa_ra)
4f660b15 5693 {
e67d1102 5694 rtx datum = orgfun;
4f660b15 5695 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 5696 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
5697 add_reg_note (last, REG_CALL_DECL, datum);
5698 }
5699
460b171d
JB
5700 /* Right-shift returned value if necessary. */
5701 if (!pcc_struct_value
5702 && TYPE_MODE (tfom) != BLKmode
5703 && targetm.calls.return_in_msb (tfom))
5704 {
5705 shift_return_value (TYPE_MODE (tfom), false, valreg);
5706 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5707 }
5708
2f21e1ba
BS
5709 targetm.calls.end_call_args ();
5710
6fb5fa3c
DB
5711 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5712 that it should complain if nonvolatile values are live. For
5713 functions that cannot return, inform flow that control does not
5714 fall through. */
6e14af16 5715 if (flags & ECF_NORETURN)
695ee791 5716 {
570a98eb 5717 /* The barrier note must be emitted
695ee791
RH
5718 immediately after the CALL_INSN. Some ports emit more than
5719 just a CALL_INSN above, so we must search for it here. */
48810515 5720 rtx_insn *last = get_last_insn ();
4b4bf941 5721 while (!CALL_P (last))
695ee791
RH
5722 {
5723 last = PREV_INSN (last);
5724 /* There was no CALL_INSN? */
366de0ce 5725 gcc_assert (last != before_call);
695ee791
RH
5726 }
5727
570a98eb 5728 emit_barrier_after (last);
695ee791
RH
5729 }
5730
85da11a6
EB
5731 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5732 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5733 if (flags & ECF_NOTHROW)
5734 {
48810515 5735 rtx_insn *last = get_last_insn ();
85da11a6
EB
5736 while (!CALL_P (last))
5737 {
5738 last = PREV_INSN (last);
5739 /* There was no CALL_INSN? */
5740 gcc_assert (last != before_call);
5741 }
5742
5743 make_reg_eh_region_note_nothrow_nononlocal (last);
5744 }
5745
3c0fca12
RH
5746 /* Now restore inhibit_defer_pop to its actual original value. */
5747 OK_DEFER_POP;
5748
5749 pop_temp_slots ();
5750
5751 /* Copy the value to the right place. */
de76b467 5752 if (outmode != VOIDmode && retval)
3c0fca12
RH
5753 {
5754 if (mem_value)
5755 {
5756 if (value == 0)
5757 value = mem_value;
5758 if (value != mem_value)
5759 emit_move_insn (value, mem_value);
5760 }
c3297561
AO
5761 else if (GET_CODE (valreg) == PARALLEL)
5762 {
5763 if (value == 0)
5764 value = gen_reg_rtx (outmode);
643642eb 5765 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 5766 }
3c0fca12 5767 else
7ab0aca2 5768 {
cde0f3fd 5769 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
5770 if (GET_MODE (valreg) != outmode)
5771 {
5772 int unsignedp = TYPE_UNSIGNED (tfom);
5773
cde0f3fd
PB
5774 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5775 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 5776 == GET_MODE (valreg));
7ab0aca2
RH
5777 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5778 }
5779
5780 if (value != 0)
5781 emit_move_insn (value, valreg);
5782 else
5783 value = valreg;
5784 }
3c0fca12
RH
5785 }
5786
f73ad30e 5787 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 5788 {
f73ad30e
JH
5789#ifdef REG_PARM_STACK_SPACE
5790 if (save_area)
b820d2b8
AM
5791 restore_fixed_argument_area (save_area, argblock,
5792 high_to_save, low_to_save);
3c0fca12 5793#endif
f725a3ec 5794
f73ad30e
JH
5795 /* If we saved any argument areas, restore them. */
5796 for (count = 0; count < nargs; count++)
5797 if (argvec[count].save_area)
5798 {
ef4bddc2 5799 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 5800 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
5801 argvec[count].locate.offset.constant);
5802 rtx stack_area = gen_rtx_MEM (save_mode,
5803 memory_address (save_mode, adr));
f73ad30e 5804
9778f2f8
JH
5805 if (save_mode == BLKmode)
5806 emit_block_move (stack_area,
1a8cb155
RS
5807 validize_mem
5808 (copy_rtx (argvec[count].save_area)),
a20c5714
RS
5809 (gen_int_mode
5810 (argvec[count].locate.size.constant, Pmode)),
9778f2f8
JH
5811 BLOCK_OP_CALL_PARM);
5812 else
5813 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 5814 }
3c0fca12 5815
f73ad30e
JH
5816 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5817 stack_usage_map = initial_stack_usage_map;
a20c5714 5818 stack_usage_watermark = initial_stack_usage_watermark;
f73ad30e 5819 }
43bc5f13 5820
04695783 5821 free (stack_usage_map_buf);
d9725c41 5822
de76b467
JH
5823 return value;
5824
5825}
5826\f
d5e254e1 5827
51bbfa0c
RS
5828/* Store a single argument for a function call
5829 into the register or memory area where it must be passed.
5830 *ARG describes the argument value and where to pass it.
5831
5832 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 5833 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
5834
5835 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 5836 so must be careful about how the stack is used.
51bbfa0c
RS
5837
5838 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5839 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5840 that we need not worry about saving and restoring the stack.
5841
4c6b3b2a 5842 FNDECL is the declaration of the function we are calling.
f725a3ec 5843
da7d8304 5844 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 5845 zero otherwise. */
51bbfa0c 5846
4c6b3b2a 5847static int
d329e058
AJ
5848store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5849 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 5850{
b3694847 5851 tree pval = arg->tree_value;
51bbfa0c
RS
5852 rtx reg = 0;
5853 int partial = 0;
a20c5714
RS
5854 poly_int64 used = 0;
5855 poly_int64 lower_bound = 0, upper_bound = 0;
4c6b3b2a 5856 int sibcall_failure = 0;
51bbfa0c
RS
5857
5858 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 5859 return 1;
51bbfa0c 5860
cc79451b
RK
5861 /* Push a new temporary level for any temporaries we make for
5862 this argument. */
5863 push_temp_slots ();
5864
f8a097cd 5865 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 5866 {
f73ad30e
JH
5867 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5868 save any previous data at that location. */
5869 if (argblock && ! variable_size && arg->stack)
5870 {
6dad9361
TS
5871 if (ARGS_GROW_DOWNWARD)
5872 {
5873 /* stack_slot is negative, but we want to index stack_usage_map
5874 with positive values. */
5875 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5876 {
5877 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5878 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5879 }
6dad9361
TS
5880 else
5881 upper_bound = 0;
51bbfa0c 5882
6dad9361
TS
5883 lower_bound = upper_bound - arg->locate.size.constant;
5884 }
f73ad30e 5885 else
6dad9361
TS
5886 {
5887 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5888 {
5889 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5890 lower_bound = rtx_to_poly_int64 (offset);
5891 }
6dad9361
TS
5892 else
5893 lower_bound = 0;
51bbfa0c 5894
6dad9361
TS
5895 upper_bound = lower_bound + arg->locate.size.constant;
5896 }
51bbfa0c 5897
a20c5714
RS
5898 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5899 reg_parm_stack_space))
51bbfa0c 5900 {
e7949876 5901 /* We need to make a save area. */
a20c5714 5902 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
f4b31647
RS
5903 machine_mode save_mode
5904 = int_mode_for_size (size, 1).else_blk ();
e7949876
AM
5905 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5906 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
5907
5908 if (save_mode == BLKmode)
5909 {
9ee5337d
EB
5910 arg->save_area
5911 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 5912 preserve_temp_slots (arg->save_area);
1a8cb155
RS
5913 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5914 stack_area,
a20c5714
RS
5915 (gen_int_mode
5916 (arg->locate.size.constant, Pmode)),
44bb111a 5917 BLOCK_OP_CALL_PARM);
f73ad30e
JH
5918 }
5919 else
5920 {
5921 arg->save_area = gen_reg_rtx (save_mode);
5922 emit_move_insn (arg->save_area, stack_area);
5923 }
51bbfa0c
RS
5924 }
5925 }
5926 }
b564df06 5927
51bbfa0c
RS
5928 /* If this isn't going to be placed on both the stack and in registers,
5929 set up the register and number of words. */
5930 if (! arg->pass_on_stack)
aa7634dd
DM
5931 {
5932 if (flags & ECF_SIBCALL)
5933 reg = arg->tail_call_reg;
5934 else
5935 reg = arg->reg;
5936 partial = arg->partial;
5937 }
51bbfa0c 5938
366de0ce
NS
5939 /* Being passed entirely in a register. We shouldn't be called in
5940 this case. */
5941 gcc_assert (reg == 0 || partial != 0);
c22cacf3 5942
4ab56118
RK
5943 /* If this arg needs special alignment, don't load the registers
5944 here. */
5945 if (arg->n_aligned_regs != 0)
5946 reg = 0;
f725a3ec 5947
4ab56118 5948 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
5949 it directly into its stack slot. Otherwise, we can. */
5950 if (arg->value == 0)
d64f5a78 5951 {
d64f5a78
RS
5952 /* stack_arg_under_construction is nonzero if a function argument is
5953 being evaluated directly into the outgoing argument list and
5954 expand_call must take special action to preserve the argument list
5955 if it is called recursively.
5956
5957 For scalar function arguments stack_usage_map is sufficient to
5958 determine which stack slots must be saved and restored. Scalar
5959 arguments in general have pass_on_stack == 0.
5960
5961 If this argument is initialized by a function which takes the
5962 address of the argument (a C++ constructor or a C function
5963 returning a BLKmode structure), then stack_usage_map is
5964 insufficient and expand_call must push the stack around the
5965 function call. Such arguments have pass_on_stack == 1.
5966
5967 Note that it is always safe to set stack_arg_under_construction,
5968 but this generates suboptimal code if set when not needed. */
5969
5970 if (arg->pass_on_stack)
5971 stack_arg_under_construction++;
f73ad30e 5972
3a08477a
RK
5973 arg->value = expand_expr (pval,
5974 (partial
5975 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5976 ? NULL_RTX : arg->stack,
8403445a 5977 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
5978
5979 /* If we are promoting object (or for any other reason) the mode
5980 doesn't agree, convert the mode. */
5981
7373d92d
RK
5982 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5983 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5984 arg->value, arg->unsignedp);
1efe6448 5985
d64f5a78
RS
5986 if (arg->pass_on_stack)
5987 stack_arg_under_construction--;
d64f5a78 5988 }
51bbfa0c 5989
0dc42b03 5990 /* Check for overlap with already clobbered argument area. */
07eef816
KH
5991 if ((flags & ECF_SIBCALL)
5992 && MEM_P (arg->value)
a20c5714
RS
5993 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5994 arg->locate.size.constant))
07eef816 5995 sibcall_failure = 1;
0dc42b03 5996
51bbfa0c
RS
5997 /* Don't allow anything left on stack from computation
5998 of argument to alloca. */
f8a097cd 5999 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
6000 do_pending_stack_adjust ();
6001
6002 if (arg->value == arg->stack)
37a08a29
RK
6003 /* If the value is already in the stack slot, we are done. */
6004 ;
1efe6448 6005 else if (arg->mode != BLKmode)
51bbfa0c 6006 {
46bd2bee 6007 unsigned int parm_align;
51bbfa0c
RS
6008
6009 /* Argument is a scalar, not entirely passed in registers.
6010 (If part is passed in registers, arg->partial says how much
6011 and emit_push_insn will take care of putting it there.)
f725a3ec 6012
51bbfa0c
RS
6013 Push it, and if its size is less than the
6014 amount of space allocated to it,
6015 also bump stack pointer by the additional space.
6016 Note that in C the default argument promotions
6017 will prevent such mismatches. */
6018
7b4df2bf
RS
6019 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6020 ? 0 : GET_MODE_SIZE (arg->mode));
974aedcc 6021
51bbfa0c
RS
6022 /* Compute how much space the push instruction will push.
6023 On many machines, pushing a byte will advance the stack
6024 pointer by a halfword. */
6025#ifdef PUSH_ROUNDING
6026 size = PUSH_ROUNDING (size);
6027#endif
6028 used = size;
6029
6030 /* Compute how much space the argument should get:
6031 round up to a multiple of the alignment for arguments. */
76b0cbf8
RS
6032 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6033 != PAD_NONE)
7b4df2bf
RS
6034 /* At the moment we don't (need to) support ABIs for which the
6035 padding isn't known at compile time. In principle it should
6036 be easy to add though. */
6037 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c 6038
46bd2bee
JM
6039 /* Compute the alignment of the pushed argument. */
6040 parm_align = arg->locate.boundary;
76b0cbf8
RS
6041 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6042 == PAD_DOWNWARD)
46bd2bee 6043 {
a20c5714
RS
6044 poly_int64 pad = used - size;
6045 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6046 if (pad_align != 0)
6047 parm_align = MIN (parm_align, pad_align);
46bd2bee
JM
6048 }
6049
51bbfa0c
RS
6050 /* This isn't already where we want it on the stack, so put it there.
6051 This can either be done with push or copy insns. */
a20c5714 6052 if (maybe_ne (used, 0)
974aedcc
MP
6053 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6054 NULL_RTX, parm_align, partial, reg, used - size,
6055 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6056 reg_parm_stack_space,
6057 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
99206968 6058 sibcall_failure = 1;
841404cd
AO
6059
6060 /* Unless this is a partially-in-register argument, the argument is now
6061 in the stack. */
6062 if (partial == 0)
6063 arg->value = arg->stack;
51bbfa0c
RS
6064 }
6065 else
6066 {
6067 /* BLKmode, at least partly to be pushed. */
6068
1b1f20ca 6069 unsigned int parm_align;
a20c5714 6070 poly_int64 excess;
51bbfa0c
RS
6071 rtx size_rtx;
6072
6073 /* Pushing a nonscalar.
6074 If part is passed in registers, PARTIAL says how much
6075 and emit_push_insn will take care of putting it there. */
6076
6077 /* Round its size up to a multiple
6078 of the allocation unit for arguments. */
6079
e7949876 6080 if (arg->locate.size.var != 0)
51bbfa0c
RS
6081 {
6082 excess = 0;
e7949876 6083 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
6084 }
6085 else
6086 {
78a52f11
RH
6087 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6088 for BLKmode is careful to avoid it. */
6089 excess = (arg->locate.size.constant
974aedcc 6090 - arg_int_size_in_bytes (TREE_TYPE (pval))
78a52f11 6091 + partial);
974aedcc 6092 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
6093 NULL_RTX, TYPE_MODE (sizetype),
6094 EXPAND_NORMAL);
51bbfa0c
RS
6095 }
6096
bfc45551 6097 parm_align = arg->locate.boundary;
1b1f20ca
RH
6098
6099 /* When an argument is padded down, the block is aligned to
6100 PARM_BOUNDARY, but the actual argument isn't. */
76b0cbf8
RS
6101 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6102 == PAD_DOWNWARD)
1b1f20ca 6103 {
e7949876 6104 if (arg->locate.size.var)
1b1f20ca 6105 parm_align = BITS_PER_UNIT;
a20c5714 6106 else
1b1f20ca 6107 {
a20c5714
RS
6108 unsigned int excess_align
6109 = known_alignment (excess) * BITS_PER_UNIT;
6110 if (excess_align != 0)
6111 parm_align = MIN (parm_align, excess_align);
1b1f20ca
RH
6112 }
6113 }
6114
3c0cb5de 6115 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
6116 {
6117 /* emit_push_insn might not work properly if arg->value and
e7949876 6118 argblock + arg->locate.offset areas overlap. */
4c6b3b2a 6119 rtx x = arg->value;
a20c5714 6120 poly_int64 i = 0;
4c6b3b2a 6121
5284e559
RS
6122 if (strip_offset (XEXP (x, 0), &i)
6123 == crtl->args.internal_arg_pointer)
4c6b3b2a 6124 {
b3877860
KT
6125 /* arg.locate doesn't contain the pretend_args_size offset,
6126 it's part of argblock. Ensure we don't count it in I. */
6127 if (STACK_GROWS_DOWNWARD)
6128 i -= crtl->args.pretend_args_size;
6129 else
6130 i += crtl->args.pretend_args_size;
6131
e0a21ab9 6132 /* expand_call should ensure this. */
366de0ce 6133 gcc_assert (!arg->locate.offset.var
a20c5714
RS
6134 && arg->locate.size.var == 0);
6135 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
4c6b3b2a 6136
a20c5714 6137 if (known_eq (arg->locate.offset.constant, i))
d6c2c77c
JC
6138 {
6139 /* Even though they appear to be at the same location,
6140 if part of the outgoing argument is in registers,
6141 they aren't really at the same location. Check for
6142 this by making sure that the incoming size is the
6143 same as the outgoing size. */
a20c5714 6144 if (maybe_ne (arg->locate.size.constant, size_val))
4c6b3b2a
JJ
6145 sibcall_failure = 1;
6146 }
a20c5714
RS
6147 else if (maybe_in_range_p (arg->locate.offset.constant,
6148 i, size_val))
6149 sibcall_failure = 1;
6150 /* Use arg->locate.size.constant instead of size_rtx
6151 because we only care about the part of the argument
6152 on the stack. */
6153 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6154 arg->locate.size.constant))
6155 sibcall_failure = 1;
4c6b3b2a
JJ
6156 }
6157 }
6158
974aedcc
MP
6159 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6160 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6161 parm_align, partial, reg, excess, argblock,
6162 ARGS_SIZE_RTX (arg->locate.offset),
6163 reg_parm_stack_space,
6164 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
51bbfa0c 6165
841404cd
AO
6166 /* Unless this is a partially-in-register argument, the argument is now
6167 in the stack.
51bbfa0c 6168
841404cd
AO
6169 ??? Unlike the case above, in which we want the actual
6170 address of the data, so that we can load it directly into a
6171 register, here we want the address of the stack slot, so that
6172 it's properly aligned for word-by-word copying or something
6173 like that. It's not clear that this is always correct. */
6174 if (partial == 0)
6175 arg->value = arg->stack_slot;
6176 }
8df3dbb7
RH
6177
6178 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6179 {
6180 tree type = TREE_TYPE (arg->tree_value);
6181 arg->parallel_value
6182 = emit_group_load_into_temps (arg->reg, arg->value, type,
6183 int_size_in_bytes (type));
6184 }
51bbfa0c 6185
8403445a
AM
6186 /* Mark all slots this store used. */
6187 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6188 && argblock && ! variable_size && arg->stack)
a20c5714 6189 mark_stack_region_used (lower_bound, upper_bound);
8403445a 6190
51bbfa0c
RS
6191 /* Once we have pushed something, pops can't safely
6192 be deferred during the rest of the arguments. */
6193 NO_DEFER_POP;
6194
9474e8ab 6195 /* Free any temporary slots made in processing this argument. */
cc79451b 6196 pop_temp_slots ();
4c6b3b2a
JJ
6197
6198 return sibcall_failure;
51bbfa0c 6199}
a4b1b92a 6200
0ffef200 6201/* Nonzero if we do not know how to pass ARG solely in registers. */
a4b1b92a 6202
fe984136 6203bool
0ffef200 6204must_pass_in_stack_var_size (const function_arg_info &arg)
fe984136 6205{
0ffef200 6206 if (!arg.type)
fe984136
RH
6207 return false;
6208
6209 /* If the type has variable size... */
c600df9a 6210 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
fe984136 6211 return true;
a4b1b92a 6212
fe984136
RH
6213 /* If the type is marked as addressable (it is required
6214 to be constructed into the stack)... */
0ffef200 6215 if (TREE_ADDRESSABLE (arg.type))
fe984136
RH
6216 return true;
6217
6218 return false;
6219}
a4b1b92a 6220
7ae4ad28 6221/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
6222 takes trailing padding of a structure into account. */
6223/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
6224
6225bool
0ffef200 6226must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
a4b1b92a 6227{
0ffef200 6228 if (!arg.type)
40cdfd5a 6229 return false;
a4b1b92a
RH
6230
6231 /* If the type has variable size... */
0ffef200 6232 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
a4b1b92a
RH
6233 return true;
6234
6235 /* If the type is marked as addressable (it is required
6236 to be constructed into the stack)... */
0ffef200 6237 if (TREE_ADDRESSABLE (arg.type))
a4b1b92a
RH
6238 return true;
6239
0ffef200 6240 if (TYPE_EMPTY_P (arg.type))
974aedcc
MP
6241 return false;
6242
a4b1b92a
RH
6243 /* If the padding and mode of the type is such that a copy into
6244 a register would put it into the wrong part of the register. */
0ffef200
RS
6245 if (arg.mode == BLKmode
6246 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6247 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
76b0cbf8 6248 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
a4b1b92a
RH
6249 return true;
6250
6251 return false;
6252}
6bf29a7e 6253
4f53599c
RS
6254/* Return true if TYPE must be passed on the stack when passed to
6255 the "..." arguments of a function. */
6256
6257bool
6258must_pass_va_arg_in_stack (tree type)
6259{
0ffef200
RS
6260 function_arg_info arg (type, /*named=*/false);
6261 return targetm.calls.must_pass_in_stack (arg);
4f53599c
RS
6262}
6263
3bce7904
RS
6264/* Return true if FIELD is the C++17 empty base field that should
6265 be ignored for ABI calling convention decisions in order to
6266 maintain ABI compatibility between C++14 and earlier, which doesn't
6267 add this FIELD to classes with empty bases, and C++17 and later
6268 which does. */
6269
6270bool
6271cxx17_empty_base_field_p (const_tree field)
6272{
6273 return (DECL_FIELD_ABI_IGNORED (field)
6274 && DECL_ARTIFICIAL (field)
6275 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6276 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6277}
6278
6bf29a7e
MS
6279/* Tell the garbage collector about GTY markers in this source file. */
6280#include "gt-calls.h"