]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
openmp: Fix reduction clause handling on teams distribute simd [PR99928]
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
99dee823 2 Copyright (C) 1989-2021 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c 19
873f8c1e 20#define INCLUDE_STRING
51bbfa0c 21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
c7131fb2
AM
27#include "tree.h"
28#include "gimple.h"
957060b5 29#include "predict.h"
4d0cdd0c 30#include "memmodel.h"
957060b5
AM
31#include "tm_p.h"
32#include "stringpool.h"
33#include "expmed.h"
34#include "optabs.h"
957060b5
AM
35#include "emit-rtl.h"
36#include "cgraph.h"
37#include "diagnostic-core.h"
40e23961 38#include "fold-const.h"
d8a2d370
DN
39#include "stor-layout.h"
40#include "varasm.h"
2fb9a547 41#include "internal-fn.h"
36566b39
PK
42#include "dojump.h"
43#include "explow.h"
44#include "calls.h"
670ee920 45#include "expr.h"
d6f4ec51 46#include "output.h"
b0c48229 47#include "langhooks.h"
b2dd096b 48#include "except.h"
6fb5fa3c 49#include "dbgcnt.h"
e9f56944 50#include "rtl-iter.h"
8bd9f164
MS
51#include "tree-vrp.h"
52#include "tree-ssanames.h"
4252ccd7 53#include "tree-ssa-strlen.h"
8bd9f164 54#include "intl.h"
314e6352 55#include "stringpool.h"
54aa6b58
MS
56#include "hash-map.h"
57#include "hash-traits.h"
314e6352 58#include "attribs.h"
cc8bea0a 59#include "builtins.h"
d677a8b6 60#include "gimple-fold.h"
05d39f0d 61#include "attr-fnspec.h"
e864d395 62#include "value-query.h"
76e048a8 63
baad4c48
MS
64#include "tree-pretty-print.h"
65
c795bca9
BS
66/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
68
69/* Data structure and subroutines used within expand_call. */
70
71struct arg_data
72{
73 /* Tree node for this argument. */
74 tree tree_value;
1efe6448 75 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 76 machine_mode mode;
51bbfa0c
RS
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 82 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
83 registers. */
84 rtx reg;
099e9712
JH
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
8df3dbb7
RH
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
84b55618
RK
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
f0078f86
AM
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
51bbfa0c 97 int partial;
da7d8304 98 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 102 int pass_on_stack;
e7949876
AM
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 111 rtx stack_slot;
51bbfa0c
RS
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
4ab56118
RK
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
51bbfa0c
RS
120};
121
da7d8304 122/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126static char *stack_usage_map;
127
128/* Size of STACK_USAGE_MAP. */
a20c5714
RS
129static unsigned int highest_outgoing_arg_in_use;
130
131/* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
2f4aa534 134
c67846f2
JJ
135/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140static sbitmap stored_args_map;
141
a20c5714
RS
142/* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144static unsigned HOST_WIDE_INT stored_args_watermark;
145
2f4aa534
RS
146/* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
0405cc0e 151static int stack_arg_under_construction;
51bbfa0c 152
d329e058
AJ
153static void precompute_register_parameters (int, struct arg_data *, int *);
154static int store_one_arg (struct arg_data *, rtx, int, int, int);
155static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
84b8030f 158static void precompute_arguments (int, struct arg_data *);
d329e058
AJ
159static void compute_argument_addresses (struct arg_data *, rtx, int);
160static rtx rtx_for_function_call (tree, tree);
161static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
6ea2b70d 163static int special_function_p (const_tree, int);
d329e058 164static int check_sibcall_argument_overlap_1 (rtx);
48810515 165static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058 166
2f2b4a02 167static tree split_complex_types (tree);
21a3b983 168
f73ad30e 169#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
170static rtx save_fixed_argument_area (int, rtx, int *, int *);
171static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 172#endif
51bbfa0c 173\f
a20c5714
RS
174/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
176
177static bool
178stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
180{
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
185
186 if (const_upper > stack_usage_watermark)
187 return true;
188
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
197}
198
199/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
201
202static void
203mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
204{
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212}
213
51bbfa0c
RS
214/* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
77cac2f2
RK
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 220
03dacb02 221rtx
f2d3d07e 222prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 223 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 224{
ba228239 225 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
84355514
AS
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
4c640e26
EB
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
408702b4 296 else
51bbfa0c 297 {
408702b4
RL
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
4c640e26 304 if (!(flags & ECF_SIBCALL))
408702b4
RL
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
51bbfa0c
RS
309 }
310
f2d3d07e
RH
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 314 {
531ca746
RH
315 rtx chain;
316
f2d3d07e 317 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 319
531ca746
RH
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
4c640e26
EB
322 {
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
51bbfa0c
RS
326 }
327
328 return funexp;
329}
330
331/* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
607ea900 335 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
2c8da025 338
079e7538
NF
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
51bbfa0c
RS
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
51bbfa0c
RS
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
6783fdb7
RS
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
51bbfa0c
RS
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
94b25f81 371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 372 denote registers used by the called function. */
f725a3ec 373
322e3e34 374static void
28ed065e 375emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 376 tree funtype ATTRIBUTE_UNUSED,
a20c5714
RS
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
5c8e61cf 379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
d329e058
AJ
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 383{
a20c5714 384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
58d745ec 385 rtx call, funmem, pat;
51bbfa0c 386 int already_popped = 0;
a20c5714 387 poly_int64 n_popped = 0;
a00fe3b7
RS
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 395
fa5322fa 396#ifdef CALL_POPS_ARGS
a00fe3b7 397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 398#endif
a00fe3b7 399 }
d329e058 400
51bbfa0c
RS
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
325f5379
JJ
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
409 {
410 tree t = fndecl;
e79983f4 411
047d33a0
AO
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
e79983f4
MM
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
047d33a0 427 }
325f5379 428 else if (fntree)
e19f6650 429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 430
58d745ec 431 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 432 {
0a1c58a2 433 if (valreg)
58d745ec
RS
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
0a1c58a2 437 else
58d745ec 438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
5c8e61cf
RS
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 441 }
8ac61af7
RK
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
a20c5714 446 else if (maybe_ne (n_popped, 0)
58d745ec
RS
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
51bbfa0c 450 {
a20c5714 451 rtx n_pop = gen_int_mode (n_popped, Pmode);
51bbfa0c
RS
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
2c8da025 455
51bbfa0c 456 if (valreg)
58d745ec
RS
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
51bbfa0c 460 else
58d745ec
RS
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
51bbfa0c 463
51bbfa0c
RS
464 already_popped = 1;
465 }
466 else
0a1c58a2
JL
467 {
468 if (valreg)
58d745ec
RS
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
0a1c58a2 471 else
58d745ec 472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
5c8e61cf 473 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 474 }
58d745ec 475 emit_insn (pat);
51bbfa0c 476
ee960939 477 /* Find the call we just emitted. */
e67d1102 478 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 479
325f5379
JJ
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
da4fdf2d
SB
482 call = get_call_rtx_from (call_insn);
483 if (call
325f5379
JJ
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
ee960939
OH
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
490
491 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 502
1d65f45c
RH
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 505
ca3920ad 506 if (ecf_flags & ECF_NORETURN)
65c5f2a6 507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 508
570a98eb 509 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 510 {
65c5f2a6 511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 512 cfun->calls_setjmp = 1;
9defc9b7 513 }
570a98eb 514
0a1c58a2
JL
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
b1e64e0d
RS
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
a20c5714 521 if (maybe_ne (n_popped, 0))
51bbfa0c
RS
522 {
523 if (!already_popped)
e3da301d 524 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 528 rounded_stack_size -= n_popped;
a20c5714 529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
1503a7ec 530 stack_pointer_delta -= n_popped;
2e3f842f 531
68184180 532 add_args_size_note (call_insn, stack_pointer_delta);
9a08d230 533
2e3f842f
L
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
51bbfa0c 537 }
f8f75b16
JJ
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
68184180 542 add_args_size_note (call_insn, stack_pointer_delta);
51bbfa0c 543
f73ad30e 544 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 545 {
f73ad30e
JH
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
a20c5714 553 if (maybe_ne (rounded_stack_size, 0))
f73ad30e 554 {
9dd9bf80 555 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
51bbfa0c 564 }
f73ad30e
JH
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
f725a3ec 573
f73ad30e
JH
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
a20c5714
RS
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
51bbfa0c
RS
578}
579
25f0609b
BE
580/* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
582
583 For example, if the function might return more than one time (setjmp), then
25f0609b 584 set ECF_RETURNS_TWICE.
20efdf74 585
25f0609b 586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
587 space from the stack such as alloca. */
588
f2d33f13 589static int
6ea2b70d 590special_function_p (const_tree fndecl, int flags)
20efdf74 591{
d5e254e1
IE
592 tree name_decl = DECL_NAME (fndecl);
593
182ce042
DM
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
20efdf74 596 {
d5e254e1 597 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 598 const char *tname = name;
20efdf74 599
ca54603f
JL
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
4e722cf1
JJ
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
f2d33f13 606 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 607
25f0609b 608 /* Disregard prefix _ or __. */
20efdf74
JL
609 if (name[0] == '_')
610 {
25f0609b 611 if (name[1] == '_')
20efdf74
JL
612 tname += 2;
613 else
614 tname += 1;
615 }
616
25f0609b
BE
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
20efdf74 624 }
d1c38823 625
9e878cf1
EB
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
4e722cf1 629
f2d33f13 630 return flags;
20efdf74
JL
631}
632
4f8cfb42
JH
633/* Return fnspec for DECL. */
634
635static attr_fnspec
636decl_fnspec (tree fndecl)
637{
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651}
652
e384e6b5
BS
653/* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655static int
656decl_return_flags (tree fndecl)
657{
4f8cfb42 658 attr_fnspec fnspec = decl_fnspec (fndecl);
e384e6b5 659
05d39f0d
JH
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
e384e6b5
BS
667}
668
bae802f9 669/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 670
f2d33f13 671int
6ea2b70d 672setjmp_call_p (const_tree fndecl)
f2d33f13 673{
275311c4
MP
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return ECF_RETURNS_TWICE;
f2d33f13
JH
676 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
677}
678
726a989a 679
159e8ef0 680/* Return true if STMT may be an alloca call. */
726a989a
RB
681
682bool
159e8ef0 683gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
684{
685 tree fndecl;
686
687 if (!is_gimple_call (stmt))
688 return false;
689
690 fndecl = gimple_call_fndecl (stmt);
691 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
692 return true;
693
694 return false;
695}
696
159e8ef0
BE
697/* Return true if STMT is a builtin alloca call. */
698
699bool
700gimple_alloca_call_p (const gimple *stmt)
701{
702 tree fndecl;
703
704 if (!is_gimple_call (stmt))
705 return false;
706
707 fndecl = gimple_call_fndecl (stmt);
3d78e008 708 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
159e8ef0
BE
709 switch (DECL_FUNCTION_CODE (fndecl))
710 {
9e878cf1 711 CASE_BUILT_IN_ALLOCA:
eacac712 712 return gimple_call_num_args (stmt) > 0;
159e8ef0
BE
713 default:
714 break;
715 }
716
717 return false;
718}
719
720/* Return true when exp contains a builtin alloca call. */
726a989a 721
c986baf6 722bool
6ea2b70d 723alloca_call_p (const_tree exp)
c986baf6 724{
2284b034 725 tree fndecl;
c986baf6 726 if (TREE_CODE (exp) == CALL_EXPR
2284b034 727 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
728 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
729 switch (DECL_FUNCTION_CODE (fndecl))
730 {
9e878cf1 731 CASE_BUILT_IN_ALLOCA:
159e8ef0
BE
732 return true;
733 default:
734 break;
735 }
736
c986baf6
JH
737 return false;
738}
739
0a35513e
AH
740/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
741 function. Return FALSE otherwise. */
742
743static bool
744is_tm_builtin (const_tree fndecl)
745{
746 if (fndecl == NULL)
747 return false;
748
749 if (decl_is_tm_clone (fndecl))
750 return true;
751
752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
753 {
754 switch (DECL_FUNCTION_CODE (fndecl))
755 {
756 case BUILT_IN_TM_COMMIT:
757 case BUILT_IN_TM_COMMIT_EH:
758 case BUILT_IN_TM_ABORT:
759 case BUILT_IN_TM_IRREVOCABLE:
760 case BUILT_IN_TM_GETTMCLONE_IRR:
761 case BUILT_IN_TM_MEMCPY:
762 case BUILT_IN_TM_MEMMOVE:
763 case BUILT_IN_TM_MEMSET:
764 CASE_BUILT_IN_TM_STORE (1):
765 CASE_BUILT_IN_TM_STORE (2):
766 CASE_BUILT_IN_TM_STORE (4):
767 CASE_BUILT_IN_TM_STORE (8):
768 CASE_BUILT_IN_TM_STORE (FLOAT):
769 CASE_BUILT_IN_TM_STORE (DOUBLE):
770 CASE_BUILT_IN_TM_STORE (LDOUBLE):
771 CASE_BUILT_IN_TM_STORE (M64):
772 CASE_BUILT_IN_TM_STORE (M128):
773 CASE_BUILT_IN_TM_STORE (M256):
774 CASE_BUILT_IN_TM_LOAD (1):
775 CASE_BUILT_IN_TM_LOAD (2):
776 CASE_BUILT_IN_TM_LOAD (4):
777 CASE_BUILT_IN_TM_LOAD (8):
778 CASE_BUILT_IN_TM_LOAD (FLOAT):
779 CASE_BUILT_IN_TM_LOAD (DOUBLE):
780 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
781 CASE_BUILT_IN_TM_LOAD (M64):
782 CASE_BUILT_IN_TM_LOAD (M128):
783 CASE_BUILT_IN_TM_LOAD (M256):
784 case BUILT_IN_TM_LOG:
785 case BUILT_IN_TM_LOG_1:
786 case BUILT_IN_TM_LOG_2:
787 case BUILT_IN_TM_LOG_4:
788 case BUILT_IN_TM_LOG_8:
789 case BUILT_IN_TM_LOG_FLOAT:
790 case BUILT_IN_TM_LOG_DOUBLE:
791 case BUILT_IN_TM_LOG_LDOUBLE:
792 case BUILT_IN_TM_LOG_M64:
793 case BUILT_IN_TM_LOG_M128:
794 case BUILT_IN_TM_LOG_M256:
795 return true;
796 default:
797 break;
798 }
799 }
800 return false;
801}
802
b5cd4ed4 803/* Detect flags (function attributes) from the function decl or type node. */
7393c642 804
4977bab6 805int
6ea2b70d 806flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
807{
808 int flags = 0;
36dbb93d 809
f2d33f13
JH
810 if (DECL_P (exp))
811 {
812 /* The function exp may have the `malloc' attribute. */
36dbb93d 813 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
814 flags |= ECF_MALLOC;
815
6e9a3221
AN
816 /* The function exp may have the `returns_twice' attribute. */
817 if (DECL_IS_RETURNS_TWICE (exp))
818 flags |= ECF_RETURNS_TWICE;
819
becfd6e5 820 /* Process the pure and const attributes. */
9e3920e9 821 if (TREE_READONLY (exp))
becfd6e5
KZ
822 flags |= ECF_CONST;
823 if (DECL_PURE_P (exp))
e238ccac 824 flags |= ECF_PURE;
becfd6e5
KZ
825 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
826 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 827
dcd6de6d
ZD
828 if (DECL_IS_NOVOPS (exp))
829 flags |= ECF_NOVOPS;
46a4da10
JH
830 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
831 flags |= ECF_LEAF;
cb59f689
JH
832 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_COLD;
dcd6de6d 834
f2d33f13
JH
835 if (TREE_NOTHROW (exp))
836 flags |= ECF_NOTHROW;
2b187c63 837
0a35513e
AH
838 if (flag_tm)
839 {
840 if (is_tm_builtin (exp))
841 flags |= ECF_TM_BUILTIN;
fe924d9f 842 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
843 || lookup_attribute ("transaction_pure",
844 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
845 flags |= ECF_TM_PURE;
846 }
847
6de9cd9a 848 flags = special_function_p (exp, flags);
f2d33f13 849 }
0a35513e
AH
850 else if (TYPE_P (exp))
851 {
852 if (TYPE_READONLY (exp))
853 flags |= ECF_CONST;
854
855 if (flag_tm
856 && ((flags & ECF_CONST) != 0
857 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
858 flags |= ECF_TM_PURE;
859 }
17fc8d6f
AH
860 else
861 gcc_unreachable ();
f2d33f13
JH
862
863 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
864 {
865 flags |= ECF_NORETURN;
866 if (flags & (ECF_CONST|ECF_PURE))
867 flags |= ECF_LOOPING_CONST_OR_PURE;
868 }
f2d33f13
JH
869
870 return flags;
871}
872
f027e0a2
JM
873/* Detect flags from a CALL_EXPR. */
874
875int
fa233e34 876call_expr_flags (const_tree t)
f027e0a2
JM
877{
878 int flags;
879 tree decl = get_callee_fndecl (t);
880
881 if (decl)
882 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
883 else if (CALL_EXPR_FN (t) == NULL_TREE)
884 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
885 else
886 {
4c640e26
EB
887 tree type = TREE_TYPE (CALL_EXPR_FN (t));
888 if (type && TREE_CODE (type) == POINTER_TYPE)
889 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
890 else
891 flags = 0;
4c640e26
EB
892 if (CALL_EXPR_BY_DESCRIPTOR (t))
893 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
894 }
895
896 return flags;
897}
898
52090e4d 899/* Return true if ARG should be passed by invisible reference. */
16a16ec7
AM
900
901bool
52090e4d 902pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
16a16ec7 903{
52090e4d 904 if (tree type = arg.type)
16a16ec7
AM
905 {
906 /* If this type contains non-trivial constructors, then it is
907 forbidden for the middle-end to create any new copies. */
908 if (TREE_ADDRESSABLE (type))
909 return true;
910
911 /* GCC post 3.4 passes *all* variable sized types by reference. */
c600df9a 912 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
16a16ec7
AM
913 return true;
914
915 /* If a record type should be passed the same as its first (and only)
916 member, use the type and mode of that member. */
917 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
918 {
52090e4d
RS
919 arg.type = TREE_TYPE (first_field (type));
920 arg.mode = TYPE_MODE (arg.type);
16a16ec7
AM
921 }
922 }
923
52090e4d 924 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
16a16ec7
AM
925}
926
fde65a89
RS
927/* Return true if TYPE should be passed by reference when passed to
928 the "..." arguments of a function. */
929
930bool
931pass_va_arg_by_reference (tree type)
932{
52090e4d 933 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
fde65a89
RS
934}
935
b12cdd6e
RS
936/* Decide whether ARG, which occurs in the state described by CA,
937 should be passed by reference. Return true if so and update
938 ARG accordingly. */
939
940bool
941apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
942{
943 if (pass_by_reference (ca, arg))
944 {
945 arg.type = build_pointer_type (arg.type);
946 arg.mode = TYPE_MODE (arg.type);
257caa55 947 arg.pass_by_reference = true;
b12cdd6e
RS
948 return true;
949 }
950 return false;
951}
952
7256c719 953/* Return true if ARG, which is passed by reference, should be callee
16a16ec7
AM
954 copied instead of caller copied. */
955
956bool
7256c719 957reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
16a16ec7 958{
7256c719 959 if (arg.type && TREE_ADDRESSABLE (arg.type))
16a16ec7 960 return false;
7256c719 961 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
16a16ec7
AM
962}
963
964
20efdf74
JL
965/* Precompute all register parameters as described by ARGS, storing values
966 into fields within the ARGS array.
967
968 NUM_ACTUALS indicates the total number elements in the ARGS array.
969
970 Set REG_PARM_SEEN if we encounter a register parameter. */
971
972static void
27e29549
RH
973precompute_register_parameters (int num_actuals, struct arg_data *args,
974 int *reg_parm_seen)
20efdf74
JL
975{
976 int i;
977
978 *reg_parm_seen = 0;
979
980 for (i = 0; i < num_actuals; i++)
981 if (args[i].reg != 0 && ! args[i].pass_on_stack)
982 {
983 *reg_parm_seen = 1;
984
985 if (args[i].value == 0)
986 {
987 push_temp_slots ();
84217346 988 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
989 preserve_temp_slots (args[i].value);
990 pop_temp_slots ();
20efdf74
JL
991 }
992
993 /* If we are to promote the function arg to a wider mode,
994 do it now. */
995
996 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
997 args[i].value
998 = convert_modes (args[i].mode,
999 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1000 args[i].value, args[i].unsignedp);
1001
a7adbbcb
L
1002 /* If the value is a non-legitimate constant, force it into a
1003 pseudo now. TLS symbols sometimes need a call to resolve. */
1004 if (CONSTANT_P (args[i].value)
a21b3997
DE
1005 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1006 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
a7adbbcb
L
1007 args[i].value = force_reg (args[i].mode, args[i].value);
1008
27e29549
RH
1009 /* If we're going to have to load the value by parts, pull the
1010 parts into pseudos. The part extraction process can involve
1011 non-trivial computation. */
1012 if (GET_CODE (args[i].reg) == PARALLEL)
1013 {
1014 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 1015 args[i].parallel_value
27e29549
RH
1016 = emit_group_load_into_temps (args[i].reg, args[i].value,
1017 type, int_size_in_bytes (type));
1018 }
1019
f725a3ec 1020 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
1021 short loop, put the value into a pseudo and then put the pseudo
1022 into the hard reg.
1023
1024 For small register classes, also do this if this call uses
1025 register parameters. This is to avoid reload conflicts while
1026 loading the parameters registers. */
1027
27e29549
RH
1028 else if ((! (REG_P (args[i].value)
1029 || (GET_CODE (args[i].value) == SUBREG
1030 && REG_P (SUBREG_REG (args[i].value)))))
1031 && args[i].mode != BLKmode
e548c9df
AM
1032 && (set_src_cost (args[i].value, args[i].mode,
1033 optimize_insn_for_speed_p ())
1034 > COSTS_N_INSNS (1))
42db504c
SB
1035 && ((*reg_parm_seen
1036 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1037 || optimize))
20efdf74
JL
1038 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1039 }
1040}
1041
f73ad30e 1042#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1043
1044 /* The argument list is the property of the called routine and it
1045 may clobber it. If the fixed area has been used for previous
1046 parameters, we must save and restore it. */
3bdf5ad1 1047
20efdf74 1048static rtx
d329e058 1049save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1050{
a20c5714
RS
1051 unsigned int low;
1052 unsigned int high;
20efdf74 1053
b820d2b8
AM
1054 /* Compute the boundary of the area that needs to be saved, if any. */
1055 high = reg_parm_stack_space;
6dad9361
TS
1056 if (ARGS_GROW_DOWNWARD)
1057 high += 1;
1058
b820d2b8
AM
1059 if (high > highest_outgoing_arg_in_use)
1060 high = highest_outgoing_arg_in_use;
20efdf74 1061
b820d2b8 1062 for (low = 0; low < high; low++)
a20c5714 1063 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
b820d2b8
AM
1064 {
1065 int num_to_save;
ef4bddc2 1066 machine_mode save_mode;
b820d2b8 1067 int delta;
0a81f074 1068 rtx addr;
b820d2b8
AM
1069 rtx stack_area;
1070 rtx save_area;
20efdf74 1071
b820d2b8
AM
1072 while (stack_usage_map[--high] == 0)
1073 ;
20efdf74 1074
b820d2b8
AM
1075 *low_to_save = low;
1076 *high_to_save = high;
1077
1078 num_to_save = high - low + 1;
20efdf74 1079
b820d2b8
AM
1080 /* If we don't have the required alignment, must do this
1081 in BLKmode. */
fffbab82
RS
1082 scalar_int_mode imode;
1083 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1084 && (low & (MIN (GET_MODE_SIZE (imode),
1085 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1086 save_mode = imode;
1087 else
b820d2b8 1088 save_mode = BLKmode;
20efdf74 1089
6dad9361
TS
1090 if (ARGS_GROW_DOWNWARD)
1091 delta = -high;
1092 else
1093 delta = low;
1094
0a81f074
RS
1095 addr = plus_constant (Pmode, argblock, delta);
1096 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1097
b820d2b8
AM
1098 set_mem_align (stack_area, PARM_BOUNDARY);
1099 if (save_mode == BLKmode)
1100 {
9474e8ab 1101 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1102 emit_block_move (validize_mem (save_area), stack_area,
1103 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1104 }
1105 else
1106 {
1107 save_area = gen_reg_rtx (save_mode);
1108 emit_move_insn (save_area, stack_area);
1109 }
8ac61af7 1110
b820d2b8
AM
1111 return save_area;
1112 }
1113
1114 return NULL_RTX;
20efdf74
JL
1115}
1116
1117static void
d329e058 1118restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1119{
ef4bddc2 1120 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1121 int delta;
0a81f074 1122 rtx addr, stack_area;
b820d2b8 1123
6dad9361
TS
1124 if (ARGS_GROW_DOWNWARD)
1125 delta = -high_to_save;
1126 else
1127 delta = low_to_save;
1128
0a81f074
RS
1129 addr = plus_constant (Pmode, argblock, delta);
1130 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1131 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1132
1133 if (save_mode != BLKmode)
1134 emit_move_insn (stack_area, save_area);
1135 else
44bb111a
RH
1136 emit_block_move (stack_area, validize_mem (save_area),
1137 GEN_INT (high_to_save - low_to_save + 1),
1138 BLOCK_OP_CALL_PARM);
20efdf74 1139}
19652adf 1140#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1141
20efdf74
JL
1142/* If any elements in ARGS refer to parameters that are to be passed in
1143 registers, but not in memory, and whose alignment does not permit a
1144 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1145 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1146
1147 Pseudos for each unaligned argument will be stored into the array
1148 args[argnum].aligned_regs. The caller is responsible for deallocating
1149 the aligned_regs array if it is nonzero. */
1150
20efdf74 1151static void
d329e058 1152store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1153{
1154 int i, j;
f725a3ec 1155
20efdf74
JL
1156 for (i = 0; i < num_actuals; i++)
1157 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1158 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1159 && args[i].mode == BLKmode
852d22b4
EB
1160 && MEM_P (args[i].value)
1161 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1162 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1163 {
1164 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1165 int endian_correction = 0;
20efdf74 1166
78a52f11
RH
1167 if (args[i].partial)
1168 {
1169 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1170 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1171 }
1172 else
1173 {
1174 args[i].n_aligned_regs
1175 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1176 }
1177
5ed6ace5 1178 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1179
6e985040
AM
1180 /* Structures smaller than a word are normally aligned to the
1181 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1182 this means we must skip the empty high order bytes when
1183 calculating the bit offset. */
6e985040
AM
1184 if (bytes < UNITS_PER_WORD
1185#ifdef BLOCK_REG_PADDING
1186 && (BLOCK_REG_PADDING (args[i].mode,
1187 TREE_TYPE (args[i].tree_value), 1)
76b0cbf8 1188 == PAD_DOWNWARD)
6e985040
AM
1189#else
1190 && BYTES_BIG_ENDIAN
1191#endif
1192 )
1193 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1194
1195 for (j = 0; j < args[i].n_aligned_regs; j++)
1196 {
1197 rtx reg = gen_reg_rtx (word_mode);
1198 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1199 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1200
1201 args[i].aligned_regs[j] = reg;
c6285bd7 1202 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1203 word_mode, word_mode, false, NULL);
20efdf74
JL
1204
1205 /* There is no need to restrict this code to loading items
1206 in TYPE_ALIGN sized hunks. The bitfield instructions can
1207 load up entire word sized registers efficiently.
1208
1209 ??? This may not be needed anymore.
1210 We use to emit a clobber here but that doesn't let later
1211 passes optimize the instructions we emit. By storing 0 into
1212 the register later passes know the first AND to zero out the
1213 bitfield being set in the register is unnecessary. The store
1214 of 0 will be deleted as will at least the first AND. */
1215
1216 emit_move_insn (reg, const0_rtx);
1217
1218 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1219 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1220 word_mode, word, false);
20efdf74
JL
1221 }
1222 }
1223}
1224
8bd9f164
MS
1225/* The limit set by -Walloc-larger-than=. */
1226static GTY(()) tree alloc_object_size_limit;
1227
1228/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1229 setting if the option is specified, or to the maximum object size if it
1230 is not. Return the initialized value. */
1231
1232static tree
1233alloc_max_size (void)
1234{
d258f4aa
MS
1235 if (alloc_object_size_limit)
1236 return alloc_object_size_limit;
8bd9f164 1237
0aaafa5e
MS
1238 HOST_WIDE_INT limit = warn_alloc_size_limit;
1239 if (limit == HOST_WIDE_INT_MAX)
1240 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1241
1242 alloc_object_size_limit = build_int_cst (size_type_node, limit);
d258f4aa 1243
8bd9f164
MS
1244 return alloc_object_size_limit;
1245}
1246
c16880ef 1247/* Return true when EXP's range can be determined and set RANGE[] to it
cc8bea0a
MS
1248 after adjusting it if necessary to make EXP a represents a valid size
1249 of object, or a valid size argument to an allocation function declared
1250 with attribute alloc_size (whose argument may be signed), or to a string
83685efd
MS
1251 manipulation function like memset.
1252 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1253 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1254 a (nearly) invalid argument to allocation functions like malloc but it
1255 is a valid argument to functions like memset.
1256 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1257 in a multi-range, otherwise to the smallest valid subrange. */
8bd9f164 1258
c16880ef 1259bool
e864d395
AH
1260get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1261 int flags /* = 0 */)
8bd9f164 1262{
54aa6b58
MS
1263 if (!exp)
1264 return false;
1265
c16880ef 1266 if (tree_fits_uhwi_p (exp))
8bd9f164 1267 {
c16880ef
MS
1268 /* EXP is a constant. */
1269 range[0] = range[1] = exp;
1270 return true;
1271 }
1272
cc8bea0a
MS
1273 tree exptype = TREE_TYPE (exp);
1274 bool integral = INTEGRAL_TYPE_P (exptype);
1275
c16880ef 1276 wide_int min, max;
54994253 1277 enum value_range_kind range_type;
cc8bea0a 1278
72930d9f 1279 if (integral)
e864d395
AH
1280 {
1281 value_range vr;
1282 if (query && query->range_of_expr (vr, exp, stmt))
1283 {
16e2427f
AH
1284 if (vr.undefined_p ())
1285 vr.set_varying (TREE_TYPE (exp));
e864d395 1286 range_type = vr.kind ();
16e2427f
AH
1287 min = wi::to_wide (vr.min ());
1288 max = wi::to_wide (vr.max ());
e864d395
AH
1289 }
1290 else
1291 range_type = determine_value_range (exp, &min, &max);
e864d395 1292 }
cc8bea0a
MS
1293 else
1294 range_type = VR_VARYING;
c16880ef
MS
1295
1296 if (range_type == VR_VARYING)
1297 {
cc8bea0a 1298 if (integral)
d14c547a 1299 {
cc8bea0a
MS
1300 /* Use the full range of the type of the expression when
1301 no value range information is available. */
1302 range[0] = TYPE_MIN_VALUE (exptype);
1303 range[1] = TYPE_MAX_VALUE (exptype);
1304 return true;
1305 }
1306
c16880ef
MS
1307 range[0] = NULL_TREE;
1308 range[1] = NULL_TREE;
1309 return false;
1310 }
1311
c16880ef 1312 unsigned expprec = TYPE_PRECISION (exptype);
c16880ef
MS
1313
1314 bool signed_p = !TYPE_UNSIGNED (exptype);
1315
1316 if (range_type == VR_ANTI_RANGE)
1317 {
1318 if (signed_p)
8bd9f164 1319 {
8e6cdc90 1320 if (wi::les_p (max, 0))
8bd9f164 1321 {
c16880ef
MS
1322 /* EXP is not in a strictly negative range. That means
1323 it must be in some (not necessarily strictly) positive
1324 range which includes zero. Since in signed to unsigned
1325 conversions negative values end up converted to large
1326 positive values, and otherwise they are not valid sizes,
1327 the resulting range is in both cases [0, TYPE_MAX]. */
8e6cdc90
RS
1328 min = wi::zero (expprec);
1329 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
8bd9f164 1330 }
8e6cdc90 1331 else if (wi::les_p (min - 1, 0))
c16880ef
MS
1332 {
1333 /* EXP is not in a negative-positive range. That means EXP
1334 is either negative, or greater than max. Since negative
1335 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1336 min = max + 1;
8e6cdc90 1337 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
c16880ef
MS
1338 }
1339 else
1340 {
1341 max = min - 1;
8e6cdc90 1342 min = wi::zero (expprec);
c16880ef
MS
1343 }
1344 }
83685efd 1345 else
c16880ef 1346 {
83685efd
MS
1347 wide_int maxsize = wi::to_wide (max_object_size ());
1348 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1349 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1350 if (wi::eq_p (0, min - 1))
1351 {
1352 /* EXP is unsigned and not in the range [1, MAX]. That means
1353 it's either zero or greater than MAX. Even though 0 would
1354 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1355 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1356 is greater than the limit the whole range is diagnosed. */
1357 wide_int maxsize = wi::to_wide (max_object_size ());
1358 if (flags & SR_ALLOW_ZERO)
1359 {
1360 if (wi::leu_p (maxsize, max + 1)
1361 || !(flags & SR_USE_LARGEST))
1362 min = max = wi::zero (expprec);
1363 else
1364 {
1365 min = max + 1;
1366 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1367 }
1368 }
1369 else
1370 {
1371 min = max + 1;
1372 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1373 }
1374 }
1375 else if ((flags & SR_USE_LARGEST)
1376 && wi::ltu_p (max + 1, maxsize))
cc8bea0a 1377 {
83685efd
MS
1378 /* When USE_LARGEST is set and the larger of the two subranges
1379 is a valid size, use it... */
cc8bea0a 1380 min = max + 1;
83685efd
MS
1381 max = maxsize;
1382 }
1383 else
1384 {
1385 /* ...otherwise use the smaller subrange. */
1386 max = min - 1;
1387 min = wi::zero (expprec);
cc8bea0a 1388 }
8bd9f164
MS
1389 }
1390 }
1391
c16880ef
MS
1392 range[0] = wide_int_to_tree (exptype, min);
1393 range[1] = wide_int_to_tree (exptype, max);
1394
1395 return true;
8bd9f164
MS
1396}
1397
e864d395
AH
1398bool
1399get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1400{
1401 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1402}
1403
8bd9f164
MS
1404/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1405 whose argument numbers given by IDX with values given by ARGS exceed
1406 the maximum object size or cause an unsigned oveflow (wrapping) when
302db8ba
MS
1407 multiplied. FN is null when EXP is a call via a function pointer.
1408 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1409 for functions like malloc, and non-null for those like calloc that
1410 are decorated with a two-argument attribute alloc_size. */
8bd9f164
MS
1411
1412void
1413maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1414{
1415 /* The range each of the (up to) two arguments is known to be in. */
1416 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1417
1418 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1419 tree maxobjsize = alloc_max_size ();
1420
1421 location_t loc = EXPR_LOCATION (exp);
1422
302db8ba 1423 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
8bd9f164
MS
1424 bool warned = false;
1425
1426 /* Validate each argument individually. */
1427 for (unsigned i = 0; i != 2 && args[i]; ++i)
1428 {
1429 if (TREE_CODE (args[i]) == INTEGER_CST)
1430 {
1431 argrange[i][0] = args[i];
1432 argrange[i][1] = args[i];
1433
1434 if (tree_int_cst_lt (args[i], integer_zero_node))
1435 {
1436 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1437 "%Kargument %i value %qE is negative",
1438 exp, idx[i] + 1, args[i]);
8bd9f164
MS
1439 }
1440 else if (integer_zerop (args[i]))
1441 {
1442 /* Avoid issuing -Walloc-zero for allocation functions other
1443 than __builtin_alloca that are declared with attribute
1444 returns_nonnull because there's no portability risk. This
1445 avoids warning for such calls to libiberty's xmalloc and
1446 friends.
1447 Also avoid issuing the warning for calls to function named
1448 "alloca". */
cb1180d5
RS
1449 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1450 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1451 : !lookup_attribute ("returns_nonnull",
1452 TYPE_ATTRIBUTES (fntype)))
8bd9f164 1453 warned = warning_at (loc, OPT_Walloc_zero,
c16880ef
MS
1454 "%Kargument %i value is zero",
1455 exp, idx[i] + 1);
8bd9f164
MS
1456 }
1457 else if (tree_int_cst_lt (maxobjsize, args[i]))
1458 {
1459 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1460 mode and with -fno-exceptions as a way to indicate array
1461 size overflow. There's no good way to detect C++98 here
1462 so avoid diagnosing these calls for all C++ modes. */
1463 if (i == 0
302db8ba 1464 && fn
8bd9f164
MS
1465 && !args[1]
1466 && lang_GNU_CXX ()
cb50701e 1467 && DECL_IS_OPERATOR_NEW_P (fn)
8bd9f164
MS
1468 && integer_all_onesp (args[i]))
1469 continue;
1470
1471 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1472 "%Kargument %i value %qE exceeds "
8bd9f164 1473 "maximum object size %E",
c16880ef 1474 exp, idx[i] + 1, args[i], maxobjsize);
8bd9f164
MS
1475 }
1476 }
c16880ef
MS
1477 else if (TREE_CODE (args[i]) == SSA_NAME
1478 && get_size_range (args[i], argrange[i]))
8bd9f164 1479 {
8bd9f164
MS
1480 /* Verify that the argument's range is not negative (including
1481 upper bound of zero). */
1482 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1483 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1484 {
1485 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1486 "%Kargument %i range [%E, %E] is negative",
1487 exp, idx[i] + 1,
1488 argrange[i][0], argrange[i][1]);
8bd9f164
MS
1489 }
1490 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1491 {
1492 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1493 "%Kargument %i range [%E, %E] exceeds "
8bd9f164 1494 "maximum object size %E",
c16880ef
MS
1495 exp, idx[i] + 1,
1496 argrange[i][0], argrange[i][1],
8bd9f164
MS
1497 maxobjsize);
1498 }
1499 }
1500 }
1501
1502 if (!argrange[0])
1503 return;
1504
1505 /* For a two-argument alloc_size, validate the product of the two
1506 arguments if both of their values or ranges are known. */
1507 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1508 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1509 && !integer_onep (argrange[0][0])
1510 && !integer_onep (argrange[1][0]))
1511 {
1512 /* Check for overflow in the product of a function decorated with
1513 attribute alloc_size (X, Y). */
1514 unsigned szprec = TYPE_PRECISION (size_type_node);
1515 wide_int x = wi::to_wide (argrange[0][0], szprec);
1516 wide_int y = wi::to_wide (argrange[1][0], szprec);
1517
4a669ac3 1518 wi::overflow_type vflow;
8bd9f164
MS
1519 wide_int prod = wi::umul (x, y, &vflow);
1520
1521 if (vflow)
1522 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1523 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1524 "exceeds %<SIZE_MAX%>",
c16880ef 1525 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1526 idx[0] + 1, idx[1] + 1);
1527 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1528 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1529 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1530 "exceeds maximum object size %E",
c16880ef 1531 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1532 idx[0] + 1, idx[1] + 1,
1533 maxobjsize);
1534
1535 if (warned)
1536 {
1537 /* Print the full range of each of the two arguments to make
1538 it clear when it is, in fact, in a range and not constant. */
1539 if (argrange[0][0] != argrange [0][1])
1540 inform (loc, "argument %i in the range [%E, %E]",
1541 idx[0] + 1, argrange[0][0], argrange[0][1]);
1542 if (argrange[1][0] != argrange [1][1])
1543 inform (loc, "argument %i in the range [%E, %E]",
1544 idx[1] + 1, argrange[1][0], argrange[1][1]);
1545 }
1546 }
1547
302db8ba 1548 if (warned && fn)
8bd9f164
MS
1549 {
1550 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1551
ba649812 1552 if (DECL_IS_UNDECLARED_BUILTIN (fn))
8bd9f164
MS
1553 inform (loc,
1554 "in a call to built-in allocation function %qD", fn);
1555 else
1556 inform (fnloc,
1557 "in a call to allocation function %qD declared here", fn);
1558 }
1559}
1560
6a33d0ff
MS
1561/* If EXPR refers to a character array or pointer declared attribute
1562 nonstring return a decl for that array or pointer and set *REF to
1563 the referenced enclosing object or pointer. Otherwise returns
1564 null. */
1565
1566tree
1567get_attr_nonstring_decl (tree expr, tree *ref)
1568{
1569 tree decl = expr;
665db3ae 1570 tree var = NULL_TREE;
6a33d0ff
MS
1571 if (TREE_CODE (decl) == SSA_NAME)
1572 {
1573 gimple *def = SSA_NAME_DEF_STMT (decl);
1574
1575 if (is_gimple_assign (def))
1576 {
1577 tree_code code = gimple_assign_rhs_code (def);
1578 if (code == ADDR_EXPR
1579 || code == COMPONENT_REF
1580 || code == VAR_DECL)
1581 decl = gimple_assign_rhs1 (def);
1582 }
665db3ae
JL
1583 else
1584 var = SSA_NAME_VAR (decl);
6a33d0ff
MS
1585 }
1586
1587 if (TREE_CODE (decl) == ADDR_EXPR)
1588 decl = TREE_OPERAND (decl, 0);
1589
665db3ae
JL
1590 /* To simplify calling code, store the referenced DECL regardless of
1591 the attribute determined below, but avoid storing the SSA_NAME_VAR
1592 obtained above (it's not useful for dataflow purposes). */
6a33d0ff
MS
1593 if (ref)
1594 *ref = decl;
1595
665db3ae
JL
1596 /* Use the SSA_NAME_VAR that was determined above to see if it's
1597 declared nonstring. Otherwise drill down into the referenced
1598 DECL. */
1599 if (var)
1600 decl = var;
1601 else if (TREE_CODE (decl) == ARRAY_REF)
27560569
MS
1602 decl = TREE_OPERAND (decl, 0);
1603 else if (TREE_CODE (decl) == COMPONENT_REF)
6a33d0ff 1604 decl = TREE_OPERAND (decl, 1);
27560569
MS
1605 else if (TREE_CODE (decl) == MEM_REF)
1606 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
6a33d0ff
MS
1607
1608 if (DECL_P (decl)
1609 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1610 return decl;
1611
1612 return NULL_TREE;
1613}
1614
d14c547a
MS
1615/* Warn about passing a non-string array/pointer to a built-in function
1616 that expects a nul-terminated string argument. Returns true if
1617 a warning has been issued.*/
6a33d0ff 1618
d14c547a 1619bool
6a33d0ff
MS
1620maybe_warn_nonstring_arg (tree fndecl, tree exp)
1621{
3d78e008 1622 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
d14c547a 1623 return false;
6a33d0ff 1624
d14c547a
MS
1625 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1626 return false;
781ff3d8 1627
1a9b15a7 1628 /* Avoid clearly invalid calls (more checking done below). */
2438cb6a 1629 unsigned nargs = call_expr_nargs (exp);
1a9b15a7 1630 if (!nargs)
d14c547a 1631 return false;
2438cb6a 1632
6a33d0ff
MS
1633 /* The bound argument to a bounded string function like strncpy. */
1634 tree bound = NULL_TREE;
1635
5d6655eb
MS
1636 /* The longest known or possible string argument to one of the comparison
1637 functions. If the length is less than the bound it is used instead.
1638 Since the length is only used for warning and not for code generation
1639 disable strict mode in the calls to get_range_strlen below. */
1640 tree maxlen = NULL_TREE;
d677a8b6 1641
6a33d0ff
MS
1642 /* It's safe to call "bounded" string functions with a non-string
1643 argument since the functions provide an explicit bound for this
4252ccd7
MS
1644 purpose. The exception is strncat where the bound may refer to
1645 either the destination or the source. */
1646 int fncode = DECL_FUNCTION_CODE (fndecl);
1647 switch (fncode)
6a33d0ff 1648 {
d677a8b6 1649 case BUILT_IN_STRCMP:
6a33d0ff
MS
1650 case BUILT_IN_STRNCMP:
1651 case BUILT_IN_STRNCASECMP:
d677a8b6
MS
1652 {
1653 /* For these, if one argument refers to one or more of a set
1654 of string constants or arrays of known size, determine
1655 the range of their known or possible lengths and use it
1656 conservatively as the bound for the unbounded function,
1657 and to adjust the range of the bound of the bounded ones. */
2c8861b7
JJ
1658 for (unsigned argno = 0;
1659 argno < MIN (nargs, 2)
5d6655eb 1660 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
d677a8b6
MS
1661 {
1662 tree arg = CALL_EXPR_ARG (exp, argno);
1663 if (!get_attr_nonstring_decl (arg))
5d6655eb
MS
1664 {
1665 c_strlen_data lendata = { };
a7160771
MS
1666 /* Set MAXBOUND to an arbitrary non-null non-integer
1667 node as a request to have it set to the length of
1668 the longest string in a PHI. */
1669 lendata.maxbound = arg;
5d6655eb
MS
1670 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1671 maxlen = lendata.maxbound;
1672 }
d677a8b6
MS
1673 }
1674 }
1675 /* Fall through. */
1676
4252ccd7 1677 case BUILT_IN_STRNCAT:
d677a8b6 1678 case BUILT_IN_STPNCPY:
6a33d0ff 1679 case BUILT_IN_STRNCPY:
2c8861b7 1680 if (nargs > 2)
781ff3d8
MS
1681 bound = CALL_EXPR_ARG (exp, 2);
1682 break;
6a33d0ff
MS
1683
1684 case BUILT_IN_STRNDUP:
2c8861b7 1685 if (nargs > 1)
781ff3d8
MS
1686 bound = CALL_EXPR_ARG (exp, 1);
1687 break;
1688
1689 case BUILT_IN_STRNLEN:
2438cb6a 1690 {
781ff3d8
MS
1691 tree arg = CALL_EXPR_ARG (exp, 0);
1692 if (!get_attr_nonstring_decl (arg))
5d6655eb
MS
1693 {
1694 c_strlen_data lendata = { };
a7160771
MS
1695 /* Set MAXBOUND to an arbitrary non-null non-integer
1696 node as a request to have it set to the length of
1697 the longest string in a PHI. */
1698 lendata.maxbound = arg;
5d6655eb
MS
1699 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1700 maxlen = lendata.maxbound;
1701 }
2c8861b7 1702 if (nargs > 1)
781ff3d8 1703 bound = CALL_EXPR_ARG (exp, 1);
2438cb6a
MS
1704 break;
1705 }
6a33d0ff
MS
1706
1707 default:
1708 break;
1709 }
1710
1711 /* Determine the range of the bound argument (if specified). */
1712 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1713 if (bound)
36537a1c
MS
1714 {
1715 STRIP_NOPS (bound);
1716 get_size_range (bound, bndrng);
1717 }
6a33d0ff 1718
781ff3d8
MS
1719 location_t loc = EXPR_LOCATION (exp);
1720
1721 if (bndrng[0])
1722 {
d14c547a 1723 /* Diagnose excessive bound prior to the adjustment below and
781ff3d8
MS
1724 regardless of attribute nonstring. */
1725 tree maxobjsize = max_object_size ();
1726 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1727 {
d14c547a 1728 bool warned = false;
781ff3d8 1729 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
d14c547a
MS
1730 warned = warning_at (loc, OPT_Wstringop_overread,
1731 "%K%qD specified bound %E "
1732 "exceeds maximum object size %E",
1733 exp, fndecl, bndrng[0], maxobjsize);
781ff3d8 1734 else
d14c547a
MS
1735 warned = warning_at (loc, OPT_Wstringop_overread,
1736 "%K%qD specified bound [%E, %E] "
1737 "exceeds maximum object size %E",
1738 exp, fndecl, bndrng[0], bndrng[1],
1739 maxobjsize);
1740 if (warned)
1741 TREE_NO_WARNING (exp) = true;
1742
1743 return warned;
781ff3d8
MS
1744 }
1745 }
1746
5d6655eb 1747 if (maxlen && !integer_all_onesp (maxlen))
d677a8b6
MS
1748 {
1749 /* Add one for the nul. */
5d6655eb
MS
1750 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1751 size_one_node);
d677a8b6
MS
1752
1753 if (!bndrng[0])
1754 {
1755 /* Conservatively use the upper bound of the lengths for
1756 both the lower and the upper bound of the operation. */
5d6655eb
MS
1757 bndrng[0] = maxlen;
1758 bndrng[1] = maxlen;
d677a8b6
MS
1759 bound = void_type_node;
1760 }
df161fc2 1761 else if (maxlen)
d677a8b6 1762 {
39c71bc3 1763 /* Replace the bound on the operation with the upper bound
d677a8b6 1764 of the length of the string if the latter is smaller. */
5d6655eb
MS
1765 if (tree_int_cst_lt (maxlen, bndrng[0]))
1766 bndrng[0] = maxlen;
1767 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1768 bndrng[1] = maxlen;
d677a8b6
MS
1769 }
1770 }
1771
d14c547a 1772 bool any_arg_warned = false;
6a33d0ff
MS
1773 /* Iterate over the built-in function's formal arguments and check
1774 each const char* against the actual argument. If the actual
1775 argument is declared attribute non-string issue a warning unless
1776 the argument's maximum length is bounded. */
1777 function_args_iterator it;
1778 function_args_iter_init (&it, TREE_TYPE (fndecl));
1779
1780 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1781 {
2438cb6a
MS
1782 /* Avoid iterating past the declared argument in a call
1783 to function declared without a prototype. */
1784 if (argno >= nargs)
1785 break;
1786
6a33d0ff
MS
1787 tree argtype = function_args_iter_cond (&it);
1788 if (!argtype)
1789 break;
1790
1791 if (TREE_CODE (argtype) != POINTER_TYPE)
1792 continue;
1793
1794 argtype = TREE_TYPE (argtype);
1795
1796 if (TREE_CODE (argtype) != INTEGER_TYPE
1797 || !TYPE_READONLY (argtype))
1798 continue;
1799
1800 argtype = TYPE_MAIN_VARIANT (argtype);
1801 if (argtype != char_type_node)
1802 continue;
1803
1804 tree callarg = CALL_EXPR_ARG (exp, argno);
1805 if (TREE_CODE (callarg) == ADDR_EXPR)
1806 callarg = TREE_OPERAND (callarg, 0);
1807
1808 /* See if the destination is declared with attribute "nonstring". */
1809 tree decl = get_attr_nonstring_decl (callarg);
1810 if (!decl)
1811 continue;
1812
d677a8b6 1813 /* The maximum number of array elements accessed. */
6a33d0ff 1814 offset_int wibnd = 0;
4252ccd7
MS
1815
1816 if (argno && fncode == BUILT_IN_STRNCAT)
1817 {
1818 /* See if the bound in strncat is derived from the length
1819 of the strlen of the destination (as it's expected to be).
1820 If so, reset BOUND and FNCODE to trigger a warning. */
1821 tree dstarg = CALL_EXPR_ARG (exp, 0);
1822 if (is_strlen_related_p (dstarg, bound))
1823 {
1824 /* The bound applies to the destination, not to the source,
1825 so reset these to trigger a warning without mentioning
1826 the bound. */
1827 bound = NULL;
1828 fncode = 0;
1829 }
1830 else if (bndrng[1])
1831 /* Use the upper bound of the range for strncat. */
1832 wibnd = wi::to_offset (bndrng[1]);
1833 }
1834 else if (bndrng[0])
1835 /* Use the lower bound of the range for functions other than
1836 strncat. */
6a33d0ff
MS
1837 wibnd = wi::to_offset (bndrng[0]);
1838
4252ccd7 1839 /* Determine the size of the argument array if it is one. */
6a33d0ff 1840 offset_int asize = wibnd;
4252ccd7
MS
1841 bool known_size = false;
1842 tree type = TREE_TYPE (decl);
6a33d0ff 1843
d677a8b6
MS
1844 /* Determine the array size. For arrays of unknown bound and
1845 pointers reset BOUND to trigger the appropriate warning. */
6a33d0ff 1846 if (TREE_CODE (type) == ARRAY_TYPE)
d677a8b6
MS
1847 {
1848 if (tree arrbnd = TYPE_DOMAIN (type))
1849 {
1850 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
4252ccd7
MS
1851 {
1852 asize = wi::to_offset (arrbnd) + 1;
1853 known_size = true;
1854 }
d677a8b6
MS
1855 }
1856 else if (bound == void_type_node)
1857 bound = NULL_TREE;
1858 }
1859 else if (bound == void_type_node)
1860 bound = NULL_TREE;
6a33d0ff 1861
4252ccd7
MS
1862 /* In a call to strncat with a bound in a range whose lower but
1863 not upper bound is less than the array size, reset ASIZE to
1864 be the same as the bound and the other variable to trigger
1865 the apprpriate warning below. */
1866 if (fncode == BUILT_IN_STRNCAT
1867 && bndrng[0] != bndrng[1]
1868 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1869 && (!known_size
1870 || wi::ltu_p (asize, wibnd)))
1871 {
1872 asize = wibnd;
1873 bound = NULL_TREE;
1874 fncode = 0;
1875 }
1876
6a33d0ff
MS
1877 bool warned = false;
1878
097f82ec 1879 auto_diagnostic_group d;
6a33d0ff 1880 if (wi::ltu_p (asize, wibnd))
4252ccd7
MS
1881 {
1882 if (bndrng[0] == bndrng[1])
d14c547a 1883 warned = warning_at (loc, OPT_Wstringop_overread,
4252ccd7
MS
1884 "%qD argument %i declared attribute "
1885 "%<nonstring%> is smaller than the specified "
1886 "bound %wu",
1887 fndecl, argno + 1, wibnd.to_uhwi ());
1888 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
d14c547a 1889 warned = warning_at (loc, OPT_Wstringop_overread,
4252ccd7
MS
1890 "%qD argument %i declared attribute "
1891 "%<nonstring%> is smaller than "
1892 "the specified bound [%E, %E]",
1893 fndecl, argno + 1, bndrng[0], bndrng[1]);
1894 else
d14c547a 1895 warned = warning_at (loc, OPT_Wstringop_overread,
4252ccd7
MS
1896 "%qD argument %i declared attribute "
1897 "%<nonstring%> may be smaller than "
1898 "the specified bound [%E, %E]",
1899 fndecl, argno + 1, bndrng[0], bndrng[1]);
1900 }
1901 else if (fncode == BUILT_IN_STRNCAT)
1902 ; /* Avoid warning for calls to strncat() when the bound
1903 is equal to the size of the non-string argument. */
6a33d0ff 1904 else if (!bound)
d14c547a 1905 warned = warning_at (loc, OPT_Wstringop_overread,
6a33d0ff
MS
1906 "%qD argument %i declared attribute %<nonstring%>",
1907 fndecl, argno + 1);
1908
1909 if (warned)
d14c547a
MS
1910 {
1911 inform (DECL_SOURCE_LOCATION (decl),
1912 "argument %qD declared here", decl);
1913 any_arg_warned = true;
1914 }
6a33d0ff 1915 }
d14c547a
MS
1916
1917 if (any_arg_warned)
1918 TREE_NO_WARNING (exp) = true;
1919
1920 return any_arg_warned;
6a33d0ff
MS
1921}
1922
9a385c2d
DM
1923/* Issue an error if CALL_EXPR was flagged as requiring
1924 tall-call optimization. */
1925
18963d3b 1926void
9a385c2d
DM
1927maybe_complain_about_tail_call (tree call_expr, const char *reason)
1928{
1929 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1930 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1931 return;
1932
1933 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1934}
1935
54aa6b58
MS
1936/* Returns the type of the argument ARGNO to function with type FNTYPE
1937 or null when the typoe cannot be determined or no such argument exists. */
1938
1939static tree
1940fntype_argno_type (tree fntype, unsigned argno)
1941{
1942 if (!prototype_p (fntype))
1943 return NULL_TREE;
1944
1945 tree argtype;
1946 function_args_iterator it;
1947 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1948 if (argno-- == 0)
1949 return argtype;
1950
1951 return NULL_TREE;
1952}
1953
baad4c48
MS
1954/* Helper to append the "human readable" attribute access specification
1955 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
54aa6b58
MS
1956 diagnostics. */
1957
1958static inline void
1959append_attrname (const std::pair<int, attr_access> &access,
1960 char *attrstr, size_t strsize)
1961{
baad4c48
MS
1962 if (access.second.internal_p)
1963 return;
1964
1965 tree str = access.second.to_external_string ();
1966 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1967 strcpy (attrstr, TREE_STRING_POINTER (str));
54aa6b58
MS
1968}
1969
1970/* Iterate over attribute access read-only, read-write, and write-only
1971 arguments and diagnose past-the-end accesses and related problems
1972 in the function call EXP. */
1973
1974static void
f9264b90 1975maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
54aa6b58 1976{
75ff24e1 1977 auto_diagnostic_group adg;
873f8c1e
MS
1978
1979 /* Set if a warning has been issued for any argument (used to decide
1980 whether to emit an informational note at the end). */
1981 bool any_warned = false;
75ff24e1 1982
54aa6b58
MS
1983 /* A string describing the attributes that the warnings issued by this
1984 function apply to. Used to print one informational note per function
1985 call, rather than one per warning. That reduces clutter. */
1986 char attrstr[80];
1987 attrstr[0] = 0;
1988
1989 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1990 {
1991 std::pair<int, attr_access> access = *it;
1992
1993 /* Get the function call arguments corresponding to the attribute's
1994 positional arguments. When both arguments have been specified
1995 there will be two entries in *RWM, one for each. They are
1996 cross-referenced by their respective argument numbers in
1997 ACCESS.PTRARG and ACCESS.SIZARG. */
1998 const int ptridx = access.second.ptrarg;
1999 const int sizidx = access.second.sizarg;
2000
2001 gcc_assert (ptridx != -1);
2002 gcc_assert (access.first == ptridx || access.first == sizidx);
2003
2004 /* The pointer is set to null for the entry corresponding to
2005 the size argument. Skip it. It's handled when the entry
2006 corresponding to the pointer argument comes up. */
2007 if (!access.second.ptr)
2008 continue;
2009
baad4c48
MS
2010 tree ptrtype = fntype_argno_type (fntype, ptridx);
2011 tree argtype = TREE_TYPE (ptrtype);
54aa6b58 2012
baad4c48
MS
2013 /* The size of the access by the call. */
2014 tree access_size;
54aa6b58
MS
2015 if (sizidx == -1)
2016 {
baad4c48
MS
2017 /* If only the pointer attribute operand was specified and
2018 not size, set SIZE to the greater of MINSIZE or size of
2019 one element of the pointed to type to detect smaller
2020 objects (null pointers are diagnosed in this case only
2021 if the pointer is also declared with attribute nonnull. */
2022 if (access.second.minsize
2023 && access.second.minsize != HOST_WIDE_INT_M1U)
2024 access_size = build_int_cstu (sizetype, access.second.minsize);
2025 else
2026 access_size = size_one_node;
54aa6b58
MS
2027 }
2028 else
baad4c48 2029 access_size = rwm->get (sizidx)->size;
54aa6b58 2030
873f8c1e
MS
2031 /* Format the value or range to avoid an explosion of messages. */
2032 char sizstr[80];
2033 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2034 if (get_size_range (access_size, sizrng, true))
2035 {
e5304598 2036 char *s0 = print_generic_expr_to_str (sizrng[0]);
873f8c1e
MS
2037 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2038 {
2039 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2040 strcpy (sizstr, s0);
2041 }
2042 else
2043 {
e5304598 2044 char *s1 = print_generic_expr_to_str (sizrng[1]);
873f8c1e
MS
2045 gcc_checking_assert (strlen (s0) + strlen (s1)
2046 < sizeof sizstr - 4);
2047 sprintf (sizstr, "[%s, %s]", s0, s1);
e5304598 2048 free (s1);
873f8c1e 2049 }
e5304598 2050 free (s0);
873f8c1e
MS
2051 }
2052 else
2053 *sizstr = '\0';
2054
2055 /* Set if a warning has been issued for the current argument. */
2056 bool arg_warned = false;
baad4c48 2057 location_t loc = EXPR_LOCATION (exp);
54aa6b58 2058 tree ptr = access.second.ptr;
873f8c1e 2059 if (*sizstr
54aa6b58
MS
2060 && tree_int_cst_sgn (sizrng[0]) < 0
2061 && tree_int_cst_sgn (sizrng[1]) < 0)
2062 {
2063 /* Warn about negative sizes. */
873f8c1e
MS
2064 if (access.second.internal_p)
2065 {
2066 const std::string argtypestr
2067 = access.second.array_as_string (ptrtype);
2068
2069 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2070 "%Kbound argument %i value %s is "
2071 "negative for a variable length array "
2072 "argument %i of type %s",
2073 exp, sizidx + 1, sizstr,
2074 ptridx + 1, argtypestr.c_str ());
2075 }
54aa6b58 2076 else
873f8c1e
MS
2077 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2078 "%Kargument %i value %s is negative",
2079 exp, sizidx + 1, sizstr);
2080
2081 if (arg_warned)
54aa6b58
MS
2082 {
2083 append_attrname (access, attrstr, sizeof attrstr);
873f8c1e
MS
2084 /* Remember a warning has been issued and avoid warning
2085 again below for the same attribute. */
2086 any_warned = true;
54aa6b58
MS
2087 continue;
2088 }
2089 }
2090
2091 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2092 {
2093 if (COMPLETE_TYPE_P (argtype))
2094 {
baad4c48
MS
2095 /* Multiply ACCESS_SIZE by the size of the type the pointer
2096 argument points to. If it's incomplete the size is used
2097 as is. */
54aa6b58
MS
2098 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2099 if (TREE_CODE (argsize) == INTEGER_CST)
2100 {
2101 const int prec = TYPE_PRECISION (sizetype);
2102 wide_int minsize = wi::to_wide (sizrng[0], prec);
2103 minsize *= wi::to_wide (argsize, prec);
baad4c48 2104 access_size = wide_int_to_tree (sizetype, minsize);
54aa6b58
MS
2105 }
2106 }
2107 }
2108 else
baad4c48 2109 access_size = NULL_TREE;
54aa6b58 2110
baad4c48 2111 if (integer_zerop (ptr))
54aa6b58 2112 {
baad4c48
MS
2113 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2114 {
2115 /* Warn about null pointers with positive sizes. This is
2116 different from also declaring the pointer argument with
2117 attribute nonnull when the function accepts null pointers
2118 only when the corresponding size is zero. */
873f8c1e
MS
2119 if (access.second.internal_p)
2120 {
2121 const std::string argtypestr
2122 = access.second.array_as_string (ptrtype);
2123
2124 arg_warned = warning_at (loc, OPT_Wnonnull,
2125 "%Kargument %i of variable length "
2126 "array %s is null but "
2127 "the corresponding bound argument "
2128 "%i value is %s",
2129 exp, sizidx + 1, argtypestr.c_str (),
2130 ptridx + 1, sizstr);
2131 }
baad4c48 2132 else
873f8c1e
MS
2133 arg_warned = warning_at (loc, OPT_Wnonnull,
2134 "%Kargument %i is null but "
2135 "the corresponding size argument "
2136 "%i value is %s",
2137 exp, ptridx + 1, sizidx + 1,
2138 sizstr);
baad4c48
MS
2139 }
2140 else if (access_size && access.second.static_p)
2141 {
2142 /* Warn about null pointers for [static N] array arguments
2143 but do not warn for ordinary (i.e., nonstatic) arrays. */
873f8c1e
MS
2144 arg_warned = warning_at (loc, OPT_Wnonnull,
2145 "%Kargument %i to %<%T[static %E]%> "
2146 "is null where non-null expected",
2147 exp, ptridx + 1, argtype,
2148 access_size);
baad4c48
MS
2149 }
2150
873f8c1e 2151 if (arg_warned)
54aa6b58
MS
2152 {
2153 append_attrname (access, attrstr, sizeof attrstr);
873f8c1e
MS
2154 /* Remember a warning has been issued and avoid warning
2155 again below for the same attribute. */
2156 any_warned = true;
54aa6b58
MS
2157 continue;
2158 }
2159 }
2160
baad4c48
MS
2161 access_data data (ptr, access.second.mode, NULL_TREE, false,
2162 NULL_TREE, false);
2163 access_ref* const pobj = (access.second.mode == access_write_only
2164 ? &data.dst : &data.src);
2165 tree objsize = compute_objsize (ptr, 1, pobj);
54aa6b58 2166
baad4c48
MS
2167 /* The size of the destination or source object. */
2168 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2169 if (access.second.mode == access_read_only
2170 || access.second.mode == access_none)
54aa6b58 2171 {
baad4c48
MS
2172 /* For a read-only argument there is no destination. For
2173 no access, set the source as well and differentiate via
2174 the access flag below. */
54aa6b58 2175 srcsize = objsize;
d14c547a
MS
2176 if (access.second.mode == access_read_only
2177 || access.second.mode == access_none)
54aa6b58
MS
2178 {
2179 /* For a read-only attribute there is no destination so
2180 clear OBJSIZE. This emits "reading N bytes" kind of
b825a228
MS
2181 diagnostics instead of the "writing N bytes" kind,
2182 unless MODE is none. */
54aa6b58
MS
2183 objsize = NULL_TREE;
2184 }
2185 }
baad4c48
MS
2186 else
2187 dstsize = objsize;
54aa6b58 2188
baad4c48
MS
2189 /* Clear the no-warning bit in case it was set by check_access
2190 in a prior iteration so that accesses via different arguments
2191 are diagnosed. */
54aa6b58 2192 TREE_NO_WARNING (exp) = false;
baad4c48
MS
2193 access_mode mode = data.mode;
2194 if (mode == access_deferred)
2195 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2196 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2197 dstsize, mode, &data);
54aa6b58
MS
2198
2199 if (TREE_NO_WARNING (exp))
baad4c48 2200 {
873f8c1e 2201 any_warned = true;
54aa6b58 2202
baad4c48
MS
2203 if (access.second.internal_p)
2204 inform (loc, "referencing argument %u of type %qT",
2205 ptridx + 1, ptrtype);
2206 else
2207 /* If check_access issued a warning above, append the relevant
2208 attribute to the string. */
2209 append_attrname (access, attrstr, sizeof attrstr);
2210 }
2211 }
54aa6b58 2212
baad4c48
MS
2213 if (*attrstr)
2214 {
2215 if (fndecl)
2216 inform (DECL_SOURCE_LOCATION (fndecl),
2217 "in a call to function %qD declared with attribute %qs",
2218 fndecl, attrstr);
2219 else
2220 inform (EXPR_LOCATION (fndecl),
2221 "in a call with type %qT and attribute %qs",
2222 fntype, attrstr);
2223 }
873f8c1e 2224 else if (any_warned)
baad4c48
MS
2225 {
2226 if (fndecl)
2227 inform (DECL_SOURCE_LOCATION (fndecl),
2228 "in a call to function %qD", fndecl);
2229 else
2230 inform (EXPR_LOCATION (fndecl),
2231 "in a call with type %qT", fntype);
2232 }
54aa6b58
MS
2233
2234 /* Set the bit in case if was cleared and not set above. */
2235 TREE_NO_WARNING (exp) = true;
2236}
2237
d7cdf113 2238/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 2239 CALL_EXPR EXP.
d7cdf113
JL
2240
2241 NUM_ACTUALS is the total number of parameters.
2242
2243 N_NAMED_ARGS is the total number of named arguments.
2244
078a18a4
SL
2245 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2246 value, or null.
2247
d7cdf113
JL
2248 FNDECL is the tree code for the target of this call (if known)
2249
2250 ARGS_SO_FAR holds state needed by the target to know where to place
2251 the next argument.
2252
2253 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2254 for arguments which are passed in registers.
2255
2256 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2257 and may be modified by this routine.
2258
f2d33f13 2259 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 2260 flags which may be modified by this routine.
dd292d0a 2261
6de9cd9a
DN
2262 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2263 that requires allocation of stack space.
2264
dd292d0a
MM
2265 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2266 the thunked-to function. */
d7cdf113
JL
2267
2268static void
d329e058
AJ
2269initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2270 struct arg_data *args,
2271 struct args_size *args_size,
2272 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 2273 tree exp, tree struct_value_addr_value,
45769134 2274 tree fndecl, tree fntype,
d5cc9181 2275 cumulative_args_t args_so_far,
d329e058 2276 int reg_parm_stack_space,
a20c5714
RS
2277 rtx *old_stack_level,
2278 poly_int64_pod *old_pending_adj,
dd292d0a 2279 int *must_preallocate, int *ecf_flags,
6de9cd9a 2280 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 2281{
d5cc9181 2282 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 2283 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
2284
2285 /* Count arg position in order args appear. */
2286 int argpos;
2287
2288 int i;
f725a3ec 2289
d7cdf113
JL
2290 args_size->constant = 0;
2291 args_size->var = 0;
2292
d5e254e1
IE
2293 bitmap_obstack_initialize (NULL);
2294
d7cdf113 2295 /* In this loop, we consider args in the order they are written.
3d9684ae 2296 We fill up ARGS from the back. */
d7cdf113 2297
3d9684ae 2298 i = num_actuals - 1;
078a18a4 2299 {
31db0fe0 2300 int j = i;
078a18a4
SL
2301 call_expr_arg_iterator iter;
2302 tree arg;
d5e254e1 2303 bitmap slots = NULL;
078a18a4
SL
2304
2305 if (struct_value_addr_value)
2306 {
2307 args[j].tree_value = struct_value_addr_value;
3d9684ae 2308 j--;
078a18a4 2309 }
afc610db 2310 argpos = 0;
078a18a4
SL
2311 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2312 {
2313 tree argtype = TREE_TYPE (arg);
d5e254e1 2314
078a18a4
SL
2315 if (targetm.calls.split_complex_arg
2316 && argtype
2317 && TREE_CODE (argtype) == COMPLEX_TYPE
2318 && targetm.calls.split_complex_arg (argtype))
2319 {
2320 tree subtype = TREE_TYPE (argtype);
078a18a4 2321 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 2322 j--;
078a18a4
SL
2323 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2324 }
2325 else
2326 args[j].tree_value = arg;
3d9684ae 2327 j--;
afc610db 2328 argpos++;
078a18a4 2329 }
d5e254e1
IE
2330
2331 if (slots)
2332 BITMAP_FREE (slots);
078a18a4
SL
2333 }
2334
d5e254e1
IE
2335 bitmap_obstack_release (NULL);
2336
6450f073 2337 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
302db8ba
MS
2338 /* Extract attribute alloc_size from the type of the called expression
2339 (which could be a function or a function pointer) and if set, store
2340 the indices of the corresponding arguments in ALLOC_IDX, and then
2341 the actual argument(s) at those indices in ALLOC_ARGS. */
8bd9f164 2342 int alloc_idx[2] = { -1, -1 };
6450f073 2343 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
8bd9f164
MS
2344 {
2345 tree args = TREE_VALUE (alloc_size);
2346 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2347 if (TREE_CHAIN (args))
2348 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2349 }
2350
2351 /* Array for up to the two attribute alloc_size arguments. */
2352 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2353
b825a228 2354 /* Map of attribute accewss specifications for function arguments. */
54aa6b58 2355 rdwr_map rdwr_idx;
6450f073 2356 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
54aa6b58 2357
d7cdf113 2358 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 2359 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 2360 {
078a18a4 2361 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 2362 int unsignedp;
d7cdf113 2363
d7cdf113 2364 /* Replace erroneous argument with constant zero. */
d0f062fb 2365 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
2366 args[i].tree_value = integer_zero_node, type = integer_type_node;
2367
ebf0bf7f
JJ
2368 /* If TYPE is a transparent union or record, pass things the way
2369 we would pass the first field of the union or record. We have
2370 already verified that the modes are the same. */
920ea3b8 2371 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
ebf0bf7f 2372 type = TREE_TYPE (first_field (type));
d7cdf113
JL
2373
2374 /* Decide where to pass this arg.
2375
2376 args[i].reg is nonzero if all or part is passed in registers.
2377
2378 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 2379 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
2380
2381 args[i].pass_on_stack is nonzero if the argument must at least be
2382 computed on the stack. It may then be loaded back into registers
2383 if args[i].reg is nonzero.
2384
2385 These decisions are driven by the FUNCTION_... macros and must agree
2386 with those made by function.c. */
2387
2388 /* See if this argument should be passed by invisible reference. */
cf0d189e
RS
2389 function_arg_info arg (type, argpos < n_named_args);
2390 if (pass_by_reference (args_so_far_pnt, arg))
d7cdf113 2391 {
defafb78
EB
2392 const bool callee_copies
2393 = reference_callee_copied (args_so_far_pnt, arg);
2394 tree base;
2395
2396 /* If we're compiling a thunk, pass directly the address of an object
2397 already in memory, instead of making a copy. Likewise if we want
2398 to make the copy in the callee instead of the caller. */
2399 if ((call_from_thunk_p || callee_copies)
2400 && (base = get_base_address (args[i].tree_value))
2401 && TREE_CODE (base) != SSA_NAME
2402 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
d7cdf113 2403 {
006e317a
JH
2404 /* We may have turned the parameter value into an SSA name.
2405 Go back to the original parameter so we can take the
2406 address. */
2407 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2408 {
2409 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2410 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2411 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2412 }
fe8dd12e
JH
2413 /* Argument setup code may have copied the value to register. We
2414 revert that optimization now because the tail call code must
2415 use the original location. */
2416 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2417 && !MEM_P (DECL_RTL (args[i].tree_value))
2418 && DECL_INCOMING_RTL (args[i].tree_value)
2419 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2420 set_decl_rtl (args[i].tree_value,
2421 DECL_INCOMING_RTL (args[i].tree_value));
2422
c4b9a87e
ER
2423 mark_addressable (args[i].tree_value);
2424
9969aaf6
RH
2425 /* We can't use sibcalls if a callee-copied argument is
2426 stored in the current function's frame. */
2427 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
2428 {
2429 *may_tailcall = false;
2430 maybe_complain_about_tail_call (exp,
2431 "a callee-copied argument is"
cefc0906 2432 " stored in the current"
9a385c2d
DM
2433 " function's frame");
2434 }
9fd47435 2435
db3927fb
AH
2436 args[i].tree_value = build_fold_addr_expr_loc (loc,
2437 args[i].tree_value);
9969aaf6
RH
2438 type = TREE_TYPE (args[i].tree_value);
2439
becfd6e5
KZ
2440 if (*ecf_flags & ECF_CONST)
2441 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 2442 }
d7cdf113
JL
2443 else
2444 {
2445 /* We make a copy of the object and pass the address to the
2446 function being called. */
2447 rtx copy;
2448
d0f062fb 2449 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
2450 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2451 || (flag_stack_check == GENERIC_STACK_CHECK
2452 && compare_tree_int (TYPE_SIZE_UNIT (type),
2453 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
2454 {
2455 /* This is a variable-sized object. Make space on the stack
2456 for it. */
078a18a4 2457 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
2458
2459 if (*old_stack_level == 0)
2460 {
9eac0f2a 2461 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
2462 *old_pending_adj = pending_stack_adjust;
2463 pending_stack_adjust = 0;
2464 }
2465
d3c12306
EB
2466 /* We can pass TRUE as the 4th argument because we just
2467 saved the stack pointer and will restore it right after
2468 the call. */
3a42502d
RH
2469 copy = allocate_dynamic_stack_space (size_rtx,
2470 TYPE_ALIGN (type),
2471 TYPE_ALIGN (type),
9e878cf1
EB
2472 max_int_size_in_bytes
2473 (type),
3a42502d
RH
2474 true);
2475 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 2476 set_mem_attributes (copy, type, 1);
d7cdf113
JL
2477 }
2478 else
9474e8ab 2479 copy = assign_temp (type, 1, 0);
d7cdf113 2480
ee45a32d 2481 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 2482
becfd6e5
KZ
2483 /* Just change the const function to pure and then let
2484 the next test clear the pure based on
2485 callee_copies. */
2486 if (*ecf_flags & ECF_CONST)
2487 {
2488 *ecf_flags &= ~ECF_CONST;
2489 *ecf_flags |= ECF_PURE;
2490 }
2491
2492 if (!callee_copies && *ecf_flags & ECF_PURE)
2493 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
2494
2495 args[i].tree_value
db3927fb 2496 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 2497 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 2498 *may_tailcall = false;
9a385c2d
DM
2499 maybe_complain_about_tail_call (exp,
2500 "argument must be passed"
2501 " by copying");
d7cdf113 2502 }
257caa55 2503 arg.pass_by_reference = true;
d7cdf113
JL
2504 }
2505
8df83eae 2506 unsignedp = TYPE_UNSIGNED (type);
cf0d189e
RS
2507 arg.type = type;
2508 arg.mode
2509 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2510 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
2511
2512 args[i].unsignedp = unsignedp;
cf0d189e 2513 args[i].mode = arg.mode;
7d167afd 2514
974aedcc
MP
2515 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2516
6783fdb7 2517 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
3c07301f 2518
d5e254e1 2519 if (args[i].reg && CONST_INT_P (args[i].reg))
dbcdd561 2520 args[i].reg = NULL;
d5e254e1 2521
7d167afd
JJ
2522 /* If this is a sibling call and the machine has register windows, the
2523 register window has to be unwinded before calling the routine, so
2524 arguments have to go into the incoming registers. */
3c07301f
NF
2525 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2526 args[i].tail_call_reg
6783fdb7 2527 = targetm.calls.function_incoming_arg (args_so_far, arg);
3c07301f
NF
2528 else
2529 args[i].tail_call_reg = args[i].reg;
7d167afd 2530
d7cdf113 2531 if (args[i].reg)
a7c81bc1 2532 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
d7cdf113 2533
0ffef200 2534 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
d7cdf113
JL
2535
2536 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2537 it means that we are to pass this arg in the register(s) designated
2538 by the PARALLEL, but also to pass it in the stack. */
2539 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2540 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2541 args[i].pass_on_stack = 1;
2542
2543 /* If this is an addressable type, we must preallocate the stack
2544 since we must evaluate the object into its final location.
2545
2546 If this is to be passed in both registers and the stack, it is simpler
2547 to preallocate. */
2548 if (TREE_ADDRESSABLE (type)
2549 || (args[i].pass_on_stack && args[i].reg != 0))
2550 *must_preallocate = 1;
2551
d7cdf113 2552 /* Compute the stack-size of this argument. */
31db0fe0 2553 if (args[i].reg == 0 || args[i].partial != 0
d5e254e1
IE
2554 || reg_parm_stack_space > 0
2555 || args[i].pass_on_stack)
cf0d189e 2556 locate_and_pad_parm (arg.mode, type,
d7cdf113
JL
2557#ifdef STACK_PARMS_IN_REG_PARM_AREA
2558 1,
2559#else
2560 args[i].reg != 0,
2561#endif
2e4ceca5 2562 reg_parm_stack_space,
e7949876
AM
2563 args[i].pass_on_stack ? 0 : args[i].partial,
2564 fndecl, args_size, &args[i].locate);
648bb159
RS
2565#ifdef BLOCK_REG_PADDING
2566 else
2567 /* The argument is passed entirely in registers. See at which
2568 end it should be padded. */
2569 args[i].locate.where_pad =
cf0d189e 2570 BLOCK_REG_PADDING (arg.mode, type,
648bb159
RS
2571 int_size_in_bytes (type) <= UNITS_PER_WORD);
2572#endif
f725a3ec 2573
d7cdf113
JL
2574 /* Update ARGS_SIZE, the total stack space for args so far. */
2575
e7949876
AM
2576 args_size->constant += args[i].locate.size.constant;
2577 if (args[i].locate.size.var)
2578 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
2579
2580 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2581 have been used, etc. */
2582
6930c98c
RS
2583 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2584 promoted_mode used for function_arg above. However, the
2585 corresponding handling of incoming arguments in function.c
2586 does pass the promoted mode. */
cf0d189e
RS
2587 arg.mode = TYPE_MODE (type);
2588 targetm.calls.function_arg_advance (args_so_far, arg);
8bd9f164
MS
2589
2590 /* Store argument values for functions decorated with attribute
2591 alloc_size. */
2592 if (argpos == alloc_idx[0])
2593 alloc_args[0] = args[i].tree_value;
2594 else if (argpos == alloc_idx[1])
2595 alloc_args[1] = args[i].tree_value;
54aa6b58
MS
2596
2597 /* Save the actual argument that corresponds to the access attribute
2598 operand for later processing. */
2599 if (attr_access *access = rdwr_idx.get (argpos))
2600 {
2601 if (POINTER_TYPE_P (type))
2602 {
2603 access->ptr = args[i].tree_value;
6450f073 2604 // A nonnull ACCESS->SIZE contains VLA bounds. */
54aa6b58
MS
2605 }
2606 else
2607 {
2608 access->size = args[i].tree_value;
2609 gcc_assert (access->ptr == NULL_TREE);
2610 }
2611 }
8bd9f164
MS
2612 }
2613
2614 if (alloc_args[0])
2615 {
2616 /* Check the arguments of functions decorated with attribute
2617 alloc_size. */
2618 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
d7cdf113 2619 }
6a33d0ff
MS
2620
2621 /* Detect passing non-string arguments to functions expecting
2622 nul-terminated strings. */
2623 maybe_warn_nonstring_arg (fndecl, exp);
54aa6b58 2624
b825a228 2625 /* Check attribute access arguments. */
f9264b90 2626 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
dce6c58d
MS
2627
2628 /* Check calls to operator new for mismatched forms and attempts
2629 to deallocate unallocated objects. */
2630 maybe_emit_free_warning (exp);
d7cdf113
JL
2631}
2632
599f37b6
JL
2633/* Update ARGS_SIZE to contain the total size for the argument block.
2634 Return the original constant component of the argument block's size.
2635
2636 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2637 for arguments passed in registers. */
2638
a20c5714 2639static poly_int64
d329e058
AJ
2640compute_argument_block_size (int reg_parm_stack_space,
2641 struct args_size *args_size,
033df0b9 2642 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 2643 tree fntype ATTRIBUTE_UNUSED,
d329e058 2644 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6 2645{
a20c5714 2646 poly_int64 unadjusted_args_size = args_size->constant;
599f37b6 2647
f73ad30e
JH
2648 /* For accumulate outgoing args mode we don't need to align, since the frame
2649 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 2650 backends from generating misaligned frame sizes. */
f73ad30e
JH
2651 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2652 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 2653
599f37b6
JL
2654 /* Compute the actual size of the argument block required. The variable
2655 and constant sizes must be combined, the size may have to be rounded,
2656 and there may be a minimum required size. */
2657
2658 if (args_size->var)
2659 {
2660 args_size->var = ARGS_SIZE_TREE (*args_size);
2661 args_size->constant = 0;
2662
c2f8b491
JH
2663 preferred_stack_boundary /= BITS_PER_UNIT;
2664 if (preferred_stack_boundary > 1)
1503a7ec
JH
2665 {
2666 /* We don't handle this case yet. To handle it correctly we have
f5143c46 2667 to add the delta, round and subtract the delta.
1503a7ec 2668 Currently no machine description requires this support. */
a20c5714
RS
2669 gcc_assert (multiple_p (stack_pointer_delta,
2670 preferred_stack_boundary));
1503a7ec
JH
2671 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2672 }
599f37b6
JL
2673
2674 if (reg_parm_stack_space > 0)
2675 {
2676 args_size->var
2677 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 2678 ssize_int (reg_parm_stack_space));
599f37b6 2679
599f37b6
JL
2680 /* The area corresponding to register parameters is not to count in
2681 the size of the block we need. So make the adjustment. */
5d059ed9 2682 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
2683 args_size->var
2684 = size_binop (MINUS_EXPR, args_size->var,
2685 ssize_int (reg_parm_stack_space));
599f37b6
JL
2686 }
2687 }
2688 else
2689 {
c2f8b491 2690 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
2691 if (preferred_stack_boundary < 1)
2692 preferred_stack_boundary = 1;
a20c5714
RS
2693 args_size->constant = (aligned_upper_bound (args_size->constant
2694 + stack_pointer_delta,
2695 preferred_stack_boundary)
1503a7ec 2696 - stack_pointer_delta);
599f37b6 2697
a20c5714
RS
2698 args_size->constant = upper_bound (args_size->constant,
2699 reg_parm_stack_space);
599f37b6 2700
5d059ed9 2701 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 2702 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
2703 }
2704 return unadjusted_args_size;
2705}
2706
19832c77 2707/* Precompute parameters as needed for a function call.
cc0b1adc 2708
f2d33f13 2709 FLAGS is mask of ECF_* constants.
cc0b1adc 2710
cc0b1adc
JL
2711 NUM_ACTUALS is the number of arguments.
2712
f725a3ec
KH
2713 ARGS is an array containing information for each argument; this
2714 routine fills in the INITIAL_VALUE and VALUE fields for each
2715 precomputed argument. */
cc0b1adc
JL
2716
2717static void
84b8030f 2718precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
2719{
2720 int i;
2721
3638733b 2722 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 2723 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
2724
2725 /* If we preallocated the stack space, and some arguments must be passed
2726 on the stack, then we must precompute any parameter which contains a
2727 function call which will store arguments on the stack.
2728 Otherwise, evaluating the parameter may clobber previous parameters
2729 which have already been stored into the stack. (we have code to avoid
2730 such case by saving the outgoing stack arguments, but it results in
2731 worse code) */
84b8030f 2732 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 2733 return;
7ae4ad28 2734
cc0b1adc 2735 for (i = 0; i < num_actuals; i++)
82c82743 2736 {
cde0f3fd 2737 tree type;
ef4bddc2 2738 machine_mode mode;
ddef6bc7 2739
84b8030f 2740 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
2741 continue;
2742
82c82743 2743 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
2744 type = TREE_TYPE (args[i].tree_value);
2745 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 2746
82c82743 2747 args[i].initial_value = args[i].value
84217346 2748 = expand_normal (args[i].tree_value);
cc0b1adc 2749
cde0f3fd 2750 mode = TYPE_MODE (type);
82c82743
RH
2751 if (mode != args[i].mode)
2752 {
cde0f3fd 2753 int unsignedp = args[i].unsignedp;
82c82743
RH
2754 args[i].value
2755 = convert_modes (args[i].mode, mode,
2756 args[i].value, args[i].unsignedp);
cde0f3fd 2757
82c82743
RH
2758 /* CSE will replace this only if it contains args[i].value
2759 pseudo, so convert it down to the declared mode using
2760 a SUBREG. */
2761 if (REG_P (args[i].value)
cde0f3fd
PB
2762 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2763 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
2764 {
2765 args[i].initial_value
2766 = gen_lowpart_SUBREG (mode, args[i].value);
2767 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 2768 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 2769 }
82c82743
RH
2770 }
2771 }
cc0b1adc
JL
2772}
2773
0f9b3ea6
JL
2774/* Given the current state of MUST_PREALLOCATE and information about
2775 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2776 compute and return the final value for MUST_PREALLOCATE. */
2777
2778static int
b8698a0f 2779finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 2780 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
2781{
2782 /* See if we have or want to preallocate stack space.
2783
2784 If we would have to push a partially-in-regs parm
2785 before other stack parms, preallocate stack space instead.
2786
2787 If the size of some parm is not a multiple of the required stack
2788 alignment, we must preallocate.
2789
2790 If the total size of arguments that would otherwise create a copy in
2791 a temporary (such as a CALL) is more than half the total argument list
2792 size, preallocation is faster.
2793
2794 Another reason to preallocate is if we have a machine (like the m88k)
2795 where stack alignment is required to be maintained between every
2796 pair of insns, not just when the call is made. However, we assume here
2797 that such machines either do not have push insns (and hence preallocation
2798 would occur anyway) or the problem is taken care of with
2799 PUSH_ROUNDING. */
2800
2801 if (! must_preallocate)
2802 {
2803 int partial_seen = 0;
a20c5714 2804 poly_int64 copy_to_evaluate_size = 0;
0f9b3ea6
JL
2805 int i;
2806
2807 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2808 {
2809 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2810 partial_seen = 1;
2811 else if (partial_seen && args[i].reg == 0)
2812 must_preallocate = 1;
2813
2814 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2815 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2816 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2817 || TREE_CODE (args[i].tree_value) == COND_EXPR
2818 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2819 copy_to_evaluate_size
2820 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2821 }
2822
a20c5714
RS
2823 if (maybe_ne (args_size->constant, 0)
2824 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
0f9b3ea6
JL
2825 must_preallocate = 1;
2826 }
2827 return must_preallocate;
2828}
599f37b6 2829
a45bdd02
JL
2830/* If we preallocated stack space, compute the address of each argument
2831 and store it into the ARGS array.
2832
f725a3ec 2833 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
2834 validized when it is used.
2835
2836 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2837
2838static void
d329e058 2839compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
2840{
2841 if (argblock)
2842 {
2843 rtx arg_reg = argblock;
a20c5714
RS
2844 int i;
2845 poly_int64 arg_offset = 0;
a45bdd02
JL
2846
2847 if (GET_CODE (argblock) == PLUS)
a20c5714
RS
2848 {
2849 arg_reg = XEXP (argblock, 0);
2850 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2851 }
a45bdd02
JL
2852
2853 for (i = 0; i < num_actuals; i++)
2854 {
e7949876
AM
2855 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2856 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 2857 rtx addr;
bfc45551 2858 unsigned int align, boundary;
a20c5714 2859 poly_uint64 units_on_stack = 0;
ef4bddc2 2860 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
2861
2862 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
2863 if (! args[i].pass_on_stack
2864 && args[i].reg != 0
2865 && args[i].partial == 0)
a45bdd02
JL
2866 continue;
2867
5b8b4a88
JJ
2868 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2869 continue;
2870
a708f4b6 2871 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
0a81f074 2872 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2873
2874 if (args[i].partial != 0)
2875 {
2876 /* Only part of the parameter is being passed on the stack.
2877 Generate a simple memory reference of the correct size. */
2878 units_on_stack = args[i].locate.size.constant;
a20c5714 2879 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
f4b31647 2880 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
7816b87e 2881 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 2882 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
2883 }
2884 else
2885 {
2886 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2887 set_mem_attributes (args[i].stack,
2888 TREE_TYPE (args[i].tree_value), 1);
2889 }
bfc45551
AM
2890 align = BITS_PER_UNIT;
2891 boundary = args[i].locate.boundary;
a20c5714 2892 poly_int64 offset_val;
76b0cbf8 2893 if (args[i].locate.where_pad != PAD_DOWNWARD)
bfc45551 2894 align = boundary;
a20c5714 2895 else if (poly_int_rtx_p (offset, &offset_val))
bfc45551 2896 {
a20c5714
RS
2897 align = least_bit_hwi (boundary);
2898 unsigned int offset_align
2899 = known_alignment (offset_val) * BITS_PER_UNIT;
2900 if (offset_align != 0)
2901 align = MIN (align, offset_align);
bfc45551
AM
2902 }
2903 set_mem_align (args[i].stack, align);
a45bdd02 2904
a708f4b6 2905 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
0a81f074 2906 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2907
2908 if (args[i].partial != 0)
2909 {
2910 /* Only part of the parameter is being passed on the stack.
2911 Generate a simple memory reference of the correct size.
2912 */
2913 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 2914 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
2915 }
2916 else
2917 {
2918 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2919 set_mem_attributes (args[i].stack_slot,
2920 TREE_TYPE (args[i].tree_value), 1);
2921 }
bfc45551 2922 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
2923
2924 /* Function incoming arguments may overlap with sibling call
2925 outgoing arguments and we cannot allow reordering of reads
2926 from function arguments with stores to outgoing arguments
2927 of sibling calls. */
ba4828e0
RK
2928 set_mem_alias_set (args[i].stack, 0);
2929 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
2930 }
2931 }
2932}
f725a3ec 2933
a45bdd02
JL
2934/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2935 in a call instruction.
2936
2937 FNDECL is the tree node for the target function. For an indirect call
2938 FNDECL will be NULL_TREE.
2939
09e2bf48 2940 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
2941
2942static rtx
d329e058 2943rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
2944{
2945 rtx funexp;
2946
2947 /* Get the function to call, in the form of RTL. */
2948 if (fndecl)
2949 {
ad960f56 2950 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 2951 TREE_USED (fndecl) = 1;
a45bdd02
JL
2952
2953 /* Get a SYMBOL_REF rtx for the function address. */
2954 funexp = XEXP (DECL_RTL (fndecl), 0);
2955 }
2956 else
2957 /* Generate an rtx (probably a pseudo-register) for the address. */
2958 {
2959 push_temp_slots ();
84217346 2960 funexp = expand_normal (addr);
f725a3ec 2961 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
2962 }
2963 return funexp;
2964}
2965
4b522b8f
TV
2966/* Return the static chain for this function, if any. */
2967
2968rtx
2969rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2970{
2971 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2972 return NULL;
2973
2974 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2975}
2976
5275901c
JJ
2977/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2978static struct
2979{
2980 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2981 or NULL_RTX if none has been scanned yet. */
48810515 2982 rtx_insn *scan_start;
5275901c
JJ
2983 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2984 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2985 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2986 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 2987 vec<rtx> cache;
5275901c
JJ
2988} internal_arg_pointer_exp_state;
2989
e9f56944 2990static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
2991
2992/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2993 the tail call sequence, starting with first insn that hasn't been
2994 scanned yet, and note for each pseudo on the LHS whether it is based
2995 on crtl->args.internal_arg_pointer or not, and what offset from that
2996 that pointer it has. */
2997
2998static void
2999internal_arg_pointer_based_exp_scan (void)
3000{
48810515 3001 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
3002
3003 if (scan_start == NULL_RTX)
3004 insn = get_insns ();
3005 else
3006 insn = NEXT_INSN (scan_start);
3007
3008 while (insn)
3009 {
3010 rtx set = single_set (insn);
3011 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
3012 {
3013 rtx val = NULL_RTX;
3014 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3015 /* Punt on pseudos set multiple times. */
9771b263
DN
3016 if (idx < internal_arg_pointer_exp_state.cache.length ()
3017 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
3018 != NULL_RTX))
3019 val = pc_rtx;
3020 else
3021 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3022 if (val != NULL_RTX)
3023 {
9771b263 3024 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718 3025 internal_arg_pointer_exp_state.cache
cb3874dc 3026 .safe_grow_cleared (idx + 1, true);
9771b263 3027 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
3028 }
3029 }
3030 if (NEXT_INSN (insn) == NULL_RTX)
3031 scan_start = insn;
3032 insn = NEXT_INSN (insn);
3033 }
3034
3035 internal_arg_pointer_exp_state.scan_start = scan_start;
3036}
3037
5275901c
JJ
3038/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3039 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3040 it with fixed offset, or PC if this is with variable or unknown offset.
3041 TOPLEVEL is true if the function is invoked at the topmost level. */
3042
3043static rtx
e9f56944 3044internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
3045{
3046 if (CONSTANT_P (rtl))
3047 return NULL_RTX;
3048
3049 if (rtl == crtl->args.internal_arg_pointer)
3050 return const0_rtx;
3051
3052 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3053 return NULL_RTX;
3054
a20c5714
RS
3055 poly_int64 offset;
3056 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
5275901c
JJ
3057 {
3058 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3059 if (val == NULL_RTX || val == pc_rtx)
3060 return val;
a20c5714 3061 return plus_constant (Pmode, val, offset);
5275901c
JJ
3062 }
3063
3064 /* When called at the topmost level, scan pseudo assignments in between the
3065 last scanned instruction in the tail call sequence and the latest insn
3066 in that sequence. */
3067 if (toplevel)
3068 internal_arg_pointer_based_exp_scan ();
3069
3070 if (REG_P (rtl))
3071 {
3072 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
3073 if (idx < internal_arg_pointer_exp_state.cache.length ())
3074 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
3075
3076 return NULL_RTX;
3077 }
3078
e9f56944
RS
3079 subrtx_iterator::array_type array;
3080 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3081 {
3082 const_rtx x = *iter;
3083 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3084 return pc_rtx;
3085 if (MEM_P (x))
3086 iter.skip_subrtxes ();
3087 }
5275901c
JJ
3088
3089 return NULL_RTX;
3090}
3091
a20c5714
RS
3092/* Return true if SIZE bytes starting from address ADDR might overlap an
3093 already-clobbered argument area. This function is used to determine
3094 if we should give up a sibcall. */
07eef816
KH
3095
3096static bool
a20c5714 3097mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
07eef816 3098{
a20c5714
RS
3099 poly_int64 i;
3100 unsigned HOST_WIDE_INT start, end;
5275901c 3101 rtx val;
07eef816 3102
a20c5714
RS
3103 if (bitmap_empty_p (stored_args_map)
3104 && stored_args_watermark == HOST_WIDE_INT_M1U)
4189fb53 3105 return false;
5275901c
JJ
3106 val = internal_arg_pointer_based_exp (addr, true);
3107 if (val == NULL_RTX)
3108 return false;
a20c5714 3109 else if (!poly_int_rtx_p (val, &i))
6c3cb698 3110 return true;
a20c5714
RS
3111
3112 if (known_eq (size, 0U))
3113 return false;
76e048a8
KT
3114
3115 if (STACK_GROWS_DOWNWARD)
3116 i -= crtl->args.pretend_args_size;
3117 else
3118 i += crtl->args.pretend_args_size;
3119
6dad9361
TS
3120 if (ARGS_GROW_DOWNWARD)
3121 i = -i - size;
3122
a20c5714
RS
3123 /* We can ignore any references to the function's pretend args,
3124 which at this point would manifest as negative values of I. */
3125 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3126 return false;
07eef816 3127
a20c5714
RS
3128 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3129 if (!(i + size).is_constant (&end))
3130 end = HOST_WIDE_INT_M1U;
3131
3132 if (end > stored_args_watermark)
3133 return true;
3134
3135 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3136 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3137 if (bitmap_bit_p (stored_args_map, k))
3138 return true;
07eef816
KH
3139
3140 return false;
3141}
3142
21a3b983
JL
3143/* Do the register loads required for any wholly-register parms or any
3144 parms which are passed both on the stack and in a register. Their
f725a3ec 3145 expressions were already evaluated.
21a3b983
JL
3146
3147 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
3148 insns in the CALL_INSN_FUNCTION_USAGE field.
3149
40b0345d 3150 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 3151 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
3152
3153static void
d329e058
AJ
3154load_register_parameters (struct arg_data *args, int num_actuals,
3155 rtx *call_fusage, int flags, int is_sibcall,
3156 int *sibcall_failure)
21a3b983
JL
3157{
3158 int i, j;
3159
21a3b983 3160 for (i = 0; i < num_actuals; i++)
21a3b983 3161 {
099e9712
JH
3162 rtx reg = ((flags & ECF_SIBCALL)
3163 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
3164 if (reg)
3165 {
6e985040
AM
3166 int partial = args[i].partial;
3167 int nregs;
95fe7b48
RS
3168 poly_int64 size = 0;
3169 HOST_WIDE_INT const_size = 0;
48810515 3170 rtx_insn *before_arg = get_last_insn ();
72834792 3171 tree type = TREE_TYPE (args[i].tree_value);
920ea3b8 3172 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
72834792 3173 type = TREE_TYPE (first_field (type));
f0078f86
AM
3174 /* Set non-negative if we must move a word at a time, even if
3175 just one word (e.g, partial == 4 && mode == DFmode). Set
3176 to -1 if we just use a normal move insn. This value can be
3177 zero if the argument is a zero size structure. */
6e985040 3178 nregs = -1;
78a52f11
RH
3179 if (GET_CODE (reg) == PARALLEL)
3180 ;
3181 else if (partial)
3182 {
3183 gcc_assert (partial % UNITS_PER_WORD == 0);
3184 nregs = partial / UNITS_PER_WORD;
3185 }
72834792 3186 else if (TYPE_MODE (type) == BLKmode)
6e985040 3187 {
95fe7b48
RS
3188 /* Variable-sized parameters should be described by a
3189 PARALLEL instead. */
72834792 3190 const_size = int_size_in_bytes (type);
95fe7b48
RS
3191 gcc_assert (const_size >= 0);
3192 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3193 size = const_size;
6e985040
AM
3194 }
3195 else
3196 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
3197
3198 /* Handle calls that pass values in multiple non-contiguous
3199 locations. The Irix 6 ABI has examples of this. */
3200
3201 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 3202 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
3203
3204 /* If simple case, just do move. If normal partial, store_one_arg
3205 has already loaded the register for us. In all other cases,
3206 load the register(s) from memory. */
3207
9206d736
AM
3208 else if (nregs == -1)
3209 {
3210 emit_move_insn (reg, args[i].value);
6e985040 3211#ifdef BLOCK_REG_PADDING
9206d736
AM
3212 /* Handle case where we have a value that needs shifting
3213 up to the msb. eg. a QImode value and we're padding
3214 upward on a BYTES_BIG_ENDIAN machine. */
95fe7b48
RS
3215 if (args[i].locate.where_pad
3216 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
9206d736 3217 {
95fe7b48
RS
3218 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3219 if (maybe_lt (size, UNITS_PER_WORD))
3220 {
3221 rtx x;
3222 poly_int64 shift
3223 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3224
3225 /* Assigning REG here rather than a temp makes
3226 CALL_FUSAGE report the whole reg as used.
3227 Strictly speaking, the call only uses SIZE
3228 bytes at the msb end, but it doesn't seem worth
3229 generating rtl to say that. */
3230 reg = gen_rtx_REG (word_mode, REGNO (reg));
3231 x = expand_shift (LSHIFT_EXPR, word_mode,
3232 reg, shift, reg, 1);
3233 if (x != reg)
3234 emit_move_insn (reg, x);
3235 }
9206d736 3236 }
6e985040 3237#endif
9206d736 3238 }
21a3b983
JL
3239
3240 /* If we have pre-computed the values to put in the registers in
3241 the case of non-aligned structures, copy them in now. */
3242
3243 else if (args[i].n_aligned_regs != 0)
3244 for (j = 0; j < args[i].n_aligned_regs; j++)
3245 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3246 args[i].aligned_regs[j]);
3247
3b2ee170 3248 else if (partial == 0 || args[i].pass_on_stack)
6e985040 3249 {
95fe7b48
RS
3250 /* SIZE and CONST_SIZE are 0 for partial arguments and
3251 the size of a BLKmode type otherwise. */
3252 gcc_checking_assert (known_eq (size, const_size));
1a8cb155 3253 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 3254
3b2ee170
IS
3255 /* Check for overlap with already clobbered argument area,
3256 providing that this has non-zero size. */
07eef816 3257 if (is_sibcall
95fe7b48 3258 && const_size != 0
a20c5714 3259 && (mem_might_overlap_already_clobbered_arg_p
95fe7b48 3260 (XEXP (args[i].value, 0), const_size)))
07eef816
KH
3261 *sibcall_failure = 1;
3262
95fe7b48 3263 if (const_size % UNITS_PER_WORD == 0
984b2054
AM
3264 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3265 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3266 else
3267 {
3268 if (nregs > 1)
3269 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3270 args[i].mode);
3271 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3272 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
95fe7b48 3273 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
ee45a32d 3274 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
3275 word_mode, word_mode, false,
3276 NULL);
984b2054
AM
3277 if (BYTES_BIG_ENDIAN)
3278 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3279 BITS_PER_WORD - bitsize, dest, 1);
3280 if (x != dest)
3281 emit_move_insn (dest, x);
3282 }
3283
6e985040 3284 /* Handle a BLKmode that needs shifting. */
95fe7b48 3285 if (nregs == 1 && const_size < UNITS_PER_WORD
03ca1672 3286#ifdef BLOCK_REG_PADDING
76b0cbf8 3287 && args[i].locate.where_pad == PAD_DOWNWARD
03ca1672
UW
3288#else
3289 && BYTES_BIG_ENDIAN
3290#endif
984b2054 3291 )
6e985040 3292 {
984b2054 3293 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
95fe7b48 3294 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
984b2054
AM
3295 enum tree_code dir = (BYTES_BIG_ENDIAN
3296 ? RSHIFT_EXPR : LSHIFT_EXPR);
3297 rtx x;
6e985040 3298
984b2054
AM
3299 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3300 if (x != dest)
3301 emit_move_insn (dest, x);
6e985040 3302 }
6e985040 3303 }
21a3b983 3304
0cdca92b
DJ
3305 /* When a parameter is a block, and perhaps in other cases, it is
3306 possible that it did a load from an argument slot that was
32dd366d 3307 already clobbered. */
0cdca92b
DJ
3308 if (is_sibcall
3309 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3310 *sibcall_failure = 1;
3311
21a3b983
JL
3312 /* Handle calls that pass values in multiple non-contiguous
3313 locations. The Irix 6 ABI has examples of this. */
3314 if (GET_CODE (reg) == PARALLEL)
3315 use_group_regs (call_fusage, reg);
3316 else if (nregs == -1)
72834792 3317 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
faa00334
AO
3318 else if (nregs > 0)
3319 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
3320 }
3321 }
3322}
3323
739fb049
MM
3324/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3325 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3326 bytes, then we would need to push some additional bytes to pad the
a20c5714 3327 arguments. So, we try to compute an adjust to the stack pointer for an
ce48579b
RH
3328 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3329 bytes. Then, when the arguments are pushed the stack will be perfectly
a20c5714 3330 aligned.
739fb049 3331
a20c5714
RS
3332 Return true if this optimization is possible, storing the adjustment
3333 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3334 bytes that should be popped after the call. */
3335
3336static bool
3337combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3338 poly_int64 unadjusted_args_size,
d329e058 3339 struct args_size *args_size,
95899b34 3340 unsigned int preferred_unit_stack_boundary)
739fb049
MM
3341{
3342 /* The number of bytes to pop so that the stack will be
3343 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
a20c5714 3344 poly_int64 adjustment;
739fb049
MM
3345 /* The alignment of the stack after the arguments are pushed, if we
3346 just pushed the arguments without adjust the stack here. */
95899b34 3347 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 3348
a20c5714
RS
3349 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3350 preferred_unit_stack_boundary,
3351 &unadjusted_alignment))
3352 return false;
739fb049
MM
3353
3354 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3355 as possible -- leaving just enough left to cancel out the
3356 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3357 PENDING_STACK_ADJUST is non-negative, and congruent to
3358 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3359
3360 /* Begin by trying to pop all the bytes. */
a20c5714
RS
3361 unsigned HOST_WIDE_INT tmp_misalignment;
3362 if (!known_misalignment (pending_stack_adjust,
3363 preferred_unit_stack_boundary,
3364 &tmp_misalignment))
3365 return false;
3366 unadjusted_alignment -= tmp_misalignment;
739fb049
MM
3367 adjustment = pending_stack_adjust;
3368 /* Push enough additional bytes that the stack will be aligned
3369 after the arguments are pushed. */
0aae1572
NS
3370 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3371 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 3372
a20c5714
RS
3373 /* We need to know whether the adjusted argument size
3374 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3375 or a deallocation. */
3376 if (!ordered_p (adjustment, unadjusted_args_size))
3377 return false;
3378
739fb049
MM
3379 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3380 bytes after the call. The right number is the entire
3381 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3382 by the arguments in the first place. */
f725a3ec 3383 args_size->constant
739fb049
MM
3384 = pending_stack_adjust - adjustment + unadjusted_args_size;
3385
a20c5714
RS
3386 *adjustment_out = adjustment;
3387 return true;
739fb049
MM
3388}
3389
c67846f2
JJ
3390/* Scan X expression if it does not dereference any argument slots
3391 we already clobbered by tail call arguments (as noted in stored_args_map
3392 bitmap).
da7d8304 3393 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
3394 zero otherwise. */
3395
3396static int
d329e058 3397check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
3398{
3399 RTX_CODE code;
3400 int i, j;
c67846f2
JJ
3401 const char *fmt;
3402
3403 if (x == NULL_RTX)
3404 return 0;
3405
3406 code = GET_CODE (x);
3407
6c3cb698
KY
3408 /* We need not check the operands of the CALL expression itself. */
3409 if (code == CALL)
3410 return 0;
3411
c67846f2 3412 if (code == MEM)
a20c5714
RS
3413 return (mem_might_overlap_already_clobbered_arg_p
3414 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
c67846f2 3415
f725a3ec 3416 /* Scan all subexpressions. */
c67846f2
JJ
3417 fmt = GET_RTX_FORMAT (code);
3418 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3419 {
3420 if (*fmt == 'e')
f725a3ec
KH
3421 {
3422 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3423 return 1;
3424 }
c67846f2 3425 else if (*fmt == 'E')
f725a3ec
KH
3426 {
3427 for (j = 0; j < XVECLEN (x, i); j++)
3428 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3429 return 1;
3430 }
c67846f2
JJ
3431 }
3432 return 0;
c67846f2
JJ
3433}
3434
3435/* Scan sequence after INSN if it does not dereference any argument slots
3436 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
3437 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3438 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3439 should be 0). Return nonzero if sequence after INSN dereferences such argument
3440 slots, zero otherwise. */
c67846f2
JJ
3441
3442static int
48810515
DM
3443check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3444 int mark_stored_args_map)
f725a3ec 3445{
a20c5714
RS
3446 poly_uint64 low, high;
3447 unsigned HOST_WIDE_INT const_low, const_high;
c67846f2
JJ
3448
3449 if (insn == NULL_RTX)
3450 insn = get_insns ();
3451 else
3452 insn = NEXT_INSN (insn);
3453
3454 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
3455 if (INSN_P (insn)
3456 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
3457 break;
3458
0cdca92b
DJ
3459 if (mark_stored_args_map)
3460 {
6dad9361
TS
3461 if (ARGS_GROW_DOWNWARD)
3462 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3463 else
3464 low = arg->locate.slot_offset.constant;
a20c5714 3465 high = low + arg->locate.size.constant;
d60eab50 3466
a20c5714
RS
3467 const_low = constant_lower_bound (low);
3468 if (high.is_constant (&const_high))
3469 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3470 bitmap_set_bit (stored_args_map, i);
3471 else
3472 stored_args_watermark = MIN (stored_args_watermark, const_low);
0cdca92b 3473 }
c67846f2
JJ
3474 return insn != NULL_RTX;
3475}
3476
bef5d8b6
RS
3477/* Given that a function returns a value of mode MODE at the most
3478 significant end of hard register VALUE, shift VALUE left or right
3479 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 3480
bef5d8b6 3481bool
ef4bddc2 3482shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 3483{
bef5d8b6 3484 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
abd3c800 3485 machine_mode value_mode = GET_MODE (value);
73a699ae
RS
3486 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3487
3488 if (known_eq (shift, 0))
bef5d8b6
RS
3489 return false;
3490
3491 /* Use ashr rather than lshr for right shifts. This is for the benefit
3492 of the MIPS port, which requires SImode values to be sign-extended
3493 when stored in 64-bit registers. */
abd3c800
RS
3494 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3495 value, gen_int_shift_amount (value_mode, shift),
3496 value, 1, OPTAB_WIDEN))
bef5d8b6
RS
3497 gcc_unreachable ();
3498 return true;
c988af2b
RS
3499}
3500
3fb30019
RS
3501/* If X is a likely-spilled register value, copy it to a pseudo
3502 register and return that register. Return X otherwise. */
3503
3504static rtx
3505avoid_likely_spilled_reg (rtx x)
3506{
82d6e6fc 3507 rtx new_rtx;
3fb30019
RS
3508
3509 if (REG_P (x)
3510 && HARD_REGISTER_P (x)
07b8f0a8 3511 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
3512 {
3513 /* Make sure that we generate a REG rather than a CONCAT.
3514 Moves into CONCATs can need nontrivial instructions,
3515 and the whole point of this function is to avoid
3516 using the hard register directly in such a situation. */
3517 generating_concat_p = 0;
82d6e6fc 3518 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 3519 generating_concat_p = 1;
82d6e6fc
KG
3520 emit_move_insn (new_rtx, x);
3521 return new_rtx;
3fb30019
RS
3522 }
3523 return x;
3524}
3525
b40d90e6
DM
3526/* Helper function for expand_call.
3527 Return false is EXP is not implementable as a sibling call. */
3528
3529static bool
3530can_implement_as_sibling_call_p (tree exp,
3531 rtx structure_value_addr,
3532 tree funtype,
b40d90e6
DM
3533 tree fndecl,
3534 int flags,
3535 tree addr,
3536 const args_size &args_size)
3537{
3538 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
3539 {
3540 maybe_complain_about_tail_call
3541 (exp,
3542 "machine description does not have"
3543 " a sibcall_epilogue instruction pattern");
3544 return false;
3545 }
b40d90e6
DM
3546
3547 /* Doing sibling call optimization needs some work, since
3548 structure_value_addr can be allocated on the stack.
3549 It does not seem worth the effort since few optimizable
3550 sibling calls will return a structure. */
3551 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
3552 {
3553 maybe_complain_about_tail_call (exp, "callee returns a structure");
3554 return false;
3555 }
b40d90e6 3556
b40d90e6
DM
3557 /* Check whether the target is able to optimize the call
3558 into a sibcall. */
3559 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
3560 {
3561 maybe_complain_about_tail_call (exp,
3562 "target is not able to optimize the"
3563 " call into a sibling call");
3564 return false;
3565 }
b40d90e6
DM
3566
3567 /* Functions that do not return exactly once may not be sibcall
3568 optimized. */
9a385c2d
DM
3569 if (flags & ECF_RETURNS_TWICE)
3570 {
3571 maybe_complain_about_tail_call (exp, "callee returns twice");
3572 return false;
3573 }
3574 if (flags & ECF_NORETURN)
3575 {
3576 maybe_complain_about_tail_call (exp, "callee does not return");
3577 return false;
3578 }
b40d90e6
DM
3579
3580 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
3581 {
3582 maybe_complain_about_tail_call (exp, "volatile function type");
3583 return false;
3584 }
b40d90e6
DM
3585
3586 /* If the called function is nested in the current one, it might access
3587 some of the caller's arguments, but could clobber them beforehand if
3588 the argument areas are shared. */
3589 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
3590 {
3591 maybe_complain_about_tail_call (exp, "nested function");
3592 return false;
3593 }
b40d90e6
DM
3594
3595 /* If this function requires more stack slots than the current
3596 function, we cannot change it into a sibling call.
3597 crtl->args.pretend_args_size is not part of the
3598 stack allocated by our caller. */
a20c5714
RS
3599 if (maybe_gt (args_size.constant,
3600 crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
3601 {
3602 maybe_complain_about_tail_call (exp,
3603 "callee required more stack slots"
3604 " than the caller");
3605 return false;
3606 }
b40d90e6
DM
3607
3608 /* If the callee pops its own arguments, then it must pop exactly
3609 the same number of arguments as the current function. */
a20c5714
RS
3610 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3611 args_size.constant),
3612 targetm.calls.return_pops_args (current_function_decl,
3613 TREE_TYPE
3614 (current_function_decl),
3615 crtl->args.size)))
9a385c2d
DM
3616 {
3617 maybe_complain_about_tail_call (exp,
3618 "inconsistent number of"
3619 " popped arguments");
3620 return false;
3621 }
b40d90e6
DM
3622
3623 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
3624 {
3625 maybe_complain_about_tail_call (exp, "frontend does not support"
3626 " sibling call");
3627 return false;
3628 }
b40d90e6
DM
3629
3630 /* All checks passed. */
3631 return true;
3632}
3633
957ed738
L
3634/* Update stack alignment when the parameter is passed in the stack
3635 since the outgoing parameter requires extra alignment on the calling
3636 function side. */
3637
3638static void
3639update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3640{
3641 if (crtl->stack_alignment_needed < locate->boundary)
3642 crtl->stack_alignment_needed = locate->boundary;
3643 if (crtl->preferred_stack_boundary < locate->boundary)
3644 crtl->preferred_stack_boundary = locate->boundary;
3645}
3646
5039610b 3647/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
3648 and return an rtx for its value.
3649 Store the value in TARGET (specified as an rtx) if convenient.
3650 If the value is stored in TARGET then TARGET is returned.
3651 If IGNORE is nonzero, then we ignore the value of the function call. */
3652
3653rtx
d329e058 3654expand_call (tree exp, rtx target, int ignore)
51bbfa0c 3655{
0a1c58a2
JL
3656 /* Nonzero if we are currently expanding a call. */
3657 static int currently_expanding_call = 0;
3658
51bbfa0c
RS
3659 /* RTX for the function to be called. */
3660 rtx funexp;
0a1c58a2 3661 /* Sequence of insns to perform a normal "call". */
48810515 3662 rtx_insn *normal_call_insns = NULL;
6de9cd9a 3663 /* Sequence of insns to perform a tail "call". */
48810515 3664 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
3665 /* Data type of the function. */
3666 tree funtype;
ded9bf77 3667 tree type_arg_types;
28ed065e 3668 tree rettype;
51bbfa0c
RS
3669 /* Declaration of the function being called,
3670 or 0 if the function is computed (not known by name). */
3671 tree fndecl = 0;
57782ad8
MM
3672 /* The type of the function being called. */
3673 tree fntype;
6de9cd9a 3674 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 3675 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 3676 int pass;
51bbfa0c
RS
3677
3678 /* Register in which non-BLKmode value will be returned,
3679 or 0 if no value or if value is BLKmode. */
3680 rtx valreg;
3681 /* Address where we should return a BLKmode value;
3682 0 if value not BLKmode. */
3683 rtx structure_value_addr = 0;
3684 /* Nonzero if that address is being passed by treating it as
3685 an extra, implicit first parameter. Otherwise,
3686 it is passed by being copied directly into struct_value_rtx. */
3687 int structure_value_addr_parm = 0;
078a18a4
SL
3688 /* Holds the value of implicit argument for the struct value. */
3689 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
3690 /* Size of aggregate value wanted, or zero if none wanted
3691 or if we are using the non-reentrant PCC calling convention
3692 or expecting the value in registers. */
5c8e61cf 3693 poly_int64 struct_value_size = 0;
51bbfa0c
RS
3694 /* Nonzero if called function returns an aggregate in memory PCC style,
3695 by returning the address of where to find it. */
3696 int pcc_struct_value = 0;
61f71b34 3697 rtx struct_value = 0;
51bbfa0c
RS
3698
3699 /* Number of actual parameters in this call, including struct value addr. */
3700 int num_actuals;
3701 /* Number of named args. Args after this are anonymous ones
3702 and they must all go on the stack. */
3703 int n_named_args;
078a18a4
SL
3704 /* Number of complex actual arguments that need to be split. */
3705 int num_complex_actuals = 0;
51bbfa0c
RS
3706
3707 /* Vector of information about each argument.
3708 Arguments are numbered in the order they will be pushed,
3709 not the order they are written. */
3710 struct arg_data *args;
3711
3712 /* Total size in bytes of all the stack-parms scanned so far. */
3713 struct args_size args_size;
099e9712 3714 struct args_size adjusted_args_size;
51bbfa0c 3715 /* Size of arguments before any adjustments (such as rounding). */
a20c5714 3716 poly_int64 unadjusted_args_size;
51bbfa0c 3717 /* Data on reg parms scanned so far. */
d5cc9181
JR
3718 CUMULATIVE_ARGS args_so_far_v;
3719 cumulative_args_t args_so_far;
51bbfa0c
RS
3720 /* Nonzero if a reg parm has been scanned. */
3721 int reg_parm_seen;
efd65a8b 3722 /* Nonzero if this is an indirect function call. */
51bbfa0c 3723
f725a3ec 3724 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
3725 If stack space is allocated for register parameters, but not by the
3726 caller, then it is preallocated in the fixed part of the stack frame.
3727 So the entire argument block must then be preallocated (i.e., we
3728 ignore PUSH_ROUNDING in that case). */
3729
f73ad30e 3730 int must_preallocate = !PUSH_ARGS;
51bbfa0c 3731
f72aed24 3732 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
3733 int reg_parm_stack_space = 0;
3734
51bbfa0c
RS
3735 /* Address of space preallocated for stack parms
3736 (on machines that lack push insns), or 0 if space not preallocated. */
3737 rtx argblock = 0;
3738
e384e6b5 3739 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 3740 int flags = 0;
e384e6b5 3741 int return_flags = 0;
f73ad30e 3742#ifdef REG_PARM_STACK_SPACE
51bbfa0c 3743 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
3744 saved, if any. */
3745 int low_to_save, high_to_save;
51bbfa0c
RS
3746 rtx save_area = 0; /* Place that it is saved */
3747#endif
3748
a20c5714 3749 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
51bbfa0c 3750 char *initial_stack_usage_map = stack_usage_map;
a20c5714 3751 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 3752 char *stack_usage_map_buf = NULL;
51bbfa0c 3753
a20c5714 3754 poly_int64 old_stack_allocated;
38afb23f
OH
3755
3756 /* State variables to track stack modifications. */
51bbfa0c 3757 rtx old_stack_level = 0;
38afb23f 3758 int old_stack_arg_under_construction = 0;
a20c5714 3759 poly_int64 old_pending_adj = 0;
51bbfa0c 3760 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
3761
3762 /* Some stack pointer alterations we make are performed via
3763 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3764 which we then also need to save/restore along the way. */
a20c5714 3765 poly_int64 old_stack_pointer_delta = 0;
38afb23f 3766
0a1c58a2 3767 rtx call_fusage;
5039610b 3768 tree addr = CALL_EXPR_FN (exp);
b3694847 3769 int i;
739fb049 3770 /* The alignment of the stack, in bits. */
95899b34 3771 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 3772 /* The alignment of the stack, in bytes. */
95899b34 3773 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
3774 /* The static chain value to use for this call. */
3775 rtx static_chain_value;
f2d33f13
JH
3776 /* See if this is "nothrow" function call. */
3777 if (TREE_NOTHROW (exp))
3778 flags |= ECF_NOTHROW;
3779
6de9cd9a
DN
3780 /* See if we can find a DECL-node for the actual function, and get the
3781 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
3782 fndecl = get_callee_fndecl (exp);
3783 if (fndecl)
51bbfa0c 3784 {
57782ad8 3785 fntype = TREE_TYPE (fndecl);
39b0dce7 3786 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 3787 return_flags |= decl_return_flags (fndecl);
51bbfa0c 3788 }
39b0dce7 3789 else
72954a4f 3790 {
28ed065e 3791 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 3792 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
3793 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3794 flags |= ECF_BY_DESCRIPTOR;
72954a4f 3795 }
28ed065e 3796 rettype = TREE_TYPE (exp);
7393c642 3797
57782ad8 3798 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 3799
8c6a8269
RS
3800 /* Warn if this value is an aggregate type,
3801 regardless of which calling convention we are using for it. */
28ed065e 3802 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 3803 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 3804
becfd6e5
KZ
3805 /* If the result of a non looping pure or const function call is
3806 ignored (or void), and none of its arguments are volatile, we can
3807 avoid expanding the call and just evaluate the arguments for
3808 side-effects. */
8c6a8269 3809 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 3810 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8ebf6b99 3811 && (flags & ECF_NOTHROW)
8c6a8269 3812 && (ignore || target == const0_rtx
28ed065e 3813 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
3814 {
3815 bool volatilep = false;
3816 tree arg;
078a18a4 3817 call_expr_arg_iterator iter;
8c6a8269 3818
078a18a4
SL
3819 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3820 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
3821 {
3822 volatilep = true;
3823 break;
3824 }
3825
3826 if (! volatilep)
3827 {
078a18a4
SL
3828 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3829 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
3830 return const0_rtx;
3831 }
3832 }
3833
6f90e075 3834#ifdef REG_PARM_STACK_SPACE
5d059ed9 3835 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 3836#endif
6f90e075 3837
5d059ed9 3838 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3839 && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 3840 must_preallocate = 1;
e5e809f4 3841
51bbfa0c
RS
3842 /* Set up a place to return a structure. */
3843
3844 /* Cater to broken compilers. */
d47d0a8d 3845 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
3846 {
3847 /* This call returns a big structure. */
84b8030f 3848 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
3849
3850#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
3851 {
3852 pcc_struct_value = 1;
9e7b1d0a
RS
3853 }
3854#else /* not PCC_STATIC_STRUCT_RETURN */
3855 {
5c8e61cf
RS
3856 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3857 struct_value_size = -1;
51bbfa0c 3858
391756ad
EB
3859 /* Even if it is semantically safe to use the target as the return
3860 slot, it may be not sufficiently aligned for the return type. */
3861 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3862 && target
3863 && MEM_P (target)
ffc8b52f
JJ
3864 /* If rettype is addressable, we may not create a temporary.
3865 If target is properly aligned at runtime and the compiler
3866 just doesn't know about it, it will work fine, otherwise it
3867 will be UB. */
3868 && (TREE_ADDRESSABLE (rettype)
3869 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3870 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3871 MEM_ALIGN (target)))))
9e7b1d0a
RS
3872 structure_value_addr = XEXP (target, 0);
3873 else
3874 {
9e7b1d0a
RS
3875 /* For variable-sized objects, we must be called with a target
3876 specified. If we were to allocate space on the stack here,
3877 we would have no way of knowing when to free it. */
9474e8ab 3878 rtx d = assign_temp (rettype, 1, 1);
4361b41d 3879 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
3880 target = 0;
3881 }
3882 }
3883#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
3884 }
3885
099e9712 3886 /* Figure out the amount to which the stack should be aligned. */
099e9712 3887 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
3888 if (fndecl)
3889 {
3dafb85c 3890 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
3891 /* Without automatic stack alignment, we can't increase preferred
3892 stack boundary. With automatic stack alignment, it is
3893 unnecessary since unless we can guarantee that all callers will
3894 align the outgoing stack properly, callee has to align its
3895 stack anyway. */
3896 if (i
3897 && i->preferred_incoming_stack_boundary
3898 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
3899 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3900 }
099e9712
JH
3901
3902 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 3903 funtype = TREE_TYPE (addr);
366de0ce 3904 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
3905 funtype = TREE_TYPE (funtype);
3906
078a18a4
SL
3907 /* Count whether there are actual complex arguments that need to be split
3908 into their real and imaginary parts. Munge the type_arg_types
3909 appropriately here as well. */
42ba5130 3910 if (targetm.calls.split_complex_arg)
ded9bf77 3911 {
078a18a4
SL
3912 call_expr_arg_iterator iter;
3913 tree arg;
3914 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3915 {
3916 tree type = TREE_TYPE (arg);
3917 if (type && TREE_CODE (type) == COMPLEX_TYPE
3918 && targetm.calls.split_complex_arg (type))
3919 num_complex_actuals++;
3920 }
ded9bf77 3921 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
3922 }
3923 else
3924 type_arg_types = TYPE_ARG_TYPES (funtype);
3925
099e9712 3926 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 3927 cfun->calls_alloca = 1;
099e9712
JH
3928
3929 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
3930 as if it were an extra parameter. Put the argument expression
3931 in structure_value_addr_value. */
61f71b34 3932 if (structure_value_addr && struct_value == 0)
099e9712
JH
3933 {
3934 /* If structure_value_addr is a REG other than
3935 virtual_outgoing_args_rtx, we can use always use it. If it
3936 is not a REG, we must always copy it into a register.
3937 If it is virtual_outgoing_args_rtx, we must copy it to another
3938 register in some cases. */
f8cfc6aa 3939 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
3940 || (ACCUMULATE_OUTGOING_ARGS
3941 && stack_arg_under_construction
3942 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 3943 ? copy_addr_to_reg (convert_memory_address
57782ad8 3944 (Pmode, structure_value_addr))
099e9712
JH
3945 : structure_value_addr);
3946
078a18a4
SL
3947 structure_value_addr_value =
3948 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
31db0fe0 3949 structure_value_addr_parm = 1;
099e9712
JH
3950 }
3951
3952 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
3953 num_actuals =
3954 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
3955
3956 /* Compute number of named args.
3a4d587b
AM
3957 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3958
3959 if (type_arg_types != 0)
3960 n_named_args
3961 = (list_length (type_arg_types)
3962 /* Count the struct value address, if it is passed as a parm. */
3963 + structure_value_addr_parm);
3964 else
3965 /* If we know nothing, treat all args as named. */
3966 n_named_args = num_actuals;
3967
3968 /* Start updating where the next arg would go.
3969
3970 On some machines (such as the PA) indirect calls have a different
3971 calling convention than normal calls. The fourth argument in
3972 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3973 or not. */
d5cc9181
JR
3974 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3975 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
3976
3977 /* Now possibly adjust the number of named args.
099e9712 3978 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
3979 We do include the last named arg if
3980 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
3981 (If no anonymous args follow, the result of list_length is actually
3982 one too large. This is harmless.)
3983
4ac8340c 3984 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
3985 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3986 this machine will be able to place unnamed args that were passed
3987 in registers into the stack. So treat all args as named. This
3988 allows the insns emitting for a specific argument list to be
3989 independent of the function declaration.
4ac8340c
KH
3990
3991 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3992 we do not have any reliable way to pass unnamed args in
3993 registers, so we must force them into memory. */
099e9712 3994
3a4d587b 3995 if (type_arg_types != 0
d5cc9181 3996 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
3997 ;
3998 else if (type_arg_types != 0
d5cc9181 3999 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
4000 /* Don't include the last named arg. */
4001 --n_named_args;
099e9712 4002 else
3a4d587b 4003 /* Treat all args as named. */
099e9712
JH
4004 n_named_args = num_actuals;
4005
099e9712 4006 /* Make a vector to hold all the information about each arg. */
765fc0f7 4007 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 4008
d80d2d2a
KH
4009 /* Build up entries in the ARGS array, compute the size of the
4010 arguments into ARGS_SIZE, etc. */
099e9712 4011 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 4012 n_named_args, exp,
45769134 4013 structure_value_addr_value, fndecl, fntype,
d5cc9181 4014 args_so_far, reg_parm_stack_space,
099e9712 4015 &old_stack_level, &old_pending_adj,
dd292d0a 4016 &must_preallocate, &flags,
6de9cd9a 4017 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
4018
4019 if (args_size.var)
84b8030f 4020 must_preallocate = 1;
099e9712
JH
4021
4022 /* Now make final decision about preallocating stack space. */
4023 must_preallocate = finalize_must_preallocate (must_preallocate,
4024 num_actuals, args,
4025 &args_size);
4026
4027 /* If the structure value address will reference the stack pointer, we
4028 must stabilize it. We don't need to do this if we know that we are
4029 not going to adjust the stack pointer in processing this call. */
4030
4031 if (structure_value_addr
4032 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4033 || reg_mentioned_p (virtual_outgoing_args_rtx,
4034 structure_value_addr))
4035 && (args_size.var
a20c5714
RS
4036 || (!ACCUMULATE_OUTGOING_ARGS
4037 && maybe_ne (args_size.constant, 0))))
099e9712 4038 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 4039
7ae4ad28 4040 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 4041 pushed these optimizations into -O2. Don't try if we're already
fb158467 4042 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 4043 there's cleanups, as we know there's code to follow the call. */
099e9712 4044 if (currently_expanding_call++ != 0
44662f68 4045 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
6de9cd9a 4046 || args_size.var
6fb5fa3c 4047 || dbg_cnt (tail_call) == false)
6de9cd9a 4048 try_tail_call = 0;
099e9712 4049
4b8e35f1
JJ
4050 /* Workaround buggy C/C++ wrappers around Fortran routines with
4051 character(len=constant) arguments if the hidden string length arguments
4052 are passed on the stack; if the callers forget to pass those arguments,
4053 attempting to tail call in such routines leads to stack corruption.
4054 Avoid tail calls in functions where at least one such hidden string
4055 length argument is passed (partially or fully) on the stack in the
4056 caller and the callee needs to pass any arguments on the stack.
4057 See PR90329. */
4058 if (try_tail_call && maybe_ne (args_size.constant, 0))
4059 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4060 arg; arg = DECL_CHAIN (arg))
4061 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4062 {
4063 subrtx_iterator::array_type array;
4064 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4065 if (MEM_P (*iter))
4066 {
4067 try_tail_call = 0;
4068 break;
4069 }
4070 }
4071
9a385c2d
DM
4072 /* If the user has marked the function as requiring tail-call
4073 optimization, attempt it. */
4074 if (must_tail_call)
4075 try_tail_call = 1;
4076
099e9712 4077 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 4078 if (try_tail_call)
9a385c2d
DM
4079 try_tail_call = can_implement_as_sibling_call_p (exp,
4080 structure_value_addr,
4081 funtype,
9a385c2d 4082 fndecl,
b40d90e6 4083 flags, addr, args_size);
497eb8c3 4084
c69cd1f5
JJ
4085 /* Check if caller and callee disagree in promotion of function
4086 return value. */
4087 if (try_tail_call)
4088 {
ef4bddc2
RS
4089 machine_mode caller_mode, caller_promoted_mode;
4090 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
4091 int caller_unsignedp, callee_unsignedp;
4092 tree caller_res = DECL_RESULT (current_function_decl);
4093
4094 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 4095 caller_mode = DECL_MODE (caller_res);
c69cd1f5 4096 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
4097 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4098 caller_promoted_mode
4099 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4100 &caller_unsignedp,
4101 TREE_TYPE (current_function_decl), 1);
4102 callee_promoted_mode
666e3ceb 4103 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 4104 &callee_unsignedp,
666e3ceb 4105 funtype, 1);
c69cd1f5
JJ
4106 if (caller_mode != VOIDmode
4107 && (caller_promoted_mode != callee_promoted_mode
4108 || ((caller_mode != caller_promoted_mode
4109 || callee_mode != callee_promoted_mode)
4110 && (caller_unsignedp != callee_unsignedp
bd4288c0 4111 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
4112 {
4113 try_tail_call = 0;
4114 maybe_complain_about_tail_call (exp,
4115 "caller and callee disagree in"
4116 " promotion of function"
4117 " return value");
4118 }
c69cd1f5
JJ
4119 }
4120
01973e26
L
4121 /* Ensure current function's preferred stack boundary is at least
4122 what we need. Stack alignment may also increase preferred stack
4123 boundary. */
957ed738
L
4124 for (i = 0; i < num_actuals; i++)
4125 if (reg_parm_stack_space > 0
4126 || args[i].reg == 0
4127 || args[i].partial != 0
4128 || args[i].pass_on_stack)
4129 update_stack_alignment_for_call (&args[i].locate);
b5f772ce 4130 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 4131 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
4132 else
4133 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 4134
099e9712 4135 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 4136
3cf3da88
EB
4137 if (flag_callgraph_info)
4138 record_final_call (fndecl, EXPR_LOCATION (exp));
4139
0a1c58a2
JL
4140 /* We want to make two insn chains; one for a sibling call, the other
4141 for a normal call. We will select one of the two chains after
4142 initial RTL generation is complete. */
b820d2b8 4143 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
4144 {
4145 int sibcall_failure = 0;
f5143c46 4146 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 4147 recursion "call". That way we know any adjustment after the tail
7ae4ad28 4148 recursion call can be ignored if we indeed use the tail
0a1c58a2 4149 call expansion. */
7f2f0a01 4150 saved_pending_stack_adjust save;
48810515
DM
4151 rtx_insn *insns, *before_call, *after_args;
4152 rtx next_arg_reg;
39842893 4153
0a1c58a2
JL
4154 if (pass == 0)
4155 {
0a1c58a2
JL
4156 /* State variables we need to save and restore between
4157 iterations. */
7f2f0a01 4158 save_pending_stack_adjust (&save);
0a1c58a2 4159 }
f2d33f13
JH
4160 if (pass)
4161 flags &= ~ECF_SIBCALL;
4162 else
4163 flags |= ECF_SIBCALL;
51bbfa0c 4164
0a1c58a2 4165 /* Other state variables that we must reinitialize each time
f2d33f13 4166 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
4167 argblock = 0;
4168 call_fusage = 0;
fa76d9e0 4169
f725a3ec 4170 /* Start a new sequence for the normal call case.
51bbfa0c 4171
0a1c58a2
JL
4172 From this point on, if the sibling call fails, we want to set
4173 sibcall_failure instead of continuing the loop. */
4174 start_sequence ();
eecb6f50 4175
0a1c58a2
JL
4176 /* Don't let pending stack adjusts add up to too much.
4177 Also, do all pending adjustments now if there is any chance
4178 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 4179 call sequence.
63579539
DJ
4180 Also do the adjustments before a throwing call, otherwise
4181 exception handling can fail; PR 19225. */
a20c5714
RS
4182 if (maybe_ge (pending_stack_adjust, 32)
4183 || (maybe_ne (pending_stack_adjust, 0)
9dd9bf80 4184 && (flags & ECF_MAY_BE_ALLOCA))
a20c5714 4185 || (maybe_ne (pending_stack_adjust, 0)
63579539 4186 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
4187 || pass == 0)
4188 do_pending_stack_adjust ();
51bbfa0c 4189
0a1c58a2 4190 /* Precompute any arguments as needed. */
f8a097cd 4191 if (pass)
84b8030f 4192 precompute_arguments (num_actuals, args);
51bbfa0c 4193
0a1c58a2
JL
4194 /* Now we are about to start emitting insns that can be deleted
4195 if a libcall is deleted. */
84b8030f 4196 if (pass && (flags & ECF_MALLOC))
0a1c58a2 4197 start_sequence ();
51bbfa0c 4198
87a5dc2d
JW
4199 if (pass == 0
4200 && crtl->stack_protect_guard
4201 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
4202 stack_protect_epilogue ();
4203
099e9712 4204 adjusted_args_size = args_size;
ce48579b
RH
4205 /* Compute the actual size of the argument block required. The variable
4206 and constant sizes must be combined, the size may have to be rounded,
4207 and there may be a minimum required size. When generating a sibcall
4208 pattern, do not round up, since we'll be re-using whatever space our
4209 caller provided. */
4210 unadjusted_args_size
f725a3ec
KH
4211 = compute_argument_block_size (reg_parm_stack_space,
4212 &adjusted_args_size,
5d059ed9 4213 fndecl, fntype,
ce48579b
RH
4214 (pass == 0 ? 0
4215 : preferred_stack_boundary));
4216
f725a3ec 4217 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 4218
f8a097cd 4219 /* The argument block when performing a sibling call is the
c22cacf3 4220 incoming argument block. */
f8a097cd 4221 if (pass == 0)
c67846f2 4222 {
2e3f842f 4223 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
4224 if (STACK_GROWS_DOWNWARD)
4225 argblock
4226 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4227 else
4228 argblock
4229 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4230
a20c5714
RS
4231 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4232 stored_args_map = sbitmap_alloc (map_size);
f61e445a 4233 bitmap_clear (stored_args_map);
a20c5714 4234 stored_args_watermark = HOST_WIDE_INT_M1U;
c67846f2 4235 }
ce48579b 4236
0a1c58a2
JL
4237 /* If we have no actual push instructions, or shouldn't use them,
4238 make space for all args right now. */
099e9712 4239 else if (adjusted_args_size.var != 0)
51bbfa0c 4240 {
0a1c58a2
JL
4241 if (old_stack_level == 0)
4242 {
9eac0f2a 4243 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 4244 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
4245 old_pending_adj = pending_stack_adjust;
4246 pending_stack_adjust = 0;
0a1c58a2
JL
4247 /* stack_arg_under_construction says whether a stack arg is
4248 being constructed at the old stack level. Pushing the stack
4249 gets a clean outgoing argument block. */
4250 old_stack_arg_under_construction = stack_arg_under_construction;
4251 stack_arg_under_construction = 0;
0a1c58a2 4252 }
099e9712 4253 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 4254 if (flag_stack_usage_info)
d3c12306 4255 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 4256 }
0a1c58a2
JL
4257 else
4258 {
4259 /* Note that we must go through the motions of allocating an argument
4260 block even if the size is zero because we may be storing args
4261 in the area reserved for register arguments, which may be part of
4262 the stack frame. */
26a258fe 4263
a20c5714 4264 poly_int64 needed = adjusted_args_size.constant;
51bbfa0c 4265
0a1c58a2
JL
4266 /* Store the maximum argument space used. It will be pushed by
4267 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4268 checking). */
51bbfa0c 4269
a20c5714
RS
4270 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4271 needed);
51bbfa0c 4272
0a1c58a2
JL
4273 if (must_preallocate)
4274 {
f73ad30e
JH
4275 if (ACCUMULATE_OUTGOING_ARGS)
4276 {
f8a097cd
JH
4277 /* Since the stack pointer will never be pushed, it is
4278 possible for the evaluation of a parm to clobber
4279 something we have already written to the stack.
4280 Since most function calls on RISC machines do not use
4281 the stack, this is uncommon, but must work correctly.
26a258fe 4282
f73ad30e 4283 Therefore, we save any area of the stack that was already
f8a097cd
JH
4284 written and that we are using. Here we set up to do this
4285 by making a new stack usage map from the old one. The
f725a3ec 4286 actual save will be done by store_one_arg.
26a258fe 4287
f73ad30e
JH
4288 Another approach might be to try to reorder the argument
4289 evaluations to avoid this conflicting stack usage. */
26a258fe 4290
f8a097cd
JH
4291 /* Since we will be writing into the entire argument area,
4292 the map must be allocated for its entire size, not just
4293 the part that is the responsibility of the caller. */
5d059ed9 4294 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4295 needed += reg_parm_stack_space;
51bbfa0c 4296
a20c5714 4297 poly_int64 limit = needed;
6dad9361 4298 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
4299 limit += 1;
4300
4301 /* For polynomial sizes, this is the maximum possible
4302 size needed for arguments with a constant size
4303 and offset. */
4304 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4305 highest_outgoing_arg_in_use
4306 = MAX (initial_highest_arg_in_use, const_limit);
6dad9361 4307
04695783 4308 free (stack_usage_map_buf);
5ed6ace5 4309 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4310 stack_usage_map = stack_usage_map_buf;
51bbfa0c 4311
f73ad30e 4312 if (initial_highest_arg_in_use)
2e09e75a
JM
4313 memcpy (stack_usage_map, initial_stack_usage_map,
4314 initial_highest_arg_in_use);
2f4aa534 4315
f73ad30e 4316 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4317 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4318 (highest_outgoing_arg_in_use
4319 - initial_highest_arg_in_use));
4320 needed = 0;
2f4aa534 4321
f8a097cd
JH
4322 /* The address of the outgoing argument list must not be
4323 copied to a register here, because argblock would be left
4324 pointing to the wrong place after the call to
f725a3ec 4325 allocate_dynamic_stack_space below. */
2f4aa534 4326
f73ad30e 4327 argblock = virtual_outgoing_args_rtx;
f725a3ec 4328 }
f73ad30e 4329 else
26a258fe 4330 {
a20c5714
RS
4331 /* Try to reuse some or all of the pending_stack_adjust
4332 to get this space. */
4333 if (inhibit_defer_pop == 0
4334 && (combine_pending_stack_adjustment_and_call
4335 (&needed,
4336 unadjusted_args_size,
4337 &adjusted_args_size,
4338 preferred_unit_stack_boundary)))
0a1c58a2 4339 {
ce48579b
RH
4340 /* combine_pending_stack_adjustment_and_call computes
4341 an adjustment before the arguments are allocated.
4342 Account for them and see whether or not the stack
4343 needs to go up or down. */
4344 needed = unadjusted_args_size - needed;
4345
a20c5714
RS
4346 /* Checked by
4347 combine_pending_stack_adjustment_and_call. */
4348 gcc_checking_assert (ordered_p (needed, 0));
4349 if (maybe_lt (needed, 0))
f73ad30e 4350 {
ce48579b
RH
4351 /* We're releasing stack space. */
4352 /* ??? We can avoid any adjustment at all if we're
4353 already aligned. FIXME. */
4354 pending_stack_adjust = -needed;
4355 do_pending_stack_adjust ();
f73ad30e
JH
4356 needed = 0;
4357 }
f725a3ec 4358 else
ce48579b
RH
4359 /* We need to allocate space. We'll do that in
4360 push_block below. */
4361 pending_stack_adjust = 0;
0a1c58a2 4362 }
ce48579b
RH
4363
4364 /* Special case this because overhead of `push_block' in
4365 this case is non-trivial. */
a20c5714 4366 if (known_eq (needed, 0))
f73ad30e 4367 argblock = virtual_outgoing_args_rtx;
0a1c58a2 4368 else
d892f288 4369 {
a20c5714
RS
4370 rtx needed_rtx = gen_int_mode (needed, Pmode);
4371 argblock = push_block (needed_rtx, 0, 0);
6dad9361
TS
4372 if (ARGS_GROW_DOWNWARD)
4373 argblock = plus_constant (Pmode, argblock, needed);
d892f288 4374 }
f73ad30e 4375
f8a097cd
JH
4376 /* We only really need to call `copy_to_reg' in the case
4377 where push insns are going to be used to pass ARGBLOCK
4378 to a function call in ARGS. In that case, the stack
4379 pointer changes value from the allocation point to the
4380 call point, and hence the value of
4381 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4382 as well always do it. */
f73ad30e 4383 argblock = copy_to_reg (argblock);
38afb23f
OH
4384 }
4385 }
4386 }
0a1c58a2 4387
38afb23f
OH
4388 if (ACCUMULATE_OUTGOING_ARGS)
4389 {
4390 /* The save/restore code in store_one_arg handles all
4391 cases except one: a constructor call (including a C
4392 function returning a BLKmode struct) to initialize
4393 an argument. */
4394 if (stack_arg_under_construction)
4395 {
ac294f0b 4396 rtx push_size
a20c5714
RS
4397 = (gen_int_mode
4398 (adjusted_args_size.constant
4399 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4400 : TREE_TYPE (fndecl))
4401 ? 0 : reg_parm_stack_space), Pmode));
38afb23f
OH
4402 if (old_stack_level == 0)
4403 {
9eac0f2a 4404 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
4405 old_stack_pointer_delta = stack_pointer_delta;
4406 old_pending_adj = pending_stack_adjust;
4407 pending_stack_adjust = 0;
4408 /* stack_arg_under_construction says whether a stack
4409 arg is being constructed at the old stack level.
4410 Pushing the stack gets a clean outgoing argument
4411 block. */
4412 old_stack_arg_under_construction
4413 = stack_arg_under_construction;
4414 stack_arg_under_construction = 0;
4415 /* Make a new map for the new argument list. */
04695783 4416 free (stack_usage_map_buf);
b9eae1a9 4417 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4418 stack_usage_map = stack_usage_map_buf;
38afb23f 4419 highest_outgoing_arg_in_use = 0;
a20c5714 4420 stack_usage_watermark = HOST_WIDE_INT_M1U;
f73ad30e 4421 }
d3c12306
EB
4422 /* We can pass TRUE as the 4th argument because we just
4423 saved the stack pointer and will restore it right after
4424 the call. */
9e878cf1
EB
4425 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4426 -1, true);
0a1c58a2 4427 }
bfbf933a 4428
38afb23f
OH
4429 /* If argument evaluation might modify the stack pointer,
4430 copy the address of the argument list to a register. */
4431 for (i = 0; i < num_actuals; i++)
4432 if (args[i].pass_on_stack)
4433 {
4434 argblock = copy_addr_to_reg (argblock);
4435 break;
4436 }
4437 }
d329e058 4438
0a1c58a2 4439 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 4440
5ba53785
UB
4441 /* Stack is properly aligned, pops can't safely be deferred during
4442 the evaluation of the arguments. */
4443 NO_DEFER_POP;
4444
ac4ee457
UB
4445 /* Precompute all register parameters. It isn't safe to compute
4446 anything once we have started filling any specific hard regs.
4447 TLS symbols sometimes need a call to resolve. Precompute
4448 register parameters before any stack pointer manipulation
4449 to avoid unaligned stack in the called function. */
4450 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4451
5ba53785
UB
4452 OK_DEFER_POP;
4453
3d9684ae
JG
4454 /* Perform stack alignment before the first push (the last arg). */
4455 if (argblock == 0
a20c5714
RS
4456 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4457 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4e217aed 4458 {
0a1c58a2
JL
4459 /* When the stack adjustment is pending, we get better code
4460 by combining the adjustments. */
a20c5714
RS
4461 if (maybe_ne (pending_stack_adjust, 0)
4462 && ! inhibit_defer_pop
4463 && (combine_pending_stack_adjustment_and_call
4464 (&pending_stack_adjust,
4465 unadjusted_args_size,
4466 &adjusted_args_size,
4467 preferred_unit_stack_boundary)))
4468 do_pending_stack_adjust ();
0a1c58a2 4469 else if (argblock == 0)
a20c5714
RS
4470 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4471 - unadjusted_args_size,
4472 Pmode));
0a1c58a2 4473 }
ebcd0b57
JH
4474 /* Now that the stack is properly aligned, pops can't safely
4475 be deferred during the evaluation of the arguments. */
4476 NO_DEFER_POP;
51bbfa0c 4477
d3c12306
EB
4478 /* Record the maximum pushed stack space size. We need to delay
4479 doing it this far to take into account the optimization done
4480 by combine_pending_stack_adjustment_and_call. */
a11e0df4 4481 if (flag_stack_usage_info
d3c12306
EB
4482 && !ACCUMULATE_OUTGOING_ARGS
4483 && pass
4484 && adjusted_args_size.var == 0)
4485 {
a20c5714
RS
4486 poly_int64 pushed = (adjusted_args_size.constant
4487 + pending_stack_adjust);
4488 current_function_pushed_stack_size
4489 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4490 }
4491
09e2bf48 4492 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 4493
5039610b
SL
4494 if (CALL_EXPR_STATIC_CHAIN (exp))
4495 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
4496 else
4497 static_chain_value = 0;
4498
f73ad30e 4499#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
4500 /* Save the fixed argument area if it's part of the caller's frame and
4501 is clobbered by argument setup for this call. */
f8a097cd 4502 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
4503 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4504 &low_to_save, &high_to_save);
b94301c2 4505#endif
51bbfa0c 4506
0a1c58a2
JL
4507 /* Now store (and compute if necessary) all non-register parms.
4508 These come before register parms, since they can require block-moves,
4509 which could clobber the registers used for register parms.
4510 Parms which have partial registers are not stored here,
4511 but we do preallocate space here if they want that. */
51bbfa0c 4512
0a1c58a2 4513 for (i = 0; i < num_actuals; i++)
0196c95e 4514 {
31db0fe0 4515 if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 4516 {
48810515 4517 rtx_insn *before_arg = get_last_insn ();
0196c95e 4518
ddc923b5
MP
4519 /* We don't allow passing huge (> 2^30 B) arguments
4520 by value. It would cause an overflow later on. */
a20c5714 4521 if (constant_lower_bound (adjusted_args_size.constant)
ddc923b5
MP
4522 >= (1 << (HOST_BITS_PER_INT - 2)))
4523 {
4524 sorry ("passing too large argument on stack");
4525 continue;
4526 }
4527
0196c95e
JJ
4528 if (store_one_arg (&args[i], argblock, flags,
4529 adjusted_args_size.var != 0,
4530 reg_parm_stack_space)
4531 || (pass == 0
4532 && check_sibcall_argument_overlap (before_arg,
4533 &args[i], 1)))
4534 sibcall_failure = 1;
4535 }
4536
2b1c5433 4537 if (args[i].stack)
7d810276
JJ
4538 call_fusage
4539 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4540 gen_rtx_USE (VOIDmode, args[i].stack),
4541 call_fusage);
0196c95e 4542 }
0a1c58a2
JL
4543
4544 /* If we have a parm that is passed in registers but not in memory
4545 and whose alignment does not permit a direct copy into registers,
4546 make a group of pseudos that correspond to each register that we
4547 will later fill. */
4548 if (STRICT_ALIGNMENT)
4549 store_unaligned_arguments_into_pseudos (args, num_actuals);
4550
4551 /* Now store any partially-in-registers parm.
4552 This is the last place a block-move can happen. */
4553 if (reg_parm_seen)
4554 for (i = 0; i < num_actuals; i++)
4555 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 4556 {
48810515 4557 rtx_insn *before_arg = get_last_insn ();
c67846f2 4558
99206968
KT
4559 /* On targets with weird calling conventions (e.g. PA) it's
4560 hard to ensure that all cases of argument overlap between
4561 stack and registers work. Play it safe and bail out. */
4562 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4563 {
4564 sibcall_failure = 1;
4565 break;
4566 }
4567
4c6b3b2a
JJ
4568 if (store_one_arg (&args[i], argblock, flags,
4569 adjusted_args_size.var != 0,
4570 reg_parm_stack_space)
4571 || (pass == 0
4572 && check_sibcall_argument_overlap (before_arg,
0cdca92b 4573 &args[i], 1)))
c67846f2
JJ
4574 sibcall_failure = 1;
4575 }
51bbfa0c 4576
2f21e1ba
BS
4577 bool any_regs = false;
4578 for (i = 0; i < num_actuals; i++)
4579 if (args[i].reg != NULL_RTX)
4580 {
4581 any_regs = true;
4582 targetm.calls.call_args (args[i].reg, funtype);
4583 }
4584 if (!any_regs)
4585 targetm.calls.call_args (pc_rtx, funtype);
4586
4587 /* Figure out the register where the value, if any, will come back. */
4588 valreg = 0;
2f21e1ba
BS
4589 if (TYPE_MODE (rettype) != VOIDmode
4590 && ! structure_value_addr)
4591 {
4592 if (pcc_struct_value)
31db0fe0
ML
4593 valreg = hard_function_value (build_pointer_type (rettype),
4594 fndecl, NULL, (pass == 0));
2f21e1ba 4595 else
31db0fe0
ML
4596 valreg = hard_function_value (rettype, fndecl, fntype,
4597 (pass == 0));
2f21e1ba
BS
4598
4599 /* If VALREG is a PARALLEL whose first member has a zero
4600 offset, use that. This is for targets such as m68k that
4601 return the same value in multiple places. */
4602 if (GET_CODE (valreg) == PARALLEL)
4603 {
4604 rtx elem = XVECEXP (valreg, 0, 0);
4605 rtx where = XEXP (elem, 0);
4606 rtx offset = XEXP (elem, 1);
4607 if (offset == const0_rtx
4608 && GET_MODE (where) == GET_MODE (valreg))
4609 valreg = where;
4610 }
4611 }
4612
0a1c58a2
JL
4613 /* If register arguments require space on the stack and stack space
4614 was not preallocated, allocate stack space here for arguments
4615 passed in registers. */
5d059ed9 4616 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 4617 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 4618 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 4619 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 4620
0a1c58a2
JL
4621 /* Pass the function the address in which to return a
4622 structure value. */
4623 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4624 {
7ae4ad28 4625 structure_value_addr
5ae6cd0d 4626 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 4627 emit_move_insn (struct_value,
0a1c58a2
JL
4628 force_reg (Pmode,
4629 force_operand (structure_value_addr,
4630 NULL_RTX)));
4631
f8cfc6aa 4632 if (REG_P (struct_value))
61f71b34 4633 use_reg (&call_fusage, struct_value);
0a1c58a2 4634 }
c2939b57 4635
05e6ee93 4636 after_args = get_last_insn ();
78bcf3dc
EB
4637 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4638 static_chain_value, &call_fusage,
4639 reg_parm_seen, flags);
6b8805cf 4640
0cdca92b
DJ
4641 load_register_parameters (args, num_actuals, &call_fusage, flags,
4642 pass == 0, &sibcall_failure);
f725a3ec 4643
0a1c58a2
JL
4644 /* Save a pointer to the last insn before the call, so that we can
4645 later safely search backwards to find the CALL_INSN. */
4646 before_call = get_last_insn ();
51bbfa0c 4647
7d167afd
JJ
4648 /* Set up next argument register. For sibling calls on machines
4649 with register windows this should be the incoming register. */
7d167afd 4650 if (pass == 0)
6783fdb7
RS
4651 next_arg_reg = targetm.calls.function_incoming_arg
4652 (args_so_far, function_arg_info::end_marker ());
7d167afd 4653 else
6783fdb7
RS
4654 next_arg_reg = targetm.calls.function_arg
4655 (args_so_far, function_arg_info::end_marker ());
7d167afd 4656
e384e6b5
BS
4657 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4658 {
4659 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 4660 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
4661 if (arg_nr >= 0
4662 && arg_nr < num_actuals
4663 && args[arg_nr].reg
e384e6b5
BS
4664 && valreg
4665 && REG_P (valreg)
4666 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4667 call_fusage
4668 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 4669 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
4670 call_fusage);
4671 }
0a1c58a2
JL
4672 /* All arguments and registers used for the call must be set up by
4673 now! */
4674
ce48579b 4675 /* Stack must be properly aligned now. */
366de0ce 4676 gcc_assert (!pass
a20c5714
RS
4677 || multiple_p (stack_pointer_delta,
4678 preferred_unit_stack_boundary));
ebcd0b57 4679
0a1c58a2 4680 /* Generate the actual call instruction. */
6de9cd9a 4681 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 4682 adjusted_args_size.constant, struct_value_size,
7d167afd 4683 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 4684 flags, args_so_far);
0a1c58a2 4685
1e288103 4686 if (flag_ipa_ra)
4f660b15 4687 {
48810515
DM
4688 rtx_call_insn *last;
4689 rtx datum = NULL_RTX;
4f660b15
RO
4690 if (fndecl != NULL_TREE)
4691 {
4692 datum = XEXP (DECL_RTL (fndecl), 0);
4693 gcc_assert (datum != NULL_RTX
4694 && GET_CODE (datum) == SYMBOL_REF);
4695 }
4696 last = last_call_insn ();
4697 add_reg_note (last, REG_CALL_DECL, datum);
4698 }
4699
05e6ee93
MM
4700 /* If the call setup or the call itself overlaps with anything
4701 of the argument setup we probably clobbered our call address.
4702 In that case we can't do sibcalls. */
4703 if (pass == 0
4704 && check_sibcall_argument_overlap (after_args, 0, 0))
4705 sibcall_failure = 1;
4706
bef5d8b6
RS
4707 /* If a non-BLKmode value is returned at the most significant end
4708 of a register, shift the register right by the appropriate amount
4709 and update VALREG accordingly. BLKmode values are handled by the
4710 group load/store machinery below. */
4711 if (!structure_value_addr
4712 && !pcc_struct_value
66de4d7c 4713 && TYPE_MODE (rettype) != VOIDmode
28ed065e 4714 && TYPE_MODE (rettype) != BLKmode
66de4d7c 4715 && REG_P (valreg)
28ed065e 4716 && targetm.calls.return_in_msb (rettype))
bef5d8b6 4717 {
28ed065e 4718 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 4719 sibcall_failure = 1;
28ed065e 4720 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
4721 }
4722
84b8030f 4723 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
4724 {
4725 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 4726 rtx_insn *last, *insns;
0a1c58a2 4727
f725a3ec 4728 /* The return value from a malloc-like function is a pointer. */
28ed065e 4729 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 4730 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
4731
4732 emit_move_insn (temp, valreg);
4733
67914693 4734 /* The return value from a malloc-like function cannot alias
0a1c58a2
JL
4735 anything else. */
4736 last = get_last_insn ();
65c5f2a6 4737 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
4738
4739 /* Write out the sequence. */
4740 insns = get_insns ();
4741 end_sequence ();
2f937369 4742 emit_insn (insns);
0a1c58a2
JL
4743 valreg = temp;
4744 }
51bbfa0c 4745
6fb5fa3c
DB
4746 /* For calls to `setjmp', etc., inform
4747 function.c:setjmp_warnings that it should complain if
4748 nonvolatile values are live. For functions that cannot
4749 return, inform flow that control does not fall through. */
51bbfa0c 4750
6e14af16 4751 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 4752 {
570a98eb 4753 /* The barrier must be emitted
0a1c58a2
JL
4754 immediately after the CALL_INSN. Some ports emit more
4755 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 4756
48810515 4757 rtx_insn *last = get_last_insn ();
4b4bf941 4758 while (!CALL_P (last))
0a1c58a2
JL
4759 {
4760 last = PREV_INSN (last);
4761 /* There was no CALL_INSN? */
366de0ce 4762 gcc_assert (last != before_call);
0a1c58a2 4763 }
51bbfa0c 4764
570a98eb 4765 emit_barrier_after (last);
8af61113 4766
f451eeef
JS
4767 /* Stack adjustments after a noreturn call are dead code.
4768 However when NO_DEFER_POP is in effect, we must preserve
4769 stack_pointer_delta. */
4770 if (inhibit_defer_pop == 0)
4771 {
4772 stack_pointer_delta = old_stack_allocated;
4773 pending_stack_adjust = 0;
4774 }
0a1c58a2 4775 }
51bbfa0c 4776
0a1c58a2 4777 /* If value type not void, return an rtx for the value. */
51bbfa0c 4778
28ed065e 4779 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 4780 || ignore)
b5cd4ed4 4781 target = const0_rtx;
0a1c58a2
JL
4782 else if (structure_value_addr)
4783 {
3c0cb5de 4784 if (target == 0 || !MEM_P (target))
0a1c58a2 4785 {
3bdf5ad1 4786 target
28ed065e
MM
4787 = gen_rtx_MEM (TYPE_MODE (rettype),
4788 memory_address (TYPE_MODE (rettype),
3bdf5ad1 4789 structure_value_addr));
28ed065e 4790 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
4791 }
4792 }
4793 else if (pcc_struct_value)
cacbd532 4794 {
0a1c58a2
JL
4795 /* This is the special C++ case where we need to
4796 know what the true target was. We take care to
4797 never use this value more than once in one expression. */
28ed065e 4798 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 4799 copy_to_reg (valreg));
28ed065e 4800 set_mem_attributes (target, rettype, 1);
cacbd532 4801 }
0a1c58a2
JL
4802 /* Handle calls that return values in multiple non-contiguous locations.
4803 The Irix 6 ABI has examples of this. */
4804 else if (GET_CODE (valreg) == PARALLEL)
4805 {
6de9cd9a 4806 if (target == 0)
5ef0b50d 4807 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
4808 else if (rtx_equal_p (target, valreg))
4809 ;
4810 else if (GET_CODE (target) == PARALLEL)
4811 /* Handle the result of a emit_group_move_into_temps
4812 call in the previous pass. */
4813 emit_group_move (target, valreg);
4814 else
28ed065e
MM
4815 emit_group_store (target, valreg, rettype,
4816 int_size_in_bytes (rettype));
0a1c58a2
JL
4817 }
4818 else if (target
28ed065e 4819 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
4820 && GET_MODE (target) == GET_MODE (valreg))
4821 {
51caaefe
EB
4822 bool may_overlap = false;
4823
f2d18690
KK
4824 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4825 reg to a plain register. */
3fb30019
RS
4826 if (!REG_P (target) || HARD_REGISTER_P (target))
4827 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 4828
51caaefe
EB
4829 /* If TARGET is a MEM in the argument area, and we have
4830 saved part of the argument area, then we can't store
4831 directly into TARGET as it may get overwritten when we
4832 restore the argument save area below. Don't work too
4833 hard though and simply force TARGET to a register if it
4834 is a MEM; the optimizer is quite likely to sort it out. */
4835 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4836 for (i = 0; i < num_actuals; i++)
4837 if (args[i].save_area)
4838 {
4839 may_overlap = true;
4840 break;
4841 }
0219237c 4842
51caaefe
EB
4843 if (may_overlap)
4844 target = copy_to_reg (valreg);
4845 else
4846 {
4847 /* TARGET and VALREG cannot be equal at this point
4848 because the latter would not have
4849 REG_FUNCTION_VALUE_P true, while the former would if
4850 it were referring to the same register.
4851
4852 If they refer to the same register, this move will be
4853 a no-op, except when function inlining is being
4854 done. */
4855 emit_move_insn (target, valreg);
4856
4857 /* If we are setting a MEM, this code must be executed.
4858 Since it is emitted after the call insn, sibcall
4859 optimization cannot be performed in that case. */
4860 if (MEM_P (target))
4861 sibcall_failure = 1;
4862 }
0a1c58a2 4863 }
0a1c58a2 4864 else
3fb30019 4865 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 4866
cde0f3fd
PB
4867 /* If we promoted this return value, make the proper SUBREG.
4868 TARGET might be const0_rtx here, so be careful. */
4869 if (REG_P (target)
28ed065e
MM
4870 && TYPE_MODE (rettype) != BLKmode
4871 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 4872 {
28ed065e 4873 tree type = rettype;
cde0f3fd 4874 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2 4875 machine_mode pmode;
cde0f3fd
PB
4876
4877 /* Ensure we promote as expected, and get the new unsignedness. */
4878 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4879 funtype, 1);
4880 gcc_assert (GET_MODE (target) == pmode);
4881
91914e56
RS
4882 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4883 GET_MODE (target));
cde0f3fd
PB
4884 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4885 SUBREG_PROMOTED_VAR_P (target) = 1;
362d42dc 4886 SUBREG_PROMOTED_SET (target, unsignedp);
61f71b34 4887 }
84b55618 4888
0a1c58a2
JL
4889 /* If size of args is variable or this was a constructor call for a stack
4890 argument, restore saved stack-pointer value. */
51bbfa0c 4891
9dd9bf80 4892 if (old_stack_level)
0a1c58a2 4893 {
48810515 4894 rtx_insn *prev = get_last_insn ();
9a08d230 4895
9eac0f2a 4896 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 4897 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 4898
faf7a23d 4899 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 4900
0a1c58a2 4901 pending_stack_adjust = old_pending_adj;
d25cee4d 4902 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
4903 stack_arg_under_construction = old_stack_arg_under_construction;
4904 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4905 stack_usage_map = initial_stack_usage_map;
a20c5714 4906 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2
JL
4907 sibcall_failure = 1;
4908 }
f8a097cd 4909 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 4910 {
51bbfa0c 4911#ifdef REG_PARM_STACK_SPACE
0a1c58a2 4912 if (save_area)
b820d2b8
AM
4913 restore_fixed_argument_area (save_area, argblock,
4914 high_to_save, low_to_save);
b94301c2 4915#endif
51bbfa0c 4916
0a1c58a2
JL
4917 /* If we saved any argument areas, restore them. */
4918 for (i = 0; i < num_actuals; i++)
4919 if (args[i].save_area)
4920 {
ef4bddc2 4921 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
4922 rtx stack_area
4923 = gen_rtx_MEM (save_mode,
4924 memory_address (save_mode,
4925 XEXP (args[i].stack_slot, 0)));
4926
4927 if (save_mode != BLKmode)
4928 emit_move_insn (stack_area, args[i].save_area);
4929 else
44bb111a 4930 emit_block_move (stack_area, args[i].save_area,
a20c5714
RS
4931 (gen_int_mode
4932 (args[i].locate.size.constant, Pmode)),
44bb111a 4933 BLOCK_OP_CALL_PARM);
0a1c58a2 4934 }
51bbfa0c 4935
0a1c58a2
JL
4936 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4937 stack_usage_map = initial_stack_usage_map;
a20c5714 4938 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2 4939 }
51bbfa0c 4940
d33606c3
EB
4941 /* If this was alloca, record the new stack level. */
4942 if (flags & ECF_MAY_BE_ALLOCA)
4943 record_new_stack_level ();
51bbfa0c 4944
0a1c58a2
JL
4945 /* Free up storage we no longer need. */
4946 for (i = 0; i < num_actuals; ++i)
04695783 4947 free (args[i].aligned_regs);
0a1c58a2 4948
2f21e1ba
BS
4949 targetm.calls.end_call_args ();
4950
0a1c58a2
JL
4951 insns = get_insns ();
4952 end_sequence ();
4953
4954 if (pass == 0)
4955 {
4956 tail_call_insns = insns;
4957
0a1c58a2
JL
4958 /* Restore the pending stack adjustment now that we have
4959 finished generating the sibling call sequence. */
1503a7ec 4960
7f2f0a01 4961 restore_pending_stack_adjust (&save);
099e9712
JH
4962
4963 /* Prepare arg structure for next iteration. */
f725a3ec 4964 for (i = 0; i < num_actuals; i++)
099e9712
JH
4965 {
4966 args[i].value = 0;
4967 args[i].aligned_regs = 0;
4968 args[i].stack = 0;
4969 }
c67846f2
JJ
4970
4971 sbitmap_free (stored_args_map);
48810515 4972 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 4973 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
4974 }
4975 else
38afb23f
OH
4976 {
4977 normal_call_insns = insns;
4978
4979 /* Verify that we've deallocated all the stack we used. */
6e14af16 4980 gcc_assert ((flags & ECF_NORETURN)
a20c5714
RS
4981 || known_eq (old_stack_allocated,
4982 stack_pointer_delta
4983 - pending_stack_adjust));
38afb23f 4984 }
fadb729c
JJ
4985
4986 /* If something prevents making this a sibling call,
4987 zero out the sequence. */
4988 if (sibcall_failure)
48810515 4989 tail_call_insns = NULL;
6de9cd9a
DN
4990 else
4991 break;
0a1c58a2
JL
4992 }
4993
1ea7e6ad 4994 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
4995 arguments too, as argument area is now clobbered by the call. */
4996 if (tail_call_insns)
0a1c58a2 4997 {
6de9cd9a 4998 emit_insn (tail_call_insns);
e3b5732b 4999 crtl->tail_call_emit = true;
0a1c58a2
JL
5000 }
5001 else
9a385c2d
DM
5002 {
5003 emit_insn (normal_call_insns);
5004 if (try_tail_call)
5005 /* Ideally we'd emit a message for all of the ways that it could
5006 have failed. */
5007 maybe_complain_about_tail_call (exp, "tail call production failed");
5008 }
51bbfa0c 5009
0a1c58a2 5010 currently_expanding_call--;
8e6a59fe 5011
04695783 5012 free (stack_usage_map_buf);
765fc0f7 5013 free (args);
51bbfa0c
RS
5014 return target;
5015}
ded9bf77 5016
6de9cd9a
DN
5017/* A sibling call sequence invalidates any REG_EQUIV notes made for
5018 this function's incoming arguments.
5019
5020 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
5021 in the rtl chain are those for incoming arguments, so we can look
5022 for REG_EQUIV notes between the start of the function and the
5023 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
5024
5025 This is (slight) overkill. We could keep track of the highest
5026 argument we clobber and be more selective in removing notes, but it
5027 does not seem to be worth the effort. */
29d51cdb 5028
6de9cd9a
DN
5029void
5030fixup_tail_calls (void)
5031{
48810515 5032 rtx_insn *insn;
29d51cdb
SB
5033
5034 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5035 {
a31830a7
SB
5036 rtx note;
5037
29d51cdb
SB
5038 /* There are never REG_EQUIV notes for the incoming arguments
5039 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5040 if (NOTE_P (insn)
a38e7aa5 5041 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
5042 break;
5043
a31830a7
SB
5044 note = find_reg_note (insn, REG_EQUIV, 0);
5045 if (note)
5046 remove_note (insn, note);
5047 note = find_reg_note (insn, REG_EQUIV, 0);
5048 gcc_assert (!note);
29d51cdb 5049 }
6de9cd9a
DN
5050}
5051
ded9bf77
AH
5052/* Traverse a list of TYPES and expand all complex types into their
5053 components. */
2f2b4a02 5054static tree
ded9bf77
AH
5055split_complex_types (tree types)
5056{
5057 tree p;
5058
42ba5130
RH
5059 /* Before allocating memory, check for the common case of no complex. */
5060 for (p = types; p; p = TREE_CHAIN (p))
5061 {
5062 tree type = TREE_VALUE (p);
5063 if (TREE_CODE (type) == COMPLEX_TYPE
5064 && targetm.calls.split_complex_arg (type))
c22cacf3 5065 goto found;
42ba5130
RH
5066 }
5067 return types;
5068
5069 found:
ded9bf77
AH
5070 types = copy_list (types);
5071
5072 for (p = types; p; p = TREE_CHAIN (p))
5073 {
5074 tree complex_type = TREE_VALUE (p);
5075
42ba5130
RH
5076 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5077 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
5078 {
5079 tree next, imag;
5080
5081 /* Rewrite complex type with component type. */
5082 TREE_VALUE (p) = TREE_TYPE (complex_type);
5083 next = TREE_CHAIN (p);
5084
5085 /* Add another component type for the imaginary part. */
5086 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5087 TREE_CHAIN (p) = imag;
5088 TREE_CHAIN (imag) = next;
5089
5090 /* Skip the newly created node. */
5091 p = TREE_CHAIN (p);
5092 }
5093 }
5094
5095 return types;
5096}
51bbfa0c 5097\f
db69559b
RS
5098/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5099 for a value of mode OUTMODE,
5100 with NARGS different arguments, passed as ARGS.
5101 Store the return value if RETVAL is nonzero: store it in VALUE if
5102 VALUE is nonnull, otherwise pick a convenient location. In either
5103 case return the location of the stored value.
8ac61af7 5104
db69559b
RS
5105 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5106 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5107 other types of library calls. */
5108
5109rtx
d329e058
AJ
5110emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5111 enum libcall_type fn_type,
db69559b 5112 machine_mode outmode, int nargs, rtx_mode_t *args)
43bc5f13 5113{
3c0fca12
RH
5114 /* Total size in bytes of all the stack-parms scanned so far. */
5115 struct args_size args_size;
5116 /* Size of arguments before any adjustments (such as rounding). */
5117 struct args_size original_args_size;
b3694847 5118 int argnum;
3c0fca12 5119 rtx fun;
81464b2c
KT
5120 /* Todo, choose the correct decl type of orgfun. Sadly this information
5121 isn't present here, so we default to native calling abi here. */
033df0b9 5122 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 5123 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 5124 int count;
3c0fca12 5125 rtx argblock = 0;
d5cc9181
JR
5126 CUMULATIVE_ARGS args_so_far_v;
5127 cumulative_args_t args_so_far;
f725a3ec
KH
5128 struct arg
5129 {
5130 rtx value;
ef4bddc2 5131 machine_mode mode;
f725a3ec
KH
5132 rtx reg;
5133 int partial;
e7949876 5134 struct locate_and_pad_arg_data locate;
f725a3ec
KH
5135 rtx save_area;
5136 };
3c0fca12
RH
5137 struct arg *argvec;
5138 int old_inhibit_defer_pop = inhibit_defer_pop;
5139 rtx call_fusage = 0;
5140 rtx mem_value = 0;
5591ee6f 5141 rtx valreg;
3c0fca12 5142 int pcc_struct_value = 0;
cf098191 5143 poly_int64 struct_value_size = 0;
52a11cbf 5144 int flags;
3c0fca12 5145 int reg_parm_stack_space = 0;
a20c5714 5146 poly_int64 needed;
48810515 5147 rtx_insn *before_call;
0ed4bf92 5148 bool have_push_fusage;
b0c48229 5149 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 5150
f73ad30e 5151#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
5152 /* Define the boundary of the register parm stack space that needs to be
5153 save, if any. */
726a989a 5154 int low_to_save = 0, high_to_save = 0;
f725a3ec 5155 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
5156#endif
5157
3c0fca12 5158 /* Size of the stack reserved for parameter registers. */
a20c5714 5159 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3c0fca12 5160 char *initial_stack_usage_map = stack_usage_map;
a20c5714 5161 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 5162 char *stack_usage_map_buf = NULL;
3c0fca12 5163
61f71b34
DD
5164 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5165
3c0fca12 5166#ifdef REG_PARM_STACK_SPACE
3c0fca12 5167 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
5168#endif
5169
0529235d 5170 /* By default, library functions cannot throw. */
52a11cbf
RH
5171 flags = ECF_NOTHROW;
5172
9555a122
RH
5173 switch (fn_type)
5174 {
5175 case LCT_NORMAL:
53d4257f 5176 break;
9555a122 5177 case LCT_CONST:
53d4257f
JH
5178 flags |= ECF_CONST;
5179 break;
9555a122 5180 case LCT_PURE:
53d4257f 5181 flags |= ECF_PURE;
9555a122 5182 break;
9555a122
RH
5183 case LCT_NORETURN:
5184 flags |= ECF_NORETURN;
5185 break;
5186 case LCT_THROW:
0529235d 5187 flags &= ~ECF_NOTHROW;
9555a122 5188 break;
9defc9b7
RH
5189 case LCT_RETURNS_TWICE:
5190 flags = ECF_RETURNS_TWICE;
5191 break;
9555a122 5192 }
3c0fca12
RH
5193 fun = orgfun;
5194
3c0fca12
RH
5195 /* Ensure current function's preferred stack boundary is at least
5196 what we need. */
cb91fab0
JH
5197 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5198 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
5199
5200 /* If this kind of value comes back in memory,
5201 decide where in memory it should come back. */
b0c48229 5202 if (outmode != VOIDmode)
3c0fca12 5203 {
ae2bcd98 5204 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 5205 if (aggregate_value_p (tfom, 0))
b0c48229 5206 {
3c0fca12 5207#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 5208 rtx pointer_reg
1d636cc6 5209 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
5210 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5211 pcc_struct_value = 1;
5212 if (value == 0)
5213 value = gen_reg_rtx (outmode);
3c0fca12 5214#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 5215 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 5216 if (value != 0 && MEM_P (value))
b0c48229
NB
5217 mem_value = value;
5218 else
9474e8ab 5219 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 5220#endif
b0c48229 5221 /* This call returns a big structure. */
84b8030f 5222 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 5223 }
3c0fca12 5224 }
b0c48229
NB
5225 else
5226 tfom = void_type_node;
3c0fca12
RH
5227
5228 /* ??? Unfinished: must pass the memory address as an argument. */
5229
5230 /* Copy all the libcall-arguments out of the varargs data
5231 and into a vector ARGVEC.
5232
5233 Compute how to pass each argument. We only support a very small subset
5234 of the full argument passing conventions to limit complexity here since
5235 library functions shouldn't have many args. */
5236
f883e0a7 5237 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 5238 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 5239
97fc4caf 5240#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 5241 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 5242#else
d5cc9181 5243 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 5244#endif
d5cc9181 5245 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
5246
5247 args_size.constant = 0;
5248 args_size.var = 0;
5249
5250 count = 0;
5251
5252 push_temp_slots ();
5253
5254 /* If there's a structure value address to be passed,
5255 either pass it in the special place, or pass it as an extra argument. */
61f71b34 5256 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
5257 {
5258 rtx addr = XEXP (mem_value, 0);
c22cacf3 5259
3c0fca12
RH
5260 nargs++;
5261
ee88d9aa
MK
5262 /* Make sure it is a reasonable operand for a move or push insn. */
5263 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
5264 && !(CONSTANT_P (addr)
5265 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
5266 addr = force_operand (addr, NULL_RTX);
5267
3c0fca12
RH
5268 argvec[count].value = addr;
5269 argvec[count].mode = Pmode;
5270 argvec[count].partial = 0;
5271
a7c81bc1 5272 function_arg_info ptr_arg (Pmode, /*named=*/true);
6783fdb7 5273 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
a7c81bc1 5274 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
3c0fca12
RH
5275
5276 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 5277#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 5278 1,
a4d5044f
CM
5279#else
5280 argvec[count].reg != 0,
5281#endif
2e4ceca5
UW
5282 reg_parm_stack_space, 0,
5283 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 5284
3c0fca12
RH
5285 if (argvec[count].reg == 0 || argvec[count].partial != 0
5286 || reg_parm_stack_space > 0)
e7949876 5287 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 5288
6930c98c 5289 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
3c0fca12
RH
5290
5291 count++;
5292 }
5293
db69559b 5294 for (unsigned int i = 0; count < nargs; i++, count++)
3c0fca12 5295 {
db69559b 5296 rtx val = args[i].first;
cf0d189e 5297 function_arg_info arg (args[i].second, /*named=*/true);
5e617be8 5298 int unsigned_p = 0;
3c0fca12
RH
5299
5300 /* We cannot convert the arg value to the mode the library wants here;
5301 must do it earlier where we know the signedness of the arg. */
cf0d189e
RS
5302 gcc_assert (arg.mode != BLKmode
5303 && (GET_MODE (val) == arg.mode
5304 || GET_MODE (val) == VOIDmode));
3c0fca12 5305
ee88d9aa
MK
5306 /* Make sure it is a reasonable operand for a move or push insn. */
5307 if (!REG_P (val) && !MEM_P (val)
cf0d189e
RS
5308 && !(CONSTANT_P (val)
5309 && targetm.legitimate_constant_p (arg.mode, val)))
ee88d9aa
MK
5310 val = force_operand (val, NULL_RTX);
5311
cf0d189e 5312 if (pass_by_reference (&args_so_far_v, arg))
3c0fca12 5313 {
f474c6f8 5314 rtx slot;
cf0d189e 5315 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
f474c6f8 5316
becfd6e5
KZ
5317 /* If this was a CONST function, it is now PURE since it now
5318 reads memory. */
99a32567
DM
5319 if (flags & ECF_CONST)
5320 {
5321 flags &= ~ECF_CONST;
5322 flags |= ECF_PURE;
5323 }
5324
e0c68ce9 5325 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
5326 {
5327 tree val_expr = MEM_EXPR (val);
5328 if (val_expr)
5329 mark_addressable (val_expr);
5330 slot = val;
5331 }
9969aaf6 5332 else
f474c6f8 5333 {
cf0d189e 5334 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
9474e8ab 5335 1, 1);
f474c6f8
AO
5336 emit_move_insn (slot, val);
5337 }
1da68f56 5338
6b5273c3
AO
5339 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5340 gen_rtx_USE (VOIDmode, slot),
5341 call_fusage);
f474c6f8
AO
5342 if (must_copy)
5343 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5344 gen_rtx_CLOBBER (VOIDmode,
5345 slot),
5346 call_fusage);
5347
cf0d189e 5348 arg.mode = Pmode;
257caa55 5349 arg.pass_by_reference = true;
f474c6f8 5350 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 5351 }
3c0fca12 5352
cf0d189e
RS
5353 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5354 NULL_TREE, 0);
5355 argvec[count].mode = arg.mode;
5356 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5357 unsigned_p);
6783fdb7 5358 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
3c0fca12 5359
3c0fca12 5360 argvec[count].partial
a7c81bc1 5361 = targetm.calls.arg_partial_bytes (args_so_far, arg);
3c0fca12 5362
3576f984
RS
5363 if (argvec[count].reg == 0
5364 || argvec[count].partial != 0
5365 || reg_parm_stack_space > 0)
5366 {
cf0d189e 5367 locate_and_pad_parm (arg.mode, NULL_TREE,
a4d5044f 5368#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 5369 1,
a4d5044f 5370#else
3576f984
RS
5371 argvec[count].reg != 0,
5372#endif
2e4ceca5 5373 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
5374 NULL_TREE, &args_size, &argvec[count].locate);
5375 args_size.constant += argvec[count].locate.size.constant;
5376 gcc_assert (!argvec[count].locate.size.var);
5377 }
5378#ifdef BLOCK_REG_PADDING
5379 else
5380 /* The argument is passed entirely in registers. See at which
5381 end it should be padded. */
5382 argvec[count].locate.where_pad =
cf0d189e
RS
5383 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5384 known_le (GET_MODE_SIZE (arg.mode),
5385 UNITS_PER_WORD));
a4d5044f 5386#endif
3c0fca12 5387
6930c98c 5388 targetm.calls.function_arg_advance (args_so_far, arg);
3c0fca12 5389 }
3c0fca12 5390
957ed738
L
5391 for (int i = 0; i < nargs; i++)
5392 if (reg_parm_stack_space > 0
5393 || argvec[i].reg == 0
5394 || argvec[i].partial != 0)
5395 update_stack_alignment_for_call (&argvec[i].locate);
5396
3c0fca12
RH
5397 /* If this machine requires an external definition for library
5398 functions, write one out. */
5399 assemble_external_libcall (fun);
5400
5401 original_args_size = args_size;
a20c5714
RS
5402 args_size.constant = (aligned_upper_bound (args_size.constant
5403 + stack_pointer_delta,
5404 STACK_BYTES)
5405 - stack_pointer_delta);
3c0fca12 5406
a20c5714
RS
5407 args_size.constant = upper_bound (args_size.constant,
5408 reg_parm_stack_space);
3c0fca12 5409
5d059ed9 5410 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 5411 args_size.constant -= reg_parm_stack_space;
3c0fca12 5412
a20c5714
RS
5413 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5414 args_size.constant);
3c0fca12 5415
a11e0df4 5416 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306 5417 {
a20c5714
RS
5418 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5419 current_function_pushed_stack_size
5420 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
5421 }
5422
f73ad30e
JH
5423 if (ACCUMULATE_OUTGOING_ARGS)
5424 {
5425 /* Since the stack pointer will never be pushed, it is possible for
5426 the evaluation of a parm to clobber something we have already
5427 written to the stack. Since most function calls on RISC machines
5428 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 5429
f73ad30e
JH
5430 Therefore, we save any area of the stack that was already written
5431 and that we are using. Here we set up to do this by making a new
5432 stack usage map from the old one.
3c0fca12 5433
f73ad30e
JH
5434 Another approach might be to try to reorder the argument
5435 evaluations to avoid this conflicting stack usage. */
3c0fca12 5436
f73ad30e 5437 needed = args_size.constant;
3c0fca12 5438
f73ad30e
JH
5439 /* Since we will be writing into the entire argument area, the
5440 map must be allocated for its entire size, not just the part that
5441 is the responsibility of the caller. */
5d059ed9 5442 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 5443 needed += reg_parm_stack_space;
3c0fca12 5444
a20c5714 5445 poly_int64 limit = needed;
6dad9361 5446 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
5447 limit += 1;
5448
5449 /* For polynomial sizes, this is the maximum possible size needed
5450 for arguments with a constant size and offset. */
5451 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5452 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5453 const_limit);
6dad9361 5454
5ed6ace5 5455 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 5456 stack_usage_map = stack_usage_map_buf;
3c0fca12 5457
f73ad30e 5458 if (initial_highest_arg_in_use)
2e09e75a
JM
5459 memcpy (stack_usage_map, initial_stack_usage_map,
5460 initial_highest_arg_in_use);
3c0fca12 5461
f73ad30e 5462 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 5463 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
5464 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5465 needed = 0;
3c0fca12 5466
c39ada04 5467 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 5468 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
5469 new libcalls after we've instantiated the virtual regs, and if we
5470 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 5471
c39ada04 5472 if (virtuals_instantiated)
0a81f074
RS
5473 argblock = plus_constant (Pmode, stack_pointer_rtx,
5474 STACK_POINTER_OFFSET);
c39ada04
DD
5475 else
5476 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
5477 }
5478 else
5479 {
5480 if (!PUSH_ARGS)
a20c5714 5481 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
f73ad30e 5482 }
3c0fca12 5483
3d9684ae 5484 /* We push args individually in reverse order, perform stack alignment
3c0fca12 5485 before the first push (the last arg). */
3d9684ae 5486 if (argblock == 0)
a20c5714
RS
5487 anti_adjust_stack (gen_int_mode (args_size.constant
5488 - original_args_size.constant,
5489 Pmode));
3c0fca12 5490
3d9684ae 5491 argnum = nargs - 1;
3c0fca12 5492
f73ad30e
JH
5493#ifdef REG_PARM_STACK_SPACE
5494 if (ACCUMULATE_OUTGOING_ARGS)
5495 {
5496 /* The argument list is the property of the called routine and it
5497 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
5498 parameters, we must save and restore it. */
5499 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5500 &low_to_save, &high_to_save);
3c0fca12
RH
5501 }
5502#endif
f725a3ec 5503
2f21e1ba
BS
5504 /* When expanding a normal call, args are stored in push order,
5505 which is the reverse of what we have here. */
5506 bool any_regs = false;
5507 for (int i = nargs; i-- > 0; )
5508 if (argvec[i].reg != NULL_RTX)
5509 {
5510 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5511 any_regs = true;
5512 }
5513 if (!any_regs)
5514 targetm.calls.call_args (pc_rtx, NULL_TREE);
5515
3c0fca12
RH
5516 /* Push the args that need to be pushed. */
5517
0ed4bf92
BS
5518 have_push_fusage = false;
5519
3c0fca12
RH
5520 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5521 are to be pushed. */
3d9684ae 5522 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5523 {
ef4bddc2 5524 machine_mode mode = argvec[argnum].mode;
b3694847 5525 rtx val = argvec[argnum].value;
3c0fca12
RH
5526 rtx reg = argvec[argnum].reg;
5527 int partial = argvec[argnum].partial;
6bdf8c2e 5528 unsigned int parm_align = argvec[argnum].locate.boundary;
a20c5714 5529 poly_int64 lower_bound = 0, upper_bound = 0;
3c0fca12
RH
5530
5531 if (! (reg != 0 && partial == 0))
5532 {
2b1c5433
JJ
5533 rtx use;
5534
f73ad30e
JH
5535 if (ACCUMULATE_OUTGOING_ARGS)
5536 {
f8a097cd
JH
5537 /* If this is being stored into a pre-allocated, fixed-size,
5538 stack area, save any previous data at that location. */
3c0fca12 5539
6dad9361
TS
5540 if (ARGS_GROW_DOWNWARD)
5541 {
5542 /* stack_slot is negative, but we want to index stack_usage_map
5543 with positive values. */
5544 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5545 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5546 }
5547 else
5548 {
5549 lower_bound = argvec[argnum].locate.slot_offset.constant;
5550 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5551 }
3c0fca12 5552
a20c5714
RS
5553 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5554 reg_parm_stack_space))
f73ad30e 5555 {
e7949876 5556 /* We need to make a save area. */
a20c5714 5557 poly_uint64 size
e7949876 5558 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 5559 machine_mode save_mode
f4b31647 5560 = int_mode_for_size (size, 1).else_blk ();
e7949876 5561 rtx adr
0a81f074 5562 = plus_constant (Pmode, argblock,
e7949876 5563 argvec[argnum].locate.offset.constant);
f73ad30e 5564 rtx stack_area
e7949876 5565 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 5566
9778f2f8
JH
5567 if (save_mode == BLKmode)
5568 {
5569 argvec[argnum].save_area
5570 = assign_stack_temp (BLKmode,
9474e8ab
MM
5571 argvec[argnum].locate.size.constant
5572 );
9778f2f8 5573
1a8cb155
RS
5574 emit_block_move (validize_mem
5575 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 5576 stack_area,
a20c5714
RS
5577 (gen_int_mode
5578 (argvec[argnum].locate.size.constant,
5579 Pmode)),
9778f2f8
JH
5580 BLOCK_OP_CALL_PARM);
5581 }
5582 else
5583 {
5584 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5585
5586 emit_move_insn (argvec[argnum].save_area, stack_area);
5587 }
f73ad30e 5588 }
3c0fca12 5589 }
19caa751 5590
6bdf8c2e 5591 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 5592 partial, reg, 0, argblock,
a20c5714
RS
5593 (gen_int_mode
5594 (argvec[argnum].locate.offset.constant, Pmode)),
e7949876 5595 reg_parm_stack_space,
99206968 5596 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 5597
3c0fca12 5598 /* Now mark the segment we just used. */
f73ad30e 5599 if (ACCUMULATE_OUTGOING_ARGS)
a20c5714 5600 mark_stack_region_used (lower_bound, upper_bound);
3c0fca12
RH
5601
5602 NO_DEFER_POP;
475a3eef 5603
2b1c5433
JJ
5604 /* Indicate argument access so that alias.c knows that these
5605 values are live. */
5606 if (argblock)
0a81f074 5607 use = plus_constant (Pmode, argblock,
2b1c5433 5608 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
5609 else if (have_push_fusage)
5610 continue;
2b1c5433 5611 else
0ed4bf92
BS
5612 {
5613 /* When arguments are pushed, trying to tell alias.c where
5614 exactly this argument is won't work, because the
5615 auto-increment causes confusion. So we merely indicate
5616 that we access something with a known mode somewhere on
5617 the stack. */
5618 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5619 gen_rtx_SCRATCH (Pmode));
5620 have_push_fusage = true;
5621 }
2b1c5433
JJ
5622 use = gen_rtx_MEM (argvec[argnum].mode, use);
5623 use = gen_rtx_USE (VOIDmode, use);
5624 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
5625 }
5626 }
5627
3d9684ae 5628 argnum = nargs - 1;
3c0fca12 5629
531ca746 5630 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
5631
5632 /* Now load any reg parms into their regs. */
5633
5634 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5635 are to be pushed. */
3d9684ae 5636 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5637 {
ef4bddc2 5638 machine_mode mode = argvec[argnum].mode;
b3694847 5639 rtx val = argvec[argnum].value;
3c0fca12
RH
5640 rtx reg = argvec[argnum].reg;
5641 int partial = argvec[argnum].partial;
460b171d 5642
3c0fca12
RH
5643 /* Handle calls that pass values in multiple non-contiguous
5644 locations. The PA64 has examples of this for library calls. */
5645 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 5646 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 5647 else if (reg != 0 && partial == 0)
460b171d
JB
5648 {
5649 emit_move_insn (reg, val);
5650#ifdef BLOCK_REG_PADDING
cf098191 5651 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
460b171d
JB
5652
5653 /* Copied from load_register_parameters. */
5654
5655 /* Handle case where we have a value that needs shifting
5656 up to the msb. eg. a QImode value and we're padding
5657 upward on a BYTES_BIG_ENDIAN machine. */
cf098191 5658 if (known_lt (size, UNITS_PER_WORD)
460b171d 5659 && (argvec[argnum].locate.where_pad
76b0cbf8 5660 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
460b171d
JB
5661 {
5662 rtx x;
cf098191 5663 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
460b171d
JB
5664
5665 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5666 report the whole reg as used. Strictly speaking, the
5667 call only uses SIZE bytes at the msb end, but it doesn't
5668 seem worth generating rtl to say that. */
5669 reg = gen_rtx_REG (word_mode, REGNO (reg));
5670 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5671 if (x != reg)
5672 emit_move_insn (reg, x);
5673 }
5674#endif
5675 }
3c0fca12
RH
5676
5677 NO_DEFER_POP;
5678 }
5679
3c0fca12
RH
5680 /* Any regs containing parms remain in use through the call. */
5681 for (count = 0; count < nargs; count++)
5682 {
5683 rtx reg = argvec[count].reg;
5684 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5685 use_group_regs (&call_fusage, reg);
5686 else if (reg != 0)
3b1bf459
BS
5687 {
5688 int partial = argvec[count].partial;
5689 if (partial)
5690 {
5691 int nregs;
5692 gcc_assert (partial % UNITS_PER_WORD == 0);
5693 nregs = partial / UNITS_PER_WORD;
5694 use_regs (&call_fusage, REGNO (reg), nregs);
5695 }
5696 else
5697 use_reg (&call_fusage, reg);
5698 }
3c0fca12
RH
5699 }
5700
5701 /* Pass the function the address in which to return a structure value. */
61f71b34 5702 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 5703 {
61f71b34 5704 emit_move_insn (struct_value,
3c0fca12
RH
5705 force_reg (Pmode,
5706 force_operand (XEXP (mem_value, 0),
5707 NULL_RTX)));
f8cfc6aa 5708 if (REG_P (struct_value))
61f71b34 5709 use_reg (&call_fusage, struct_value);
3c0fca12
RH
5710 }
5711
5712 /* Don't allow popping to be deferred, since then
5713 cse'ing of library calls could delete a call and leave the pop. */
5714 NO_DEFER_POP;
5591ee6f 5715 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 5716 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 5717
ce48579b 5718 /* Stack must be properly aligned now. */
a20c5714
RS
5719 gcc_assert (multiple_p (stack_pointer_delta,
5720 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
ebcd0b57 5721
695ee791
RH
5722 before_call = get_last_insn ();
5723
3cf3da88
EB
5724 if (flag_callgraph_info)
5725 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5726
3c0fca12
RH
5727 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5728 will set inhibit_defer_pop to that value. */
de76b467
JH
5729 /* The return type is needed to decide how many bytes the function pops.
5730 Signedness plays no role in that, so for simplicity, we pretend it's
5731 always signed. We also assume that the list of arguments passed has
5732 no impact, so we pretend it is unknown. */
3c0fca12 5733
6de9cd9a 5734 emit_call_1 (fun, NULL,
f725a3ec 5735 get_identifier (XSTR (orgfun, 0)),
b0c48229 5736 build_function_type (tfom, NULL_TREE),
f725a3ec 5737 original_args_size.constant, args_size.constant,
3c0fca12 5738 struct_value_size,
d5cc9181 5739 targetm.calls.function_arg (args_so_far,
6783fdb7 5740 function_arg_info::end_marker ()),
5591ee6f 5741 valreg,
d5cc9181 5742 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 5743
1e288103 5744 if (flag_ipa_ra)
4f660b15 5745 {
e67d1102 5746 rtx datum = orgfun;
4f660b15 5747 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 5748 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
5749 add_reg_note (last, REG_CALL_DECL, datum);
5750 }
5751
460b171d
JB
5752 /* Right-shift returned value if necessary. */
5753 if (!pcc_struct_value
5754 && TYPE_MODE (tfom) != BLKmode
5755 && targetm.calls.return_in_msb (tfom))
5756 {
5757 shift_return_value (TYPE_MODE (tfom), false, valreg);
5758 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5759 }
5760
2f21e1ba
BS
5761 targetm.calls.end_call_args ();
5762
6fb5fa3c
DB
5763 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5764 that it should complain if nonvolatile values are live. For
5765 functions that cannot return, inform flow that control does not
5766 fall through. */
6e14af16 5767 if (flags & ECF_NORETURN)
695ee791 5768 {
570a98eb 5769 /* The barrier note must be emitted
695ee791
RH
5770 immediately after the CALL_INSN. Some ports emit more than
5771 just a CALL_INSN above, so we must search for it here. */
48810515 5772 rtx_insn *last = get_last_insn ();
4b4bf941 5773 while (!CALL_P (last))
695ee791
RH
5774 {
5775 last = PREV_INSN (last);
5776 /* There was no CALL_INSN? */
366de0ce 5777 gcc_assert (last != before_call);
695ee791
RH
5778 }
5779
570a98eb 5780 emit_barrier_after (last);
695ee791
RH
5781 }
5782
85da11a6
EB
5783 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5784 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5785 if (flags & ECF_NOTHROW)
5786 {
48810515 5787 rtx_insn *last = get_last_insn ();
85da11a6
EB
5788 while (!CALL_P (last))
5789 {
5790 last = PREV_INSN (last);
5791 /* There was no CALL_INSN? */
5792 gcc_assert (last != before_call);
5793 }
5794
5795 make_reg_eh_region_note_nothrow_nononlocal (last);
5796 }
5797
3c0fca12
RH
5798 /* Now restore inhibit_defer_pop to its actual original value. */
5799 OK_DEFER_POP;
5800
5801 pop_temp_slots ();
5802
5803 /* Copy the value to the right place. */
de76b467 5804 if (outmode != VOIDmode && retval)
3c0fca12
RH
5805 {
5806 if (mem_value)
5807 {
5808 if (value == 0)
5809 value = mem_value;
5810 if (value != mem_value)
5811 emit_move_insn (value, mem_value);
5812 }
c3297561
AO
5813 else if (GET_CODE (valreg) == PARALLEL)
5814 {
5815 if (value == 0)
5816 value = gen_reg_rtx (outmode);
643642eb 5817 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 5818 }
3c0fca12 5819 else
7ab0aca2 5820 {
cde0f3fd 5821 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
5822 if (GET_MODE (valreg) != outmode)
5823 {
5824 int unsignedp = TYPE_UNSIGNED (tfom);
5825
cde0f3fd
PB
5826 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5827 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 5828 == GET_MODE (valreg));
7ab0aca2
RH
5829 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5830 }
5831
5832 if (value != 0)
5833 emit_move_insn (value, valreg);
5834 else
5835 value = valreg;
5836 }
3c0fca12
RH
5837 }
5838
f73ad30e 5839 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 5840 {
f73ad30e
JH
5841#ifdef REG_PARM_STACK_SPACE
5842 if (save_area)
b820d2b8
AM
5843 restore_fixed_argument_area (save_area, argblock,
5844 high_to_save, low_to_save);
3c0fca12 5845#endif
f725a3ec 5846
f73ad30e
JH
5847 /* If we saved any argument areas, restore them. */
5848 for (count = 0; count < nargs; count++)
5849 if (argvec[count].save_area)
5850 {
ef4bddc2 5851 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 5852 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
5853 argvec[count].locate.offset.constant);
5854 rtx stack_area = gen_rtx_MEM (save_mode,
5855 memory_address (save_mode, adr));
f73ad30e 5856
9778f2f8
JH
5857 if (save_mode == BLKmode)
5858 emit_block_move (stack_area,
1a8cb155
RS
5859 validize_mem
5860 (copy_rtx (argvec[count].save_area)),
a20c5714
RS
5861 (gen_int_mode
5862 (argvec[count].locate.size.constant, Pmode)),
9778f2f8
JH
5863 BLOCK_OP_CALL_PARM);
5864 else
5865 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 5866 }
3c0fca12 5867
f73ad30e
JH
5868 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5869 stack_usage_map = initial_stack_usage_map;
a20c5714 5870 stack_usage_watermark = initial_stack_usage_watermark;
f73ad30e 5871 }
43bc5f13 5872
04695783 5873 free (stack_usage_map_buf);
d9725c41 5874
de76b467
JH
5875 return value;
5876
5877}
5878\f
d5e254e1 5879
51bbfa0c
RS
5880/* Store a single argument for a function call
5881 into the register or memory area where it must be passed.
5882 *ARG describes the argument value and where to pass it.
5883
5884 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 5885 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
5886
5887 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 5888 so must be careful about how the stack is used.
51bbfa0c
RS
5889
5890 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5891 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5892 that we need not worry about saving and restoring the stack.
5893
4c6b3b2a 5894 FNDECL is the declaration of the function we are calling.
f725a3ec 5895
da7d8304 5896 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 5897 zero otherwise. */
51bbfa0c 5898
4c6b3b2a 5899static int
d329e058
AJ
5900store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5901 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 5902{
b3694847 5903 tree pval = arg->tree_value;
51bbfa0c
RS
5904 rtx reg = 0;
5905 int partial = 0;
a20c5714
RS
5906 poly_int64 used = 0;
5907 poly_int64 lower_bound = 0, upper_bound = 0;
4c6b3b2a 5908 int sibcall_failure = 0;
51bbfa0c
RS
5909
5910 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 5911 return 1;
51bbfa0c 5912
cc79451b
RK
5913 /* Push a new temporary level for any temporaries we make for
5914 this argument. */
5915 push_temp_slots ();
5916
f8a097cd 5917 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 5918 {
f73ad30e
JH
5919 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5920 save any previous data at that location. */
5921 if (argblock && ! variable_size && arg->stack)
5922 {
6dad9361
TS
5923 if (ARGS_GROW_DOWNWARD)
5924 {
5925 /* stack_slot is negative, but we want to index stack_usage_map
5926 with positive values. */
5927 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5928 {
5929 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5930 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5931 }
6dad9361
TS
5932 else
5933 upper_bound = 0;
51bbfa0c 5934
6dad9361
TS
5935 lower_bound = upper_bound - arg->locate.size.constant;
5936 }
f73ad30e 5937 else
6dad9361
TS
5938 {
5939 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5940 {
5941 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5942 lower_bound = rtx_to_poly_int64 (offset);
5943 }
6dad9361
TS
5944 else
5945 lower_bound = 0;
51bbfa0c 5946
6dad9361
TS
5947 upper_bound = lower_bound + arg->locate.size.constant;
5948 }
51bbfa0c 5949
a20c5714
RS
5950 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5951 reg_parm_stack_space))
51bbfa0c 5952 {
e7949876 5953 /* We need to make a save area. */
a20c5714 5954 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
f4b31647
RS
5955 machine_mode save_mode
5956 = int_mode_for_size (size, 1).else_blk ();
e7949876
AM
5957 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5958 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
5959
5960 if (save_mode == BLKmode)
5961 {
9ee5337d
EB
5962 arg->save_area
5963 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 5964 preserve_temp_slots (arg->save_area);
1a8cb155
RS
5965 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5966 stack_area,
a20c5714
RS
5967 (gen_int_mode
5968 (arg->locate.size.constant, Pmode)),
44bb111a 5969 BLOCK_OP_CALL_PARM);
f73ad30e
JH
5970 }
5971 else
5972 {
5973 arg->save_area = gen_reg_rtx (save_mode);
5974 emit_move_insn (arg->save_area, stack_area);
5975 }
51bbfa0c
RS
5976 }
5977 }
5978 }
b564df06 5979
51bbfa0c
RS
5980 /* If this isn't going to be placed on both the stack and in registers,
5981 set up the register and number of words. */
5982 if (! arg->pass_on_stack)
aa7634dd
DM
5983 {
5984 if (flags & ECF_SIBCALL)
5985 reg = arg->tail_call_reg;
5986 else
5987 reg = arg->reg;
5988 partial = arg->partial;
5989 }
51bbfa0c 5990
366de0ce
NS
5991 /* Being passed entirely in a register. We shouldn't be called in
5992 this case. */
5993 gcc_assert (reg == 0 || partial != 0);
c22cacf3 5994
4ab56118
RK
5995 /* If this arg needs special alignment, don't load the registers
5996 here. */
5997 if (arg->n_aligned_regs != 0)
5998 reg = 0;
f725a3ec 5999
4ab56118 6000 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
6001 it directly into its stack slot. Otherwise, we can. */
6002 if (arg->value == 0)
d64f5a78 6003 {
d64f5a78
RS
6004 /* stack_arg_under_construction is nonzero if a function argument is
6005 being evaluated directly into the outgoing argument list and
6006 expand_call must take special action to preserve the argument list
6007 if it is called recursively.
6008
6009 For scalar function arguments stack_usage_map is sufficient to
6010 determine which stack slots must be saved and restored. Scalar
6011 arguments in general have pass_on_stack == 0.
6012
6013 If this argument is initialized by a function which takes the
6014 address of the argument (a C++ constructor or a C function
6015 returning a BLKmode structure), then stack_usage_map is
6016 insufficient and expand_call must push the stack around the
6017 function call. Such arguments have pass_on_stack == 1.
6018
6019 Note that it is always safe to set stack_arg_under_construction,
6020 but this generates suboptimal code if set when not needed. */
6021
6022 if (arg->pass_on_stack)
6023 stack_arg_under_construction++;
f73ad30e 6024
3a08477a
RK
6025 arg->value = expand_expr (pval,
6026 (partial
6027 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6028 ? NULL_RTX : arg->stack,
8403445a 6029 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
6030
6031 /* If we are promoting object (or for any other reason) the mode
6032 doesn't agree, convert the mode. */
6033
7373d92d
RK
6034 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6035 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6036 arg->value, arg->unsignedp);
1efe6448 6037
d64f5a78
RS
6038 if (arg->pass_on_stack)
6039 stack_arg_under_construction--;
d64f5a78 6040 }
51bbfa0c 6041
0dc42b03 6042 /* Check for overlap with already clobbered argument area. */
07eef816
KH
6043 if ((flags & ECF_SIBCALL)
6044 && MEM_P (arg->value)
a20c5714
RS
6045 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6046 arg->locate.size.constant))
07eef816 6047 sibcall_failure = 1;
0dc42b03 6048
51bbfa0c
RS
6049 /* Don't allow anything left on stack from computation
6050 of argument to alloca. */
f8a097cd 6051 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
6052 do_pending_stack_adjust ();
6053
6054 if (arg->value == arg->stack)
37a08a29
RK
6055 /* If the value is already in the stack slot, we are done. */
6056 ;
1efe6448 6057 else if (arg->mode != BLKmode)
51bbfa0c 6058 {
46bd2bee 6059 unsigned int parm_align;
51bbfa0c
RS
6060
6061 /* Argument is a scalar, not entirely passed in registers.
6062 (If part is passed in registers, arg->partial says how much
6063 and emit_push_insn will take care of putting it there.)
f725a3ec 6064
51bbfa0c
RS
6065 Push it, and if its size is less than the
6066 amount of space allocated to it,
6067 also bump stack pointer by the additional space.
6068 Note that in C the default argument promotions
6069 will prevent such mismatches. */
6070
7b4df2bf
RS
6071 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6072 ? 0 : GET_MODE_SIZE (arg->mode));
974aedcc 6073
51bbfa0c
RS
6074 /* Compute how much space the push instruction will push.
6075 On many machines, pushing a byte will advance the stack
6076 pointer by a halfword. */
6077#ifdef PUSH_ROUNDING
6078 size = PUSH_ROUNDING (size);
6079#endif
6080 used = size;
6081
6082 /* Compute how much space the argument should get:
6083 round up to a multiple of the alignment for arguments. */
76b0cbf8
RS
6084 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6085 != PAD_NONE)
7b4df2bf
RS
6086 /* At the moment we don't (need to) support ABIs for which the
6087 padding isn't known at compile time. In principle it should
6088 be easy to add though. */
6089 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c 6090
46bd2bee
JM
6091 /* Compute the alignment of the pushed argument. */
6092 parm_align = arg->locate.boundary;
76b0cbf8
RS
6093 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6094 == PAD_DOWNWARD)
46bd2bee 6095 {
a20c5714
RS
6096 poly_int64 pad = used - size;
6097 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6098 if (pad_align != 0)
6099 parm_align = MIN (parm_align, pad_align);
46bd2bee
JM
6100 }
6101
51bbfa0c
RS
6102 /* This isn't already where we want it on the stack, so put it there.
6103 This can either be done with push or copy insns. */
a20c5714 6104 if (maybe_ne (used, 0)
974aedcc
MP
6105 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6106 NULL_RTX, parm_align, partial, reg, used - size,
6107 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6108 reg_parm_stack_space,
6109 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
99206968 6110 sibcall_failure = 1;
841404cd
AO
6111
6112 /* Unless this is a partially-in-register argument, the argument is now
6113 in the stack. */
6114 if (partial == 0)
6115 arg->value = arg->stack;
51bbfa0c
RS
6116 }
6117 else
6118 {
6119 /* BLKmode, at least partly to be pushed. */
6120
1b1f20ca 6121 unsigned int parm_align;
a20c5714 6122 poly_int64 excess;
51bbfa0c
RS
6123 rtx size_rtx;
6124
6125 /* Pushing a nonscalar.
6126 If part is passed in registers, PARTIAL says how much
6127 and emit_push_insn will take care of putting it there. */
6128
6129 /* Round its size up to a multiple
6130 of the allocation unit for arguments. */
6131
e7949876 6132 if (arg->locate.size.var != 0)
51bbfa0c
RS
6133 {
6134 excess = 0;
e7949876 6135 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
6136 }
6137 else
6138 {
78a52f11
RH
6139 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6140 for BLKmode is careful to avoid it. */
6141 excess = (arg->locate.size.constant
974aedcc 6142 - arg_int_size_in_bytes (TREE_TYPE (pval))
78a52f11 6143 + partial);
974aedcc 6144 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
6145 NULL_RTX, TYPE_MODE (sizetype),
6146 EXPAND_NORMAL);
51bbfa0c
RS
6147 }
6148
bfc45551 6149 parm_align = arg->locate.boundary;
1b1f20ca
RH
6150
6151 /* When an argument is padded down, the block is aligned to
6152 PARM_BOUNDARY, but the actual argument isn't. */
76b0cbf8
RS
6153 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6154 == PAD_DOWNWARD)
1b1f20ca 6155 {
e7949876 6156 if (arg->locate.size.var)
1b1f20ca 6157 parm_align = BITS_PER_UNIT;
a20c5714 6158 else
1b1f20ca 6159 {
a20c5714
RS
6160 unsigned int excess_align
6161 = known_alignment (excess) * BITS_PER_UNIT;
6162 if (excess_align != 0)
6163 parm_align = MIN (parm_align, excess_align);
1b1f20ca
RH
6164 }
6165 }
6166
3c0cb5de 6167 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
6168 {
6169 /* emit_push_insn might not work properly if arg->value and
e7949876 6170 argblock + arg->locate.offset areas overlap. */
4c6b3b2a 6171 rtx x = arg->value;
a20c5714 6172 poly_int64 i = 0;
4c6b3b2a 6173
5284e559
RS
6174 if (strip_offset (XEXP (x, 0), &i)
6175 == crtl->args.internal_arg_pointer)
4c6b3b2a 6176 {
b3877860
KT
6177 /* arg.locate doesn't contain the pretend_args_size offset,
6178 it's part of argblock. Ensure we don't count it in I. */
6179 if (STACK_GROWS_DOWNWARD)
6180 i -= crtl->args.pretend_args_size;
6181 else
6182 i += crtl->args.pretend_args_size;
6183
e0a21ab9 6184 /* expand_call should ensure this. */
366de0ce 6185 gcc_assert (!arg->locate.offset.var
a20c5714
RS
6186 && arg->locate.size.var == 0);
6187 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
4c6b3b2a 6188
a20c5714 6189 if (known_eq (arg->locate.offset.constant, i))
d6c2c77c
JC
6190 {
6191 /* Even though they appear to be at the same location,
6192 if part of the outgoing argument is in registers,
6193 they aren't really at the same location. Check for
6194 this by making sure that the incoming size is the
6195 same as the outgoing size. */
a20c5714 6196 if (maybe_ne (arg->locate.size.constant, size_val))
4c6b3b2a
JJ
6197 sibcall_failure = 1;
6198 }
a20c5714
RS
6199 else if (maybe_in_range_p (arg->locate.offset.constant,
6200 i, size_val))
6201 sibcall_failure = 1;
6202 /* Use arg->locate.size.constant instead of size_rtx
6203 because we only care about the part of the argument
6204 on the stack. */
6205 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6206 arg->locate.size.constant))
6207 sibcall_failure = 1;
4c6b3b2a
JJ
6208 }
6209 }
6210
974aedcc
MP
6211 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6212 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6213 parm_align, partial, reg, excess, argblock,
6214 ARGS_SIZE_RTX (arg->locate.offset),
6215 reg_parm_stack_space,
6216 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
51bbfa0c 6217
841404cd
AO
6218 /* Unless this is a partially-in-register argument, the argument is now
6219 in the stack.
51bbfa0c 6220
841404cd
AO
6221 ??? Unlike the case above, in which we want the actual
6222 address of the data, so that we can load it directly into a
6223 register, here we want the address of the stack slot, so that
6224 it's properly aligned for word-by-word copying or something
6225 like that. It's not clear that this is always correct. */
6226 if (partial == 0)
6227 arg->value = arg->stack_slot;
6228 }
8df3dbb7
RH
6229
6230 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6231 {
6232 tree type = TREE_TYPE (arg->tree_value);
6233 arg->parallel_value
6234 = emit_group_load_into_temps (arg->reg, arg->value, type,
6235 int_size_in_bytes (type));
6236 }
51bbfa0c 6237
8403445a
AM
6238 /* Mark all slots this store used. */
6239 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6240 && argblock && ! variable_size && arg->stack)
a20c5714 6241 mark_stack_region_used (lower_bound, upper_bound);
8403445a 6242
51bbfa0c
RS
6243 /* Once we have pushed something, pops can't safely
6244 be deferred during the rest of the arguments. */
6245 NO_DEFER_POP;
6246
9474e8ab 6247 /* Free any temporary slots made in processing this argument. */
cc79451b 6248 pop_temp_slots ();
4c6b3b2a
JJ
6249
6250 return sibcall_failure;
51bbfa0c 6251}
a4b1b92a 6252
0ffef200 6253/* Nonzero if we do not know how to pass ARG solely in registers. */
a4b1b92a 6254
fe984136 6255bool
0ffef200 6256must_pass_in_stack_var_size (const function_arg_info &arg)
fe984136 6257{
0ffef200 6258 if (!arg.type)
fe984136
RH
6259 return false;
6260
6261 /* If the type has variable size... */
c600df9a 6262 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
fe984136 6263 return true;
a4b1b92a 6264
fe984136
RH
6265 /* If the type is marked as addressable (it is required
6266 to be constructed into the stack)... */
0ffef200 6267 if (TREE_ADDRESSABLE (arg.type))
fe984136
RH
6268 return true;
6269
6270 return false;
6271}
a4b1b92a 6272
7ae4ad28 6273/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
6274 takes trailing padding of a structure into account. */
6275/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
6276
6277bool
0ffef200 6278must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
a4b1b92a 6279{
0ffef200 6280 if (!arg.type)
40cdfd5a 6281 return false;
a4b1b92a
RH
6282
6283 /* If the type has variable size... */
0ffef200 6284 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
a4b1b92a
RH
6285 return true;
6286
6287 /* If the type is marked as addressable (it is required
6288 to be constructed into the stack)... */
0ffef200 6289 if (TREE_ADDRESSABLE (arg.type))
a4b1b92a
RH
6290 return true;
6291
0ffef200 6292 if (TYPE_EMPTY_P (arg.type))
974aedcc
MP
6293 return false;
6294
a4b1b92a
RH
6295 /* If the padding and mode of the type is such that a copy into
6296 a register would put it into the wrong part of the register. */
0ffef200
RS
6297 if (arg.mode == BLKmode
6298 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6299 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
76b0cbf8 6300 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
a4b1b92a
RH
6301 return true;
6302
6303 return false;
6304}
6bf29a7e 6305
4f53599c
RS
6306/* Return true if TYPE must be passed on the stack when passed to
6307 the "..." arguments of a function. */
6308
6309bool
6310must_pass_va_arg_in_stack (tree type)
6311{
0ffef200
RS
6312 function_arg_info arg (type, /*named=*/false);
6313 return targetm.calls.must_pass_in_stack (arg);
4f53599c
RS
6314}
6315
3bce7904
RS
6316/* Return true if FIELD is the C++17 empty base field that should
6317 be ignored for ABI calling convention decisions in order to
6318 maintain ABI compatibility between C++14 and earlier, which doesn't
6319 add this FIELD to classes with empty bases, and C++17 and later
6320 which does. */
6321
6322bool
6323cxx17_empty_base_field_p (const_tree field)
6324{
6325 return (DECL_FIELD_ABI_IGNORED (field)
6326 && DECL_ARTIFICIAL (field)
6327 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6328 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6329}
6330
6bf29a7e
MS
6331/* Tell the garbage collector about GTY markers in this source file. */
6332#include "gt-calls.h"