]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Fix wrong use-after-scope sanitization for omp variable (PR sanitizer/85081).
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
85ec4feb 2 Copyright (C) 1989-2018 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
4d0cdd0c 29#include "memmodel.h"
957060b5
AM
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
957060b5
AM
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "varasm.h"
2fb9a547 40#include "internal-fn.h"
36566b39
PK
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
670ee920 44#include "expr.h"
d6f4ec51 45#include "output.h"
b0c48229 46#include "langhooks.h"
b2dd096b 47#include "except.h"
6fb5fa3c 48#include "dbgcnt.h"
e9f56944 49#include "rtl-iter.h"
d5e254e1 50#include "tree-chkp.h"
8bd9f164
MS
51#include "tree-vrp.h"
52#include "tree-ssanames.h"
d5e254e1 53#include "rtl-chkp.h"
8bd9f164 54#include "intl.h"
314e6352
ML
55#include "stringpool.h"
56#include "attribs.h"
cc8bea0a 57#include "builtins.h"
76e048a8 58
c795bca9
BS
59/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
61
62/* Data structure and subroutines used within expand_call. */
63
64struct arg_data
65{
66 /* Tree node for this argument. */
67 tree tree_value;
1efe6448 68 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 69 machine_mode mode;
51bbfa0c
RS
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 75 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
76 registers. */
77 rtx reg;
099e9712
JH
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
8df3dbb7
RH
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
d5e254e1
IE
85 /* If value is passed in neither reg nor stack, this field holds a number
86 of a special slot to be used. */
87 rtx special_slot;
88 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
89 there is no such pointer. */
90 int pointer_arg;
91 /* If pointer_arg refers a structure, then pointer_offset holds an offset
92 of a pointer in this structure. */
93 int pointer_offset;
84b55618
RK
94 /* If REG was promoted from the actual mode of the argument expression,
95 indicates whether the promotion is sign- or zero-extended. */
96 int unsignedp;
f0078f86
AM
97 /* Number of bytes to put in registers. 0 means put the whole arg
98 in registers. Also 0 if not passed in registers. */
51bbfa0c 99 int partial;
da7d8304 100 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
101 Note that some arguments may be passed on the stack
102 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
103 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 104 int pass_on_stack;
e7949876
AM
105 /* Some fields packaged up for locate_and_pad_parm. */
106 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
107 /* Location on the stack at which parameter should be stored. The store
108 has already been done if STACK == VALUE. */
109 rtx stack;
110 /* Location on the stack of the start of this argument slot. This can
111 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 112 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 113 rtx stack_slot;
51bbfa0c
RS
114 /* Place that this stack area has been saved, if needed. */
115 rtx save_area;
4ab56118
RK
116 /* If an argument's alignment does not permit direct copying into registers,
117 copy in smaller-sized pieces into pseudos. These are stored in a
118 block pointed to by this field. The next field says how many
119 word-sized pseudos we made. */
120 rtx *aligned_regs;
121 int n_aligned_regs;
51bbfa0c
RS
122};
123
da7d8304 124/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
125 the corresponding stack location has been used.
126 This vector is used to prevent a function call within an argument from
127 clobbering any stack already set up. */
128static char *stack_usage_map;
129
130/* Size of STACK_USAGE_MAP. */
a20c5714
RS
131static unsigned int highest_outgoing_arg_in_use;
132
133/* Assume that any stack location at this byte index is used,
134 without checking the contents of stack_usage_map. */
135static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
2f4aa534 136
c67846f2
JJ
137/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
138 stack location's tail call argument has been already stored into the stack.
139 This bitmap is used to prevent sibling call optimization if function tries
140 to use parent's incoming argument slots when they have been already
141 overwritten with tail call arguments. */
142static sbitmap stored_args_map;
143
a20c5714
RS
144/* Assume that any virtual-incoming location at this byte index has been
145 stored, without checking the contents of stored_args_map. */
146static unsigned HOST_WIDE_INT stored_args_watermark;
147
2f4aa534
RS
148/* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
0405cc0e 153static int stack_arg_under_construction;
51bbfa0c 154
d329e058 155static void precompute_register_parameters (int, struct arg_data *, int *);
d5e254e1 156static void store_bounds (struct arg_data *, struct arg_data *);
d329e058
AJ
157static int store_one_arg (struct arg_data *, rtx, int, int, int);
158static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
159static int finalize_must_preallocate (int, int, struct arg_data *,
160 struct args_size *);
84b8030f 161static void precompute_arguments (int, struct arg_data *);
d329e058
AJ
162static void compute_argument_addresses (struct arg_data *, rtx, int);
163static rtx rtx_for_function_call (tree, tree);
164static void load_register_parameters (struct arg_data *, int, rtx *, int,
165 int, int *);
6ea2b70d 166static int special_function_p (const_tree, int);
d329e058 167static int check_sibcall_argument_overlap_1 (rtx);
48810515 168static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058 169
2f2b4a02 170static tree split_complex_types (tree);
21a3b983 171
f73ad30e 172#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
173static rtx save_fixed_argument_area (int, rtx, int *, int *);
174static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 175#endif
51bbfa0c 176\f
a20c5714
RS
177/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
178 stack region might already be in use. */
179
180static bool
181stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
182 unsigned int reg_parm_stack_space)
183{
184 unsigned HOST_WIDE_INT const_lower, const_upper;
185 const_lower = constant_lower_bound (lower_bound);
186 if (!upper_bound.is_constant (&const_upper))
187 const_upper = HOST_WIDE_INT_M1U;
188
189 if (const_upper > stack_usage_watermark)
190 return true;
191
192 /* Don't worry about things in the fixed argument area;
193 it has already been saved. */
194 const_lower = MAX (const_lower, reg_parm_stack_space);
195 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
196 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
197 if (stack_usage_map[i])
198 return true;
199 return false;
200}
201
202/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
203 stack region are now in use. */
204
205static void
206mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
207{
208 unsigned HOST_WIDE_INT const_lower, const_upper;
209 const_lower = constant_lower_bound (lower_bound);
210 if (upper_bound.is_constant (&const_upper))
211 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
212 stack_usage_map[i] = 1;
213 else
214 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
215}
216
51bbfa0c
RS
217/* Force FUNEXP into a form suitable for the address of a CALL,
218 and return that as an rtx. Also load the static chain register
219 if FNDECL is a nested function.
220
77cac2f2
RK
221 CALL_FUSAGE points to a variable holding the prospective
222 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 223
03dacb02 224rtx
f2d3d07e 225prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 226 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 227{
ba228239 228 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
229 but not for a constant address if -fno-function-cse. */
230 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
231 {
232 /* If it's an indirect call by descriptor, generate code to perform
233 runtime identification of the pointer and load the descriptor. */
234 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
235 {
236 const int bit_val = targetm.calls.custom_function_descriptors;
237 rtx call_lab = gen_label_rtx ();
238
239 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
240 fndecl_or_type
241 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
242 fndecl_or_type);
243 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
244 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
245
84355514
AS
246 if (GET_MODE (funexp) != Pmode)
247 funexp = convert_memory_address (Pmode, funexp);
248
4c640e26
EB
249 /* Avoid long live ranges around function calls. */
250 funexp = copy_to_mode_reg (Pmode, funexp);
251
252 if (REG_P (chain))
253 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
254
255 /* Emit the runtime identification pattern. */
256 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
257 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
258 call_lab);
259
260 /* Statically predict the branch to very likely taken. */
261 rtx_insn *insn = get_last_insn ();
262 if (JUMP_P (insn))
263 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
264
265 /* Load the descriptor. */
266 rtx mem = gen_rtx_MEM (ptr_mode,
267 plus_constant (Pmode, funexp, - bit_val));
268 MEM_NOTRAP_P (mem) = 1;
269 mem = convert_memory_address (Pmode, mem);
270 emit_move_insn (chain, mem);
271
272 mem = gen_rtx_MEM (ptr_mode,
273 plus_constant (Pmode, funexp,
274 POINTER_SIZE / BITS_PER_UNIT
275 - bit_val));
276 MEM_NOTRAP_P (mem) = 1;
277 mem = convert_memory_address (Pmode, mem);
278 emit_move_insn (funexp, mem);
279
280 emit_label (call_lab);
281
282 if (REG_P (chain))
283 {
284 use_reg (call_fusage, chain);
285 STATIC_CHAIN_REG_P (chain) = 1;
286 }
287
288 /* Make sure we're not going to be overwritten below. */
289 gcc_assert (!static_chain_value);
290 }
291
292 /* If we are using registers for parameters, force the
293 function address into a register now. */
294 funexp = ((reg_parm_seen
295 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
296 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
297 : memory_address (FUNCTION_MODE, funexp));
298 }
408702b4 299 else
51bbfa0c 300 {
408702b4
RL
301 /* funexp could be a SYMBOL_REF represents a function pointer which is
302 of ptr_mode. In this case, it should be converted into address mode
303 to be a valid address for memory rtx pattern. See PR 64971. */
304 if (GET_MODE (funexp) != Pmode)
305 funexp = convert_memory_address (Pmode, funexp);
306
4c640e26 307 if (!(flags & ECF_SIBCALL))
408702b4
RL
308 {
309 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
310 funexp = force_reg (Pmode, funexp);
311 }
51bbfa0c
RS
312 }
313
f2d3d07e
RH
314 if (static_chain_value != 0
315 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
316 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 317 {
531ca746
RH
318 rtx chain;
319
f2d3d07e 320 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 321 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 322
531ca746
RH
323 emit_move_insn (chain, static_chain_value);
324 if (REG_P (chain))
4c640e26
EB
325 {
326 use_reg (call_fusage, chain);
327 STATIC_CHAIN_REG_P (chain) = 1;
328 }
51bbfa0c
RS
329 }
330
331 return funexp;
332}
333
334/* Generate instructions to call function FUNEXP,
335 and optionally pop the results.
336 The CALL_INSN is the first insn generated.
337
607ea900 338 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
339 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
340 its own args.
2c8da025 341
079e7538
NF
342 FUNTYPE is the data type of the function. This is given to the hook
343 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
344 own args. We used to allow an identifier for library functions, but
345 that doesn't work when the return type is an aggregate type and the
346 calling convention says that the pointer to this aggregate is to be
347 popped by the callee.
51bbfa0c
RS
348
349 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
350 ROUNDED_STACK_SIZE is that number rounded up to
351 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
352 both to put into the call insn and to generate explicit popping
353 code if necessary.
51bbfa0c
RS
354
355 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
356 It is zero if this call doesn't want a structure value.
357
358 NEXT_ARG_REG is the rtx that results from executing
3c07301f 359 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
51bbfa0c
RS
360 just after all the args have had their registers assigned.
361 This could be whatever you like, but normally it is the first
362 arg-register beyond those used for args in this call,
363 or 0 if all the arg-registers are used in this call.
364 It is passed on to `gen_call' so you can put this info in the call insn.
365
366 VALREG is a hard register in which a value is returned,
367 or 0 if the call does not return a value.
368
369 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
370 the args to this call were processed.
371 We restore `inhibit_defer_pop' to that value.
372
94b25f81 373 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 374 denote registers used by the called function. */
f725a3ec 375
322e3e34 376static void
28ed065e 377emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 378 tree funtype ATTRIBUTE_UNUSED,
a20c5714
RS
379 poly_int64 stack_size ATTRIBUTE_UNUSED,
380 poly_int64 rounded_stack_size,
5c8e61cf 381 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
d329e058
AJ
382 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
383 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 384 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 385{
a20c5714 386 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
58d745ec 387 rtx call, funmem, pat;
51bbfa0c 388 int already_popped = 0;
a20c5714 389 poly_int64 n_popped = 0;
a00fe3b7
RS
390
391 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
392 patterns exist). Any popping that the callee does on return will
393 be from our caller's frame rather than ours. */
394 if (!(ecf_flags & ECF_SIBCALL))
395 {
396 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 397
fa5322fa 398#ifdef CALL_POPS_ARGS
a00fe3b7 399 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 400#endif
a00fe3b7 401 }
d329e058 402
51bbfa0c
RS
403 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
404 and we don't want to load it into a register as an optimization,
405 because prepare_call_address already did it if it should be done. */
406 if (GET_CODE (funexp) != SYMBOL_REF)
407 funexp = memory_address (FUNCTION_MODE, funexp);
408
325f5379
JJ
409 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
410 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
411 {
412 tree t = fndecl;
e79983f4 413
047d33a0
AO
414 /* Although a built-in FUNCTION_DECL and its non-__builtin
415 counterpart compare equal and get a shared mem_attrs, they
416 produce different dump output in compare-debug compilations,
417 if an entry gets garbage collected in one compilation, then
418 adds a different (but equivalent) entry, while the other
419 doesn't run the garbage collector at the same spot and then
420 shares the mem_attr with the equivalent entry. */
e79983f4
MM
421 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
422 {
423 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
424 if (t2)
425 t = t2;
426 }
427
428 set_mem_expr (funmem, t);
047d33a0 429 }
325f5379 430 else if (fntree)
e19f6650 431 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 432
58d745ec 433 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 434 {
0a1c58a2 435 if (valreg)
58d745ec
RS
436 pat = targetm.gen_sibcall_value (valreg, funmem,
437 rounded_stack_size_rtx,
438 next_arg_reg, NULL_RTX);
0a1c58a2 439 else
58d745ec 440 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
5c8e61cf
RS
441 next_arg_reg,
442 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 443 }
8ac61af7
RK
444 /* If the target has "call" or "call_value" insns, then prefer them
445 if no arguments are actually popped. If the target does not have
446 "call" or "call_value" insns, then we must use the popping versions
447 even if the call has no arguments to pop. */
a20c5714 448 else if (maybe_ne (n_popped, 0)
58d745ec
RS
449 || !(valreg
450 ? targetm.have_call_value ()
451 : targetm.have_call ()))
51bbfa0c 452 {
a20c5714 453 rtx n_pop = gen_int_mode (n_popped, Pmode);
51bbfa0c
RS
454
455 /* If this subroutine pops its own args, record that in the call insn
456 if possible, for the sake of frame pointer elimination. */
2c8da025 457
51bbfa0c 458 if (valreg)
58d745ec
RS
459 pat = targetm.gen_call_value_pop (valreg, funmem,
460 rounded_stack_size_rtx,
461 next_arg_reg, n_pop);
51bbfa0c 462 else
58d745ec
RS
463 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
464 next_arg_reg, n_pop);
51bbfa0c 465
51bbfa0c
RS
466 already_popped = 1;
467 }
468 else
0a1c58a2
JL
469 {
470 if (valreg)
58d745ec
RS
471 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
472 next_arg_reg, NULL_RTX);
0a1c58a2 473 else
58d745ec 474 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
5c8e61cf 475 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 476 }
58d745ec 477 emit_insn (pat);
51bbfa0c 478
ee960939 479 /* Find the call we just emitted. */
e67d1102 480 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 481
325f5379
JJ
482 /* Some target create a fresh MEM instead of reusing the one provided
483 above. Set its MEM_EXPR. */
da4fdf2d
SB
484 call = get_call_rtx_from (call_insn);
485 if (call
325f5379
JJ
486 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
487 && MEM_EXPR (funmem) != NULL_TREE)
488 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
489
d5e254e1
IE
490 /* Mark instrumented calls. */
491 if (call && fntree)
492 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
493
ee960939
OH
494 /* Put the register usage information there. */
495 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
496
497 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
498 if (ecf_flags & ECF_CONST)
499 RTL_CONST_CALL_P (call_insn) = 1;
500
501 /* If this is a pure call, then set the insn's unchanging bit. */
502 if (ecf_flags & ECF_PURE)
503 RTL_PURE_CALL_P (call_insn) = 1;
504
505 /* If this is a const call, then set the insn's unchanging bit. */
506 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
507 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 508
1d65f45c
RH
509 /* Create a nothrow REG_EH_REGION note, if needed. */
510 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 511
ca3920ad 512 if (ecf_flags & ECF_NORETURN)
65c5f2a6 513 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 514
570a98eb 515 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 516 {
65c5f2a6 517 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 518 cfun->calls_setjmp = 1;
9defc9b7 519 }
570a98eb 520
0a1c58a2
JL
521 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
522
b1e64e0d
RS
523 /* Restore this now, so that we do defer pops for this call's args
524 if the context of the call as a whole permits. */
525 inhibit_defer_pop = old_inhibit_defer_pop;
526
a20c5714 527 if (maybe_ne (n_popped, 0))
51bbfa0c
RS
528 {
529 if (!already_popped)
e3da301d 530 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
531 = gen_rtx_EXPR_LIST (VOIDmode,
532 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
533 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 534 rounded_stack_size -= n_popped;
a20c5714 535 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
1503a7ec 536 stack_pointer_delta -= n_popped;
2e3f842f 537
68184180 538 add_args_size_note (call_insn, stack_pointer_delta);
9a08d230 539
2e3f842f
L
540 /* If popup is needed, stack realign must use DRAP */
541 if (SUPPORTS_STACK_ALIGNMENT)
542 crtl->need_drap = true;
51bbfa0c 543 }
f8f75b16
JJ
544 /* For noreturn calls when not accumulating outgoing args force
545 REG_ARGS_SIZE note to prevent crossjumping of calls with different
546 args sizes. */
547 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
68184180 548 add_args_size_note (call_insn, stack_pointer_delta);
51bbfa0c 549
f73ad30e 550 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 551 {
f73ad30e
JH
552 /* If returning from the subroutine does not automatically pop the args,
553 we need an instruction to pop them sooner or later.
554 Perhaps do it now; perhaps just record how much space to pop later.
555
556 If returning from the subroutine does pop the args, indicate that the
557 stack pointer will be changed. */
558
a20c5714 559 if (maybe_ne (rounded_stack_size, 0))
f73ad30e 560 {
9dd9bf80 561 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
562 /* Just pretend we did the pop. */
563 stack_pointer_delta -= rounded_stack_size;
564 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 565 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
566 pending_stack_adjust += rounded_stack_size;
567 else
568 adjust_stack (rounded_stack_size_rtx);
569 }
51bbfa0c 570 }
f73ad30e
JH
571 /* When we accumulate outgoing args, we must avoid any stack manipulations.
572 Restore the stack pointer to its original value now. Usually
573 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
574 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
575 popping variants of functions exist as well.
576
577 ??? We may optimize similar to defer_pop above, but it is
578 probably not worthwhile.
f725a3ec 579
f73ad30e
JH
580 ??? It will be worthwhile to enable combine_stack_adjustments even for
581 such machines. */
a20c5714
RS
582 else if (maybe_ne (n_popped, 0))
583 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
51bbfa0c
RS
584}
585
25f0609b
BE
586/* Determine if the function identified by FNDECL is one with
587 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
588
589 For example, if the function might return more than one time (setjmp), then
25f0609b 590 set ECF_RETURNS_TWICE.
20efdf74 591
25f0609b 592 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
593 space from the stack such as alloca. */
594
f2d33f13 595static int
6ea2b70d 596special_function_p (const_tree fndecl, int flags)
20efdf74 597{
d5e254e1
IE
598 tree name_decl = DECL_NAME (fndecl);
599
600 /* For instrumentation clones we want to derive flags
601 from the original name. */
602 if (cgraph_node::get (fndecl)
603 && cgraph_node::get (fndecl)->instrumentation_clone)
604 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
605
606 if (fndecl && name_decl
25f0609b 607 && IDENTIFIER_LENGTH (name_decl) <= 11
20efdf74
JL
608 /* Exclude functions not at the file scope, or not `extern',
609 since they are not the magic functions we would otherwise
d1bd0ded 610 think they are.
c22cacf3
MS
611 FIXME: this should be handled with attributes, not with this
612 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
613 because you can declare fork() inside a function if you
614 wish. */
7ae4ad28 615 && (DECL_CONTEXT (fndecl) == NULL_TREE
d1bd0ded
GK
616 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
617 && TREE_PUBLIC (fndecl))
20efdf74 618 {
d5e254e1 619 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 620 const char *tname = name;
20efdf74 621
ca54603f
JL
622 /* We assume that alloca will always be called by name. It
623 makes no sense to pass it as a pointer-to-function to
624 anything that does not understand its behavior. */
4e722cf1
JJ
625 if (IDENTIFIER_LENGTH (name_decl) == 6
626 && name[0] == 'a'
627 && ! strcmp (name, "alloca"))
f2d33f13 628 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 629
25f0609b 630 /* Disregard prefix _ or __. */
20efdf74
JL
631 if (name[0] == '_')
632 {
25f0609b 633 if (name[1] == '_')
20efdf74
JL
634 tname += 2;
635 else
636 tname += 1;
637 }
638
25f0609b
BE
639 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
640 if (! strcmp (tname, "setjmp")
641 || ! strcmp (tname, "sigsetjmp")
642 || ! strcmp (name, "savectx")
643 || ! strcmp (name, "vfork")
644 || ! strcmp (name, "getcontext"))
645 flags |= ECF_RETURNS_TWICE;
20efdf74 646 }
d1c38823 647
9e878cf1
EB
648 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
649 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
650 flags |= ECF_MAY_BE_ALLOCA;
4e722cf1 651
f2d33f13 652 return flags;
20efdf74
JL
653}
654
e384e6b5
BS
655/* Similar to special_function_p; return a set of ERF_ flags for the
656 function FNDECL. */
657static int
658decl_return_flags (tree fndecl)
659{
660 tree attr;
661 tree type = TREE_TYPE (fndecl);
662 if (!type)
663 return 0;
664
665 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
666 if (!attr)
667 return 0;
668
669 attr = TREE_VALUE (TREE_VALUE (attr));
670 if (!attr || TREE_STRING_LENGTH (attr) < 1)
671 return 0;
672
673 switch (TREE_STRING_POINTER (attr)[0])
674 {
675 case '1':
676 case '2':
677 case '3':
678 case '4':
679 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
680
681 case 'm':
682 return ERF_NOALIAS;
683
684 case '.':
685 default:
686 return 0;
687 }
688}
689
bae802f9 690/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 691
f2d33f13 692int
6ea2b70d 693setjmp_call_p (const_tree fndecl)
f2d33f13 694{
275311c4
MP
695 if (DECL_IS_RETURNS_TWICE (fndecl))
696 return ECF_RETURNS_TWICE;
f2d33f13
JH
697 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
698}
699
726a989a 700
159e8ef0 701/* Return true if STMT may be an alloca call. */
726a989a
RB
702
703bool
159e8ef0 704gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
705{
706 tree fndecl;
707
708 if (!is_gimple_call (stmt))
709 return false;
710
711 fndecl = gimple_call_fndecl (stmt);
712 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
713 return true;
714
715 return false;
716}
717
159e8ef0
BE
718/* Return true if STMT is a builtin alloca call. */
719
720bool
721gimple_alloca_call_p (const gimple *stmt)
722{
723 tree fndecl;
724
725 if (!is_gimple_call (stmt))
726 return false;
727
728 fndecl = gimple_call_fndecl (stmt);
729 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
730 switch (DECL_FUNCTION_CODE (fndecl))
731 {
9e878cf1 732 CASE_BUILT_IN_ALLOCA:
eacac712 733 return gimple_call_num_args (stmt) > 0;
159e8ef0
BE
734 default:
735 break;
736 }
737
738 return false;
739}
740
741/* Return true when exp contains a builtin alloca call. */
726a989a 742
c986baf6 743bool
6ea2b70d 744alloca_call_p (const_tree exp)
c986baf6 745{
2284b034 746 tree fndecl;
c986baf6 747 if (TREE_CODE (exp) == CALL_EXPR
2284b034 748 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
749 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
750 switch (DECL_FUNCTION_CODE (fndecl))
751 {
9e878cf1 752 CASE_BUILT_IN_ALLOCA:
159e8ef0
BE
753 return true;
754 default:
755 break;
756 }
757
c986baf6
JH
758 return false;
759}
760
0a35513e
AH
761/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
762 function. Return FALSE otherwise. */
763
764static bool
765is_tm_builtin (const_tree fndecl)
766{
767 if (fndecl == NULL)
768 return false;
769
770 if (decl_is_tm_clone (fndecl))
771 return true;
772
773 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
774 {
775 switch (DECL_FUNCTION_CODE (fndecl))
776 {
777 case BUILT_IN_TM_COMMIT:
778 case BUILT_IN_TM_COMMIT_EH:
779 case BUILT_IN_TM_ABORT:
780 case BUILT_IN_TM_IRREVOCABLE:
781 case BUILT_IN_TM_GETTMCLONE_IRR:
782 case BUILT_IN_TM_MEMCPY:
783 case BUILT_IN_TM_MEMMOVE:
784 case BUILT_IN_TM_MEMSET:
785 CASE_BUILT_IN_TM_STORE (1):
786 CASE_BUILT_IN_TM_STORE (2):
787 CASE_BUILT_IN_TM_STORE (4):
788 CASE_BUILT_IN_TM_STORE (8):
789 CASE_BUILT_IN_TM_STORE (FLOAT):
790 CASE_BUILT_IN_TM_STORE (DOUBLE):
791 CASE_BUILT_IN_TM_STORE (LDOUBLE):
792 CASE_BUILT_IN_TM_STORE (M64):
793 CASE_BUILT_IN_TM_STORE (M128):
794 CASE_BUILT_IN_TM_STORE (M256):
795 CASE_BUILT_IN_TM_LOAD (1):
796 CASE_BUILT_IN_TM_LOAD (2):
797 CASE_BUILT_IN_TM_LOAD (4):
798 CASE_BUILT_IN_TM_LOAD (8):
799 CASE_BUILT_IN_TM_LOAD (FLOAT):
800 CASE_BUILT_IN_TM_LOAD (DOUBLE):
801 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
802 CASE_BUILT_IN_TM_LOAD (M64):
803 CASE_BUILT_IN_TM_LOAD (M128):
804 CASE_BUILT_IN_TM_LOAD (M256):
805 case BUILT_IN_TM_LOG:
806 case BUILT_IN_TM_LOG_1:
807 case BUILT_IN_TM_LOG_2:
808 case BUILT_IN_TM_LOG_4:
809 case BUILT_IN_TM_LOG_8:
810 case BUILT_IN_TM_LOG_FLOAT:
811 case BUILT_IN_TM_LOG_DOUBLE:
812 case BUILT_IN_TM_LOG_LDOUBLE:
813 case BUILT_IN_TM_LOG_M64:
814 case BUILT_IN_TM_LOG_M128:
815 case BUILT_IN_TM_LOG_M256:
816 return true;
817 default:
818 break;
819 }
820 }
821 return false;
822}
823
b5cd4ed4 824/* Detect flags (function attributes) from the function decl or type node. */
7393c642 825
4977bab6 826int
6ea2b70d 827flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
828{
829 int flags = 0;
36dbb93d 830
f2d33f13
JH
831 if (DECL_P (exp))
832 {
833 /* The function exp may have the `malloc' attribute. */
36dbb93d 834 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
835 flags |= ECF_MALLOC;
836
6e9a3221
AN
837 /* The function exp may have the `returns_twice' attribute. */
838 if (DECL_IS_RETURNS_TWICE (exp))
839 flags |= ECF_RETURNS_TWICE;
840
becfd6e5 841 /* Process the pure and const attributes. */
9e3920e9 842 if (TREE_READONLY (exp))
becfd6e5
KZ
843 flags |= ECF_CONST;
844 if (DECL_PURE_P (exp))
e238ccac 845 flags |= ECF_PURE;
becfd6e5
KZ
846 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
847 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 848
dcd6de6d
ZD
849 if (DECL_IS_NOVOPS (exp))
850 flags |= ECF_NOVOPS;
46a4da10
JH
851 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
852 flags |= ECF_LEAF;
cb59f689
JH
853 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
854 flags |= ECF_COLD;
dcd6de6d 855
f2d33f13
JH
856 if (TREE_NOTHROW (exp))
857 flags |= ECF_NOTHROW;
2b187c63 858
0a35513e
AH
859 if (flag_tm)
860 {
861 if (is_tm_builtin (exp))
862 flags |= ECF_TM_BUILTIN;
fe924d9f 863 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
864 || lookup_attribute ("transaction_pure",
865 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
866 flags |= ECF_TM_PURE;
867 }
868
6de9cd9a 869 flags = special_function_p (exp, flags);
f2d33f13 870 }
0a35513e
AH
871 else if (TYPE_P (exp))
872 {
873 if (TYPE_READONLY (exp))
874 flags |= ECF_CONST;
875
876 if (flag_tm
877 && ((flags & ECF_CONST) != 0
878 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
879 flags |= ECF_TM_PURE;
880 }
17fc8d6f
AH
881 else
882 gcc_unreachable ();
f2d33f13
JH
883
884 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
885 {
886 flags |= ECF_NORETURN;
887 if (flags & (ECF_CONST|ECF_PURE))
888 flags |= ECF_LOOPING_CONST_OR_PURE;
889 }
f2d33f13
JH
890
891 return flags;
892}
893
f027e0a2
JM
894/* Detect flags from a CALL_EXPR. */
895
896int
fa233e34 897call_expr_flags (const_tree t)
f027e0a2
JM
898{
899 int flags;
900 tree decl = get_callee_fndecl (t);
901
902 if (decl)
903 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
904 else if (CALL_EXPR_FN (t) == NULL_TREE)
905 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
906 else
907 {
4c640e26
EB
908 tree type = TREE_TYPE (CALL_EXPR_FN (t));
909 if (type && TREE_CODE (type) == POINTER_TYPE)
910 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
911 else
912 flags = 0;
4c640e26
EB
913 if (CALL_EXPR_BY_DESCRIPTOR (t))
914 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
915 }
916
917 return flags;
918}
919
16a16ec7
AM
920/* Return true if TYPE should be passed by invisible reference. */
921
922bool
923pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
924 tree type, bool named_arg)
925{
926 if (type)
927 {
928 /* If this type contains non-trivial constructors, then it is
929 forbidden for the middle-end to create any new copies. */
930 if (TREE_ADDRESSABLE (type))
931 return true;
932
933 /* GCC post 3.4 passes *all* variable sized types by reference. */
934 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
935 return true;
936
937 /* If a record type should be passed the same as its first (and only)
938 member, use the type and mode of that member. */
939 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
940 {
941 type = TREE_TYPE (first_field (type));
942 mode = TYPE_MODE (type);
943 }
944 }
945
946 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
947 type, named_arg);
948}
949
950/* Return true if TYPE, which is passed by reference, should be callee
951 copied instead of caller copied. */
952
953bool
954reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
955 tree type, bool named_arg)
956{
957 if (type && TREE_ADDRESSABLE (type))
958 return false;
959 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
960 named_arg);
961}
962
963
20efdf74
JL
964/* Precompute all register parameters as described by ARGS, storing values
965 into fields within the ARGS array.
966
967 NUM_ACTUALS indicates the total number elements in the ARGS array.
968
969 Set REG_PARM_SEEN if we encounter a register parameter. */
970
971static void
27e29549
RH
972precompute_register_parameters (int num_actuals, struct arg_data *args,
973 int *reg_parm_seen)
20efdf74
JL
974{
975 int i;
976
977 *reg_parm_seen = 0;
978
979 for (i = 0; i < num_actuals; i++)
980 if (args[i].reg != 0 && ! args[i].pass_on_stack)
981 {
982 *reg_parm_seen = 1;
983
984 if (args[i].value == 0)
985 {
986 push_temp_slots ();
84217346 987 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
988 preserve_temp_slots (args[i].value);
989 pop_temp_slots ();
20efdf74
JL
990 }
991
992 /* If we are to promote the function arg to a wider mode,
993 do it now. */
994
995 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
996 args[i].value
997 = convert_modes (args[i].mode,
998 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
999 args[i].value, args[i].unsignedp);
1000
a7adbbcb
L
1001 /* If the value is a non-legitimate constant, force it into a
1002 pseudo now. TLS symbols sometimes need a call to resolve. */
1003 if (CONSTANT_P (args[i].value)
1004 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1005 args[i].value = force_reg (args[i].mode, args[i].value);
1006
27e29549
RH
1007 /* If we're going to have to load the value by parts, pull the
1008 parts into pseudos. The part extraction process can involve
1009 non-trivial computation. */
1010 if (GET_CODE (args[i].reg) == PARALLEL)
1011 {
1012 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 1013 args[i].parallel_value
27e29549
RH
1014 = emit_group_load_into_temps (args[i].reg, args[i].value,
1015 type, int_size_in_bytes (type));
1016 }
1017
f725a3ec 1018 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
1019 short loop, put the value into a pseudo and then put the pseudo
1020 into the hard reg.
1021
1022 For small register classes, also do this if this call uses
1023 register parameters. This is to avoid reload conflicts while
1024 loading the parameters registers. */
1025
27e29549
RH
1026 else if ((! (REG_P (args[i].value)
1027 || (GET_CODE (args[i].value) == SUBREG
1028 && REG_P (SUBREG_REG (args[i].value)))))
1029 && args[i].mode != BLKmode
e548c9df
AM
1030 && (set_src_cost (args[i].value, args[i].mode,
1031 optimize_insn_for_speed_p ())
1032 > COSTS_N_INSNS (1))
42db504c
SB
1033 && ((*reg_parm_seen
1034 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1035 || optimize))
20efdf74
JL
1036 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1037 }
1038}
1039
f73ad30e 1040#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1041
1042 /* The argument list is the property of the called routine and it
1043 may clobber it. If the fixed area has been used for previous
1044 parameters, we must save and restore it. */
3bdf5ad1 1045
20efdf74 1046static rtx
d329e058 1047save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1048{
a20c5714
RS
1049 unsigned int low;
1050 unsigned int high;
20efdf74 1051
b820d2b8
AM
1052 /* Compute the boundary of the area that needs to be saved, if any. */
1053 high = reg_parm_stack_space;
6dad9361
TS
1054 if (ARGS_GROW_DOWNWARD)
1055 high += 1;
1056
b820d2b8
AM
1057 if (high > highest_outgoing_arg_in_use)
1058 high = highest_outgoing_arg_in_use;
20efdf74 1059
b820d2b8 1060 for (low = 0; low < high; low++)
a20c5714 1061 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
b820d2b8
AM
1062 {
1063 int num_to_save;
ef4bddc2 1064 machine_mode save_mode;
b820d2b8 1065 int delta;
0a81f074 1066 rtx addr;
b820d2b8
AM
1067 rtx stack_area;
1068 rtx save_area;
20efdf74 1069
b820d2b8
AM
1070 while (stack_usage_map[--high] == 0)
1071 ;
20efdf74 1072
b820d2b8
AM
1073 *low_to_save = low;
1074 *high_to_save = high;
1075
1076 num_to_save = high - low + 1;
20efdf74 1077
b820d2b8
AM
1078 /* If we don't have the required alignment, must do this
1079 in BLKmode. */
fffbab82
RS
1080 scalar_int_mode imode;
1081 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1082 && (low & (MIN (GET_MODE_SIZE (imode),
1083 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1084 save_mode = imode;
1085 else
b820d2b8 1086 save_mode = BLKmode;
20efdf74 1087
6dad9361
TS
1088 if (ARGS_GROW_DOWNWARD)
1089 delta = -high;
1090 else
1091 delta = low;
1092
0a81f074
RS
1093 addr = plus_constant (Pmode, argblock, delta);
1094 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1095
b820d2b8
AM
1096 set_mem_align (stack_area, PARM_BOUNDARY);
1097 if (save_mode == BLKmode)
1098 {
9474e8ab 1099 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1100 emit_block_move (validize_mem (save_area), stack_area,
1101 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1102 }
1103 else
1104 {
1105 save_area = gen_reg_rtx (save_mode);
1106 emit_move_insn (save_area, stack_area);
1107 }
8ac61af7 1108
b820d2b8
AM
1109 return save_area;
1110 }
1111
1112 return NULL_RTX;
20efdf74
JL
1113}
1114
1115static void
d329e058 1116restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1117{
ef4bddc2 1118 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1119 int delta;
0a81f074 1120 rtx addr, stack_area;
b820d2b8 1121
6dad9361
TS
1122 if (ARGS_GROW_DOWNWARD)
1123 delta = -high_to_save;
1124 else
1125 delta = low_to_save;
1126
0a81f074
RS
1127 addr = plus_constant (Pmode, argblock, delta);
1128 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1129 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1130
1131 if (save_mode != BLKmode)
1132 emit_move_insn (stack_area, save_area);
1133 else
44bb111a
RH
1134 emit_block_move (stack_area, validize_mem (save_area),
1135 GEN_INT (high_to_save - low_to_save + 1),
1136 BLOCK_OP_CALL_PARM);
20efdf74 1137}
19652adf 1138#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1139
20efdf74
JL
1140/* If any elements in ARGS refer to parameters that are to be passed in
1141 registers, but not in memory, and whose alignment does not permit a
1142 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1143 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1144
1145 Pseudos for each unaligned argument will be stored into the array
1146 args[argnum].aligned_regs. The caller is responsible for deallocating
1147 the aligned_regs array if it is nonzero. */
1148
20efdf74 1149static void
d329e058 1150store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1151{
1152 int i, j;
f725a3ec 1153
20efdf74
JL
1154 for (i = 0; i < num_actuals; i++)
1155 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1156 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1157 && args[i].mode == BLKmode
852d22b4
EB
1158 && MEM_P (args[i].value)
1159 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1160 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1161 {
1162 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1163 int endian_correction = 0;
20efdf74 1164
78a52f11
RH
1165 if (args[i].partial)
1166 {
1167 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1168 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1169 }
1170 else
1171 {
1172 args[i].n_aligned_regs
1173 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1174 }
1175
5ed6ace5 1176 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1177
6e985040
AM
1178 /* Structures smaller than a word are normally aligned to the
1179 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1180 this means we must skip the empty high order bytes when
1181 calculating the bit offset. */
6e985040
AM
1182 if (bytes < UNITS_PER_WORD
1183#ifdef BLOCK_REG_PADDING
1184 && (BLOCK_REG_PADDING (args[i].mode,
1185 TREE_TYPE (args[i].tree_value), 1)
76b0cbf8 1186 == PAD_DOWNWARD)
6e985040
AM
1187#else
1188 && BYTES_BIG_ENDIAN
1189#endif
1190 )
1191 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1192
1193 for (j = 0; j < args[i].n_aligned_regs; j++)
1194 {
1195 rtx reg = gen_reg_rtx (word_mode);
1196 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1197 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1198
1199 args[i].aligned_regs[j] = reg;
c6285bd7 1200 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1201 word_mode, word_mode, false, NULL);
20efdf74
JL
1202
1203 /* There is no need to restrict this code to loading items
1204 in TYPE_ALIGN sized hunks. The bitfield instructions can
1205 load up entire word sized registers efficiently.
1206
1207 ??? This may not be needed anymore.
1208 We use to emit a clobber here but that doesn't let later
1209 passes optimize the instructions we emit. By storing 0 into
1210 the register later passes know the first AND to zero out the
1211 bitfield being set in the register is unnecessary. The store
1212 of 0 will be deleted as will at least the first AND. */
1213
1214 emit_move_insn (reg, const0_rtx);
1215
1216 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1217 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1218 word_mode, word, false);
20efdf74
JL
1219 }
1220 }
1221}
1222
8bd9f164
MS
1223/* The limit set by -Walloc-larger-than=. */
1224static GTY(()) tree alloc_object_size_limit;
1225
1226/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1227 setting if the option is specified, or to the maximum object size if it
1228 is not. Return the initialized value. */
1229
1230static tree
1231alloc_max_size (void)
1232{
1233 if (!alloc_object_size_limit)
1234 {
cc8bea0a 1235 alloc_object_size_limit = max_object_size ();
8bd9f164 1236
c16880ef 1237 if (warn_alloc_size_limit)
8bd9f164 1238 {
c16880ef
MS
1239 char *end = NULL;
1240 errno = 0;
1241 unsigned HOST_WIDE_INT unit = 1;
1242 unsigned HOST_WIDE_INT limit
1243 = strtoull (warn_alloc_size_limit, &end, 10);
1244
1245 if (!errno)
8bd9f164 1246 {
c16880ef
MS
1247 if (end && *end)
1248 {
1249 /* Numeric option arguments are at most INT_MAX. Make it
1250 possible to specify a larger value by accepting common
1251 suffixes. */
1252 if (!strcmp (end, "kB"))
1253 unit = 1000;
1254 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1255 unit = 1024;
1256 else if (!strcmp (end, "MB"))
2392baa5 1257 unit = HOST_WIDE_INT_UC (1000) * 1000;
c16880ef 1258 else if (!strcasecmp (end, "MiB"))
2392baa5 1259 unit = HOST_WIDE_INT_UC (1024) * 1024;
c16880ef 1260 else if (!strcasecmp (end, "GB"))
2392baa5 1261 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
c16880ef 1262 else if (!strcasecmp (end, "GiB"))
2392baa5 1263 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
c16880ef 1264 else if (!strcasecmp (end, "TB"))
2392baa5 1265 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
c16880ef 1266 else if (!strcasecmp (end, "TiB"))
2392baa5 1267 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
c16880ef 1268 else if (!strcasecmp (end, "PB"))
2392baa5 1269 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
c16880ef 1270 else if (!strcasecmp (end, "PiB"))
2392baa5 1271 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
c16880ef 1272 else if (!strcasecmp (end, "EB"))
2392baa5
JJ
1273 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1274 * 1000;
c16880ef 1275 else if (!strcasecmp (end, "EiB"))
2392baa5
JJ
1276 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1277 * 1024;
c16880ef
MS
1278 else
1279 unit = 0;
1280 }
8bd9f164 1281
c16880ef 1282 if (unit)
2392baa5 1283 {
a1488398
RS
1284 widest_int w = wi::mul (limit, unit);
1285 if (w < wi::to_widest (alloc_object_size_limit))
cc8bea0a
MS
1286 alloc_object_size_limit
1287 = wide_int_to_tree (ptrdiff_type_node, w);
2392baa5 1288 }
c16880ef 1289 }
8bd9f164
MS
1290 }
1291 }
1292 return alloc_object_size_limit;
1293}
1294
c16880ef 1295/* Return true when EXP's range can be determined and set RANGE[] to it
cc8bea0a
MS
1296 after adjusting it if necessary to make EXP a represents a valid size
1297 of object, or a valid size argument to an allocation function declared
1298 with attribute alloc_size (whose argument may be signed), or to a string
1299 manipulation function like memset. When ALLOW_ZERO is true, allow
1300 returning a range of [0, 0] for a size in an anti-range [1, N] where
1301 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1302 allocation functions like malloc but it is a valid argument to
1303 functions like memset. */
8bd9f164 1304
c16880ef 1305bool
cc8bea0a 1306get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
8bd9f164 1307{
c16880ef 1308 if (tree_fits_uhwi_p (exp))
8bd9f164 1309 {
c16880ef
MS
1310 /* EXP is a constant. */
1311 range[0] = range[1] = exp;
1312 return true;
1313 }
1314
cc8bea0a
MS
1315 tree exptype = TREE_TYPE (exp);
1316 bool integral = INTEGRAL_TYPE_P (exptype);
1317
c16880ef 1318 wide_int min, max;
cc8bea0a
MS
1319 enum value_range_type range_type;
1320
1321 if (TREE_CODE (exp) == SSA_NAME && integral)
1322 range_type = get_range_info (exp, &min, &max);
1323 else
1324 range_type = VR_VARYING;
c16880ef
MS
1325
1326 if (range_type == VR_VARYING)
1327 {
cc8bea0a
MS
1328 if (integral)
1329 {
1330 /* Use the full range of the type of the expression when
1331 no value range information is available. */
1332 range[0] = TYPE_MIN_VALUE (exptype);
1333 range[1] = TYPE_MAX_VALUE (exptype);
1334 return true;
1335 }
1336
c16880ef
MS
1337 range[0] = NULL_TREE;
1338 range[1] = NULL_TREE;
1339 return false;
1340 }
1341
c16880ef 1342 unsigned expprec = TYPE_PRECISION (exptype);
c16880ef
MS
1343
1344 bool signed_p = !TYPE_UNSIGNED (exptype);
1345
1346 if (range_type == VR_ANTI_RANGE)
1347 {
1348 if (signed_p)
8bd9f164 1349 {
8e6cdc90 1350 if (wi::les_p (max, 0))
8bd9f164 1351 {
c16880ef
MS
1352 /* EXP is not in a strictly negative range. That means
1353 it must be in some (not necessarily strictly) positive
1354 range which includes zero. Since in signed to unsigned
1355 conversions negative values end up converted to large
1356 positive values, and otherwise they are not valid sizes,
1357 the resulting range is in both cases [0, TYPE_MAX]. */
8e6cdc90
RS
1358 min = wi::zero (expprec);
1359 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
8bd9f164 1360 }
8e6cdc90 1361 else if (wi::les_p (min - 1, 0))
c16880ef
MS
1362 {
1363 /* EXP is not in a negative-positive range. That means EXP
1364 is either negative, or greater than max. Since negative
1365 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1366 min = max + 1;
8e6cdc90 1367 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
c16880ef
MS
1368 }
1369 else
1370 {
1371 max = min - 1;
8e6cdc90 1372 min = wi::zero (expprec);
c16880ef
MS
1373 }
1374 }
8e6cdc90 1375 else if (wi::eq_p (0, min - 1))
c16880ef
MS
1376 {
1377 /* EXP is unsigned and not in the range [1, MAX]. That means
1378 it's either zero or greater than MAX. Even though 0 would
cc8bea0a
MS
1379 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1380 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1381 is greater than the limit the whole range is diagnosed. */
1382 if (allow_zero)
1383 min = max = wi::zero (expprec);
1384 else
1385 {
1386 min = max + 1;
1387 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1388 }
c16880ef
MS
1389 }
1390 else
1391 {
1392 max = min - 1;
8e6cdc90 1393 min = wi::zero (expprec);
8bd9f164
MS
1394 }
1395 }
1396
c16880ef
MS
1397 range[0] = wide_int_to_tree (exptype, min);
1398 range[1] = wide_int_to_tree (exptype, max);
1399
1400 return true;
8bd9f164
MS
1401}
1402
1403/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404 whose argument numbers given by IDX with values given by ARGS exceed
1405 the maximum object size or cause an unsigned oveflow (wrapping) when
1406 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1407 may be null for functions like malloc, and non-null for those like
1408 calloc that are decorated with a two-argument attribute alloc_size. */
1409
1410void
1411maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1412{
1413 /* The range each of the (up to) two arguments is known to be in. */
1414 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1415
1416 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1417 tree maxobjsize = alloc_max_size ();
1418
1419 location_t loc = EXPR_LOCATION (exp);
1420
1421 bool warned = false;
1422
1423 /* Validate each argument individually. */
1424 for (unsigned i = 0; i != 2 && args[i]; ++i)
1425 {
1426 if (TREE_CODE (args[i]) == INTEGER_CST)
1427 {
1428 argrange[i][0] = args[i];
1429 argrange[i][1] = args[i];
1430
1431 if (tree_int_cst_lt (args[i], integer_zero_node))
1432 {
1433 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1434 "%Kargument %i value %qE is negative",
1435 exp, idx[i] + 1, args[i]);
8bd9f164
MS
1436 }
1437 else if (integer_zerop (args[i]))
1438 {
1439 /* Avoid issuing -Walloc-zero for allocation functions other
1440 than __builtin_alloca that are declared with attribute
1441 returns_nonnull because there's no portability risk. This
1442 avoids warning for such calls to libiberty's xmalloc and
1443 friends.
1444 Also avoid issuing the warning for calls to function named
1445 "alloca". */
1446 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1447 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1448 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1449 && !lookup_attribute ("returns_nonnull",
1450 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1451 warned = warning_at (loc, OPT_Walloc_zero,
c16880ef
MS
1452 "%Kargument %i value is zero",
1453 exp, idx[i] + 1);
8bd9f164
MS
1454 }
1455 else if (tree_int_cst_lt (maxobjsize, args[i]))
1456 {
1457 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1458 mode and with -fno-exceptions as a way to indicate array
1459 size overflow. There's no good way to detect C++98 here
1460 so avoid diagnosing these calls for all C++ modes. */
1461 if (i == 0
1462 && !args[1]
1463 && lang_GNU_CXX ()
1464 && DECL_IS_OPERATOR_NEW (fn)
1465 && integer_all_onesp (args[i]))
1466 continue;
1467
1468 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1469 "%Kargument %i value %qE exceeds "
8bd9f164 1470 "maximum object size %E",
c16880ef 1471 exp, idx[i] + 1, args[i], maxobjsize);
8bd9f164
MS
1472 }
1473 }
c16880ef
MS
1474 else if (TREE_CODE (args[i]) == SSA_NAME
1475 && get_size_range (args[i], argrange[i]))
8bd9f164 1476 {
8bd9f164
MS
1477 /* Verify that the argument's range is not negative (including
1478 upper bound of zero). */
1479 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1480 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1481 {
1482 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1483 "%Kargument %i range [%E, %E] is negative",
1484 exp, idx[i] + 1,
1485 argrange[i][0], argrange[i][1]);
8bd9f164
MS
1486 }
1487 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1488 {
1489 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1490 "%Kargument %i range [%E, %E] exceeds "
8bd9f164 1491 "maximum object size %E",
c16880ef
MS
1492 exp, idx[i] + 1,
1493 argrange[i][0], argrange[i][1],
8bd9f164
MS
1494 maxobjsize);
1495 }
1496 }
1497 }
1498
1499 if (!argrange[0])
1500 return;
1501
1502 /* For a two-argument alloc_size, validate the product of the two
1503 arguments if both of their values or ranges are known. */
1504 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1505 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1506 && !integer_onep (argrange[0][0])
1507 && !integer_onep (argrange[1][0]))
1508 {
1509 /* Check for overflow in the product of a function decorated with
1510 attribute alloc_size (X, Y). */
1511 unsigned szprec = TYPE_PRECISION (size_type_node);
1512 wide_int x = wi::to_wide (argrange[0][0], szprec);
1513 wide_int y = wi::to_wide (argrange[1][0], szprec);
1514
1515 bool vflow;
1516 wide_int prod = wi::umul (x, y, &vflow);
1517
1518 if (vflow)
1519 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1520 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1521 "exceeds %<SIZE_MAX%>",
c16880ef 1522 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1523 idx[0] + 1, idx[1] + 1);
1524 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1525 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1526 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1527 "exceeds maximum object size %E",
c16880ef 1528 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1529 idx[0] + 1, idx[1] + 1,
1530 maxobjsize);
1531
1532 if (warned)
1533 {
1534 /* Print the full range of each of the two arguments to make
1535 it clear when it is, in fact, in a range and not constant. */
1536 if (argrange[0][0] != argrange [0][1])
1537 inform (loc, "argument %i in the range [%E, %E]",
1538 idx[0] + 1, argrange[0][0], argrange[0][1]);
1539 if (argrange[1][0] != argrange [1][1])
1540 inform (loc, "argument %i in the range [%E, %E]",
1541 idx[1] + 1, argrange[1][0], argrange[1][1]);
1542 }
1543 }
1544
1545 if (warned)
1546 {
1547 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1548
1549 if (DECL_IS_BUILTIN (fn))
1550 inform (loc,
1551 "in a call to built-in allocation function %qD", fn);
1552 else
1553 inform (fnloc,
1554 "in a call to allocation function %qD declared here", fn);
1555 }
1556}
1557
6a33d0ff
MS
1558/* If EXPR refers to a character array or pointer declared attribute
1559 nonstring return a decl for that array or pointer and set *REF to
1560 the referenced enclosing object or pointer. Otherwise returns
1561 null. */
1562
1563tree
1564get_attr_nonstring_decl (tree expr, tree *ref)
1565{
1566 tree decl = expr;
1567 if (TREE_CODE (decl) == SSA_NAME)
1568 {
1569 gimple *def = SSA_NAME_DEF_STMT (decl);
1570
1571 if (is_gimple_assign (def))
1572 {
1573 tree_code code = gimple_assign_rhs_code (def);
1574 if (code == ADDR_EXPR
1575 || code == COMPONENT_REF
1576 || code == VAR_DECL)
1577 decl = gimple_assign_rhs1 (def);
1578 }
1579 else if (tree var = SSA_NAME_VAR (decl))
1580 decl = var;
1581 }
1582
1583 if (TREE_CODE (decl) == ADDR_EXPR)
1584 decl = TREE_OPERAND (decl, 0);
1585
1586 if (ref)
1587 *ref = decl;
1588
1589 if (TREE_CODE (decl) == COMPONENT_REF)
1590 decl = TREE_OPERAND (decl, 1);
1591
1592 if (DECL_P (decl)
1593 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1594 return decl;
1595
1596 return NULL_TREE;
1597}
1598
6a33d0ff
MS
1599/* Warn about passing a non-string array/pointer to a function that
1600 expects a nul-terminated string argument. */
1601
1602void
1603maybe_warn_nonstring_arg (tree fndecl, tree exp)
1604{
1605 if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
1606 return;
1607
1608 bool with_bounds = CALL_WITH_BOUNDS_P (exp);
1609
2438cb6a
MS
1610 unsigned nargs = call_expr_nargs (exp);
1611
6a33d0ff
MS
1612 /* The bound argument to a bounded string function like strncpy. */
1613 tree bound = NULL_TREE;
1614
1615 /* It's safe to call "bounded" string functions with a non-string
1616 argument since the functions provide an explicit bound for this
1617 purpose. */
1618 switch (DECL_FUNCTION_CODE (fndecl))
1619 {
1620 case BUILT_IN_STPNCPY:
1621 case BUILT_IN_STPNCPY_CHK:
1622 case BUILT_IN_STRNCMP:
1623 case BUILT_IN_STRNCASECMP:
1624 case BUILT_IN_STRNCPY:
1625 case BUILT_IN_STRNCPY_CHK:
2438cb6a
MS
1626 {
1627 unsigned argno = with_bounds ? 4 : 2;
1628 if (argno < nargs)
1629 bound = CALL_EXPR_ARG (exp, argno);
1630 break;
1631 }
6a33d0ff
MS
1632
1633 case BUILT_IN_STRNDUP:
2438cb6a
MS
1634 {
1635 unsigned argno = with_bounds ? 2 : 1;
1636 if (argno < nargs)
1637 bound = CALL_EXPR_ARG (exp, argno);
1638 break;
1639 }
6a33d0ff
MS
1640
1641 default:
1642 break;
1643 }
1644
1645 /* Determine the range of the bound argument (if specified). */
1646 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1647 if (bound)
1648 get_size_range (bound, bndrng);
1649
1650 /* Iterate over the built-in function's formal arguments and check
1651 each const char* against the actual argument. If the actual
1652 argument is declared attribute non-string issue a warning unless
1653 the argument's maximum length is bounded. */
1654 function_args_iterator it;
1655 function_args_iter_init (&it, TREE_TYPE (fndecl));
1656
1657 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1658 {
2438cb6a
MS
1659 /* Avoid iterating past the declared argument in a call
1660 to function declared without a prototype. */
1661 if (argno >= nargs)
1662 break;
1663
6a33d0ff
MS
1664 tree argtype = function_args_iter_cond (&it);
1665 if (!argtype)
1666 break;
1667
1668 if (TREE_CODE (argtype) != POINTER_TYPE)
1669 continue;
1670
1671 argtype = TREE_TYPE (argtype);
1672
1673 if (TREE_CODE (argtype) != INTEGER_TYPE
1674 || !TYPE_READONLY (argtype))
1675 continue;
1676
1677 argtype = TYPE_MAIN_VARIANT (argtype);
1678 if (argtype != char_type_node)
1679 continue;
1680
1681 tree callarg = CALL_EXPR_ARG (exp, argno);
1682 if (TREE_CODE (callarg) == ADDR_EXPR)
1683 callarg = TREE_OPERAND (callarg, 0);
1684
1685 /* See if the destination is declared with attribute "nonstring". */
1686 tree decl = get_attr_nonstring_decl (callarg);
1687 if (!decl)
1688 continue;
1689
1690 tree type = TREE_TYPE (decl);
1691
1692 offset_int wibnd = 0;
1693 if (bndrng[0])
1694 wibnd = wi::to_offset (bndrng[0]);
1695
1696 offset_int asize = wibnd;
1697
1698 if (TREE_CODE (type) == ARRAY_TYPE)
1699 if (tree arrbnd = TYPE_DOMAIN (type))
1700 {
1701 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1702 asize = wi::to_offset (arrbnd) + 1;
1703 }
1704
1705 location_t loc = EXPR_LOCATION (exp);
1706
1707 bool warned = false;
1708
1709 if (wi::ltu_p (asize, wibnd))
1710 warned = warning_at (loc, OPT_Wstringop_overflow_,
1711 "%qD argument %i declared attribute %<nonstring%> "
1712 "is smaller than the specified bound %E",
1713 fndecl, argno + 1, bndrng[0]);
1714 else if (!bound)
1715 warned = warning_at (loc, OPT_Wstringop_overflow_,
1716 "%qD argument %i declared attribute %<nonstring%>",
1717 fndecl, argno + 1);
1718
1719 if (warned)
1720 inform (DECL_SOURCE_LOCATION (decl),
1721 "argument %qD declared here", decl);
1722 }
1723}
1724
9a385c2d
DM
1725/* Issue an error if CALL_EXPR was flagged as requiring
1726 tall-call optimization. */
1727
1728static void
1729maybe_complain_about_tail_call (tree call_expr, const char *reason)
1730{
1731 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1732 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1733 return;
1734
1735 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1736}
1737
d7cdf113 1738/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 1739 CALL_EXPR EXP.
d7cdf113
JL
1740
1741 NUM_ACTUALS is the total number of parameters.
1742
1743 N_NAMED_ARGS is the total number of named arguments.
1744
078a18a4
SL
1745 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1746 value, or null.
1747
d7cdf113
JL
1748 FNDECL is the tree code for the target of this call (if known)
1749
1750 ARGS_SO_FAR holds state needed by the target to know where to place
1751 the next argument.
1752
1753 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1754 for arguments which are passed in registers.
1755
1756 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1757 and may be modified by this routine.
1758
f2d33f13 1759 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 1760 flags which may be modified by this routine.
dd292d0a 1761
6de9cd9a
DN
1762 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1763 that requires allocation of stack space.
1764
dd292d0a
MM
1765 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1766 the thunked-to function. */
d7cdf113
JL
1767
1768static void
d329e058
AJ
1769initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1770 struct arg_data *args,
1771 struct args_size *args_size,
1772 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 1773 tree exp, tree struct_value_addr_value,
45769134 1774 tree fndecl, tree fntype,
d5cc9181 1775 cumulative_args_t args_so_far,
d329e058 1776 int reg_parm_stack_space,
a20c5714
RS
1777 rtx *old_stack_level,
1778 poly_int64_pod *old_pending_adj,
dd292d0a 1779 int *must_preallocate, int *ecf_flags,
6de9cd9a 1780 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 1781{
d5cc9181 1782 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 1783 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
1784
1785 /* Count arg position in order args appear. */
1786 int argpos;
1787
1788 int i;
f725a3ec 1789
d7cdf113
JL
1790 args_size->constant = 0;
1791 args_size->var = 0;
1792
d5e254e1
IE
1793 bitmap_obstack_initialize (NULL);
1794
d7cdf113 1795 /* In this loop, we consider args in the order they are written.
3d9684ae 1796 We fill up ARGS from the back. */
d7cdf113 1797
3d9684ae 1798 i = num_actuals - 1;
078a18a4 1799 {
d5e254e1 1800 int j = i, ptr_arg = -1;
078a18a4
SL
1801 call_expr_arg_iterator iter;
1802 tree arg;
d5e254e1 1803 bitmap slots = NULL;
078a18a4
SL
1804
1805 if (struct_value_addr_value)
1806 {
1807 args[j].tree_value = struct_value_addr_value;
3d9684ae 1808 j--;
d5e254e1
IE
1809
1810 /* If we pass structure address then we need to
1811 create bounds for it. Since created bounds is
1812 a call statement, we expand it right here to avoid
1813 fixing all other places where it may be expanded. */
1814 if (CALL_WITH_BOUNDS_P (exp))
1815 {
1816 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1817 args[j].tree_value
1818 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1819 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1820 EXPAND_NORMAL, 0, false);
1821 args[j].pointer_arg = j + 1;
1822 j--;
1823 }
078a18a4 1824 }
afc610db 1825 argpos = 0;
078a18a4
SL
1826 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1827 {
1828 tree argtype = TREE_TYPE (arg);
d5e254e1
IE
1829
1830 /* Remember last param with pointer and associate it
1831 with following pointer bounds. */
1832 if (CALL_WITH_BOUNDS_P (exp)
1833 && chkp_type_has_pointer (argtype))
1834 {
1835 if (slots)
1836 BITMAP_FREE (slots);
1837 ptr_arg = j;
1838 if (!BOUNDED_TYPE_P (argtype))
1839 {
1840 slots = BITMAP_ALLOC (NULL);
1841 chkp_find_bound_slots (argtype, slots);
1842 }
1843 }
afc610db
IE
1844 else if (CALL_WITH_BOUNDS_P (exp)
1845 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1846 argpos < n_named_args))
1847 {
1848 if (slots)
1849 BITMAP_FREE (slots);
1850 ptr_arg = j;
1851 }
d5e254e1
IE
1852 else if (POINTER_BOUNDS_TYPE_P (argtype))
1853 {
1854 /* We expect bounds in instrumented calls only.
1855 Otherwise it is a sign we lost flag due to some optimization
1856 and may emit call args incorrectly. */
1857 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1858
1859 /* For structures look for the next available pointer. */
1860 if (ptr_arg != -1 && slots)
1861 {
1862 unsigned bnd_no = bitmap_first_set_bit (slots);
1863 args[j].pointer_offset =
1864 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1865
1866 bitmap_clear_bit (slots, bnd_no);
1867
1868 /* Check we have no more pointers in the structure. */
1869 if (bitmap_empty_p (slots))
1870 BITMAP_FREE (slots);
1871 }
1872 args[j].pointer_arg = ptr_arg;
1873
1874 /* Check we covered all pointers in the previous
1875 non bounds arg. */
1876 if (!slots)
1877 ptr_arg = -1;
1878 }
1879 else
1880 ptr_arg = -1;
1881
078a18a4
SL
1882 if (targetm.calls.split_complex_arg
1883 && argtype
1884 && TREE_CODE (argtype) == COMPLEX_TYPE
1885 && targetm.calls.split_complex_arg (argtype))
1886 {
1887 tree subtype = TREE_TYPE (argtype);
078a18a4 1888 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 1889 j--;
078a18a4
SL
1890 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1891 }
1892 else
1893 args[j].tree_value = arg;
3d9684ae 1894 j--;
afc610db 1895 argpos++;
078a18a4 1896 }
d5e254e1
IE
1897
1898 if (slots)
1899 BITMAP_FREE (slots);
078a18a4
SL
1900 }
1901
d5e254e1
IE
1902 bitmap_obstack_release (NULL);
1903
8bd9f164
MS
1904 /* Extract attribute alloc_size and if set, store the indices of
1905 the corresponding arguments in ALLOC_IDX, and then the actual
1906 argument(s) at those indices in ALLOC_ARGS. */
1907 int alloc_idx[2] = { -1, -1 };
1908 if (tree alloc_size
1909 = (fndecl ? lookup_attribute ("alloc_size",
1910 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1911 : NULL_TREE))
1912 {
1913 tree args = TREE_VALUE (alloc_size);
1914 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1915 if (TREE_CHAIN (args))
1916 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1917 }
1918
1919 /* Array for up to the two attribute alloc_size arguments. */
1920 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1921
d7cdf113 1922 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 1923 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 1924 {
078a18a4 1925 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 1926 int unsignedp;
ef4bddc2 1927 machine_mode mode;
d7cdf113 1928
d7cdf113 1929 /* Replace erroneous argument with constant zero. */
d0f062fb 1930 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1931 args[i].tree_value = integer_zero_node, type = integer_type_node;
1932
ebf0bf7f
JJ
1933 /* If TYPE is a transparent union or record, pass things the way
1934 we would pass the first field of the union or record. We have
1935 already verified that the modes are the same. */
1936 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1937 && TYPE_TRANSPARENT_AGGR (type))
1938 type = TREE_TYPE (first_field (type));
d7cdf113
JL
1939
1940 /* Decide where to pass this arg.
1941
1942 args[i].reg is nonzero if all or part is passed in registers.
1943
1944 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1945 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
1946
1947 args[i].pass_on_stack is nonzero if the argument must at least be
1948 computed on the stack. It may then be loaded back into registers
1949 if args[i].reg is nonzero.
1950
1951 These decisions are driven by the FUNCTION_... macros and must agree
1952 with those made by function.c. */
1953
1954 /* See if this argument should be passed by invisible reference. */
d5cc9181 1955 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
0976078c 1956 type, argpos < n_named_args))
d7cdf113 1957 {
9969aaf6 1958 bool callee_copies;
d6e1acf6 1959 tree base = NULL_TREE;
9969aaf6
RH
1960
1961 callee_copies
d5cc9181 1962 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
6cdd5672 1963 type, argpos < n_named_args);
9969aaf6
RH
1964
1965 /* If we're compiling a thunk, pass through invisible references
1966 instead of making a copy. */
dd292d0a 1967 if (call_from_thunk_p
9969aaf6
RH
1968 || (callee_copies
1969 && !TREE_ADDRESSABLE (type)
1970 && (base = get_base_address (args[i].tree_value))
9c3d55b4 1971 && TREE_CODE (base) != SSA_NAME
9969aaf6 1972 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
d7cdf113 1973 {
006e317a
JH
1974 /* We may have turned the parameter value into an SSA name.
1975 Go back to the original parameter so we can take the
1976 address. */
1977 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1978 {
1979 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1980 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1981 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1982 }
fe8dd12e
JH
1983 /* Argument setup code may have copied the value to register. We
1984 revert that optimization now because the tail call code must
1985 use the original location. */
1986 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1987 && !MEM_P (DECL_RTL (args[i].tree_value))
1988 && DECL_INCOMING_RTL (args[i].tree_value)
1989 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1990 set_decl_rtl (args[i].tree_value,
1991 DECL_INCOMING_RTL (args[i].tree_value));
1992
c4b9a87e
ER
1993 mark_addressable (args[i].tree_value);
1994
9969aaf6
RH
1995 /* We can't use sibcalls if a callee-copied argument is
1996 stored in the current function's frame. */
1997 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
1998 {
1999 *may_tailcall = false;
2000 maybe_complain_about_tail_call (exp,
2001 "a callee-copied argument is"
2002 " stored in the current "
2003 " function's frame");
2004 }
9fd47435 2005
db3927fb
AH
2006 args[i].tree_value = build_fold_addr_expr_loc (loc,
2007 args[i].tree_value);
9969aaf6
RH
2008 type = TREE_TYPE (args[i].tree_value);
2009
becfd6e5
KZ
2010 if (*ecf_flags & ECF_CONST)
2011 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 2012 }
d7cdf113
JL
2013 else
2014 {
2015 /* We make a copy of the object and pass the address to the
2016 function being called. */
2017 rtx copy;
2018
d0f062fb 2019 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
2020 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2021 || (flag_stack_check == GENERIC_STACK_CHECK
2022 && compare_tree_int (TYPE_SIZE_UNIT (type),
2023 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
2024 {
2025 /* This is a variable-sized object. Make space on the stack
2026 for it. */
078a18a4 2027 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
2028
2029 if (*old_stack_level == 0)
2030 {
9eac0f2a 2031 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
2032 *old_pending_adj = pending_stack_adjust;
2033 pending_stack_adjust = 0;
2034 }
2035
d3c12306
EB
2036 /* We can pass TRUE as the 4th argument because we just
2037 saved the stack pointer and will restore it right after
2038 the call. */
3a42502d
RH
2039 copy = allocate_dynamic_stack_space (size_rtx,
2040 TYPE_ALIGN (type),
2041 TYPE_ALIGN (type),
9e878cf1
EB
2042 max_int_size_in_bytes
2043 (type),
3a42502d
RH
2044 true);
2045 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 2046 set_mem_attributes (copy, type, 1);
d7cdf113
JL
2047 }
2048 else
9474e8ab 2049 copy = assign_temp (type, 1, 0);
d7cdf113 2050
ee45a32d 2051 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 2052
becfd6e5
KZ
2053 /* Just change the const function to pure and then let
2054 the next test clear the pure based on
2055 callee_copies. */
2056 if (*ecf_flags & ECF_CONST)
2057 {
2058 *ecf_flags &= ~ECF_CONST;
2059 *ecf_flags |= ECF_PURE;
2060 }
2061
2062 if (!callee_copies && *ecf_flags & ECF_PURE)
2063 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
2064
2065 args[i].tree_value
db3927fb 2066 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 2067 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 2068 *may_tailcall = false;
9a385c2d
DM
2069 maybe_complain_about_tail_call (exp,
2070 "argument must be passed"
2071 " by copying");
d7cdf113
JL
2072 }
2073 }
2074
8df83eae 2075 unsignedp = TYPE_UNSIGNED (type);
cde0f3fd
PB
2076 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2077 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
2078
2079 args[i].unsignedp = unsignedp;
2080 args[i].mode = mode;
7d167afd 2081
974aedcc
MP
2082 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2083
3c07301f
NF
2084 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2085 argpos < n_named_args);
2086
d5e254e1
IE
2087 if (args[i].reg && CONST_INT_P (args[i].reg))
2088 {
2089 args[i].special_slot = args[i].reg;
2090 args[i].reg = NULL;
2091 }
2092
7d167afd
JJ
2093 /* If this is a sibling call and the machine has register windows, the
2094 register window has to be unwinded before calling the routine, so
2095 arguments have to go into the incoming registers. */
3c07301f
NF
2096 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2097 args[i].tail_call_reg
2098 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2099 argpos < n_named_args);
2100 else
2101 args[i].tail_call_reg = args[i].reg;
7d167afd 2102
d7cdf113
JL
2103 if (args[i].reg)
2104 args[i].partial
78a52f11
RH
2105 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2106 argpos < n_named_args);
d7cdf113 2107
fe984136 2108 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
d7cdf113
JL
2109
2110 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2111 it means that we are to pass this arg in the register(s) designated
2112 by the PARALLEL, but also to pass it in the stack. */
2113 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2114 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2115 args[i].pass_on_stack = 1;
2116
2117 /* If this is an addressable type, we must preallocate the stack
2118 since we must evaluate the object into its final location.
2119
2120 If this is to be passed in both registers and the stack, it is simpler
2121 to preallocate. */
2122 if (TREE_ADDRESSABLE (type)
2123 || (args[i].pass_on_stack && args[i].reg != 0))
2124 *must_preallocate = 1;
2125
d5e254e1
IE
2126 /* No stack allocation and padding for bounds. */
2127 if (POINTER_BOUNDS_P (args[i].tree_value))
2128 ;
d7cdf113 2129 /* Compute the stack-size of this argument. */
d5e254e1
IE
2130 else if (args[i].reg == 0 || args[i].partial != 0
2131 || reg_parm_stack_space > 0
2132 || args[i].pass_on_stack)
d7cdf113
JL
2133 locate_and_pad_parm (mode, type,
2134#ifdef STACK_PARMS_IN_REG_PARM_AREA
2135 1,
2136#else
2137 args[i].reg != 0,
2138#endif
2e4ceca5 2139 reg_parm_stack_space,
e7949876
AM
2140 args[i].pass_on_stack ? 0 : args[i].partial,
2141 fndecl, args_size, &args[i].locate);
648bb159
RS
2142#ifdef BLOCK_REG_PADDING
2143 else
2144 /* The argument is passed entirely in registers. See at which
2145 end it should be padded. */
2146 args[i].locate.where_pad =
2147 BLOCK_REG_PADDING (mode, type,
2148 int_size_in_bytes (type) <= UNITS_PER_WORD);
2149#endif
f725a3ec 2150
d7cdf113
JL
2151 /* Update ARGS_SIZE, the total stack space for args so far. */
2152
e7949876
AM
2153 args_size->constant += args[i].locate.size.constant;
2154 if (args[i].locate.size.var)
2155 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
2156
2157 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2158 have been used, etc. */
2159
3c07301f
NF
2160 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2161 type, argpos < n_named_args);
8bd9f164
MS
2162
2163 /* Store argument values for functions decorated with attribute
2164 alloc_size. */
2165 if (argpos == alloc_idx[0])
2166 alloc_args[0] = args[i].tree_value;
2167 else if (argpos == alloc_idx[1])
2168 alloc_args[1] = args[i].tree_value;
2169 }
2170
2171 if (alloc_args[0])
2172 {
2173 /* Check the arguments of functions decorated with attribute
2174 alloc_size. */
2175 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
d7cdf113 2176 }
6a33d0ff
MS
2177
2178 /* Detect passing non-string arguments to functions expecting
2179 nul-terminated strings. */
2180 maybe_warn_nonstring_arg (fndecl, exp);
d7cdf113
JL
2181}
2182
599f37b6
JL
2183/* Update ARGS_SIZE to contain the total size for the argument block.
2184 Return the original constant component of the argument block's size.
2185
2186 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2187 for arguments passed in registers. */
2188
a20c5714 2189static poly_int64
d329e058
AJ
2190compute_argument_block_size (int reg_parm_stack_space,
2191 struct args_size *args_size,
033df0b9 2192 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 2193 tree fntype ATTRIBUTE_UNUSED,
d329e058 2194 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6 2195{
a20c5714 2196 poly_int64 unadjusted_args_size = args_size->constant;
599f37b6 2197
f73ad30e
JH
2198 /* For accumulate outgoing args mode we don't need to align, since the frame
2199 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 2200 backends from generating misaligned frame sizes. */
f73ad30e
JH
2201 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2202 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 2203
599f37b6
JL
2204 /* Compute the actual size of the argument block required. The variable
2205 and constant sizes must be combined, the size may have to be rounded,
2206 and there may be a minimum required size. */
2207
2208 if (args_size->var)
2209 {
2210 args_size->var = ARGS_SIZE_TREE (*args_size);
2211 args_size->constant = 0;
2212
c2f8b491
JH
2213 preferred_stack_boundary /= BITS_PER_UNIT;
2214 if (preferred_stack_boundary > 1)
1503a7ec
JH
2215 {
2216 /* We don't handle this case yet. To handle it correctly we have
f5143c46 2217 to add the delta, round and subtract the delta.
1503a7ec 2218 Currently no machine description requires this support. */
a20c5714
RS
2219 gcc_assert (multiple_p (stack_pointer_delta,
2220 preferred_stack_boundary));
1503a7ec
JH
2221 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2222 }
599f37b6
JL
2223
2224 if (reg_parm_stack_space > 0)
2225 {
2226 args_size->var
2227 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 2228 ssize_int (reg_parm_stack_space));
599f37b6 2229
599f37b6
JL
2230 /* The area corresponding to register parameters is not to count in
2231 the size of the block we need. So make the adjustment. */
5d059ed9 2232 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
2233 args_size->var
2234 = size_binop (MINUS_EXPR, args_size->var,
2235 ssize_int (reg_parm_stack_space));
599f37b6
JL
2236 }
2237 }
2238 else
2239 {
c2f8b491 2240 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
2241 if (preferred_stack_boundary < 1)
2242 preferred_stack_boundary = 1;
a20c5714
RS
2243 args_size->constant = (aligned_upper_bound (args_size->constant
2244 + stack_pointer_delta,
2245 preferred_stack_boundary)
1503a7ec 2246 - stack_pointer_delta);
599f37b6 2247
a20c5714
RS
2248 args_size->constant = upper_bound (args_size->constant,
2249 reg_parm_stack_space);
599f37b6 2250
5d059ed9 2251 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 2252 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
2253 }
2254 return unadjusted_args_size;
2255}
2256
19832c77 2257/* Precompute parameters as needed for a function call.
cc0b1adc 2258
f2d33f13 2259 FLAGS is mask of ECF_* constants.
cc0b1adc 2260
cc0b1adc
JL
2261 NUM_ACTUALS is the number of arguments.
2262
f725a3ec
KH
2263 ARGS is an array containing information for each argument; this
2264 routine fills in the INITIAL_VALUE and VALUE fields for each
2265 precomputed argument. */
cc0b1adc
JL
2266
2267static void
84b8030f 2268precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
2269{
2270 int i;
2271
3638733b 2272 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 2273 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
2274
2275 /* If we preallocated the stack space, and some arguments must be passed
2276 on the stack, then we must precompute any parameter which contains a
2277 function call which will store arguments on the stack.
2278 Otherwise, evaluating the parameter may clobber previous parameters
2279 which have already been stored into the stack. (we have code to avoid
2280 such case by saving the outgoing stack arguments, but it results in
2281 worse code) */
84b8030f 2282 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 2283 return;
7ae4ad28 2284
cc0b1adc 2285 for (i = 0; i < num_actuals; i++)
82c82743 2286 {
cde0f3fd 2287 tree type;
ef4bddc2 2288 machine_mode mode;
ddef6bc7 2289
84b8030f 2290 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
2291 continue;
2292
82c82743 2293 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
2294 type = TREE_TYPE (args[i].tree_value);
2295 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 2296
82c82743 2297 args[i].initial_value = args[i].value
84217346 2298 = expand_normal (args[i].tree_value);
cc0b1adc 2299
cde0f3fd 2300 mode = TYPE_MODE (type);
82c82743
RH
2301 if (mode != args[i].mode)
2302 {
cde0f3fd 2303 int unsignedp = args[i].unsignedp;
82c82743
RH
2304 args[i].value
2305 = convert_modes (args[i].mode, mode,
2306 args[i].value, args[i].unsignedp);
cde0f3fd 2307
82c82743
RH
2308 /* CSE will replace this only if it contains args[i].value
2309 pseudo, so convert it down to the declared mode using
2310 a SUBREG. */
2311 if (REG_P (args[i].value)
cde0f3fd
PB
2312 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2313 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
2314 {
2315 args[i].initial_value
2316 = gen_lowpart_SUBREG (mode, args[i].value);
2317 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 2318 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 2319 }
82c82743
RH
2320 }
2321 }
cc0b1adc
JL
2322}
2323
0f9b3ea6
JL
2324/* Given the current state of MUST_PREALLOCATE and information about
2325 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2326 compute and return the final value for MUST_PREALLOCATE. */
2327
2328static int
b8698a0f 2329finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 2330 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
2331{
2332 /* See if we have or want to preallocate stack space.
2333
2334 If we would have to push a partially-in-regs parm
2335 before other stack parms, preallocate stack space instead.
2336
2337 If the size of some parm is not a multiple of the required stack
2338 alignment, we must preallocate.
2339
2340 If the total size of arguments that would otherwise create a copy in
2341 a temporary (such as a CALL) is more than half the total argument list
2342 size, preallocation is faster.
2343
2344 Another reason to preallocate is if we have a machine (like the m88k)
2345 where stack alignment is required to be maintained between every
2346 pair of insns, not just when the call is made. However, we assume here
2347 that such machines either do not have push insns (and hence preallocation
2348 would occur anyway) or the problem is taken care of with
2349 PUSH_ROUNDING. */
2350
2351 if (! must_preallocate)
2352 {
2353 int partial_seen = 0;
a20c5714 2354 poly_int64 copy_to_evaluate_size = 0;
0f9b3ea6
JL
2355 int i;
2356
2357 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2358 {
2359 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2360 partial_seen = 1;
2361 else if (partial_seen && args[i].reg == 0)
2362 must_preallocate = 1;
d5e254e1
IE
2363 /* We preallocate in case there are bounds passed
2364 in the bounds table to have precomputed address
2365 for bounds association. */
2366 else if (POINTER_BOUNDS_P (args[i].tree_value)
2367 && !args[i].reg)
2368 must_preallocate = 1;
0f9b3ea6
JL
2369
2370 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2371 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2372 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2373 || TREE_CODE (args[i].tree_value) == COND_EXPR
2374 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2375 copy_to_evaluate_size
2376 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2377 }
2378
a20c5714
RS
2379 if (maybe_ne (args_size->constant, 0)
2380 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
0f9b3ea6
JL
2381 must_preallocate = 1;
2382 }
2383 return must_preallocate;
2384}
599f37b6 2385
a45bdd02
JL
2386/* If we preallocated stack space, compute the address of each argument
2387 and store it into the ARGS array.
2388
f725a3ec 2389 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
2390 validized when it is used.
2391
2392 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2393
2394static void
d329e058 2395compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
2396{
2397 if (argblock)
2398 {
2399 rtx arg_reg = argblock;
a20c5714
RS
2400 int i;
2401 poly_int64 arg_offset = 0;
a45bdd02
JL
2402
2403 if (GET_CODE (argblock) == PLUS)
a20c5714
RS
2404 {
2405 arg_reg = XEXP (argblock, 0);
2406 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2407 }
a45bdd02
JL
2408
2409 for (i = 0; i < num_actuals; i++)
2410 {
e7949876
AM
2411 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2412 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 2413 rtx addr;
bfc45551 2414 unsigned int align, boundary;
a20c5714 2415 poly_uint64 units_on_stack = 0;
ef4bddc2 2416 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
2417
2418 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
2419 if (! args[i].pass_on_stack
2420 && args[i].reg != 0
2421 && args[i].partial == 0)
a45bdd02
JL
2422 continue;
2423
5b8b4a88
JJ
2424 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2425 continue;
2426
d5e254e1
IE
2427 /* Pointer Bounds are never passed on the stack. */
2428 if (POINTER_BOUNDS_P (args[i].tree_value))
2429 continue;
2430
a708f4b6 2431 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
0a81f074 2432 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2433
2434 if (args[i].partial != 0)
2435 {
2436 /* Only part of the parameter is being passed on the stack.
2437 Generate a simple memory reference of the correct size. */
2438 units_on_stack = args[i].locate.size.constant;
a20c5714 2439 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
f4b31647 2440 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
7816b87e 2441 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 2442 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
2443 }
2444 else
2445 {
2446 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2447 set_mem_attributes (args[i].stack,
2448 TREE_TYPE (args[i].tree_value), 1);
2449 }
bfc45551
AM
2450 align = BITS_PER_UNIT;
2451 boundary = args[i].locate.boundary;
a20c5714 2452 poly_int64 offset_val;
76b0cbf8 2453 if (args[i].locate.where_pad != PAD_DOWNWARD)
bfc45551 2454 align = boundary;
a20c5714 2455 else if (poly_int_rtx_p (offset, &offset_val))
bfc45551 2456 {
a20c5714
RS
2457 align = least_bit_hwi (boundary);
2458 unsigned int offset_align
2459 = known_alignment (offset_val) * BITS_PER_UNIT;
2460 if (offset_align != 0)
2461 align = MIN (align, offset_align);
bfc45551
AM
2462 }
2463 set_mem_align (args[i].stack, align);
a45bdd02 2464
a708f4b6 2465 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
0a81f074 2466 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2467
2468 if (args[i].partial != 0)
2469 {
2470 /* Only part of the parameter is being passed on the stack.
2471 Generate a simple memory reference of the correct size.
2472 */
2473 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 2474 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
2475 }
2476 else
2477 {
2478 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2479 set_mem_attributes (args[i].stack_slot,
2480 TREE_TYPE (args[i].tree_value), 1);
2481 }
bfc45551 2482 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
2483
2484 /* Function incoming arguments may overlap with sibling call
2485 outgoing arguments and we cannot allow reordering of reads
2486 from function arguments with stores to outgoing arguments
2487 of sibling calls. */
ba4828e0
RK
2488 set_mem_alias_set (args[i].stack, 0);
2489 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
2490 }
2491 }
2492}
f725a3ec 2493
a45bdd02
JL
2494/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2495 in a call instruction.
2496
2497 FNDECL is the tree node for the target function. For an indirect call
2498 FNDECL will be NULL_TREE.
2499
09e2bf48 2500 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
2501
2502static rtx
d329e058 2503rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
2504{
2505 rtx funexp;
2506
2507 /* Get the function to call, in the form of RTL. */
2508 if (fndecl)
2509 {
ad960f56 2510 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 2511 TREE_USED (fndecl) = 1;
a45bdd02
JL
2512
2513 /* Get a SYMBOL_REF rtx for the function address. */
2514 funexp = XEXP (DECL_RTL (fndecl), 0);
2515 }
2516 else
2517 /* Generate an rtx (probably a pseudo-register) for the address. */
2518 {
2519 push_temp_slots ();
84217346 2520 funexp = expand_normal (addr);
f725a3ec 2521 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
2522 }
2523 return funexp;
2524}
2525
4b522b8f
TV
2526/* Return the static chain for this function, if any. */
2527
2528rtx
2529rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2530{
2531 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2532 return NULL;
2533
2534 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2535}
2536
5275901c
JJ
2537/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2538static struct
2539{
2540 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2541 or NULL_RTX if none has been scanned yet. */
48810515 2542 rtx_insn *scan_start;
5275901c
JJ
2543 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2544 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2545 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2546 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 2547 vec<rtx> cache;
5275901c
JJ
2548} internal_arg_pointer_exp_state;
2549
e9f56944 2550static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
2551
2552/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2553 the tail call sequence, starting with first insn that hasn't been
2554 scanned yet, and note for each pseudo on the LHS whether it is based
2555 on crtl->args.internal_arg_pointer or not, and what offset from that
2556 that pointer it has. */
2557
2558static void
2559internal_arg_pointer_based_exp_scan (void)
2560{
48810515 2561 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
2562
2563 if (scan_start == NULL_RTX)
2564 insn = get_insns ();
2565 else
2566 insn = NEXT_INSN (scan_start);
2567
2568 while (insn)
2569 {
2570 rtx set = single_set (insn);
2571 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2572 {
2573 rtx val = NULL_RTX;
2574 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2575 /* Punt on pseudos set multiple times. */
9771b263
DN
2576 if (idx < internal_arg_pointer_exp_state.cache.length ()
2577 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
2578 != NULL_RTX))
2579 val = pc_rtx;
2580 else
2581 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2582 if (val != NULL_RTX)
2583 {
9771b263 2584 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718
RS
2585 internal_arg_pointer_exp_state.cache
2586 .safe_grow_cleared (idx + 1);
9771b263 2587 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
2588 }
2589 }
2590 if (NEXT_INSN (insn) == NULL_RTX)
2591 scan_start = insn;
2592 insn = NEXT_INSN (insn);
2593 }
2594
2595 internal_arg_pointer_exp_state.scan_start = scan_start;
2596}
2597
5275901c
JJ
2598/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2599 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2600 it with fixed offset, or PC if this is with variable or unknown offset.
2601 TOPLEVEL is true if the function is invoked at the topmost level. */
2602
2603static rtx
e9f56944 2604internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
2605{
2606 if (CONSTANT_P (rtl))
2607 return NULL_RTX;
2608
2609 if (rtl == crtl->args.internal_arg_pointer)
2610 return const0_rtx;
2611
2612 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2613 return NULL_RTX;
2614
a20c5714
RS
2615 poly_int64 offset;
2616 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
5275901c
JJ
2617 {
2618 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2619 if (val == NULL_RTX || val == pc_rtx)
2620 return val;
a20c5714 2621 return plus_constant (Pmode, val, offset);
5275901c
JJ
2622 }
2623
2624 /* When called at the topmost level, scan pseudo assignments in between the
2625 last scanned instruction in the tail call sequence and the latest insn
2626 in that sequence. */
2627 if (toplevel)
2628 internal_arg_pointer_based_exp_scan ();
2629
2630 if (REG_P (rtl))
2631 {
2632 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
2633 if (idx < internal_arg_pointer_exp_state.cache.length ())
2634 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
2635
2636 return NULL_RTX;
2637 }
2638
e9f56944
RS
2639 subrtx_iterator::array_type array;
2640 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2641 {
2642 const_rtx x = *iter;
2643 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2644 return pc_rtx;
2645 if (MEM_P (x))
2646 iter.skip_subrtxes ();
2647 }
5275901c
JJ
2648
2649 return NULL_RTX;
2650}
2651
a20c5714
RS
2652/* Return true if SIZE bytes starting from address ADDR might overlap an
2653 already-clobbered argument area. This function is used to determine
2654 if we should give up a sibcall. */
07eef816
KH
2655
2656static bool
a20c5714 2657mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
07eef816 2658{
a20c5714
RS
2659 poly_int64 i;
2660 unsigned HOST_WIDE_INT start, end;
5275901c 2661 rtx val;
07eef816 2662
a20c5714
RS
2663 if (bitmap_empty_p (stored_args_map)
2664 && stored_args_watermark == HOST_WIDE_INT_M1U)
4189fb53 2665 return false;
5275901c
JJ
2666 val = internal_arg_pointer_based_exp (addr, true);
2667 if (val == NULL_RTX)
2668 return false;
a20c5714 2669 else if (!poly_int_rtx_p (val, &i))
6c3cb698 2670 return true;
a20c5714
RS
2671
2672 if (known_eq (size, 0U))
2673 return false;
76e048a8
KT
2674
2675 if (STACK_GROWS_DOWNWARD)
2676 i -= crtl->args.pretend_args_size;
2677 else
2678 i += crtl->args.pretend_args_size;
2679
6dad9361
TS
2680 if (ARGS_GROW_DOWNWARD)
2681 i = -i - size;
2682
a20c5714
RS
2683 /* We can ignore any references to the function's pretend args,
2684 which at this point would manifest as negative values of I. */
2685 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2686 return false;
07eef816 2687
a20c5714
RS
2688 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2689 if (!(i + size).is_constant (&end))
2690 end = HOST_WIDE_INT_M1U;
2691
2692 if (end > stored_args_watermark)
2693 return true;
2694
2695 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2696 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2697 if (bitmap_bit_p (stored_args_map, k))
2698 return true;
07eef816
KH
2699
2700 return false;
2701}
2702
21a3b983
JL
2703/* Do the register loads required for any wholly-register parms or any
2704 parms which are passed both on the stack and in a register. Their
f725a3ec 2705 expressions were already evaluated.
21a3b983
JL
2706
2707 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
2708 insns in the CALL_INSN_FUNCTION_USAGE field.
2709
40b0345d 2710 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 2711 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
2712
2713static void
d329e058
AJ
2714load_register_parameters (struct arg_data *args, int num_actuals,
2715 rtx *call_fusage, int flags, int is_sibcall,
2716 int *sibcall_failure)
21a3b983
JL
2717{
2718 int i, j;
2719
21a3b983 2720 for (i = 0; i < num_actuals; i++)
21a3b983 2721 {
099e9712
JH
2722 rtx reg = ((flags & ECF_SIBCALL)
2723 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
2724 if (reg)
2725 {
6e985040
AM
2726 int partial = args[i].partial;
2727 int nregs;
95fe7b48
RS
2728 poly_int64 size = 0;
2729 HOST_WIDE_INT const_size = 0;
48810515 2730 rtx_insn *before_arg = get_last_insn ();
f0078f86
AM
2731 /* Set non-negative if we must move a word at a time, even if
2732 just one word (e.g, partial == 4 && mode == DFmode). Set
2733 to -1 if we just use a normal move insn. This value can be
2734 zero if the argument is a zero size structure. */
6e985040 2735 nregs = -1;
78a52f11
RH
2736 if (GET_CODE (reg) == PARALLEL)
2737 ;
2738 else if (partial)
2739 {
2740 gcc_assert (partial % UNITS_PER_WORD == 0);
2741 nregs = partial / UNITS_PER_WORD;
2742 }
6e985040
AM
2743 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2744 {
95fe7b48
RS
2745 /* Variable-sized parameters should be described by a
2746 PARALLEL instead. */
2747 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2748 gcc_assert (const_size >= 0);
2749 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2750 size = const_size;
6e985040
AM
2751 }
2752 else
2753 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
2754
2755 /* Handle calls that pass values in multiple non-contiguous
2756 locations. The Irix 6 ABI has examples of this. */
2757
2758 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 2759 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
2760
2761 /* If simple case, just do move. If normal partial, store_one_arg
2762 has already loaded the register for us. In all other cases,
2763 load the register(s) from memory. */
2764
9206d736
AM
2765 else if (nregs == -1)
2766 {
2767 emit_move_insn (reg, args[i].value);
6e985040 2768#ifdef BLOCK_REG_PADDING
9206d736
AM
2769 /* Handle case where we have a value that needs shifting
2770 up to the msb. eg. a QImode value and we're padding
2771 upward on a BYTES_BIG_ENDIAN machine. */
95fe7b48
RS
2772 if (args[i].locate.where_pad
2773 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
9206d736 2774 {
95fe7b48
RS
2775 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2776 if (maybe_lt (size, UNITS_PER_WORD))
2777 {
2778 rtx x;
2779 poly_int64 shift
2780 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2781
2782 /* Assigning REG here rather than a temp makes
2783 CALL_FUSAGE report the whole reg as used.
2784 Strictly speaking, the call only uses SIZE
2785 bytes at the msb end, but it doesn't seem worth
2786 generating rtl to say that. */
2787 reg = gen_rtx_REG (word_mode, REGNO (reg));
2788 x = expand_shift (LSHIFT_EXPR, word_mode,
2789 reg, shift, reg, 1);
2790 if (x != reg)
2791 emit_move_insn (reg, x);
2792 }
9206d736 2793 }
6e985040 2794#endif
9206d736 2795 }
21a3b983
JL
2796
2797 /* If we have pre-computed the values to put in the registers in
2798 the case of non-aligned structures, copy them in now. */
2799
2800 else if (args[i].n_aligned_regs != 0)
2801 for (j = 0; j < args[i].n_aligned_regs; j++)
2802 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2803 args[i].aligned_regs[j]);
2804
3b2ee170 2805 else if (partial == 0 || args[i].pass_on_stack)
6e985040 2806 {
95fe7b48
RS
2807 /* SIZE and CONST_SIZE are 0 for partial arguments and
2808 the size of a BLKmode type otherwise. */
2809 gcc_checking_assert (known_eq (size, const_size));
1a8cb155 2810 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 2811
3b2ee170
IS
2812 /* Check for overlap with already clobbered argument area,
2813 providing that this has non-zero size. */
07eef816 2814 if (is_sibcall
95fe7b48 2815 && const_size != 0
a20c5714 2816 && (mem_might_overlap_already_clobbered_arg_p
95fe7b48 2817 (XEXP (args[i].value, 0), const_size)))
07eef816
KH
2818 *sibcall_failure = 1;
2819
95fe7b48 2820 if (const_size % UNITS_PER_WORD == 0
984b2054
AM
2821 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2822 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2823 else
2824 {
2825 if (nregs > 1)
2826 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2827 args[i].mode);
2828 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2829 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
95fe7b48 2830 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
ee45a32d 2831 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
2832 word_mode, word_mode, false,
2833 NULL);
984b2054
AM
2834 if (BYTES_BIG_ENDIAN)
2835 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2836 BITS_PER_WORD - bitsize, dest, 1);
2837 if (x != dest)
2838 emit_move_insn (dest, x);
2839 }
2840
6e985040 2841 /* Handle a BLKmode that needs shifting. */
95fe7b48 2842 if (nregs == 1 && const_size < UNITS_PER_WORD
03ca1672 2843#ifdef BLOCK_REG_PADDING
76b0cbf8 2844 && args[i].locate.where_pad == PAD_DOWNWARD
03ca1672
UW
2845#else
2846 && BYTES_BIG_ENDIAN
2847#endif
984b2054 2848 )
6e985040 2849 {
984b2054 2850 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
95fe7b48 2851 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
984b2054
AM
2852 enum tree_code dir = (BYTES_BIG_ENDIAN
2853 ? RSHIFT_EXPR : LSHIFT_EXPR);
2854 rtx x;
6e985040 2855
984b2054
AM
2856 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2857 if (x != dest)
2858 emit_move_insn (dest, x);
6e985040 2859 }
6e985040 2860 }
21a3b983 2861
0cdca92b
DJ
2862 /* When a parameter is a block, and perhaps in other cases, it is
2863 possible that it did a load from an argument slot that was
32dd366d 2864 already clobbered. */
0cdca92b
DJ
2865 if (is_sibcall
2866 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2867 *sibcall_failure = 1;
2868
21a3b983
JL
2869 /* Handle calls that pass values in multiple non-contiguous
2870 locations. The Irix 6 ABI has examples of this. */
2871 if (GET_CODE (reg) == PARALLEL)
2872 use_group_regs (call_fusage, reg);
2873 else if (nregs == -1)
7d810276
JJ
2874 use_reg_mode (call_fusage, reg,
2875 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
faa00334
AO
2876 else if (nregs > 0)
2877 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
2878 }
2879 }
2880}
2881
739fb049
MM
2882/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2883 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2884 bytes, then we would need to push some additional bytes to pad the
a20c5714 2885 arguments. So, we try to compute an adjust to the stack pointer for an
ce48579b
RH
2886 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2887 bytes. Then, when the arguments are pushed the stack will be perfectly
a20c5714 2888 aligned.
739fb049 2889
a20c5714
RS
2890 Return true if this optimization is possible, storing the adjustment
2891 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2892 bytes that should be popped after the call. */
2893
2894static bool
2895combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2896 poly_int64 unadjusted_args_size,
d329e058 2897 struct args_size *args_size,
95899b34 2898 unsigned int preferred_unit_stack_boundary)
739fb049
MM
2899{
2900 /* The number of bytes to pop so that the stack will be
2901 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
a20c5714 2902 poly_int64 adjustment;
739fb049
MM
2903 /* The alignment of the stack after the arguments are pushed, if we
2904 just pushed the arguments without adjust the stack here. */
95899b34 2905 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 2906
a20c5714
RS
2907 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2908 preferred_unit_stack_boundary,
2909 &unadjusted_alignment))
2910 return false;
739fb049
MM
2911
2912 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2913 as possible -- leaving just enough left to cancel out the
2914 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2915 PENDING_STACK_ADJUST is non-negative, and congruent to
2916 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2917
2918 /* Begin by trying to pop all the bytes. */
a20c5714
RS
2919 unsigned HOST_WIDE_INT tmp_misalignment;
2920 if (!known_misalignment (pending_stack_adjust,
2921 preferred_unit_stack_boundary,
2922 &tmp_misalignment))
2923 return false;
2924 unadjusted_alignment -= tmp_misalignment;
739fb049
MM
2925 adjustment = pending_stack_adjust;
2926 /* Push enough additional bytes that the stack will be aligned
2927 after the arguments are pushed. */
0aae1572
NS
2928 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2929 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 2930
a20c5714
RS
2931 /* We need to know whether the adjusted argument size
2932 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2933 or a deallocation. */
2934 if (!ordered_p (adjustment, unadjusted_args_size))
2935 return false;
2936
739fb049
MM
2937 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2938 bytes after the call. The right number is the entire
2939 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2940 by the arguments in the first place. */
f725a3ec 2941 args_size->constant
739fb049
MM
2942 = pending_stack_adjust - adjustment + unadjusted_args_size;
2943
a20c5714
RS
2944 *adjustment_out = adjustment;
2945 return true;
739fb049
MM
2946}
2947
c67846f2
JJ
2948/* Scan X expression if it does not dereference any argument slots
2949 we already clobbered by tail call arguments (as noted in stored_args_map
2950 bitmap).
da7d8304 2951 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
2952 zero otherwise. */
2953
2954static int
d329e058 2955check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
2956{
2957 RTX_CODE code;
2958 int i, j;
c67846f2
JJ
2959 const char *fmt;
2960
2961 if (x == NULL_RTX)
2962 return 0;
2963
2964 code = GET_CODE (x);
2965
6c3cb698
KY
2966 /* We need not check the operands of the CALL expression itself. */
2967 if (code == CALL)
2968 return 0;
2969
c67846f2 2970 if (code == MEM)
a20c5714
RS
2971 return (mem_might_overlap_already_clobbered_arg_p
2972 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
c67846f2 2973
f725a3ec 2974 /* Scan all subexpressions. */
c67846f2
JJ
2975 fmt = GET_RTX_FORMAT (code);
2976 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2977 {
2978 if (*fmt == 'e')
f725a3ec
KH
2979 {
2980 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2981 return 1;
2982 }
c67846f2 2983 else if (*fmt == 'E')
f725a3ec
KH
2984 {
2985 for (j = 0; j < XVECLEN (x, i); j++)
2986 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2987 return 1;
2988 }
c67846f2
JJ
2989 }
2990 return 0;
c67846f2
JJ
2991}
2992
2993/* Scan sequence after INSN if it does not dereference any argument slots
2994 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
2995 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2996 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2997 should be 0). Return nonzero if sequence after INSN dereferences such argument
2998 slots, zero otherwise. */
c67846f2
JJ
2999
3000static int
48810515
DM
3001check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3002 int mark_stored_args_map)
f725a3ec 3003{
a20c5714
RS
3004 poly_uint64 low, high;
3005 unsigned HOST_WIDE_INT const_low, const_high;
c67846f2
JJ
3006
3007 if (insn == NULL_RTX)
3008 insn = get_insns ();
3009 else
3010 insn = NEXT_INSN (insn);
3011
3012 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
3013 if (INSN_P (insn)
3014 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
3015 break;
3016
0cdca92b
DJ
3017 if (mark_stored_args_map)
3018 {
6dad9361
TS
3019 if (ARGS_GROW_DOWNWARD)
3020 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3021 else
3022 low = arg->locate.slot_offset.constant;
a20c5714 3023 high = low + arg->locate.size.constant;
d60eab50 3024
a20c5714
RS
3025 const_low = constant_lower_bound (low);
3026 if (high.is_constant (&const_high))
3027 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3028 bitmap_set_bit (stored_args_map, i);
3029 else
3030 stored_args_watermark = MIN (stored_args_watermark, const_low);
0cdca92b 3031 }
c67846f2
JJ
3032 return insn != NULL_RTX;
3033}
3034
bef5d8b6
RS
3035/* Given that a function returns a value of mode MODE at the most
3036 significant end of hard register VALUE, shift VALUE left or right
3037 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 3038
bef5d8b6 3039bool
ef4bddc2 3040shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 3041{
bef5d8b6 3042 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
abd3c800 3043 machine_mode value_mode = GET_MODE (value);
73a699ae
RS
3044 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3045
3046 if (known_eq (shift, 0))
bef5d8b6
RS
3047 return false;
3048
3049 /* Use ashr rather than lshr for right shifts. This is for the benefit
3050 of the MIPS port, which requires SImode values to be sign-extended
3051 when stored in 64-bit registers. */
abd3c800
RS
3052 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3053 value, gen_int_shift_amount (value_mode, shift),
3054 value, 1, OPTAB_WIDEN))
bef5d8b6
RS
3055 gcc_unreachable ();
3056 return true;
c988af2b
RS
3057}
3058
3fb30019
RS
3059/* If X is a likely-spilled register value, copy it to a pseudo
3060 register and return that register. Return X otherwise. */
3061
3062static rtx
3063avoid_likely_spilled_reg (rtx x)
3064{
82d6e6fc 3065 rtx new_rtx;
3fb30019
RS
3066
3067 if (REG_P (x)
3068 && HARD_REGISTER_P (x)
07b8f0a8 3069 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
3070 {
3071 /* Make sure that we generate a REG rather than a CONCAT.
3072 Moves into CONCATs can need nontrivial instructions,
3073 and the whole point of this function is to avoid
3074 using the hard register directly in such a situation. */
3075 generating_concat_p = 0;
82d6e6fc 3076 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 3077 generating_concat_p = 1;
82d6e6fc
KG
3078 emit_move_insn (new_rtx, x);
3079 return new_rtx;
3fb30019
RS
3080 }
3081 return x;
3082}
3083
b40d90e6
DM
3084/* Helper function for expand_call.
3085 Return false is EXP is not implementable as a sibling call. */
3086
3087static bool
3088can_implement_as_sibling_call_p (tree exp,
3089 rtx structure_value_addr,
3090 tree funtype,
dfbdde16 3091 int reg_parm_stack_space ATTRIBUTE_UNUSED,
b40d90e6
DM
3092 tree fndecl,
3093 int flags,
3094 tree addr,
3095 const args_size &args_size)
3096{
3097 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
3098 {
3099 maybe_complain_about_tail_call
3100 (exp,
3101 "machine description does not have"
3102 " a sibcall_epilogue instruction pattern");
3103 return false;
3104 }
b40d90e6
DM
3105
3106 /* Doing sibling call optimization needs some work, since
3107 structure_value_addr can be allocated on the stack.
3108 It does not seem worth the effort since few optimizable
3109 sibling calls will return a structure. */
3110 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
3111 {
3112 maybe_complain_about_tail_call (exp, "callee returns a structure");
3113 return false;
3114 }
b40d90e6
DM
3115
3116#ifdef REG_PARM_STACK_SPACE
3117 /* If outgoing reg parm stack space changes, we can not do sibcall. */
3118 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3119 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3120 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
9a385c2d
DM
3121 {
3122 maybe_complain_about_tail_call (exp,
3123 "inconsistent size of stack space"
3124 " allocated for arguments which are"
3125 " passed in registers");
3126 return false;
3127 }
b40d90e6
DM
3128#endif
3129
3130 /* Check whether the target is able to optimize the call
3131 into a sibcall. */
3132 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
3133 {
3134 maybe_complain_about_tail_call (exp,
3135 "target is not able to optimize the"
3136 " call into a sibling call");
3137 return false;
3138 }
b40d90e6
DM
3139
3140 /* Functions that do not return exactly once may not be sibcall
3141 optimized. */
9a385c2d
DM
3142 if (flags & ECF_RETURNS_TWICE)
3143 {
3144 maybe_complain_about_tail_call (exp, "callee returns twice");
3145 return false;
3146 }
3147 if (flags & ECF_NORETURN)
3148 {
3149 maybe_complain_about_tail_call (exp, "callee does not return");
3150 return false;
3151 }
b40d90e6
DM
3152
3153 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
3154 {
3155 maybe_complain_about_tail_call (exp, "volatile function type");
3156 return false;
3157 }
b40d90e6
DM
3158
3159 /* If the called function is nested in the current one, it might access
3160 some of the caller's arguments, but could clobber them beforehand if
3161 the argument areas are shared. */
3162 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
3163 {
3164 maybe_complain_about_tail_call (exp, "nested function");
3165 return false;
3166 }
b40d90e6
DM
3167
3168 /* If this function requires more stack slots than the current
3169 function, we cannot change it into a sibling call.
3170 crtl->args.pretend_args_size is not part of the
3171 stack allocated by our caller. */
a20c5714
RS
3172 if (maybe_gt (args_size.constant,
3173 crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
3174 {
3175 maybe_complain_about_tail_call (exp,
3176 "callee required more stack slots"
3177 " than the caller");
3178 return false;
3179 }
b40d90e6
DM
3180
3181 /* If the callee pops its own arguments, then it must pop exactly
3182 the same number of arguments as the current function. */
a20c5714
RS
3183 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3184 args_size.constant),
3185 targetm.calls.return_pops_args (current_function_decl,
3186 TREE_TYPE
3187 (current_function_decl),
3188 crtl->args.size)))
9a385c2d
DM
3189 {
3190 maybe_complain_about_tail_call (exp,
3191 "inconsistent number of"
3192 " popped arguments");
3193 return false;
3194 }
b40d90e6
DM
3195
3196 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
3197 {
3198 maybe_complain_about_tail_call (exp, "frontend does not support"
3199 " sibling call");
3200 return false;
3201 }
b40d90e6
DM
3202
3203 /* All checks passed. */
3204 return true;
3205}
3206
5039610b 3207/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
3208 and return an rtx for its value.
3209 Store the value in TARGET (specified as an rtx) if convenient.
3210 If the value is stored in TARGET then TARGET is returned.
3211 If IGNORE is nonzero, then we ignore the value of the function call. */
3212
3213rtx
d329e058 3214expand_call (tree exp, rtx target, int ignore)
51bbfa0c 3215{
0a1c58a2
JL
3216 /* Nonzero if we are currently expanding a call. */
3217 static int currently_expanding_call = 0;
3218
51bbfa0c
RS
3219 /* RTX for the function to be called. */
3220 rtx funexp;
0a1c58a2 3221 /* Sequence of insns to perform a normal "call". */
48810515 3222 rtx_insn *normal_call_insns = NULL;
6de9cd9a 3223 /* Sequence of insns to perform a tail "call". */
48810515 3224 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
3225 /* Data type of the function. */
3226 tree funtype;
ded9bf77 3227 tree type_arg_types;
28ed065e 3228 tree rettype;
51bbfa0c
RS
3229 /* Declaration of the function being called,
3230 or 0 if the function is computed (not known by name). */
3231 tree fndecl = 0;
57782ad8
MM
3232 /* The type of the function being called. */
3233 tree fntype;
6de9cd9a 3234 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 3235 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 3236 int pass;
51bbfa0c
RS
3237
3238 /* Register in which non-BLKmode value will be returned,
3239 or 0 if no value or if value is BLKmode. */
3240 rtx valreg;
d5e254e1
IE
3241 /* Register(s) in which bounds are returned. */
3242 rtx valbnd = NULL;
51bbfa0c
RS
3243 /* Address where we should return a BLKmode value;
3244 0 if value not BLKmode. */
3245 rtx structure_value_addr = 0;
3246 /* Nonzero if that address is being passed by treating it as
3247 an extra, implicit first parameter. Otherwise,
3248 it is passed by being copied directly into struct_value_rtx. */
3249 int structure_value_addr_parm = 0;
078a18a4
SL
3250 /* Holds the value of implicit argument for the struct value. */
3251 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
3252 /* Size of aggregate value wanted, or zero if none wanted
3253 or if we are using the non-reentrant PCC calling convention
3254 or expecting the value in registers. */
5c8e61cf 3255 poly_int64 struct_value_size = 0;
51bbfa0c
RS
3256 /* Nonzero if called function returns an aggregate in memory PCC style,
3257 by returning the address of where to find it. */
3258 int pcc_struct_value = 0;
61f71b34 3259 rtx struct_value = 0;
51bbfa0c
RS
3260
3261 /* Number of actual parameters in this call, including struct value addr. */
3262 int num_actuals;
3263 /* Number of named args. Args after this are anonymous ones
3264 and they must all go on the stack. */
3265 int n_named_args;
078a18a4
SL
3266 /* Number of complex actual arguments that need to be split. */
3267 int num_complex_actuals = 0;
51bbfa0c
RS
3268
3269 /* Vector of information about each argument.
3270 Arguments are numbered in the order they will be pushed,
3271 not the order they are written. */
3272 struct arg_data *args;
3273
3274 /* Total size in bytes of all the stack-parms scanned so far. */
3275 struct args_size args_size;
099e9712 3276 struct args_size adjusted_args_size;
51bbfa0c 3277 /* Size of arguments before any adjustments (such as rounding). */
a20c5714 3278 poly_int64 unadjusted_args_size;
51bbfa0c 3279 /* Data on reg parms scanned so far. */
d5cc9181
JR
3280 CUMULATIVE_ARGS args_so_far_v;
3281 cumulative_args_t args_so_far;
51bbfa0c
RS
3282 /* Nonzero if a reg parm has been scanned. */
3283 int reg_parm_seen;
efd65a8b 3284 /* Nonzero if this is an indirect function call. */
51bbfa0c 3285
f725a3ec 3286 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
3287 If stack space is allocated for register parameters, but not by the
3288 caller, then it is preallocated in the fixed part of the stack frame.
3289 So the entire argument block must then be preallocated (i.e., we
3290 ignore PUSH_ROUNDING in that case). */
3291
f73ad30e 3292 int must_preallocate = !PUSH_ARGS;
51bbfa0c 3293
f72aed24 3294 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
3295 int reg_parm_stack_space = 0;
3296
51bbfa0c
RS
3297 /* Address of space preallocated for stack parms
3298 (on machines that lack push insns), or 0 if space not preallocated. */
3299 rtx argblock = 0;
3300
e384e6b5 3301 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 3302 int flags = 0;
e384e6b5 3303 int return_flags = 0;
f73ad30e 3304#ifdef REG_PARM_STACK_SPACE
51bbfa0c 3305 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
3306 saved, if any. */
3307 int low_to_save, high_to_save;
51bbfa0c
RS
3308 rtx save_area = 0; /* Place that it is saved */
3309#endif
3310
a20c5714 3311 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
51bbfa0c 3312 char *initial_stack_usage_map = stack_usage_map;
a20c5714 3313 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 3314 char *stack_usage_map_buf = NULL;
51bbfa0c 3315
a20c5714 3316 poly_int64 old_stack_allocated;
38afb23f
OH
3317
3318 /* State variables to track stack modifications. */
51bbfa0c 3319 rtx old_stack_level = 0;
38afb23f 3320 int old_stack_arg_under_construction = 0;
a20c5714 3321 poly_int64 old_pending_adj = 0;
51bbfa0c 3322 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
3323
3324 /* Some stack pointer alterations we make are performed via
3325 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3326 which we then also need to save/restore along the way. */
a20c5714 3327 poly_int64 old_stack_pointer_delta = 0;
38afb23f 3328
0a1c58a2 3329 rtx call_fusage;
5039610b 3330 tree addr = CALL_EXPR_FN (exp);
b3694847 3331 int i;
739fb049 3332 /* The alignment of the stack, in bits. */
95899b34 3333 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 3334 /* The alignment of the stack, in bytes. */
95899b34 3335 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
3336 /* The static chain value to use for this call. */
3337 rtx static_chain_value;
f2d33f13
JH
3338 /* See if this is "nothrow" function call. */
3339 if (TREE_NOTHROW (exp))
3340 flags |= ECF_NOTHROW;
3341
6de9cd9a
DN
3342 /* See if we can find a DECL-node for the actual function, and get the
3343 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
3344 fndecl = get_callee_fndecl (exp);
3345 if (fndecl)
51bbfa0c 3346 {
57782ad8 3347 fntype = TREE_TYPE (fndecl);
39b0dce7 3348 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 3349 return_flags |= decl_return_flags (fndecl);
51bbfa0c 3350 }
39b0dce7 3351 else
72954a4f 3352 {
28ed065e 3353 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 3354 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
3355 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3356 flags |= ECF_BY_DESCRIPTOR;
72954a4f 3357 }
28ed065e 3358 rettype = TREE_TYPE (exp);
7393c642 3359
57782ad8 3360 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 3361
8c6a8269
RS
3362 /* Warn if this value is an aggregate type,
3363 regardless of which calling convention we are using for it. */
28ed065e 3364 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 3365 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 3366
becfd6e5
KZ
3367 /* If the result of a non looping pure or const function call is
3368 ignored (or void), and none of its arguments are volatile, we can
3369 avoid expanding the call and just evaluate the arguments for
3370 side-effects. */
8c6a8269 3371 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 3372 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8c6a8269 3373 && (ignore || target == const0_rtx
28ed065e 3374 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
3375 {
3376 bool volatilep = false;
3377 tree arg;
078a18a4 3378 call_expr_arg_iterator iter;
8c6a8269 3379
078a18a4
SL
3380 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3381 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
3382 {
3383 volatilep = true;
3384 break;
3385 }
3386
3387 if (! volatilep)
3388 {
078a18a4
SL
3389 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3390 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
3391 return const0_rtx;
3392 }
3393 }
3394
6f90e075 3395#ifdef REG_PARM_STACK_SPACE
5d059ed9 3396 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 3397#endif
6f90e075 3398
5d059ed9 3399 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3400 && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 3401 must_preallocate = 1;
e5e809f4 3402
51bbfa0c
RS
3403 /* Set up a place to return a structure. */
3404
3405 /* Cater to broken compilers. */
d47d0a8d 3406 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
3407 {
3408 /* This call returns a big structure. */
84b8030f 3409 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
3410
3411#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
3412 {
3413 pcc_struct_value = 1;
9e7b1d0a
RS
3414 }
3415#else /* not PCC_STATIC_STRUCT_RETURN */
3416 {
5c8e61cf
RS
3417 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3418 struct_value_size = -1;
51bbfa0c 3419
391756ad
EB
3420 /* Even if it is semantically safe to use the target as the return
3421 slot, it may be not sufficiently aligned for the return type. */
3422 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3423 && target
3424 && MEM_P (target)
ffc8b52f
JJ
3425 /* If rettype is addressable, we may not create a temporary.
3426 If target is properly aligned at runtime and the compiler
3427 just doesn't know about it, it will work fine, otherwise it
3428 will be UB. */
3429 && (TREE_ADDRESSABLE (rettype)
3430 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3431 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3432 MEM_ALIGN (target)))))
9e7b1d0a
RS
3433 structure_value_addr = XEXP (target, 0);
3434 else
3435 {
9e7b1d0a
RS
3436 /* For variable-sized objects, we must be called with a target
3437 specified. If we were to allocate space on the stack here,
3438 we would have no way of knowing when to free it. */
9474e8ab 3439 rtx d = assign_temp (rettype, 1, 1);
4361b41d 3440 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
3441 target = 0;
3442 }
3443 }
3444#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
3445 }
3446
099e9712 3447 /* Figure out the amount to which the stack should be aligned. */
099e9712 3448 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
3449 if (fndecl)
3450 {
3dafb85c 3451 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
3452 /* Without automatic stack alignment, we can't increase preferred
3453 stack boundary. With automatic stack alignment, it is
3454 unnecessary since unless we can guarantee that all callers will
3455 align the outgoing stack properly, callee has to align its
3456 stack anyway. */
3457 if (i
3458 && i->preferred_incoming_stack_boundary
3459 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
3460 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3461 }
099e9712
JH
3462
3463 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 3464 funtype = TREE_TYPE (addr);
366de0ce 3465 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
3466 funtype = TREE_TYPE (funtype);
3467
078a18a4
SL
3468 /* Count whether there are actual complex arguments that need to be split
3469 into their real and imaginary parts. Munge the type_arg_types
3470 appropriately here as well. */
42ba5130 3471 if (targetm.calls.split_complex_arg)
ded9bf77 3472 {
078a18a4
SL
3473 call_expr_arg_iterator iter;
3474 tree arg;
3475 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3476 {
3477 tree type = TREE_TYPE (arg);
3478 if (type && TREE_CODE (type) == COMPLEX_TYPE
3479 && targetm.calls.split_complex_arg (type))
3480 num_complex_actuals++;
3481 }
ded9bf77 3482 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
3483 }
3484 else
3485 type_arg_types = TYPE_ARG_TYPES (funtype);
3486
099e9712 3487 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 3488 cfun->calls_alloca = 1;
099e9712
JH
3489
3490 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
3491 as if it were an extra parameter. Put the argument expression
3492 in structure_value_addr_value. */
61f71b34 3493 if (structure_value_addr && struct_value == 0)
099e9712
JH
3494 {
3495 /* If structure_value_addr is a REG other than
3496 virtual_outgoing_args_rtx, we can use always use it. If it
3497 is not a REG, we must always copy it into a register.
3498 If it is virtual_outgoing_args_rtx, we must copy it to another
3499 register in some cases. */
f8cfc6aa 3500 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
3501 || (ACCUMULATE_OUTGOING_ARGS
3502 && stack_arg_under_construction
3503 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 3504 ? copy_addr_to_reg (convert_memory_address
57782ad8 3505 (Pmode, structure_value_addr))
099e9712
JH
3506 : structure_value_addr);
3507
078a18a4
SL
3508 structure_value_addr_value =
3509 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
d5e254e1 3510 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
099e9712
JH
3511 }
3512
3513 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
3514 num_actuals =
3515 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
3516
3517 /* Compute number of named args.
3a4d587b
AM
3518 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3519
3520 if (type_arg_types != 0)
3521 n_named_args
3522 = (list_length (type_arg_types)
3523 /* Count the struct value address, if it is passed as a parm. */
3524 + structure_value_addr_parm);
3525 else
3526 /* If we know nothing, treat all args as named. */
3527 n_named_args = num_actuals;
3528
3529 /* Start updating where the next arg would go.
3530
3531 On some machines (such as the PA) indirect calls have a different
3532 calling convention than normal calls. The fourth argument in
3533 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3534 or not. */
d5cc9181
JR
3535 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3536 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
3537
3538 /* Now possibly adjust the number of named args.
099e9712 3539 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
3540 We do include the last named arg if
3541 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
3542 (If no anonymous args follow, the result of list_length is actually
3543 one too large. This is harmless.)
3544
4ac8340c 3545 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
3546 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3547 this machine will be able to place unnamed args that were passed
3548 in registers into the stack. So treat all args as named. This
3549 allows the insns emitting for a specific argument list to be
3550 independent of the function declaration.
4ac8340c
KH
3551
3552 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3553 we do not have any reliable way to pass unnamed args in
3554 registers, so we must force them into memory. */
099e9712 3555
3a4d587b 3556 if (type_arg_types != 0
d5cc9181 3557 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
3558 ;
3559 else if (type_arg_types != 0
d5cc9181 3560 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
3561 /* Don't include the last named arg. */
3562 --n_named_args;
099e9712 3563 else
3a4d587b 3564 /* Treat all args as named. */
099e9712
JH
3565 n_named_args = num_actuals;
3566
099e9712 3567 /* Make a vector to hold all the information about each arg. */
765fc0f7 3568 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 3569
d80d2d2a
KH
3570 /* Build up entries in the ARGS array, compute the size of the
3571 arguments into ARGS_SIZE, etc. */
099e9712 3572 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 3573 n_named_args, exp,
45769134 3574 structure_value_addr_value, fndecl, fntype,
d5cc9181 3575 args_so_far, reg_parm_stack_space,
099e9712 3576 &old_stack_level, &old_pending_adj,
dd292d0a 3577 &must_preallocate, &flags,
6de9cd9a 3578 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
3579
3580 if (args_size.var)
84b8030f 3581 must_preallocate = 1;
099e9712
JH
3582
3583 /* Now make final decision about preallocating stack space. */
3584 must_preallocate = finalize_must_preallocate (must_preallocate,
3585 num_actuals, args,
3586 &args_size);
3587
3588 /* If the structure value address will reference the stack pointer, we
3589 must stabilize it. We don't need to do this if we know that we are
3590 not going to adjust the stack pointer in processing this call. */
3591
3592 if (structure_value_addr
3593 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3594 || reg_mentioned_p (virtual_outgoing_args_rtx,
3595 structure_value_addr))
3596 && (args_size.var
a20c5714
RS
3597 || (!ACCUMULATE_OUTGOING_ARGS
3598 && maybe_ne (args_size.constant, 0))))
099e9712 3599 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 3600
7ae4ad28 3601 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 3602 pushed these optimizations into -O2. Don't try if we're already
fb158467 3603 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 3604 there's cleanups, as we know there's code to follow the call. */
0a1c58a2 3605
099e9712
JH
3606 if (currently_expanding_call++ != 0
3607 || !flag_optimize_sibling_calls
6de9cd9a 3608 || args_size.var
6fb5fa3c 3609 || dbg_cnt (tail_call) == false)
6de9cd9a 3610 try_tail_call = 0;
099e9712 3611
9a385c2d
DM
3612 /* If the user has marked the function as requiring tail-call
3613 optimization, attempt it. */
3614 if (must_tail_call)
3615 try_tail_call = 1;
3616
099e9712 3617 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 3618 if (try_tail_call)
9a385c2d
DM
3619 try_tail_call = can_implement_as_sibling_call_p (exp,
3620 structure_value_addr,
3621 funtype,
3622 reg_parm_stack_space,
3623 fndecl,
b40d90e6 3624 flags, addr, args_size);
497eb8c3 3625
c69cd1f5
JJ
3626 /* Check if caller and callee disagree in promotion of function
3627 return value. */
3628 if (try_tail_call)
3629 {
ef4bddc2
RS
3630 machine_mode caller_mode, caller_promoted_mode;
3631 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
3632 int caller_unsignedp, callee_unsignedp;
3633 tree caller_res = DECL_RESULT (current_function_decl);
3634
3635 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 3636 caller_mode = DECL_MODE (caller_res);
c69cd1f5 3637 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
3638 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3639 caller_promoted_mode
3640 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3641 &caller_unsignedp,
3642 TREE_TYPE (current_function_decl), 1);
3643 callee_promoted_mode
666e3ceb 3644 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 3645 &callee_unsignedp,
666e3ceb 3646 funtype, 1);
c69cd1f5
JJ
3647 if (caller_mode != VOIDmode
3648 && (caller_promoted_mode != callee_promoted_mode
3649 || ((caller_mode != caller_promoted_mode
3650 || callee_mode != callee_promoted_mode)
3651 && (caller_unsignedp != callee_unsignedp
bd4288c0 3652 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
3653 {
3654 try_tail_call = 0;
3655 maybe_complain_about_tail_call (exp,
3656 "caller and callee disagree in"
3657 " promotion of function"
3658 " return value");
3659 }
c69cd1f5
JJ
3660 }
3661
01973e26
L
3662 /* Ensure current function's preferred stack boundary is at least
3663 what we need. Stack alignment may also increase preferred stack
3664 boundary. */
b5f772ce 3665 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 3666 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
3667 else
3668 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 3669
099e9712 3670 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 3671
0a1c58a2
JL
3672 /* We want to make two insn chains; one for a sibling call, the other
3673 for a normal call. We will select one of the two chains after
3674 initial RTL generation is complete. */
b820d2b8 3675 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
3676 {
3677 int sibcall_failure = 0;
f5143c46 3678 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 3679 recursion "call". That way we know any adjustment after the tail
7ae4ad28 3680 recursion call can be ignored if we indeed use the tail
0a1c58a2 3681 call expansion. */
7f2f0a01 3682 saved_pending_stack_adjust save;
48810515
DM
3683 rtx_insn *insns, *before_call, *after_args;
3684 rtx next_arg_reg;
39842893 3685
0a1c58a2
JL
3686 if (pass == 0)
3687 {
0a1c58a2
JL
3688 /* State variables we need to save and restore between
3689 iterations. */
7f2f0a01 3690 save_pending_stack_adjust (&save);
0a1c58a2 3691 }
f2d33f13
JH
3692 if (pass)
3693 flags &= ~ECF_SIBCALL;
3694 else
3695 flags |= ECF_SIBCALL;
51bbfa0c 3696
0a1c58a2 3697 /* Other state variables that we must reinitialize each time
f2d33f13 3698 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
3699 argblock = 0;
3700 call_fusage = 0;
fa76d9e0 3701
f725a3ec 3702 /* Start a new sequence for the normal call case.
51bbfa0c 3703
0a1c58a2
JL
3704 From this point on, if the sibling call fails, we want to set
3705 sibcall_failure instead of continuing the loop. */
3706 start_sequence ();
eecb6f50 3707
0a1c58a2
JL
3708 /* Don't let pending stack adjusts add up to too much.
3709 Also, do all pending adjustments now if there is any chance
3710 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 3711 call sequence.
63579539
DJ
3712 Also do the adjustments before a throwing call, otherwise
3713 exception handling can fail; PR 19225. */
a20c5714
RS
3714 if (maybe_ge (pending_stack_adjust, 32)
3715 || (maybe_ne (pending_stack_adjust, 0)
9dd9bf80 3716 && (flags & ECF_MAY_BE_ALLOCA))
a20c5714 3717 || (maybe_ne (pending_stack_adjust, 0)
63579539 3718 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
3719 || pass == 0)
3720 do_pending_stack_adjust ();
51bbfa0c 3721
0a1c58a2 3722 /* Precompute any arguments as needed. */
f8a097cd 3723 if (pass)
84b8030f 3724 precompute_arguments (num_actuals, args);
51bbfa0c 3725
0a1c58a2
JL
3726 /* Now we are about to start emitting insns that can be deleted
3727 if a libcall is deleted. */
84b8030f 3728 if (pass && (flags & ECF_MALLOC))
0a1c58a2 3729 start_sequence ();
51bbfa0c 3730
87a5dc2d
JW
3731 if (pass == 0
3732 && crtl->stack_protect_guard
3733 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
3734 stack_protect_epilogue ();
3735
099e9712 3736 adjusted_args_size = args_size;
ce48579b
RH
3737 /* Compute the actual size of the argument block required. The variable
3738 and constant sizes must be combined, the size may have to be rounded,
3739 and there may be a minimum required size. When generating a sibcall
3740 pattern, do not round up, since we'll be re-using whatever space our
3741 caller provided. */
3742 unadjusted_args_size
f725a3ec
KH
3743 = compute_argument_block_size (reg_parm_stack_space,
3744 &adjusted_args_size,
5d059ed9 3745 fndecl, fntype,
ce48579b
RH
3746 (pass == 0 ? 0
3747 : preferred_stack_boundary));
3748
f725a3ec 3749 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 3750
f8a097cd 3751 /* The argument block when performing a sibling call is the
c22cacf3 3752 incoming argument block. */
f8a097cd 3753 if (pass == 0)
c67846f2 3754 {
2e3f842f 3755 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
3756 if (STACK_GROWS_DOWNWARD)
3757 argblock
3758 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3759 else
3760 argblock
3761 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3762
a20c5714
RS
3763 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3764 stored_args_map = sbitmap_alloc (map_size);
f61e445a 3765 bitmap_clear (stored_args_map);
a20c5714 3766 stored_args_watermark = HOST_WIDE_INT_M1U;
c67846f2 3767 }
ce48579b 3768
0a1c58a2
JL
3769 /* If we have no actual push instructions, or shouldn't use them,
3770 make space for all args right now. */
099e9712 3771 else if (adjusted_args_size.var != 0)
51bbfa0c 3772 {
0a1c58a2
JL
3773 if (old_stack_level == 0)
3774 {
9eac0f2a 3775 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 3776 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
3777 old_pending_adj = pending_stack_adjust;
3778 pending_stack_adjust = 0;
0a1c58a2
JL
3779 /* stack_arg_under_construction says whether a stack arg is
3780 being constructed at the old stack level. Pushing the stack
3781 gets a clean outgoing argument block. */
3782 old_stack_arg_under_construction = stack_arg_under_construction;
3783 stack_arg_under_construction = 0;
0a1c58a2 3784 }
099e9712 3785 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 3786 if (flag_stack_usage_info)
d3c12306 3787 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 3788 }
0a1c58a2
JL
3789 else
3790 {
3791 /* Note that we must go through the motions of allocating an argument
3792 block even if the size is zero because we may be storing args
3793 in the area reserved for register arguments, which may be part of
3794 the stack frame. */
26a258fe 3795
a20c5714 3796 poly_int64 needed = adjusted_args_size.constant;
51bbfa0c 3797
0a1c58a2
JL
3798 /* Store the maximum argument space used. It will be pushed by
3799 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3800 checking). */
51bbfa0c 3801
a20c5714
RS
3802 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3803 needed);
51bbfa0c 3804
0a1c58a2
JL
3805 if (must_preallocate)
3806 {
f73ad30e
JH
3807 if (ACCUMULATE_OUTGOING_ARGS)
3808 {
f8a097cd
JH
3809 /* Since the stack pointer will never be pushed, it is
3810 possible for the evaluation of a parm to clobber
3811 something we have already written to the stack.
3812 Since most function calls on RISC machines do not use
3813 the stack, this is uncommon, but must work correctly.
26a258fe 3814
f73ad30e 3815 Therefore, we save any area of the stack that was already
f8a097cd
JH
3816 written and that we are using. Here we set up to do this
3817 by making a new stack usage map from the old one. The
f725a3ec 3818 actual save will be done by store_one_arg.
26a258fe 3819
f73ad30e
JH
3820 Another approach might be to try to reorder the argument
3821 evaluations to avoid this conflicting stack usage. */
26a258fe 3822
f8a097cd
JH
3823 /* Since we will be writing into the entire argument area,
3824 the map must be allocated for its entire size, not just
3825 the part that is the responsibility of the caller. */
5d059ed9 3826 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 3827 needed += reg_parm_stack_space;
51bbfa0c 3828
a20c5714 3829 poly_int64 limit = needed;
6dad9361 3830 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
3831 limit += 1;
3832
3833 /* For polynomial sizes, this is the maximum possible
3834 size needed for arguments with a constant size
3835 and offset. */
3836 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3837 highest_outgoing_arg_in_use
3838 = MAX (initial_highest_arg_in_use, const_limit);
6dad9361 3839
04695783 3840 free (stack_usage_map_buf);
5ed6ace5 3841 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3842 stack_usage_map = stack_usage_map_buf;
51bbfa0c 3843
f73ad30e 3844 if (initial_highest_arg_in_use)
2e09e75a
JM
3845 memcpy (stack_usage_map, initial_stack_usage_map,
3846 initial_highest_arg_in_use);
2f4aa534 3847
f73ad30e 3848 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3849 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3850 (highest_outgoing_arg_in_use
3851 - initial_highest_arg_in_use));
3852 needed = 0;
2f4aa534 3853
f8a097cd
JH
3854 /* The address of the outgoing argument list must not be
3855 copied to a register here, because argblock would be left
3856 pointing to the wrong place after the call to
f725a3ec 3857 allocate_dynamic_stack_space below. */
2f4aa534 3858
f73ad30e 3859 argblock = virtual_outgoing_args_rtx;
f725a3ec 3860 }
f73ad30e 3861 else
26a258fe 3862 {
a20c5714
RS
3863 /* Try to reuse some or all of the pending_stack_adjust
3864 to get this space. */
3865 if (inhibit_defer_pop == 0
3866 && (combine_pending_stack_adjustment_and_call
3867 (&needed,
3868 unadjusted_args_size,
3869 &adjusted_args_size,
3870 preferred_unit_stack_boundary)))
0a1c58a2 3871 {
ce48579b
RH
3872 /* combine_pending_stack_adjustment_and_call computes
3873 an adjustment before the arguments are allocated.
3874 Account for them and see whether or not the stack
3875 needs to go up or down. */
3876 needed = unadjusted_args_size - needed;
3877
a20c5714
RS
3878 /* Checked by
3879 combine_pending_stack_adjustment_and_call. */
3880 gcc_checking_assert (ordered_p (needed, 0));
3881 if (maybe_lt (needed, 0))
f73ad30e 3882 {
ce48579b
RH
3883 /* We're releasing stack space. */
3884 /* ??? We can avoid any adjustment at all if we're
3885 already aligned. FIXME. */
3886 pending_stack_adjust = -needed;
3887 do_pending_stack_adjust ();
f73ad30e
JH
3888 needed = 0;
3889 }
f725a3ec 3890 else
ce48579b
RH
3891 /* We need to allocate space. We'll do that in
3892 push_block below. */
3893 pending_stack_adjust = 0;
0a1c58a2 3894 }
ce48579b
RH
3895
3896 /* Special case this because overhead of `push_block' in
3897 this case is non-trivial. */
a20c5714 3898 if (known_eq (needed, 0))
f73ad30e 3899 argblock = virtual_outgoing_args_rtx;
0a1c58a2 3900 else
d892f288 3901 {
a20c5714
RS
3902 rtx needed_rtx = gen_int_mode (needed, Pmode);
3903 argblock = push_block (needed_rtx, 0, 0);
6dad9361
TS
3904 if (ARGS_GROW_DOWNWARD)
3905 argblock = plus_constant (Pmode, argblock, needed);
d892f288 3906 }
f73ad30e 3907
f8a097cd
JH
3908 /* We only really need to call `copy_to_reg' in the case
3909 where push insns are going to be used to pass ARGBLOCK
3910 to a function call in ARGS. In that case, the stack
3911 pointer changes value from the allocation point to the
3912 call point, and hence the value of
3913 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3914 as well always do it. */
f73ad30e 3915 argblock = copy_to_reg (argblock);
38afb23f
OH
3916 }
3917 }
3918 }
0a1c58a2 3919
38afb23f
OH
3920 if (ACCUMULATE_OUTGOING_ARGS)
3921 {
3922 /* The save/restore code in store_one_arg handles all
3923 cases except one: a constructor call (including a C
3924 function returning a BLKmode struct) to initialize
3925 an argument. */
3926 if (stack_arg_under_construction)
3927 {
ac294f0b 3928 rtx push_size
a20c5714
RS
3929 = (gen_int_mode
3930 (adjusted_args_size.constant
3931 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3932 : TREE_TYPE (fndecl))
3933 ? 0 : reg_parm_stack_space), Pmode));
38afb23f
OH
3934 if (old_stack_level == 0)
3935 {
9eac0f2a 3936 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
3937 old_stack_pointer_delta = stack_pointer_delta;
3938 old_pending_adj = pending_stack_adjust;
3939 pending_stack_adjust = 0;
3940 /* stack_arg_under_construction says whether a stack
3941 arg is being constructed at the old stack level.
3942 Pushing the stack gets a clean outgoing argument
3943 block. */
3944 old_stack_arg_under_construction
3945 = stack_arg_under_construction;
3946 stack_arg_under_construction = 0;
3947 /* Make a new map for the new argument list. */
04695783 3948 free (stack_usage_map_buf);
b9eae1a9 3949 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3950 stack_usage_map = stack_usage_map_buf;
38afb23f 3951 highest_outgoing_arg_in_use = 0;
a20c5714 3952 stack_usage_watermark = HOST_WIDE_INT_M1U;
f73ad30e 3953 }
d3c12306
EB
3954 /* We can pass TRUE as the 4th argument because we just
3955 saved the stack pointer and will restore it right after
3956 the call. */
9e878cf1
EB
3957 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3958 -1, true);
0a1c58a2 3959 }
bfbf933a 3960
38afb23f
OH
3961 /* If argument evaluation might modify the stack pointer,
3962 copy the address of the argument list to a register. */
3963 for (i = 0; i < num_actuals; i++)
3964 if (args[i].pass_on_stack)
3965 {
3966 argblock = copy_addr_to_reg (argblock);
3967 break;
3968 }
3969 }
d329e058 3970
0a1c58a2 3971 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 3972
5ba53785
UB
3973 /* Stack is properly aligned, pops can't safely be deferred during
3974 the evaluation of the arguments. */
3975 NO_DEFER_POP;
3976
ac4ee457
UB
3977 /* Precompute all register parameters. It isn't safe to compute
3978 anything once we have started filling any specific hard regs.
3979 TLS symbols sometimes need a call to resolve. Precompute
3980 register parameters before any stack pointer manipulation
3981 to avoid unaligned stack in the called function. */
3982 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3983
5ba53785
UB
3984 OK_DEFER_POP;
3985
3d9684ae
JG
3986 /* Perform stack alignment before the first push (the last arg). */
3987 if (argblock == 0
a20c5714
RS
3988 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3989 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4e217aed 3990 {
0a1c58a2
JL
3991 /* When the stack adjustment is pending, we get better code
3992 by combining the adjustments. */
a20c5714
RS
3993 if (maybe_ne (pending_stack_adjust, 0)
3994 && ! inhibit_defer_pop
3995 && (combine_pending_stack_adjustment_and_call
3996 (&pending_stack_adjust,
3997 unadjusted_args_size,
3998 &adjusted_args_size,
3999 preferred_unit_stack_boundary)))
4000 do_pending_stack_adjust ();
0a1c58a2 4001 else if (argblock == 0)
a20c5714
RS
4002 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4003 - unadjusted_args_size,
4004 Pmode));
0a1c58a2 4005 }
ebcd0b57
JH
4006 /* Now that the stack is properly aligned, pops can't safely
4007 be deferred during the evaluation of the arguments. */
4008 NO_DEFER_POP;
51bbfa0c 4009
d3c12306
EB
4010 /* Record the maximum pushed stack space size. We need to delay
4011 doing it this far to take into account the optimization done
4012 by combine_pending_stack_adjustment_and_call. */
a11e0df4 4013 if (flag_stack_usage_info
d3c12306
EB
4014 && !ACCUMULATE_OUTGOING_ARGS
4015 && pass
4016 && adjusted_args_size.var == 0)
4017 {
a20c5714
RS
4018 poly_int64 pushed = (adjusted_args_size.constant
4019 + pending_stack_adjust);
4020 current_function_pushed_stack_size
4021 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4022 }
4023
09e2bf48 4024 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 4025
5039610b
SL
4026 if (CALL_EXPR_STATIC_CHAIN (exp))
4027 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
4028 else
4029 static_chain_value = 0;
4030
f73ad30e 4031#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
4032 /* Save the fixed argument area if it's part of the caller's frame and
4033 is clobbered by argument setup for this call. */
f8a097cd 4034 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
4035 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4036 &low_to_save, &high_to_save);
b94301c2 4037#endif
51bbfa0c 4038
0a1c58a2
JL
4039 /* Now store (and compute if necessary) all non-register parms.
4040 These come before register parms, since they can require block-moves,
4041 which could clobber the registers used for register parms.
4042 Parms which have partial registers are not stored here,
4043 but we do preallocate space here if they want that. */
51bbfa0c 4044
0a1c58a2 4045 for (i = 0; i < num_actuals; i++)
0196c95e 4046 {
d5e254e1
IE
4047 /* Delay bounds until all other args are stored. */
4048 if (POINTER_BOUNDS_P (args[i].tree_value))
4049 continue;
4050 else if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 4051 {
48810515 4052 rtx_insn *before_arg = get_last_insn ();
0196c95e 4053
ddc923b5
MP
4054 /* We don't allow passing huge (> 2^30 B) arguments
4055 by value. It would cause an overflow later on. */
a20c5714 4056 if (constant_lower_bound (adjusted_args_size.constant)
ddc923b5
MP
4057 >= (1 << (HOST_BITS_PER_INT - 2)))
4058 {
4059 sorry ("passing too large argument on stack");
4060 continue;
4061 }
4062
0196c95e
JJ
4063 if (store_one_arg (&args[i], argblock, flags,
4064 adjusted_args_size.var != 0,
4065 reg_parm_stack_space)
4066 || (pass == 0
4067 && check_sibcall_argument_overlap (before_arg,
4068 &args[i], 1)))
4069 sibcall_failure = 1;
4070 }
4071
2b1c5433 4072 if (args[i].stack)
7d810276
JJ
4073 call_fusage
4074 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4075 gen_rtx_USE (VOIDmode, args[i].stack),
4076 call_fusage);
0196c95e 4077 }
0a1c58a2
JL
4078
4079 /* If we have a parm that is passed in registers but not in memory
4080 and whose alignment does not permit a direct copy into registers,
4081 make a group of pseudos that correspond to each register that we
4082 will later fill. */
4083 if (STRICT_ALIGNMENT)
4084 store_unaligned_arguments_into_pseudos (args, num_actuals);
4085
4086 /* Now store any partially-in-registers parm.
4087 This is the last place a block-move can happen. */
4088 if (reg_parm_seen)
4089 for (i = 0; i < num_actuals; i++)
4090 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 4091 {
48810515 4092 rtx_insn *before_arg = get_last_insn ();
c67846f2 4093
99206968
KT
4094 /* On targets with weird calling conventions (e.g. PA) it's
4095 hard to ensure that all cases of argument overlap between
4096 stack and registers work. Play it safe and bail out. */
4097 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4098 {
4099 sibcall_failure = 1;
4100 break;
4101 }
4102
4c6b3b2a
JJ
4103 if (store_one_arg (&args[i], argblock, flags,
4104 adjusted_args_size.var != 0,
4105 reg_parm_stack_space)
4106 || (pass == 0
4107 && check_sibcall_argument_overlap (before_arg,
0cdca92b 4108 &args[i], 1)))
c67846f2
JJ
4109 sibcall_failure = 1;
4110 }
51bbfa0c 4111
2f21e1ba
BS
4112 bool any_regs = false;
4113 for (i = 0; i < num_actuals; i++)
4114 if (args[i].reg != NULL_RTX)
4115 {
4116 any_regs = true;
4117 targetm.calls.call_args (args[i].reg, funtype);
4118 }
4119 if (!any_regs)
4120 targetm.calls.call_args (pc_rtx, funtype);
4121
4122 /* Figure out the register where the value, if any, will come back. */
4123 valreg = 0;
4124 valbnd = 0;
4125 if (TYPE_MODE (rettype) != VOIDmode
4126 && ! structure_value_addr)
4127 {
4128 if (pcc_struct_value)
4129 {
4130 valreg = hard_function_value (build_pointer_type (rettype),
4131 fndecl, NULL, (pass == 0));
4132 if (CALL_WITH_BOUNDS_P (exp))
4133 valbnd = targetm.calls.
4134 chkp_function_value_bounds (build_pointer_type (rettype),
4135 fndecl, (pass == 0));
4136 }
4137 else
4138 {
4139 valreg = hard_function_value (rettype, fndecl, fntype,
4140 (pass == 0));
4141 if (CALL_WITH_BOUNDS_P (exp))
4142 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
4143 fndecl,
4144 (pass == 0));
4145 }
4146
4147 /* If VALREG is a PARALLEL whose first member has a zero
4148 offset, use that. This is for targets such as m68k that
4149 return the same value in multiple places. */
4150 if (GET_CODE (valreg) == PARALLEL)
4151 {
4152 rtx elem = XVECEXP (valreg, 0, 0);
4153 rtx where = XEXP (elem, 0);
4154 rtx offset = XEXP (elem, 1);
4155 if (offset == const0_rtx
4156 && GET_MODE (where) == GET_MODE (valreg))
4157 valreg = where;
4158 }
4159 }
4160
d5e254e1
IE
4161 /* Store all bounds not passed in registers. */
4162 for (i = 0; i < num_actuals; i++)
4163 {
4164 if (POINTER_BOUNDS_P (args[i].tree_value)
4165 && !args[i].reg)
4166 store_bounds (&args[i],
4167 args[i].pointer_arg == -1
4168 ? NULL
4169 : &args[args[i].pointer_arg]);
4170 }
4171
0a1c58a2
JL
4172 /* If register arguments require space on the stack and stack space
4173 was not preallocated, allocate stack space here for arguments
4174 passed in registers. */
5d059ed9 4175 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 4176 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 4177 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 4178 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 4179
0a1c58a2
JL
4180 /* Pass the function the address in which to return a
4181 structure value. */
4182 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4183 {
7ae4ad28 4184 structure_value_addr
5ae6cd0d 4185 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 4186 emit_move_insn (struct_value,
0a1c58a2
JL
4187 force_reg (Pmode,
4188 force_operand (structure_value_addr,
4189 NULL_RTX)));
4190
f8cfc6aa 4191 if (REG_P (struct_value))
61f71b34 4192 use_reg (&call_fusage, struct_value);
0a1c58a2 4193 }
c2939b57 4194
05e6ee93 4195 after_args = get_last_insn ();
78bcf3dc
EB
4196 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4197 static_chain_value, &call_fusage,
4198 reg_parm_seen, flags);
6b8805cf 4199
0cdca92b
DJ
4200 load_register_parameters (args, num_actuals, &call_fusage, flags,
4201 pass == 0, &sibcall_failure);
f725a3ec 4202
0a1c58a2
JL
4203 /* Save a pointer to the last insn before the call, so that we can
4204 later safely search backwards to find the CALL_INSN. */
4205 before_call = get_last_insn ();
51bbfa0c 4206
7d167afd
JJ
4207 /* Set up next argument register. For sibling calls on machines
4208 with register windows this should be the incoming register. */
7d167afd 4209 if (pass == 0)
d5cc9181 4210 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3c07301f
NF
4211 VOIDmode,
4212 void_type_node,
4213 true);
7d167afd 4214 else
d5cc9181 4215 next_arg_reg = targetm.calls.function_arg (args_so_far,
3c07301f
NF
4216 VOIDmode, void_type_node,
4217 true);
7d167afd 4218
e384e6b5
BS
4219 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4220 {
4221 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 4222 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
4223 if (arg_nr >= 0
4224 && arg_nr < num_actuals
4225 && args[arg_nr].reg
e384e6b5
BS
4226 && valreg
4227 && REG_P (valreg)
4228 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4229 call_fusage
4230 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 4231 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
4232 call_fusage);
4233 }
0a1c58a2
JL
4234 /* All arguments and registers used for the call must be set up by
4235 now! */
4236
ce48579b 4237 /* Stack must be properly aligned now. */
366de0ce 4238 gcc_assert (!pass
a20c5714
RS
4239 || multiple_p (stack_pointer_delta,
4240 preferred_unit_stack_boundary));
ebcd0b57 4241
0a1c58a2 4242 /* Generate the actual call instruction. */
6de9cd9a 4243 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 4244 adjusted_args_size.constant, struct_value_size,
7d167afd 4245 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 4246 flags, args_so_far);
0a1c58a2 4247
1e288103 4248 if (flag_ipa_ra)
4f660b15 4249 {
48810515
DM
4250 rtx_call_insn *last;
4251 rtx datum = NULL_RTX;
4f660b15
RO
4252 if (fndecl != NULL_TREE)
4253 {
4254 datum = XEXP (DECL_RTL (fndecl), 0);
4255 gcc_assert (datum != NULL_RTX
4256 && GET_CODE (datum) == SYMBOL_REF);
4257 }
4258 last = last_call_insn ();
4259 add_reg_note (last, REG_CALL_DECL, datum);
4260 }
4261
05e6ee93
MM
4262 /* If the call setup or the call itself overlaps with anything
4263 of the argument setup we probably clobbered our call address.
4264 In that case we can't do sibcalls. */
4265 if (pass == 0
4266 && check_sibcall_argument_overlap (after_args, 0, 0))
4267 sibcall_failure = 1;
4268
bef5d8b6
RS
4269 /* If a non-BLKmode value is returned at the most significant end
4270 of a register, shift the register right by the appropriate amount
4271 and update VALREG accordingly. BLKmode values are handled by the
4272 group load/store machinery below. */
4273 if (!structure_value_addr
4274 && !pcc_struct_value
66de4d7c 4275 && TYPE_MODE (rettype) != VOIDmode
28ed065e 4276 && TYPE_MODE (rettype) != BLKmode
66de4d7c 4277 && REG_P (valreg)
28ed065e 4278 && targetm.calls.return_in_msb (rettype))
bef5d8b6 4279 {
28ed065e 4280 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 4281 sibcall_failure = 1;
28ed065e 4282 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
4283 }
4284
84b8030f 4285 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
4286 {
4287 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 4288 rtx_insn *last, *insns;
0a1c58a2 4289
f725a3ec 4290 /* The return value from a malloc-like function is a pointer. */
28ed065e 4291 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 4292 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
4293
4294 emit_move_insn (temp, valreg);
4295
4296 /* The return value from a malloc-like function can not alias
4297 anything else. */
4298 last = get_last_insn ();
65c5f2a6 4299 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
4300
4301 /* Write out the sequence. */
4302 insns = get_insns ();
4303 end_sequence ();
2f937369 4304 emit_insn (insns);
0a1c58a2
JL
4305 valreg = temp;
4306 }
51bbfa0c 4307
6fb5fa3c
DB
4308 /* For calls to `setjmp', etc., inform
4309 function.c:setjmp_warnings that it should complain if
4310 nonvolatile values are live. For functions that cannot
4311 return, inform flow that control does not fall through. */
51bbfa0c 4312
6e14af16 4313 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 4314 {
570a98eb 4315 /* The barrier must be emitted
0a1c58a2
JL
4316 immediately after the CALL_INSN. Some ports emit more
4317 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 4318
48810515 4319 rtx_insn *last = get_last_insn ();
4b4bf941 4320 while (!CALL_P (last))
0a1c58a2
JL
4321 {
4322 last = PREV_INSN (last);
4323 /* There was no CALL_INSN? */
366de0ce 4324 gcc_assert (last != before_call);
0a1c58a2 4325 }
51bbfa0c 4326
570a98eb 4327 emit_barrier_after (last);
8af61113 4328
f451eeef
JS
4329 /* Stack adjustments after a noreturn call are dead code.
4330 However when NO_DEFER_POP is in effect, we must preserve
4331 stack_pointer_delta. */
4332 if (inhibit_defer_pop == 0)
4333 {
4334 stack_pointer_delta = old_stack_allocated;
4335 pending_stack_adjust = 0;
4336 }
0a1c58a2 4337 }
51bbfa0c 4338
0a1c58a2 4339 /* If value type not void, return an rtx for the value. */
51bbfa0c 4340
28ed065e 4341 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 4342 || ignore)
b5cd4ed4 4343 target = const0_rtx;
0a1c58a2
JL
4344 else if (structure_value_addr)
4345 {
3c0cb5de 4346 if (target == 0 || !MEM_P (target))
0a1c58a2 4347 {
3bdf5ad1 4348 target
28ed065e
MM
4349 = gen_rtx_MEM (TYPE_MODE (rettype),
4350 memory_address (TYPE_MODE (rettype),
3bdf5ad1 4351 structure_value_addr));
28ed065e 4352 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
4353 }
4354 }
4355 else if (pcc_struct_value)
cacbd532 4356 {
0a1c58a2
JL
4357 /* This is the special C++ case where we need to
4358 know what the true target was. We take care to
4359 never use this value more than once in one expression. */
28ed065e 4360 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 4361 copy_to_reg (valreg));
28ed065e 4362 set_mem_attributes (target, rettype, 1);
cacbd532 4363 }
0a1c58a2
JL
4364 /* Handle calls that return values in multiple non-contiguous locations.
4365 The Irix 6 ABI has examples of this. */
4366 else if (GET_CODE (valreg) == PARALLEL)
4367 {
6de9cd9a 4368 if (target == 0)
5ef0b50d 4369 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
4370 else if (rtx_equal_p (target, valreg))
4371 ;
4372 else if (GET_CODE (target) == PARALLEL)
4373 /* Handle the result of a emit_group_move_into_temps
4374 call in the previous pass. */
4375 emit_group_move (target, valreg);
4376 else
28ed065e
MM
4377 emit_group_store (target, valreg, rettype,
4378 int_size_in_bytes (rettype));
0a1c58a2
JL
4379 }
4380 else if (target
28ed065e 4381 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
4382 && GET_MODE (target) == GET_MODE (valreg))
4383 {
51caaefe
EB
4384 bool may_overlap = false;
4385
f2d18690
KK
4386 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4387 reg to a plain register. */
3fb30019
RS
4388 if (!REG_P (target) || HARD_REGISTER_P (target))
4389 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 4390
51caaefe
EB
4391 /* If TARGET is a MEM in the argument area, and we have
4392 saved part of the argument area, then we can't store
4393 directly into TARGET as it may get overwritten when we
4394 restore the argument save area below. Don't work too
4395 hard though and simply force TARGET to a register if it
4396 is a MEM; the optimizer is quite likely to sort it out. */
4397 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4398 for (i = 0; i < num_actuals; i++)
4399 if (args[i].save_area)
4400 {
4401 may_overlap = true;
4402 break;
4403 }
0219237c 4404
51caaefe
EB
4405 if (may_overlap)
4406 target = copy_to_reg (valreg);
4407 else
4408 {
4409 /* TARGET and VALREG cannot be equal at this point
4410 because the latter would not have
4411 REG_FUNCTION_VALUE_P true, while the former would if
4412 it were referring to the same register.
4413
4414 If they refer to the same register, this move will be
4415 a no-op, except when function inlining is being
4416 done. */
4417 emit_move_insn (target, valreg);
4418
4419 /* If we are setting a MEM, this code must be executed.
4420 Since it is emitted after the call insn, sibcall
4421 optimization cannot be performed in that case. */
4422 if (MEM_P (target))
4423 sibcall_failure = 1;
4424 }
0a1c58a2 4425 }
0a1c58a2 4426 else
3fb30019 4427 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 4428
cde0f3fd
PB
4429 /* If we promoted this return value, make the proper SUBREG.
4430 TARGET might be const0_rtx here, so be careful. */
4431 if (REG_P (target)
28ed065e
MM
4432 && TYPE_MODE (rettype) != BLKmode
4433 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 4434 {
28ed065e 4435 tree type = rettype;
cde0f3fd 4436 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2 4437 machine_mode pmode;
cde0f3fd
PB
4438
4439 /* Ensure we promote as expected, and get the new unsignedness. */
4440 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4441 funtype, 1);
4442 gcc_assert (GET_MODE (target) == pmode);
4443
91914e56
RS
4444 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4445 GET_MODE (target));
cde0f3fd
PB
4446 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4447 SUBREG_PROMOTED_VAR_P (target) = 1;
362d42dc 4448 SUBREG_PROMOTED_SET (target, unsignedp);
61f71b34 4449 }
84b55618 4450
0a1c58a2
JL
4451 /* If size of args is variable or this was a constructor call for a stack
4452 argument, restore saved stack-pointer value. */
51bbfa0c 4453
9dd9bf80 4454 if (old_stack_level)
0a1c58a2 4455 {
48810515 4456 rtx_insn *prev = get_last_insn ();
9a08d230 4457
9eac0f2a 4458 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 4459 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 4460
faf7a23d 4461 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 4462
0a1c58a2 4463 pending_stack_adjust = old_pending_adj;
d25cee4d 4464 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
4465 stack_arg_under_construction = old_stack_arg_under_construction;
4466 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4467 stack_usage_map = initial_stack_usage_map;
a20c5714 4468 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2
JL
4469 sibcall_failure = 1;
4470 }
f8a097cd 4471 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 4472 {
51bbfa0c 4473#ifdef REG_PARM_STACK_SPACE
0a1c58a2 4474 if (save_area)
b820d2b8
AM
4475 restore_fixed_argument_area (save_area, argblock,
4476 high_to_save, low_to_save);
b94301c2 4477#endif
51bbfa0c 4478
0a1c58a2
JL
4479 /* If we saved any argument areas, restore them. */
4480 for (i = 0; i < num_actuals; i++)
4481 if (args[i].save_area)
4482 {
ef4bddc2 4483 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
4484 rtx stack_area
4485 = gen_rtx_MEM (save_mode,
4486 memory_address (save_mode,
4487 XEXP (args[i].stack_slot, 0)));
4488
4489 if (save_mode != BLKmode)
4490 emit_move_insn (stack_area, args[i].save_area);
4491 else
44bb111a 4492 emit_block_move (stack_area, args[i].save_area,
a20c5714
RS
4493 (gen_int_mode
4494 (args[i].locate.size.constant, Pmode)),
44bb111a 4495 BLOCK_OP_CALL_PARM);
0a1c58a2 4496 }
51bbfa0c 4497
0a1c58a2
JL
4498 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4499 stack_usage_map = initial_stack_usage_map;
a20c5714 4500 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2 4501 }
51bbfa0c 4502
d33606c3
EB
4503 /* If this was alloca, record the new stack level. */
4504 if (flags & ECF_MAY_BE_ALLOCA)
4505 record_new_stack_level ();
51bbfa0c 4506
0a1c58a2
JL
4507 /* Free up storage we no longer need. */
4508 for (i = 0; i < num_actuals; ++i)
04695783 4509 free (args[i].aligned_regs);
0a1c58a2 4510
2f21e1ba
BS
4511 targetm.calls.end_call_args ();
4512
0a1c58a2
JL
4513 insns = get_insns ();
4514 end_sequence ();
4515
4516 if (pass == 0)
4517 {
4518 tail_call_insns = insns;
4519
0a1c58a2
JL
4520 /* Restore the pending stack adjustment now that we have
4521 finished generating the sibling call sequence. */
1503a7ec 4522
7f2f0a01 4523 restore_pending_stack_adjust (&save);
099e9712
JH
4524
4525 /* Prepare arg structure for next iteration. */
f725a3ec 4526 for (i = 0; i < num_actuals; i++)
099e9712
JH
4527 {
4528 args[i].value = 0;
4529 args[i].aligned_regs = 0;
4530 args[i].stack = 0;
4531 }
c67846f2
JJ
4532
4533 sbitmap_free (stored_args_map);
48810515 4534 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 4535 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
4536 }
4537 else
38afb23f
OH
4538 {
4539 normal_call_insns = insns;
4540
4541 /* Verify that we've deallocated all the stack we used. */
6e14af16 4542 gcc_assert ((flags & ECF_NORETURN)
a20c5714
RS
4543 || known_eq (old_stack_allocated,
4544 stack_pointer_delta
4545 - pending_stack_adjust));
38afb23f 4546 }
fadb729c
JJ
4547
4548 /* If something prevents making this a sibling call,
4549 zero out the sequence. */
4550 if (sibcall_failure)
48810515 4551 tail_call_insns = NULL;
6de9cd9a
DN
4552 else
4553 break;
0a1c58a2
JL
4554 }
4555
1ea7e6ad 4556 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
4557 arguments too, as argument area is now clobbered by the call. */
4558 if (tail_call_insns)
0a1c58a2 4559 {
6de9cd9a 4560 emit_insn (tail_call_insns);
e3b5732b 4561 crtl->tail_call_emit = true;
0a1c58a2
JL
4562 }
4563 else
9a385c2d
DM
4564 {
4565 emit_insn (normal_call_insns);
4566 if (try_tail_call)
4567 /* Ideally we'd emit a message for all of the ways that it could
4568 have failed. */
4569 maybe_complain_about_tail_call (exp, "tail call production failed");
4570 }
51bbfa0c 4571
0a1c58a2 4572 currently_expanding_call--;
8e6a59fe 4573
04695783 4574 free (stack_usage_map_buf);
765fc0f7 4575 free (args);
d9725c41 4576
d5e254e1
IE
4577 /* Join result with returned bounds so caller may use them if needed. */
4578 target = chkp_join_splitted_slot (target, valbnd);
4579
51bbfa0c
RS
4580 return target;
4581}
ded9bf77 4582
6de9cd9a
DN
4583/* A sibling call sequence invalidates any REG_EQUIV notes made for
4584 this function's incoming arguments.
4585
4586 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
4587 in the rtl chain are those for incoming arguments, so we can look
4588 for REG_EQUIV notes between the start of the function and the
4589 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
4590
4591 This is (slight) overkill. We could keep track of the highest
4592 argument we clobber and be more selective in removing notes, but it
4593 does not seem to be worth the effort. */
29d51cdb 4594
6de9cd9a
DN
4595void
4596fixup_tail_calls (void)
4597{
48810515 4598 rtx_insn *insn;
29d51cdb
SB
4599
4600 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4601 {
a31830a7
SB
4602 rtx note;
4603
29d51cdb
SB
4604 /* There are never REG_EQUIV notes for the incoming arguments
4605 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4606 if (NOTE_P (insn)
a38e7aa5 4607 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
4608 break;
4609
a31830a7
SB
4610 note = find_reg_note (insn, REG_EQUIV, 0);
4611 if (note)
4612 remove_note (insn, note);
4613 note = find_reg_note (insn, REG_EQUIV, 0);
4614 gcc_assert (!note);
29d51cdb 4615 }
6de9cd9a
DN
4616}
4617
ded9bf77
AH
4618/* Traverse a list of TYPES and expand all complex types into their
4619 components. */
2f2b4a02 4620static tree
ded9bf77
AH
4621split_complex_types (tree types)
4622{
4623 tree p;
4624
42ba5130
RH
4625 /* Before allocating memory, check for the common case of no complex. */
4626 for (p = types; p; p = TREE_CHAIN (p))
4627 {
4628 tree type = TREE_VALUE (p);
4629 if (TREE_CODE (type) == COMPLEX_TYPE
4630 && targetm.calls.split_complex_arg (type))
c22cacf3 4631 goto found;
42ba5130
RH
4632 }
4633 return types;
4634
4635 found:
ded9bf77
AH
4636 types = copy_list (types);
4637
4638 for (p = types; p; p = TREE_CHAIN (p))
4639 {
4640 tree complex_type = TREE_VALUE (p);
4641
42ba5130
RH
4642 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4643 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
4644 {
4645 tree next, imag;
4646
4647 /* Rewrite complex type with component type. */
4648 TREE_VALUE (p) = TREE_TYPE (complex_type);
4649 next = TREE_CHAIN (p);
4650
4651 /* Add another component type for the imaginary part. */
4652 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4653 TREE_CHAIN (p) = imag;
4654 TREE_CHAIN (imag) = next;
4655
4656 /* Skip the newly created node. */
4657 p = TREE_CHAIN (p);
4658 }
4659 }
4660
4661 return types;
4662}
51bbfa0c 4663\f
db69559b
RS
4664/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4665 for a value of mode OUTMODE,
4666 with NARGS different arguments, passed as ARGS.
4667 Store the return value if RETVAL is nonzero: store it in VALUE if
4668 VALUE is nonnull, otherwise pick a convenient location. In either
4669 case return the location of the stored value.
8ac61af7 4670
db69559b
RS
4671 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4672 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4673 other types of library calls. */
4674
4675rtx
d329e058
AJ
4676emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4677 enum libcall_type fn_type,
db69559b 4678 machine_mode outmode, int nargs, rtx_mode_t *args)
43bc5f13 4679{
3c0fca12
RH
4680 /* Total size in bytes of all the stack-parms scanned so far. */
4681 struct args_size args_size;
4682 /* Size of arguments before any adjustments (such as rounding). */
4683 struct args_size original_args_size;
b3694847 4684 int argnum;
3c0fca12 4685 rtx fun;
81464b2c
KT
4686 /* Todo, choose the correct decl type of orgfun. Sadly this information
4687 isn't present here, so we default to native calling abi here. */
033df0b9 4688 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 4689 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 4690 int count;
3c0fca12 4691 rtx argblock = 0;
d5cc9181
JR
4692 CUMULATIVE_ARGS args_so_far_v;
4693 cumulative_args_t args_so_far;
f725a3ec
KH
4694 struct arg
4695 {
4696 rtx value;
ef4bddc2 4697 machine_mode mode;
f725a3ec
KH
4698 rtx reg;
4699 int partial;
e7949876 4700 struct locate_and_pad_arg_data locate;
f725a3ec
KH
4701 rtx save_area;
4702 };
3c0fca12
RH
4703 struct arg *argvec;
4704 int old_inhibit_defer_pop = inhibit_defer_pop;
4705 rtx call_fusage = 0;
4706 rtx mem_value = 0;
5591ee6f 4707 rtx valreg;
3c0fca12 4708 int pcc_struct_value = 0;
cf098191 4709 poly_int64 struct_value_size = 0;
52a11cbf 4710 int flags;
3c0fca12 4711 int reg_parm_stack_space = 0;
a20c5714 4712 poly_int64 needed;
48810515 4713 rtx_insn *before_call;
0ed4bf92 4714 bool have_push_fusage;
b0c48229 4715 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 4716
f73ad30e 4717#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
4718 /* Define the boundary of the register parm stack space that needs to be
4719 save, if any. */
726a989a 4720 int low_to_save = 0, high_to_save = 0;
f725a3ec 4721 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
4722#endif
4723
3c0fca12 4724 /* Size of the stack reserved for parameter registers. */
a20c5714 4725 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3c0fca12 4726 char *initial_stack_usage_map = stack_usage_map;
a20c5714 4727 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 4728 char *stack_usage_map_buf = NULL;
3c0fca12 4729
61f71b34
DD
4730 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4731
3c0fca12 4732#ifdef REG_PARM_STACK_SPACE
3c0fca12 4733 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
4734#endif
4735
0529235d 4736 /* By default, library functions cannot throw. */
52a11cbf
RH
4737 flags = ECF_NOTHROW;
4738
9555a122
RH
4739 switch (fn_type)
4740 {
4741 case LCT_NORMAL:
53d4257f 4742 break;
9555a122 4743 case LCT_CONST:
53d4257f
JH
4744 flags |= ECF_CONST;
4745 break;
9555a122 4746 case LCT_PURE:
53d4257f 4747 flags |= ECF_PURE;
9555a122 4748 break;
9555a122
RH
4749 case LCT_NORETURN:
4750 flags |= ECF_NORETURN;
4751 break;
4752 case LCT_THROW:
0529235d 4753 flags &= ~ECF_NOTHROW;
9555a122 4754 break;
9defc9b7
RH
4755 case LCT_RETURNS_TWICE:
4756 flags = ECF_RETURNS_TWICE;
4757 break;
9555a122 4758 }
3c0fca12
RH
4759 fun = orgfun;
4760
3c0fca12
RH
4761 /* Ensure current function's preferred stack boundary is at least
4762 what we need. */
cb91fab0
JH
4763 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4764 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
4765
4766 /* If this kind of value comes back in memory,
4767 decide where in memory it should come back. */
b0c48229 4768 if (outmode != VOIDmode)
3c0fca12 4769 {
ae2bcd98 4770 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 4771 if (aggregate_value_p (tfom, 0))
b0c48229 4772 {
3c0fca12 4773#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 4774 rtx pointer_reg
1d636cc6 4775 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
4776 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4777 pcc_struct_value = 1;
4778 if (value == 0)
4779 value = gen_reg_rtx (outmode);
3c0fca12 4780#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 4781 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 4782 if (value != 0 && MEM_P (value))
b0c48229
NB
4783 mem_value = value;
4784 else
9474e8ab 4785 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 4786#endif
b0c48229 4787 /* This call returns a big structure. */
84b8030f 4788 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 4789 }
3c0fca12 4790 }
b0c48229
NB
4791 else
4792 tfom = void_type_node;
3c0fca12
RH
4793
4794 /* ??? Unfinished: must pass the memory address as an argument. */
4795
4796 /* Copy all the libcall-arguments out of the varargs data
4797 and into a vector ARGVEC.
4798
4799 Compute how to pass each argument. We only support a very small subset
4800 of the full argument passing conventions to limit complexity here since
4801 library functions shouldn't have many args. */
4802
f883e0a7 4803 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 4804 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 4805
97fc4caf 4806#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 4807 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 4808#else
d5cc9181 4809 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 4810#endif
d5cc9181 4811 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
4812
4813 args_size.constant = 0;
4814 args_size.var = 0;
4815
4816 count = 0;
4817
4818 push_temp_slots ();
4819
4820 /* If there's a structure value address to be passed,
4821 either pass it in the special place, or pass it as an extra argument. */
61f71b34 4822 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
4823 {
4824 rtx addr = XEXP (mem_value, 0);
c22cacf3 4825
3c0fca12
RH
4826 nargs++;
4827
ee88d9aa
MK
4828 /* Make sure it is a reasonable operand for a move or push insn. */
4829 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
4830 && !(CONSTANT_P (addr)
4831 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
4832 addr = force_operand (addr, NULL_RTX);
4833
3c0fca12
RH
4834 argvec[count].value = addr;
4835 argvec[count].mode = Pmode;
4836 argvec[count].partial = 0;
4837
d5cc9181 4838 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3c07301f 4839 Pmode, NULL_TREE, true);
d5cc9181 4840 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
78a52f11 4841 NULL_TREE, 1) == 0);
3c0fca12
RH
4842
4843 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 4844#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 4845 1,
a4d5044f
CM
4846#else
4847 argvec[count].reg != 0,
4848#endif
2e4ceca5
UW
4849 reg_parm_stack_space, 0,
4850 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 4851
3c0fca12
RH
4852 if (argvec[count].reg == 0 || argvec[count].partial != 0
4853 || reg_parm_stack_space > 0)
e7949876 4854 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 4855
d5cc9181 4856 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3c0fca12
RH
4857
4858 count++;
4859 }
4860
db69559b 4861 for (unsigned int i = 0; count < nargs; i++, count++)
3c0fca12 4862 {
db69559b
RS
4863 rtx val = args[i].first;
4864 machine_mode mode = args[i].second;
5e617be8 4865 int unsigned_p = 0;
3c0fca12
RH
4866
4867 /* We cannot convert the arg value to the mode the library wants here;
4868 must do it earlier where we know the signedness of the arg. */
366de0ce
NS
4869 gcc_assert (mode != BLKmode
4870 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3c0fca12 4871
ee88d9aa
MK
4872 /* Make sure it is a reasonable operand for a move or push insn. */
4873 if (!REG_P (val) && !MEM_P (val)
1a627b35 4874 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
ee88d9aa
MK
4875 val = force_operand (val, NULL_RTX);
4876
d5cc9181 4877 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3c0fca12 4878 {
f474c6f8 4879 rtx slot;
6cdd5672 4880 int must_copy
d5cc9181 4881 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
f474c6f8 4882
becfd6e5
KZ
4883 /* If this was a CONST function, it is now PURE since it now
4884 reads memory. */
99a32567
DM
4885 if (flags & ECF_CONST)
4886 {
4887 flags &= ~ECF_CONST;
4888 flags |= ECF_PURE;
4889 }
4890
e0c68ce9 4891 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
4892 {
4893 tree val_expr = MEM_EXPR (val);
4894 if (val_expr)
4895 mark_addressable (val_expr);
4896 slot = val;
4897 }
9969aaf6 4898 else
f474c6f8 4899 {
ae2bcd98 4900 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
9474e8ab 4901 1, 1);
f474c6f8
AO
4902 emit_move_insn (slot, val);
4903 }
1da68f56 4904
6b5273c3
AO
4905 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4906 gen_rtx_USE (VOIDmode, slot),
4907 call_fusage);
f474c6f8
AO
4908 if (must_copy)
4909 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4910 gen_rtx_CLOBBER (VOIDmode,
4911 slot),
4912 call_fusage);
4913
3c0fca12 4914 mode = Pmode;
f474c6f8 4915 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 4916 }
3c0fca12 4917
5e617be8 4918 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3c0fca12 4919 argvec[count].mode = mode;
5e617be8 4920 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
d5cc9181 4921 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3c07301f 4922 NULL_TREE, true);
3c0fca12 4923
3c0fca12 4924 argvec[count].partial
d5cc9181 4925 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3c0fca12 4926
3576f984
RS
4927 if (argvec[count].reg == 0
4928 || argvec[count].partial != 0
4929 || reg_parm_stack_space > 0)
4930 {
4931 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 4932#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 4933 1,
a4d5044f 4934#else
3576f984
RS
4935 argvec[count].reg != 0,
4936#endif
2e4ceca5 4937 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
4938 NULL_TREE, &args_size, &argvec[count].locate);
4939 args_size.constant += argvec[count].locate.size.constant;
4940 gcc_assert (!argvec[count].locate.size.var);
4941 }
4942#ifdef BLOCK_REG_PADDING
4943 else
4944 /* The argument is passed entirely in registers. See at which
4945 end it should be padded. */
4946 argvec[count].locate.where_pad =
4947 BLOCK_REG_PADDING (mode, NULL_TREE,
cf098191 4948 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
a4d5044f 4949#endif
3c0fca12 4950
d5cc9181 4951 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3c0fca12 4952 }
3c0fca12 4953
3c0fca12
RH
4954 /* If this machine requires an external definition for library
4955 functions, write one out. */
4956 assemble_external_libcall (fun);
4957
4958 original_args_size = args_size;
a20c5714
RS
4959 args_size.constant = (aligned_upper_bound (args_size.constant
4960 + stack_pointer_delta,
4961 STACK_BYTES)
4962 - stack_pointer_delta);
3c0fca12 4963
a20c5714
RS
4964 args_size.constant = upper_bound (args_size.constant,
4965 reg_parm_stack_space);
3c0fca12 4966
5d059ed9 4967 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4968 args_size.constant -= reg_parm_stack_space;
3c0fca12 4969
a20c5714
RS
4970 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4971 args_size.constant);
3c0fca12 4972
a11e0df4 4973 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306 4974 {
a20c5714
RS
4975 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4976 current_function_pushed_stack_size
4977 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4978 }
4979
f73ad30e
JH
4980 if (ACCUMULATE_OUTGOING_ARGS)
4981 {
4982 /* Since the stack pointer will never be pushed, it is possible for
4983 the evaluation of a parm to clobber something we have already
4984 written to the stack. Since most function calls on RISC machines
4985 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 4986
f73ad30e
JH
4987 Therefore, we save any area of the stack that was already written
4988 and that we are using. Here we set up to do this by making a new
4989 stack usage map from the old one.
3c0fca12 4990
f73ad30e
JH
4991 Another approach might be to try to reorder the argument
4992 evaluations to avoid this conflicting stack usage. */
3c0fca12 4993
f73ad30e 4994 needed = args_size.constant;
3c0fca12 4995
f73ad30e
JH
4996 /* Since we will be writing into the entire argument area, the
4997 map must be allocated for its entire size, not just the part that
4998 is the responsibility of the caller. */
5d059ed9 4999 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 5000 needed += reg_parm_stack_space;
3c0fca12 5001
a20c5714 5002 poly_int64 limit = needed;
6dad9361 5003 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
5004 limit += 1;
5005
5006 /* For polynomial sizes, this is the maximum possible size needed
5007 for arguments with a constant size and offset. */
5008 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5009 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5010 const_limit);
6dad9361 5011
5ed6ace5 5012 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 5013 stack_usage_map = stack_usage_map_buf;
3c0fca12 5014
f73ad30e 5015 if (initial_highest_arg_in_use)
2e09e75a
JM
5016 memcpy (stack_usage_map, initial_stack_usage_map,
5017 initial_highest_arg_in_use);
3c0fca12 5018
f73ad30e 5019 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 5020 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
5021 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5022 needed = 0;
3c0fca12 5023
c39ada04 5024 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 5025 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
5026 new libcalls after we've instantiated the virtual regs, and if we
5027 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 5028
c39ada04 5029 if (virtuals_instantiated)
0a81f074
RS
5030 argblock = plus_constant (Pmode, stack_pointer_rtx,
5031 STACK_POINTER_OFFSET);
c39ada04
DD
5032 else
5033 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
5034 }
5035 else
5036 {
5037 if (!PUSH_ARGS)
a20c5714 5038 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
f73ad30e 5039 }
3c0fca12 5040
3d9684ae 5041 /* We push args individually in reverse order, perform stack alignment
3c0fca12 5042 before the first push (the last arg). */
3d9684ae 5043 if (argblock == 0)
a20c5714
RS
5044 anti_adjust_stack (gen_int_mode (args_size.constant
5045 - original_args_size.constant,
5046 Pmode));
3c0fca12 5047
3d9684ae 5048 argnum = nargs - 1;
3c0fca12 5049
f73ad30e
JH
5050#ifdef REG_PARM_STACK_SPACE
5051 if (ACCUMULATE_OUTGOING_ARGS)
5052 {
5053 /* The argument list is the property of the called routine and it
5054 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
5055 parameters, we must save and restore it. */
5056 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5057 &low_to_save, &high_to_save);
3c0fca12
RH
5058 }
5059#endif
f725a3ec 5060
2f21e1ba
BS
5061 /* When expanding a normal call, args are stored in push order,
5062 which is the reverse of what we have here. */
5063 bool any_regs = false;
5064 for (int i = nargs; i-- > 0; )
5065 if (argvec[i].reg != NULL_RTX)
5066 {
5067 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5068 any_regs = true;
5069 }
5070 if (!any_regs)
5071 targetm.calls.call_args (pc_rtx, NULL_TREE);
5072
3c0fca12
RH
5073 /* Push the args that need to be pushed. */
5074
0ed4bf92
BS
5075 have_push_fusage = false;
5076
3c0fca12
RH
5077 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5078 are to be pushed. */
3d9684ae 5079 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5080 {
ef4bddc2 5081 machine_mode mode = argvec[argnum].mode;
b3694847 5082 rtx val = argvec[argnum].value;
3c0fca12
RH
5083 rtx reg = argvec[argnum].reg;
5084 int partial = argvec[argnum].partial;
6bdf8c2e 5085 unsigned int parm_align = argvec[argnum].locate.boundary;
a20c5714 5086 poly_int64 lower_bound = 0, upper_bound = 0;
3c0fca12
RH
5087
5088 if (! (reg != 0 && partial == 0))
5089 {
2b1c5433
JJ
5090 rtx use;
5091
f73ad30e
JH
5092 if (ACCUMULATE_OUTGOING_ARGS)
5093 {
f8a097cd
JH
5094 /* If this is being stored into a pre-allocated, fixed-size,
5095 stack area, save any previous data at that location. */
3c0fca12 5096
6dad9361
TS
5097 if (ARGS_GROW_DOWNWARD)
5098 {
5099 /* stack_slot is negative, but we want to index stack_usage_map
5100 with positive values. */
5101 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5102 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5103 }
5104 else
5105 {
5106 lower_bound = argvec[argnum].locate.slot_offset.constant;
5107 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5108 }
3c0fca12 5109
a20c5714
RS
5110 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5111 reg_parm_stack_space))
f73ad30e 5112 {
e7949876 5113 /* We need to make a save area. */
a20c5714 5114 poly_uint64 size
e7949876 5115 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 5116 machine_mode save_mode
f4b31647 5117 = int_mode_for_size (size, 1).else_blk ();
e7949876 5118 rtx adr
0a81f074 5119 = plus_constant (Pmode, argblock,
e7949876 5120 argvec[argnum].locate.offset.constant);
f73ad30e 5121 rtx stack_area
e7949876 5122 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 5123
9778f2f8
JH
5124 if (save_mode == BLKmode)
5125 {
5126 argvec[argnum].save_area
5127 = assign_stack_temp (BLKmode,
9474e8ab
MM
5128 argvec[argnum].locate.size.constant
5129 );
9778f2f8 5130
1a8cb155
RS
5131 emit_block_move (validize_mem
5132 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 5133 stack_area,
a20c5714
RS
5134 (gen_int_mode
5135 (argvec[argnum].locate.size.constant,
5136 Pmode)),
9778f2f8
JH
5137 BLOCK_OP_CALL_PARM);
5138 }
5139 else
5140 {
5141 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5142
5143 emit_move_insn (argvec[argnum].save_area, stack_area);
5144 }
f73ad30e 5145 }
3c0fca12 5146 }
19caa751 5147
6bdf8c2e 5148 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 5149 partial, reg, 0, argblock,
a20c5714
RS
5150 (gen_int_mode
5151 (argvec[argnum].locate.offset.constant, Pmode)),
e7949876 5152 reg_parm_stack_space,
99206968 5153 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 5154
3c0fca12 5155 /* Now mark the segment we just used. */
f73ad30e 5156 if (ACCUMULATE_OUTGOING_ARGS)
a20c5714 5157 mark_stack_region_used (lower_bound, upper_bound);
3c0fca12
RH
5158
5159 NO_DEFER_POP;
475a3eef 5160
2b1c5433
JJ
5161 /* Indicate argument access so that alias.c knows that these
5162 values are live. */
5163 if (argblock)
0a81f074 5164 use = plus_constant (Pmode, argblock,
2b1c5433 5165 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
5166 else if (have_push_fusage)
5167 continue;
2b1c5433 5168 else
0ed4bf92
BS
5169 {
5170 /* When arguments are pushed, trying to tell alias.c where
5171 exactly this argument is won't work, because the
5172 auto-increment causes confusion. So we merely indicate
5173 that we access something with a known mode somewhere on
5174 the stack. */
5175 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5176 gen_rtx_SCRATCH (Pmode));
5177 have_push_fusage = true;
5178 }
2b1c5433
JJ
5179 use = gen_rtx_MEM (argvec[argnum].mode, use);
5180 use = gen_rtx_USE (VOIDmode, use);
5181 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
5182 }
5183 }
5184
3d9684ae 5185 argnum = nargs - 1;
3c0fca12 5186
531ca746 5187 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
5188
5189 /* Now load any reg parms into their regs. */
5190
5191 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5192 are to be pushed. */
3d9684ae 5193 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 5194 {
ef4bddc2 5195 machine_mode mode = argvec[argnum].mode;
b3694847 5196 rtx val = argvec[argnum].value;
3c0fca12
RH
5197 rtx reg = argvec[argnum].reg;
5198 int partial = argvec[argnum].partial;
460b171d 5199
3c0fca12
RH
5200 /* Handle calls that pass values in multiple non-contiguous
5201 locations. The PA64 has examples of this for library calls. */
5202 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 5203 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 5204 else if (reg != 0 && partial == 0)
460b171d
JB
5205 {
5206 emit_move_insn (reg, val);
5207#ifdef BLOCK_REG_PADDING
cf098191 5208 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
460b171d
JB
5209
5210 /* Copied from load_register_parameters. */
5211
5212 /* Handle case where we have a value that needs shifting
5213 up to the msb. eg. a QImode value and we're padding
5214 upward on a BYTES_BIG_ENDIAN machine. */
cf098191 5215 if (known_lt (size, UNITS_PER_WORD)
460b171d 5216 && (argvec[argnum].locate.where_pad
76b0cbf8 5217 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
460b171d
JB
5218 {
5219 rtx x;
cf098191 5220 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
460b171d
JB
5221
5222 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5223 report the whole reg as used. Strictly speaking, the
5224 call only uses SIZE bytes at the msb end, but it doesn't
5225 seem worth generating rtl to say that. */
5226 reg = gen_rtx_REG (word_mode, REGNO (reg));
5227 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5228 if (x != reg)
5229 emit_move_insn (reg, x);
5230 }
5231#endif
5232 }
3c0fca12
RH
5233
5234 NO_DEFER_POP;
5235 }
5236
3c0fca12
RH
5237 /* Any regs containing parms remain in use through the call. */
5238 for (count = 0; count < nargs; count++)
5239 {
5240 rtx reg = argvec[count].reg;
5241 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5242 use_group_regs (&call_fusage, reg);
5243 else if (reg != 0)
3b1bf459
BS
5244 {
5245 int partial = argvec[count].partial;
5246 if (partial)
5247 {
5248 int nregs;
5249 gcc_assert (partial % UNITS_PER_WORD == 0);
5250 nregs = partial / UNITS_PER_WORD;
5251 use_regs (&call_fusage, REGNO (reg), nregs);
5252 }
5253 else
5254 use_reg (&call_fusage, reg);
5255 }
3c0fca12
RH
5256 }
5257
5258 /* Pass the function the address in which to return a structure value. */
61f71b34 5259 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 5260 {
61f71b34 5261 emit_move_insn (struct_value,
3c0fca12
RH
5262 force_reg (Pmode,
5263 force_operand (XEXP (mem_value, 0),
5264 NULL_RTX)));
f8cfc6aa 5265 if (REG_P (struct_value))
61f71b34 5266 use_reg (&call_fusage, struct_value);
3c0fca12
RH
5267 }
5268
5269 /* Don't allow popping to be deferred, since then
5270 cse'ing of library calls could delete a call and leave the pop. */
5271 NO_DEFER_POP;
5591ee6f 5272 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 5273 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 5274
ce48579b 5275 /* Stack must be properly aligned now. */
a20c5714
RS
5276 gcc_assert (multiple_p (stack_pointer_delta,
5277 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
ebcd0b57 5278
695ee791
RH
5279 before_call = get_last_insn ();
5280
3c0fca12
RH
5281 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5282 will set inhibit_defer_pop to that value. */
de76b467
JH
5283 /* The return type is needed to decide how many bytes the function pops.
5284 Signedness plays no role in that, so for simplicity, we pretend it's
5285 always signed. We also assume that the list of arguments passed has
5286 no impact, so we pretend it is unknown. */
3c0fca12 5287
6de9cd9a 5288 emit_call_1 (fun, NULL,
f725a3ec 5289 get_identifier (XSTR (orgfun, 0)),
b0c48229 5290 build_function_type (tfom, NULL_TREE),
f725a3ec 5291 original_args_size.constant, args_size.constant,
3c0fca12 5292 struct_value_size,
d5cc9181 5293 targetm.calls.function_arg (args_so_far,
3c07301f 5294 VOIDmode, void_type_node, true),
5591ee6f 5295 valreg,
d5cc9181 5296 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 5297
1e288103 5298 if (flag_ipa_ra)
4f660b15 5299 {
e67d1102 5300 rtx datum = orgfun;
4f660b15 5301 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 5302 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
5303 add_reg_note (last, REG_CALL_DECL, datum);
5304 }
5305
460b171d
JB
5306 /* Right-shift returned value if necessary. */
5307 if (!pcc_struct_value
5308 && TYPE_MODE (tfom) != BLKmode
5309 && targetm.calls.return_in_msb (tfom))
5310 {
5311 shift_return_value (TYPE_MODE (tfom), false, valreg);
5312 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5313 }
5314
2f21e1ba
BS
5315 targetm.calls.end_call_args ();
5316
6fb5fa3c
DB
5317 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5318 that it should complain if nonvolatile values are live. For
5319 functions that cannot return, inform flow that control does not
5320 fall through. */
6e14af16 5321 if (flags & ECF_NORETURN)
695ee791 5322 {
570a98eb 5323 /* The barrier note must be emitted
695ee791
RH
5324 immediately after the CALL_INSN. Some ports emit more than
5325 just a CALL_INSN above, so we must search for it here. */
48810515 5326 rtx_insn *last = get_last_insn ();
4b4bf941 5327 while (!CALL_P (last))
695ee791
RH
5328 {
5329 last = PREV_INSN (last);
5330 /* There was no CALL_INSN? */
366de0ce 5331 gcc_assert (last != before_call);
695ee791
RH
5332 }
5333
570a98eb 5334 emit_barrier_after (last);
695ee791
RH
5335 }
5336
85da11a6
EB
5337 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5338 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5339 if (flags & ECF_NOTHROW)
5340 {
48810515 5341 rtx_insn *last = get_last_insn ();
85da11a6
EB
5342 while (!CALL_P (last))
5343 {
5344 last = PREV_INSN (last);
5345 /* There was no CALL_INSN? */
5346 gcc_assert (last != before_call);
5347 }
5348
5349 make_reg_eh_region_note_nothrow_nononlocal (last);
5350 }
5351
3c0fca12
RH
5352 /* Now restore inhibit_defer_pop to its actual original value. */
5353 OK_DEFER_POP;
5354
5355 pop_temp_slots ();
5356
5357 /* Copy the value to the right place. */
de76b467 5358 if (outmode != VOIDmode && retval)
3c0fca12
RH
5359 {
5360 if (mem_value)
5361 {
5362 if (value == 0)
5363 value = mem_value;
5364 if (value != mem_value)
5365 emit_move_insn (value, mem_value);
5366 }
c3297561
AO
5367 else if (GET_CODE (valreg) == PARALLEL)
5368 {
5369 if (value == 0)
5370 value = gen_reg_rtx (outmode);
643642eb 5371 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 5372 }
3c0fca12 5373 else
7ab0aca2 5374 {
cde0f3fd 5375 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
5376 if (GET_MODE (valreg) != outmode)
5377 {
5378 int unsignedp = TYPE_UNSIGNED (tfom);
5379
cde0f3fd
PB
5380 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5381 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 5382 == GET_MODE (valreg));
7ab0aca2
RH
5383 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5384 }
5385
5386 if (value != 0)
5387 emit_move_insn (value, valreg);
5388 else
5389 value = valreg;
5390 }
3c0fca12
RH
5391 }
5392
f73ad30e 5393 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 5394 {
f73ad30e
JH
5395#ifdef REG_PARM_STACK_SPACE
5396 if (save_area)
b820d2b8
AM
5397 restore_fixed_argument_area (save_area, argblock,
5398 high_to_save, low_to_save);
3c0fca12 5399#endif
f725a3ec 5400
f73ad30e
JH
5401 /* If we saved any argument areas, restore them. */
5402 for (count = 0; count < nargs; count++)
5403 if (argvec[count].save_area)
5404 {
ef4bddc2 5405 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 5406 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
5407 argvec[count].locate.offset.constant);
5408 rtx stack_area = gen_rtx_MEM (save_mode,
5409 memory_address (save_mode, adr));
f73ad30e 5410
9778f2f8
JH
5411 if (save_mode == BLKmode)
5412 emit_block_move (stack_area,
1a8cb155
RS
5413 validize_mem
5414 (copy_rtx (argvec[count].save_area)),
a20c5714
RS
5415 (gen_int_mode
5416 (argvec[count].locate.size.constant, Pmode)),
9778f2f8
JH
5417 BLOCK_OP_CALL_PARM);
5418 else
5419 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 5420 }
3c0fca12 5421
f73ad30e
JH
5422 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5423 stack_usage_map = initial_stack_usage_map;
a20c5714 5424 stack_usage_watermark = initial_stack_usage_watermark;
f73ad30e 5425 }
43bc5f13 5426
04695783 5427 free (stack_usage_map_buf);
d9725c41 5428
de76b467
JH
5429 return value;
5430
5431}
5432\f
d5e254e1
IE
5433
5434/* Store pointer bounds argument ARG into Bounds Table entry
5435 associated with PARM. */
5436static void
5437store_bounds (struct arg_data *arg, struct arg_data *parm)
5438{
5439 rtx slot = NULL, ptr = NULL, addr = NULL;
5440
5441 /* We may pass bounds not associated with any pointer. */
5442 if (!parm)
5443 {
5444 gcc_assert (arg->special_slot);
5445 slot = arg->special_slot;
5446 ptr = const0_rtx;
5447 }
5448 /* Find pointer associated with bounds and where it is
5449 passed. */
5450 else
5451 {
5452 if (!parm->reg)
5453 {
5454 gcc_assert (!arg->special_slot);
5455
5456 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5457 }
5458 else if (REG_P (parm->reg))
5459 {
5460 gcc_assert (arg->special_slot);
5461 slot = arg->special_slot;
5462
5463 if (MEM_P (parm->value))
5464 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5465 else if (REG_P (parm->value))
5466 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5467 else
5468 {
5469 gcc_assert (!arg->pointer_offset);
5470 ptr = parm->value;
5471 }
5472 }
5473 else
5474 {
5475 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5476
5477 gcc_assert (arg->special_slot);
5478 slot = arg->special_slot;
5479
5480 if (parm->parallel_value)
5481 ptr = chkp_get_value_with_offs (parm->parallel_value,
5482 GEN_INT (arg->pointer_offset));
5483 else
5484 gcc_unreachable ();
5485 }
5486 }
5487
5488 /* Expand bounds. */
5489 if (!arg->value)
5490 arg->value = expand_normal (arg->tree_value);
5491
5492 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5493}
5494
51bbfa0c
RS
5495/* Store a single argument for a function call
5496 into the register or memory area where it must be passed.
5497 *ARG describes the argument value and where to pass it.
5498
5499 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 5500 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
5501
5502 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 5503 so must be careful about how the stack is used.
51bbfa0c
RS
5504
5505 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5506 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5507 that we need not worry about saving and restoring the stack.
5508
4c6b3b2a 5509 FNDECL is the declaration of the function we are calling.
f725a3ec 5510
da7d8304 5511 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 5512 zero otherwise. */
51bbfa0c 5513
4c6b3b2a 5514static int
d329e058
AJ
5515store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5516 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 5517{
b3694847 5518 tree pval = arg->tree_value;
51bbfa0c
RS
5519 rtx reg = 0;
5520 int partial = 0;
a20c5714
RS
5521 poly_int64 used = 0;
5522 poly_int64 lower_bound = 0, upper_bound = 0;
4c6b3b2a 5523 int sibcall_failure = 0;
51bbfa0c
RS
5524
5525 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 5526 return 1;
51bbfa0c 5527
cc79451b
RK
5528 /* Push a new temporary level for any temporaries we make for
5529 this argument. */
5530 push_temp_slots ();
5531
f8a097cd 5532 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 5533 {
f73ad30e
JH
5534 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5535 save any previous data at that location. */
5536 if (argblock && ! variable_size && arg->stack)
5537 {
6dad9361
TS
5538 if (ARGS_GROW_DOWNWARD)
5539 {
5540 /* stack_slot is negative, but we want to index stack_usage_map
5541 with positive values. */
5542 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5543 {
5544 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5545 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5546 }
6dad9361
TS
5547 else
5548 upper_bound = 0;
51bbfa0c 5549
6dad9361
TS
5550 lower_bound = upper_bound - arg->locate.size.constant;
5551 }
f73ad30e 5552 else
6dad9361
TS
5553 {
5554 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
5555 {
5556 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5557 lower_bound = rtx_to_poly_int64 (offset);
5558 }
6dad9361
TS
5559 else
5560 lower_bound = 0;
51bbfa0c 5561
6dad9361
TS
5562 upper_bound = lower_bound + arg->locate.size.constant;
5563 }
51bbfa0c 5564
a20c5714
RS
5565 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5566 reg_parm_stack_space))
51bbfa0c 5567 {
e7949876 5568 /* We need to make a save area. */
a20c5714 5569 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
f4b31647
RS
5570 machine_mode save_mode
5571 = int_mode_for_size (size, 1).else_blk ();
e7949876
AM
5572 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5573 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
5574
5575 if (save_mode == BLKmode)
5576 {
9ee5337d
EB
5577 arg->save_area
5578 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 5579 preserve_temp_slots (arg->save_area);
1a8cb155
RS
5580 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5581 stack_area,
a20c5714
RS
5582 (gen_int_mode
5583 (arg->locate.size.constant, Pmode)),
44bb111a 5584 BLOCK_OP_CALL_PARM);
f73ad30e
JH
5585 }
5586 else
5587 {
5588 arg->save_area = gen_reg_rtx (save_mode);
5589 emit_move_insn (arg->save_area, stack_area);
5590 }
51bbfa0c
RS
5591 }
5592 }
5593 }
b564df06 5594
51bbfa0c
RS
5595 /* If this isn't going to be placed on both the stack and in registers,
5596 set up the register and number of words. */
5597 if (! arg->pass_on_stack)
aa7634dd
DM
5598 {
5599 if (flags & ECF_SIBCALL)
5600 reg = arg->tail_call_reg;
5601 else
5602 reg = arg->reg;
5603 partial = arg->partial;
5604 }
51bbfa0c 5605
366de0ce
NS
5606 /* Being passed entirely in a register. We shouldn't be called in
5607 this case. */
5608 gcc_assert (reg == 0 || partial != 0);
c22cacf3 5609
4ab56118
RK
5610 /* If this arg needs special alignment, don't load the registers
5611 here. */
5612 if (arg->n_aligned_regs != 0)
5613 reg = 0;
f725a3ec 5614
4ab56118 5615 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
5616 it directly into its stack slot. Otherwise, we can. */
5617 if (arg->value == 0)
d64f5a78 5618 {
d64f5a78
RS
5619 /* stack_arg_under_construction is nonzero if a function argument is
5620 being evaluated directly into the outgoing argument list and
5621 expand_call must take special action to preserve the argument list
5622 if it is called recursively.
5623
5624 For scalar function arguments stack_usage_map is sufficient to
5625 determine which stack slots must be saved and restored. Scalar
5626 arguments in general have pass_on_stack == 0.
5627
5628 If this argument is initialized by a function which takes the
5629 address of the argument (a C++ constructor or a C function
5630 returning a BLKmode structure), then stack_usage_map is
5631 insufficient and expand_call must push the stack around the
5632 function call. Such arguments have pass_on_stack == 1.
5633
5634 Note that it is always safe to set stack_arg_under_construction,
5635 but this generates suboptimal code if set when not needed. */
5636
5637 if (arg->pass_on_stack)
5638 stack_arg_under_construction++;
f73ad30e 5639
3a08477a
RK
5640 arg->value = expand_expr (pval,
5641 (partial
5642 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5643 ? NULL_RTX : arg->stack,
8403445a 5644 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
5645
5646 /* If we are promoting object (or for any other reason) the mode
5647 doesn't agree, convert the mode. */
5648
7373d92d
RK
5649 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5650 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5651 arg->value, arg->unsignedp);
1efe6448 5652
d64f5a78
RS
5653 if (arg->pass_on_stack)
5654 stack_arg_under_construction--;
d64f5a78 5655 }
51bbfa0c 5656
0dc42b03 5657 /* Check for overlap with already clobbered argument area. */
07eef816
KH
5658 if ((flags & ECF_SIBCALL)
5659 && MEM_P (arg->value)
a20c5714
RS
5660 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5661 arg->locate.size.constant))
07eef816 5662 sibcall_failure = 1;
0dc42b03 5663
51bbfa0c
RS
5664 /* Don't allow anything left on stack from computation
5665 of argument to alloca. */
f8a097cd 5666 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
5667 do_pending_stack_adjust ();
5668
5669 if (arg->value == arg->stack)
37a08a29
RK
5670 /* If the value is already in the stack slot, we are done. */
5671 ;
1efe6448 5672 else if (arg->mode != BLKmode)
51bbfa0c 5673 {
46bd2bee 5674 unsigned int parm_align;
51bbfa0c
RS
5675
5676 /* Argument is a scalar, not entirely passed in registers.
5677 (If part is passed in registers, arg->partial says how much
5678 and emit_push_insn will take care of putting it there.)
f725a3ec 5679
51bbfa0c
RS
5680 Push it, and if its size is less than the
5681 amount of space allocated to it,
5682 also bump stack pointer by the additional space.
5683 Note that in C the default argument promotions
5684 will prevent such mismatches. */
5685
7b4df2bf
RS
5686 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5687 ? 0 : GET_MODE_SIZE (arg->mode));
974aedcc 5688
51bbfa0c
RS
5689 /* Compute how much space the push instruction will push.
5690 On many machines, pushing a byte will advance the stack
5691 pointer by a halfword. */
5692#ifdef PUSH_ROUNDING
5693 size = PUSH_ROUNDING (size);
5694#endif
5695 used = size;
5696
5697 /* Compute how much space the argument should get:
5698 round up to a multiple of the alignment for arguments. */
76b0cbf8
RS
5699 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5700 != PAD_NONE)
7b4df2bf
RS
5701 /* At the moment we don't (need to) support ABIs for which the
5702 padding isn't known at compile time. In principle it should
5703 be easy to add though. */
5704 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c 5705
46bd2bee
JM
5706 /* Compute the alignment of the pushed argument. */
5707 parm_align = arg->locate.boundary;
76b0cbf8
RS
5708 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5709 == PAD_DOWNWARD)
46bd2bee 5710 {
a20c5714
RS
5711 poly_int64 pad = used - size;
5712 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5713 if (pad_align != 0)
5714 parm_align = MIN (parm_align, pad_align);
46bd2bee
JM
5715 }
5716
51bbfa0c
RS
5717 /* This isn't already where we want it on the stack, so put it there.
5718 This can either be done with push or copy insns. */
a20c5714 5719 if (maybe_ne (used, 0)
974aedcc
MP
5720 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5721 NULL_RTX, parm_align, partial, reg, used - size,
5722 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5723 reg_parm_stack_space,
5724 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
99206968 5725 sibcall_failure = 1;
841404cd
AO
5726
5727 /* Unless this is a partially-in-register argument, the argument is now
5728 in the stack. */
5729 if (partial == 0)
5730 arg->value = arg->stack;
51bbfa0c
RS
5731 }
5732 else
5733 {
5734 /* BLKmode, at least partly to be pushed. */
5735
1b1f20ca 5736 unsigned int parm_align;
a20c5714 5737 poly_int64 excess;
51bbfa0c
RS
5738 rtx size_rtx;
5739
5740 /* Pushing a nonscalar.
5741 If part is passed in registers, PARTIAL says how much
5742 and emit_push_insn will take care of putting it there. */
5743
5744 /* Round its size up to a multiple
5745 of the allocation unit for arguments. */
5746
e7949876 5747 if (arg->locate.size.var != 0)
51bbfa0c
RS
5748 {
5749 excess = 0;
e7949876 5750 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
5751 }
5752 else
5753 {
78a52f11
RH
5754 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5755 for BLKmode is careful to avoid it. */
5756 excess = (arg->locate.size.constant
974aedcc 5757 - arg_int_size_in_bytes (TREE_TYPE (pval))
78a52f11 5758 + partial);
974aedcc 5759 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
5760 NULL_RTX, TYPE_MODE (sizetype),
5761 EXPAND_NORMAL);
51bbfa0c
RS
5762 }
5763
bfc45551 5764 parm_align = arg->locate.boundary;
1b1f20ca
RH
5765
5766 /* When an argument is padded down, the block is aligned to
5767 PARM_BOUNDARY, but the actual argument isn't. */
76b0cbf8
RS
5768 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5769 == PAD_DOWNWARD)
1b1f20ca 5770 {
e7949876 5771 if (arg->locate.size.var)
1b1f20ca 5772 parm_align = BITS_PER_UNIT;
a20c5714 5773 else
1b1f20ca 5774 {
a20c5714
RS
5775 unsigned int excess_align
5776 = known_alignment (excess) * BITS_PER_UNIT;
5777 if (excess_align != 0)
5778 parm_align = MIN (parm_align, excess_align);
1b1f20ca
RH
5779 }
5780 }
5781
3c0cb5de 5782 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
5783 {
5784 /* emit_push_insn might not work properly if arg->value and
e7949876 5785 argblock + arg->locate.offset areas overlap. */
4c6b3b2a 5786 rtx x = arg->value;
a20c5714 5787 poly_int64 i = 0;
4c6b3b2a 5788
38173d38 5789 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4c6b3b2a
JJ
5790 || (GET_CODE (XEXP (x, 0)) == PLUS
5791 && XEXP (XEXP (x, 0), 0) ==
38173d38 5792 crtl->args.internal_arg_pointer
481683e1 5793 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4c6b3b2a 5794 {
38173d38 5795 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
a20c5714 5796 i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
4c6b3b2a 5797
b3877860
KT
5798 /* arg.locate doesn't contain the pretend_args_size offset,
5799 it's part of argblock. Ensure we don't count it in I. */
5800 if (STACK_GROWS_DOWNWARD)
5801 i -= crtl->args.pretend_args_size;
5802 else
5803 i += crtl->args.pretend_args_size;
5804
e0a21ab9 5805 /* expand_call should ensure this. */
366de0ce 5806 gcc_assert (!arg->locate.offset.var
a20c5714
RS
5807 && arg->locate.size.var == 0);
5808 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
4c6b3b2a 5809
a20c5714 5810 if (known_eq (arg->locate.offset.constant, i))
d6c2c77c
JC
5811 {
5812 /* Even though they appear to be at the same location,
5813 if part of the outgoing argument is in registers,
5814 they aren't really at the same location. Check for
5815 this by making sure that the incoming size is the
5816 same as the outgoing size. */
a20c5714 5817 if (maybe_ne (arg->locate.size.constant, size_val))
4c6b3b2a
JJ
5818 sibcall_failure = 1;
5819 }
a20c5714
RS
5820 else if (maybe_in_range_p (arg->locate.offset.constant,
5821 i, size_val))
5822 sibcall_failure = 1;
5823 /* Use arg->locate.size.constant instead of size_rtx
5824 because we only care about the part of the argument
5825 on the stack. */
5826 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5827 arg->locate.size.constant))
5828 sibcall_failure = 1;
4c6b3b2a
JJ
5829 }
5830 }
5831
974aedcc
MP
5832 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5833 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5834 parm_align, partial, reg, excess, argblock,
5835 ARGS_SIZE_RTX (arg->locate.offset),
5836 reg_parm_stack_space,
5837 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
51bbfa0c 5838
841404cd
AO
5839 /* Unless this is a partially-in-register argument, the argument is now
5840 in the stack.
51bbfa0c 5841
841404cd
AO
5842 ??? Unlike the case above, in which we want the actual
5843 address of the data, so that we can load it directly into a
5844 register, here we want the address of the stack slot, so that
5845 it's properly aligned for word-by-word copying or something
5846 like that. It's not clear that this is always correct. */
5847 if (partial == 0)
5848 arg->value = arg->stack_slot;
5849 }
8df3dbb7
RH
5850
5851 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5852 {
5853 tree type = TREE_TYPE (arg->tree_value);
5854 arg->parallel_value
5855 = emit_group_load_into_temps (arg->reg, arg->value, type,
5856 int_size_in_bytes (type));
5857 }
51bbfa0c 5858
8403445a
AM
5859 /* Mark all slots this store used. */
5860 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5861 && argblock && ! variable_size && arg->stack)
a20c5714 5862 mark_stack_region_used (lower_bound, upper_bound);
8403445a 5863
51bbfa0c
RS
5864 /* Once we have pushed something, pops can't safely
5865 be deferred during the rest of the arguments. */
5866 NO_DEFER_POP;
5867
9474e8ab 5868 /* Free any temporary slots made in processing this argument. */
cc79451b 5869 pop_temp_slots ();
4c6b3b2a
JJ
5870
5871 return sibcall_failure;
51bbfa0c 5872}
a4b1b92a 5873
fe984136 5874/* Nonzero if we do not know how to pass TYPE solely in registers. */
a4b1b92a 5875
fe984136 5876bool
ef4bddc2 5877must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
586de218 5878 const_tree type)
fe984136
RH
5879{
5880 if (!type)
5881 return false;
5882
5883 /* If the type has variable size... */
5884 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5885 return true;
a4b1b92a 5886
fe984136
RH
5887 /* If the type is marked as addressable (it is required
5888 to be constructed into the stack)... */
5889 if (TREE_ADDRESSABLE (type))
5890 return true;
5891
5892 return false;
5893}
a4b1b92a 5894
7ae4ad28 5895/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
5896 takes trailing padding of a structure into account. */
5897/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
5898
5899bool
ef4bddc2 5900must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
a4b1b92a
RH
5901{
5902 if (!type)
40cdfd5a 5903 return false;
a4b1b92a
RH
5904
5905 /* If the type has variable size... */
5906 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5907 return true;
5908
5909 /* If the type is marked as addressable (it is required
5910 to be constructed into the stack)... */
5911 if (TREE_ADDRESSABLE (type))
5912 return true;
5913
974aedcc
MP
5914 if (TYPE_EMPTY_P (type))
5915 return false;
5916
a4b1b92a
RH
5917 /* If the padding and mode of the type is such that a copy into
5918 a register would put it into the wrong part of the register. */
5919 if (mode == BLKmode
5920 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
76b0cbf8
RS
5921 && (targetm.calls.function_arg_padding (mode, type)
5922 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
a4b1b92a
RH
5923 return true;
5924
5925 return false;
5926}
6bf29a7e
MS
5927
5928/* Tell the garbage collector about GTY markers in this source file. */
5929#include "gt-calls.h"