]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
PR libstdc++/87116 fix path::lexically_normal() handling of dot-dot
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
8e8f6434 2 Copyright (C) 1989-2018 Free Software Foundation, Inc.
66d433c7 3
f12b58b3 4This file is part of GCC.
66d433c7 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
66d433c7 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
66d433c7 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
66d433c7 19
20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
25#include "rtl.h"
9ef16211 26#include "tree.h"
27#include "gimple.h"
7c29e30e 28#include "predict.h"
ad7b10a2 29#include "memmodel.h"
7c29e30e 30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
7c29e30e 34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
39#include "varasm.h"
bc61cadb 40#include "internal-fn.h"
d53441c8 41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
405711de 44#include "expr.h"
cd03a192 45#include "output.h"
771d21fa 46#include "langhooks.h"
95cedffb 47#include "except.h"
3072d30e 48#include "dbgcnt.h"
474ce66a 49#include "rtl-iter.h"
370e45b9 50#include "tree-vrp.h"
51#include "tree-ssanames.h"
0eff2551 52#include "tree-ssa-strlen.h"
370e45b9 53#include "intl.h"
30a86690 54#include "stringpool.h"
55#include "attribs.h"
e6a18b5a 56#include "builtins.h"
974404bd 57#include "gimple-fold.h"
a8b58ffb 58
dfb1ee39 59/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 61
62/* Data structure and subroutines used within expand_call. */
63
64struct arg_data
65{
66 /* Tree node for this argument. */
67 tree tree_value;
1c0c37a5 68 /* Mode for value; TYPE_MODE unless promoted. */
3754d046 69 machine_mode mode;
66d433c7 70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 75 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 76 registers. */
77 rtx reg;
0e0be288 78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
b600a907 82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
058a1b7a 85 /* If value is passed in neither reg nor stack, this field holds a number
86 of a special slot to be used. */
87 rtx special_slot;
88 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
89 there is no such pointer. */
90 int pointer_arg;
91 /* If pointer_arg refers a structure, then pointer_offset holds an offset
92 of a pointer in this structure. */
93 int pointer_offset;
23eb5fa6 94 /* If REG was promoted from the actual mode of the argument expression,
95 indicates whether the promotion is sign- or zero-extended. */
96 int unsignedp;
83272ab4 97 /* Number of bytes to put in registers. 0 means put the whole arg
98 in registers. Also 0 if not passed in registers. */
66d433c7 99 int partial;
d10cfa8d 100 /* Nonzero if argument must be passed on stack.
f848041f 101 Note that some arguments may be passed on the stack
102 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
103 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 104 int pass_on_stack;
241399f6 105 /* Some fields packaged up for locate_and_pad_parm. */
106 struct locate_and_pad_arg_data locate;
66d433c7 107 /* Location on the stack at which parameter should be stored. The store
108 has already been done if STACK == VALUE. */
109 rtx stack;
110 /* Location on the stack of the start of this argument slot. This can
111 differ from STACK if this arg pads downward. This location is known
bd99ba64 112 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
66d433c7 113 rtx stack_slot;
66d433c7 114 /* Place that this stack area has been saved, if needed. */
115 rtx save_area;
f28c7a75 116 /* If an argument's alignment does not permit direct copying into registers,
117 copy in smaller-sized pieces into pseudos. These are stored in a
118 block pointed to by this field. The next field says how many
119 word-sized pseudos we made. */
120 rtx *aligned_regs;
121 int n_aligned_regs;
66d433c7 122};
123
d10cfa8d 124/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 125 the corresponding stack location has been used.
126 This vector is used to prevent a function call within an argument from
127 clobbering any stack already set up. */
128static char *stack_usage_map;
129
130/* Size of STACK_USAGE_MAP. */
e0deb08c 131static unsigned int highest_outgoing_arg_in_use;
132
133/* Assume that any stack location at this byte index is used,
134 without checking the contents of stack_usage_map. */
135static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
d1b03b62 136
7ecc63d3 137/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
138 stack location's tail call argument has been already stored into the stack.
139 This bitmap is used to prevent sibling call optimization if function tries
140 to use parent's incoming argument slots when they have been already
141 overwritten with tail call arguments. */
142static sbitmap stored_args_map;
143
e0deb08c 144/* Assume that any virtual-incoming location at this byte index has been
145 stored, without checking the contents of stored_args_map. */
146static unsigned HOST_WIDE_INT stored_args_watermark;
147
d1b03b62 148/* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
fbbbfe26 153static int stack_arg_under_construction;
66d433c7 154
4c9e08a4 155static void precompute_register_parameters (int, struct arg_data *, int *);
156static int store_one_arg (struct arg_data *, rtx, int, int, int);
157static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
158static int finalize_must_preallocate (int, int, struct arg_data *,
159 struct args_size *);
2dd6f9ed 160static void precompute_arguments (int, struct arg_data *);
4c9e08a4 161static void compute_argument_addresses (struct arg_data *, rtx, int);
162static rtx rtx_for_function_call (tree, tree);
163static void load_register_parameters (struct arg_data *, int, rtx *, int,
164 int, int *);
5d1b319b 165static int special_function_p (const_tree, int);
4c9e08a4 166static int check_sibcall_argument_overlap_1 (rtx);
3663becd 167static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
4c9e08a4 168
5ab29745 169static tree split_complex_types (tree);
cde25025 170
4448f543 171#ifdef REG_PARM_STACK_SPACE
4c9e08a4 172static rtx save_fixed_argument_area (int, rtx, int *, int *);
173static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 174#endif
66d433c7 175\f
e0deb08c 176/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
177 stack region might already be in use. */
178
179static bool
180stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
181 unsigned int reg_parm_stack_space)
182{
183 unsigned HOST_WIDE_INT const_lower, const_upper;
184 const_lower = constant_lower_bound (lower_bound);
185 if (!upper_bound.is_constant (&const_upper))
186 const_upper = HOST_WIDE_INT_M1U;
187
188 if (const_upper > stack_usage_watermark)
189 return true;
190
191 /* Don't worry about things in the fixed argument area;
192 it has already been saved. */
193 const_lower = MAX (const_lower, reg_parm_stack_space);
194 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
195 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
196 if (stack_usage_map[i])
197 return true;
198 return false;
199}
200
201/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
202 stack region are now in use. */
203
204static void
205mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
206{
207 unsigned HOST_WIDE_INT const_lower, const_upper;
208 const_lower = constant_lower_bound (lower_bound);
209 if (upper_bound.is_constant (&const_upper))
210 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
211 stack_usage_map[i] = 1;
212 else
213 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
214}
215
66d433c7 216/* Force FUNEXP into a form suitable for the address of a CALL,
217 and return that as an rtx. Also load the static chain register
218 if FNDECL is a nested function.
219
8866f42d 220 CALL_FUSAGE points to a variable holding the prospective
221 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 222
d9076622 223rtx
156cc902 224prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
a27e3913 225 rtx *call_fusage, int reg_parm_seen, int flags)
66d433c7 226{
c7bf1374 227 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 228 but not for a constant address if -fno-function-cse. */
229 if (GET_CODE (funexp) != SYMBOL_REF)
a27e3913 230 {
231 /* If it's an indirect call by descriptor, generate code to perform
232 runtime identification of the pointer and load the descriptor. */
233 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
234 {
235 const int bit_val = targetm.calls.custom_function_descriptors;
236 rtx call_lab = gen_label_rtx ();
237
238 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
239 fndecl_or_type
240 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
241 fndecl_or_type);
242 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
243 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
244
fa6012cb 245 if (GET_MODE (funexp) != Pmode)
246 funexp = convert_memory_address (Pmode, funexp);
247
a27e3913 248 /* Avoid long live ranges around function calls. */
249 funexp = copy_to_mode_reg (Pmode, funexp);
250
251 if (REG_P (chain))
252 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
253
254 /* Emit the runtime identification pattern. */
255 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
256 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
257 call_lab);
258
259 /* Statically predict the branch to very likely taken. */
260 rtx_insn *insn = get_last_insn ();
261 if (JUMP_P (insn))
262 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
263
264 /* Load the descriptor. */
265 rtx mem = gen_rtx_MEM (ptr_mode,
266 plus_constant (Pmode, funexp, - bit_val));
267 MEM_NOTRAP_P (mem) = 1;
268 mem = convert_memory_address (Pmode, mem);
269 emit_move_insn (chain, mem);
270
271 mem = gen_rtx_MEM (ptr_mode,
272 plus_constant (Pmode, funexp,
273 POINTER_SIZE / BITS_PER_UNIT
274 - bit_val));
275 MEM_NOTRAP_P (mem) = 1;
276 mem = convert_memory_address (Pmode, mem);
277 emit_move_insn (funexp, mem);
278
279 emit_label (call_lab);
280
281 if (REG_P (chain))
282 {
283 use_reg (call_fusage, chain);
284 STATIC_CHAIN_REG_P (chain) = 1;
285 }
286
287 /* Make sure we're not going to be overwritten below. */
288 gcc_assert (!static_chain_value);
289 }
290
291 /* If we are using registers for parameters, force the
292 function address into a register now. */
293 funexp = ((reg_parm_seen
294 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
295 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
296 : memory_address (FUNCTION_MODE, funexp));
297 }
97615b02 298 else
66d433c7 299 {
97615b02 300 /* funexp could be a SYMBOL_REF represents a function pointer which is
301 of ptr_mode. In this case, it should be converted into address mode
302 to be a valid address for memory rtx pattern. See PR 64971. */
303 if (GET_MODE (funexp) != Pmode)
304 funexp = convert_memory_address (Pmode, funexp);
305
a27e3913 306 if (!(flags & ECF_SIBCALL))
97615b02 307 {
308 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
309 funexp = force_reg (Pmode, funexp);
310 }
66d433c7 311 }
312
156cc902 313 if (static_chain_value != 0
314 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
315 || DECL_STATIC_CHAIN (fndecl_or_type)))
66d433c7 316 {
82c7907c 317 rtx chain;
318
156cc902 319 chain = targetm.calls.static_chain (fndecl_or_type, false);
3dce56cc 320 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 321
82c7907c 322 emit_move_insn (chain, static_chain_value);
323 if (REG_P (chain))
a27e3913 324 {
325 use_reg (call_fusage, chain);
326 STATIC_CHAIN_REG_P (chain) = 1;
327 }
66d433c7 328 }
329
330 return funexp;
331}
332
333/* Generate instructions to call function FUNEXP,
334 and optionally pop the results.
335 The CALL_INSN is the first insn generated.
336
c74d0a20 337 FNDECL is the declaration node of the function. This is given to the
f5bc28da 338 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
339 its own args.
e93a4612 340
f5bc28da 341 FUNTYPE is the data type of the function. This is given to the hook
342 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
343 own args. We used to allow an identifier for library functions, but
344 that doesn't work when the return type is an aggregate type and the
345 calling convention says that the pointer to this aggregate is to be
346 popped by the callee.
66d433c7 347
348 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 349 ROUNDED_STACK_SIZE is that number rounded up to
350 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
351 both to put into the call insn and to generate explicit popping
352 code if necessary.
66d433c7 353
354 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
355 It is zero if this call doesn't want a structure value.
356
357 NEXT_ARG_REG is the rtx that results from executing
f387af4f 358 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
66d433c7 359 just after all the args have had their registers assigned.
360 This could be whatever you like, but normally it is the first
361 arg-register beyond those used for args in this call,
362 or 0 if all the arg-registers are used in this call.
363 It is passed on to `gen_call' so you can put this info in the call insn.
364
365 VALREG is a hard register in which a value is returned,
366 or 0 if the call does not return a value.
367
368 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
369 the args to this call were processed.
370 We restore `inhibit_defer_pop' to that value.
371
07409b3a 372 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 373 denote registers used by the called function. */
c87678e4 374
8ddf1c7e 375static void
16c9337c 376emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
4ee9c684 377 tree funtype ATTRIBUTE_UNUSED,
e0deb08c 378 poly_int64 stack_size ATTRIBUTE_UNUSED,
379 poly_int64 rounded_stack_size,
e967c3ed 380 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
4c9e08a4 381 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
382 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
39cba157 383 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
66d433c7 384{
e0deb08c 385 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
7f265a08 386 rtx call, funmem, pat;
66d433c7 387 int already_popped = 0;
e0deb08c 388 poly_int64 n_popped = 0;
d94a1f53 389
390 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
391 patterns exist). Any popping that the callee does on return will
392 be from our caller's frame rather than ours. */
393 if (!(ecf_flags & ECF_SIBCALL))
394 {
395 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
66d433c7 396
87e19636 397#ifdef CALL_POPS_ARGS
d94a1f53 398 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
87e19636 399#endif
d94a1f53 400 }
4c9e08a4 401
66d433c7 402 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
403 and we don't want to load it into a register as an optimization,
404 because prepare_call_address already did it if it should be done. */
405 if (GET_CODE (funexp) != SYMBOL_REF)
406 funexp = memory_address (FUNCTION_MODE, funexp);
407
57999964 408 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
409 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
854aa6aa 410 {
411 tree t = fndecl;
b9a16870 412
854aa6aa 413 /* Although a built-in FUNCTION_DECL and its non-__builtin
414 counterpart compare equal and get a shared mem_attrs, they
415 produce different dump output in compare-debug compilations,
416 if an entry gets garbage collected in one compilation, then
417 adds a different (but equivalent) entry, while the other
418 doesn't run the garbage collector at the same spot and then
419 shares the mem_attr with the equivalent entry. */
b9a16870 420 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
421 {
422 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
423 if (t2)
424 t = t2;
425 }
426
427 set_mem_expr (funmem, t);
854aa6aa 428 }
57999964 429 else if (fntree)
2622064f 430 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
57999964 431
7f265a08 432 if (ecf_flags & ECF_SIBCALL)
60ecc450 433 {
60ecc450 434 if (valreg)
7f265a08 435 pat = targetm.gen_sibcall_value (valreg, funmem,
436 rounded_stack_size_rtx,
437 next_arg_reg, NULL_RTX);
60ecc450 438 else
7f265a08 439 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
e967c3ed 440 next_arg_reg,
441 gen_int_mode (struct_value_size, Pmode));
60ecc450 442 }
2a631e19 443 /* If the target has "call" or "call_value" insns, then prefer them
444 if no arguments are actually popped. If the target does not have
445 "call" or "call_value" insns, then we must use the popping versions
446 even if the call has no arguments to pop. */
e0deb08c 447 else if (maybe_ne (n_popped, 0)
7f265a08 448 || !(valreg
449 ? targetm.have_call_value ()
450 : targetm.have_call ()))
66d433c7 451 {
e0deb08c 452 rtx n_pop = gen_int_mode (n_popped, Pmode);
66d433c7 453
454 /* If this subroutine pops its own args, record that in the call insn
455 if possible, for the sake of frame pointer elimination. */
e93a4612 456
66d433c7 457 if (valreg)
7f265a08 458 pat = targetm.gen_call_value_pop (valreg, funmem,
459 rounded_stack_size_rtx,
460 next_arg_reg, n_pop);
66d433c7 461 else
7f265a08 462 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
463 next_arg_reg, n_pop);
66d433c7 464
66d433c7 465 already_popped = 1;
466 }
467 else
60ecc450 468 {
469 if (valreg)
7f265a08 470 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
471 next_arg_reg, NULL_RTX);
60ecc450 472 else
7f265a08 473 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
e967c3ed 474 gen_int_mode (struct_value_size, Pmode));
60ecc450 475 }
7f265a08 476 emit_insn (pat);
66d433c7 477
d5f9786f 478 /* Find the call we just emitted. */
9ed997be 479 rtx_call_insn *call_insn = last_call_insn ();
66d433c7 480
57999964 481 /* Some target create a fresh MEM instead of reusing the one provided
482 above. Set its MEM_EXPR. */
cf7fb72d 483 call = get_call_rtx_from (call_insn);
484 if (call
57999964 485 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
486 && MEM_EXPR (funmem) != NULL_TREE)
487 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
488
d5f9786f 489 /* Put the register usage information there. */
490 add_function_usage_to (call_insn, call_fusage);
66d433c7 491
492 /* If this is a const call, then set the insn's unchanging bit. */
9c2a0c05 493 if (ecf_flags & ECF_CONST)
494 RTL_CONST_CALL_P (call_insn) = 1;
495
496 /* If this is a pure call, then set the insn's unchanging bit. */
497 if (ecf_flags & ECF_PURE)
498 RTL_PURE_CALL_P (call_insn) = 1;
499
500 /* If this is a const call, then set the insn's unchanging bit. */
501 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
502 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 503
e38def9c 504 /* Create a nothrow REG_EH_REGION note, if needed. */
505 make_reg_eh_region_note (call_insn, ecf_flags, 0);
00dd2e9e 506
356b51a0 507 if (ecf_flags & ECF_NORETURN)
a1ddb869 508 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
356b51a0 509
9239aee6 510 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 511 {
a1ddb869 512 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
18d50ae6 513 cfun->calls_setjmp = 1;
0ff18307 514 }
9239aee6 515
60ecc450 516 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
517
d1f88d00 518 /* Restore this now, so that we do defer pops for this call's args
519 if the context of the call as a whole permits. */
520 inhibit_defer_pop = old_inhibit_defer_pop;
521
e0deb08c 522 if (maybe_ne (n_popped, 0))
66d433c7 523 {
524 if (!already_popped)
37808e3a 525 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 526 = gen_rtx_EXPR_LIST (VOIDmode,
527 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
528 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 529 rounded_stack_size -= n_popped;
e0deb08c 530 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
91b70175 531 stack_pointer_delta -= n_popped;
27a7a23a 532
f6a1fc98 533 add_args_size_note (call_insn, stack_pointer_delta);
dfe00a8f 534
27a7a23a 535 /* If popup is needed, stack realign must use DRAP */
536 if (SUPPORTS_STACK_ALIGNMENT)
537 crtl->need_drap = true;
66d433c7 538 }
27827244 539 /* For noreturn calls when not accumulating outgoing args force
540 REG_ARGS_SIZE note to prevent crossjumping of calls with different
541 args sizes. */
542 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
f6a1fc98 543 add_args_size_note (call_insn, stack_pointer_delta);
66d433c7 544
4448f543 545 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 546 {
4448f543 547 /* If returning from the subroutine does not automatically pop the args,
548 we need an instruction to pop them sooner or later.
549 Perhaps do it now; perhaps just record how much space to pop later.
550
551 If returning from the subroutine does pop the args, indicate that the
552 stack pointer will be changed. */
553
e0deb08c 554 if (maybe_ne (rounded_stack_size, 0))
4448f543 555 {
ff3ae375 556 if (ecf_flags & ECF_NORETURN)
10d1a2c0 557 /* Just pretend we did the pop. */
558 stack_pointer_delta -= rounded_stack_size;
559 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 560 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 561 pending_stack_adjust += rounded_stack_size;
562 else
563 adjust_stack (rounded_stack_size_rtx);
564 }
66d433c7 565 }
4448f543 566 /* When we accumulate outgoing args, we must avoid any stack manipulations.
567 Restore the stack pointer to its original value now. Usually
568 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
569 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
570 popping variants of functions exist as well.
571
572 ??? We may optimize similar to defer_pop above, but it is
573 probably not worthwhile.
c87678e4 574
4448f543 575 ??? It will be worthwhile to enable combine_stack_adjustments even for
576 such machines. */
e0deb08c 577 else if (maybe_ne (n_popped, 0))
578 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
66d433c7 579}
580
f29fd58e 581/* Determine if the function identified by FNDECL is one with
582 special properties we wish to know about. Modify FLAGS accordingly.
6a0e6138 583
584 For example, if the function might return more than one time (setjmp), then
f29fd58e 585 set ECF_RETURNS_TWICE.
6a0e6138 586
f29fd58e 587 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
6a0e6138 588 space from the stack such as alloca. */
589
dfe08167 590static int
5d1b319b 591special_function_p (const_tree fndecl, int flags)
6a0e6138 592{
058a1b7a 593 tree name_decl = DECL_NAME (fndecl);
594
058a1b7a 595 if (fndecl && name_decl
f29fd58e 596 && IDENTIFIER_LENGTH (name_decl) <= 11
6a0e6138 597 /* Exclude functions not at the file scope, or not `extern',
598 since they are not the magic functions we would otherwise
40109983 599 think they are.
a0c938f0 600 FIXME: this should be handled with attributes, not with this
601 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
602 because you can declare fork() inside a function if you
603 wish. */
0d568ddf 604 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 605 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
606 && TREE_PUBLIC (fndecl))
6a0e6138 607 {
058a1b7a 608 const char *name = IDENTIFIER_POINTER (name_decl);
71d9fc9b 609 const char *tname = name;
6a0e6138 610
cc7cc47f 611 /* We assume that alloca will always be called by name. It
612 makes no sense to pass it as a pointer-to-function to
613 anything that does not understand its behavior. */
26fa902d 614 if (IDENTIFIER_LENGTH (name_decl) == 6
615 && name[0] == 'a'
616 && ! strcmp (name, "alloca"))
dfe08167 617 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 618
f29fd58e 619 /* Disregard prefix _ or __. */
6a0e6138 620 if (name[0] == '_')
621 {
f29fd58e 622 if (name[1] == '_')
6a0e6138 623 tname += 2;
624 else
625 tname += 1;
626 }
627
f29fd58e 628 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
629 if (! strcmp (tname, "setjmp")
630 || ! strcmp (tname, "sigsetjmp")
631 || ! strcmp (name, "savectx")
632 || ! strcmp (name, "vfork")
633 || ! strcmp (name, "getcontext"))
634 flags |= ECF_RETURNS_TWICE;
6a0e6138 635 }
73673831 636
2b34677f 637 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
638 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
639 flags |= ECF_MAY_BE_ALLOCA;
26fa902d 640
dfe08167 641 return flags;
6a0e6138 642}
643
c8010b80 644/* Similar to special_function_p; return a set of ERF_ flags for the
645 function FNDECL. */
646static int
647decl_return_flags (tree fndecl)
648{
649 tree attr;
650 tree type = TREE_TYPE (fndecl);
651 if (!type)
652 return 0;
653
654 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
655 if (!attr)
656 return 0;
657
658 attr = TREE_VALUE (TREE_VALUE (attr));
659 if (!attr || TREE_STRING_LENGTH (attr) < 1)
660 return 0;
661
662 switch (TREE_STRING_POINTER (attr)[0])
663 {
664 case '1':
665 case '2':
666 case '3':
667 case '4':
668 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
669
670 case 'm':
671 return ERF_NOALIAS;
672
673 case '.':
674 default:
675 return 0;
676 }
677}
678
4c8db992 679/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 680
dfe08167 681int
5d1b319b 682setjmp_call_p (const_tree fndecl)
dfe08167 683{
69010134 684 if (DECL_IS_RETURNS_TWICE (fndecl))
685 return ECF_RETURNS_TWICE;
dfe08167 686 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
687}
688
75a70cf9 689
58905985 690/* Return true if STMT may be an alloca call. */
75a70cf9 691
692bool
58905985 693gimple_maybe_alloca_call_p (const gimple *stmt)
75a70cf9 694{
695 tree fndecl;
696
697 if (!is_gimple_call (stmt))
698 return false;
699
700 fndecl = gimple_call_fndecl (stmt);
701 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
702 return true;
703
704 return false;
705}
706
58905985 707/* Return true if STMT is a builtin alloca call. */
708
709bool
710gimple_alloca_call_p (const gimple *stmt)
711{
712 tree fndecl;
713
714 if (!is_gimple_call (stmt))
715 return false;
716
717 fndecl = gimple_call_fndecl (stmt);
a0e9bfbb 718 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
58905985 719 switch (DECL_FUNCTION_CODE (fndecl))
720 {
2b34677f 721 CASE_BUILT_IN_ALLOCA:
0cbf4528 722 return gimple_call_num_args (stmt) > 0;
58905985 723 default:
724 break;
725 }
726
727 return false;
728}
729
730/* Return true when exp contains a builtin alloca call. */
75a70cf9 731
9a7ecb49 732bool
5d1b319b 733alloca_call_p (const_tree exp)
9a7ecb49 734{
0b7282f1 735 tree fndecl;
9a7ecb49 736 if (TREE_CODE (exp) == CALL_EXPR
0b7282f1 737 && (fndecl = get_callee_fndecl (exp))
58905985 738 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
739 switch (DECL_FUNCTION_CODE (fndecl))
740 {
2b34677f 741 CASE_BUILT_IN_ALLOCA:
58905985 742 return true;
743 default:
744 break;
745 }
746
9a7ecb49 747 return false;
748}
749
4c0315d0 750/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
751 function. Return FALSE otherwise. */
752
753static bool
754is_tm_builtin (const_tree fndecl)
755{
756 if (fndecl == NULL)
757 return false;
758
759 if (decl_is_tm_clone (fndecl))
760 return true;
761
762 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
763 {
764 switch (DECL_FUNCTION_CODE (fndecl))
765 {
766 case BUILT_IN_TM_COMMIT:
767 case BUILT_IN_TM_COMMIT_EH:
768 case BUILT_IN_TM_ABORT:
769 case BUILT_IN_TM_IRREVOCABLE:
770 case BUILT_IN_TM_GETTMCLONE_IRR:
771 case BUILT_IN_TM_MEMCPY:
772 case BUILT_IN_TM_MEMMOVE:
773 case BUILT_IN_TM_MEMSET:
774 CASE_BUILT_IN_TM_STORE (1):
775 CASE_BUILT_IN_TM_STORE (2):
776 CASE_BUILT_IN_TM_STORE (4):
777 CASE_BUILT_IN_TM_STORE (8):
778 CASE_BUILT_IN_TM_STORE (FLOAT):
779 CASE_BUILT_IN_TM_STORE (DOUBLE):
780 CASE_BUILT_IN_TM_STORE (LDOUBLE):
781 CASE_BUILT_IN_TM_STORE (M64):
782 CASE_BUILT_IN_TM_STORE (M128):
783 CASE_BUILT_IN_TM_STORE (M256):
784 CASE_BUILT_IN_TM_LOAD (1):
785 CASE_BUILT_IN_TM_LOAD (2):
786 CASE_BUILT_IN_TM_LOAD (4):
787 CASE_BUILT_IN_TM_LOAD (8):
788 CASE_BUILT_IN_TM_LOAD (FLOAT):
789 CASE_BUILT_IN_TM_LOAD (DOUBLE):
790 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
791 CASE_BUILT_IN_TM_LOAD (M64):
792 CASE_BUILT_IN_TM_LOAD (M128):
793 CASE_BUILT_IN_TM_LOAD (M256):
794 case BUILT_IN_TM_LOG:
795 case BUILT_IN_TM_LOG_1:
796 case BUILT_IN_TM_LOG_2:
797 case BUILT_IN_TM_LOG_4:
798 case BUILT_IN_TM_LOG_8:
799 case BUILT_IN_TM_LOG_FLOAT:
800 case BUILT_IN_TM_LOG_DOUBLE:
801 case BUILT_IN_TM_LOG_LDOUBLE:
802 case BUILT_IN_TM_LOG_M64:
803 case BUILT_IN_TM_LOG_M128:
804 case BUILT_IN_TM_LOG_M256:
805 return true;
806 default:
807 break;
808 }
809 }
810 return false;
811}
812
5edaabad 813/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 814
805e22b2 815int
5d1b319b 816flags_from_decl_or_type (const_tree exp)
dfe08167 817{
818 int flags = 0;
7a24815f 819
dfe08167 820 if (DECL_P (exp))
821 {
822 /* The function exp may have the `malloc' attribute. */
7a24815f 823 if (DECL_IS_MALLOC (exp))
dfe08167 824 flags |= ECF_MALLOC;
825
26d1c5ff 826 /* The function exp may have the `returns_twice' attribute. */
827 if (DECL_IS_RETURNS_TWICE (exp))
828 flags |= ECF_RETURNS_TWICE;
829
9c2a0c05 830 /* Process the pure and const attributes. */
67fa4078 831 if (TREE_READONLY (exp))
9c2a0c05 832 flags |= ECF_CONST;
833 if (DECL_PURE_P (exp))
ef689d4e 834 flags |= ECF_PURE;
9c2a0c05 835 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
836 flags |= ECF_LOOPING_CONST_OR_PURE;
26dfc457 837
fc09b200 838 if (DECL_IS_NOVOPS (exp))
839 flags |= ECF_NOVOPS;
7bd95dfd 840 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
841 flags |= ECF_LEAF;
642860fc 842 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
843 flags |= ECF_COLD;
fc09b200 844
dfe08167 845 if (TREE_NOTHROW (exp))
846 flags |= ECF_NOTHROW;
b15db406 847
4c0315d0 848 if (flag_tm)
849 {
850 if (is_tm_builtin (exp))
851 flags |= ECF_TM_BUILTIN;
c86dbacd 852 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
4c0315d0 853 || lookup_attribute ("transaction_pure",
854 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
855 flags |= ECF_TM_PURE;
856 }
857
4ee9c684 858 flags = special_function_p (exp, flags);
dfe08167 859 }
4c0315d0 860 else if (TYPE_P (exp))
861 {
862 if (TYPE_READONLY (exp))
863 flags |= ECF_CONST;
864
865 if (flag_tm
866 && ((flags & ECF_CONST) != 0
867 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
868 flags |= ECF_TM_PURE;
869 }
c579aed5 870 else
871 gcc_unreachable ();
dfe08167 872
873 if (TREE_THIS_VOLATILE (exp))
67fa4078 874 {
875 flags |= ECF_NORETURN;
876 if (flags & (ECF_CONST|ECF_PURE))
877 flags |= ECF_LOOPING_CONST_OR_PURE;
878 }
dfe08167 879
880 return flags;
881}
882
886a914d 883/* Detect flags from a CALL_EXPR. */
884
885int
b7bf20db 886call_expr_flags (const_tree t)
886a914d 887{
888 int flags;
889 tree decl = get_callee_fndecl (t);
890
891 if (decl)
892 flags = flags_from_decl_or_type (decl);
4036aeb0 893 else if (CALL_EXPR_FN (t) == NULL_TREE)
894 flags = internal_fn_flags (CALL_EXPR_IFN (t));
886a914d 895 else
896 {
a27e3913 897 tree type = TREE_TYPE (CALL_EXPR_FN (t));
898 if (type && TREE_CODE (type) == POINTER_TYPE)
899 flags = flags_from_decl_or_type (TREE_TYPE (type));
886a914d 900 else
901 flags = 0;
a27e3913 902 if (CALL_EXPR_BY_DESCRIPTOR (t))
903 flags |= ECF_BY_DESCRIPTOR;
886a914d 904 }
905
906 return flags;
907}
908
a3c76fda 909/* Return true if TYPE should be passed by invisible reference. */
910
911bool
912pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
913 tree type, bool named_arg)
914{
915 if (type)
916 {
917 /* If this type contains non-trivial constructors, then it is
918 forbidden for the middle-end to create any new copies. */
919 if (TREE_ADDRESSABLE (type))
920 return true;
921
922 /* GCC post 3.4 passes *all* variable sized types by reference. */
923 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
924 return true;
925
926 /* If a record type should be passed the same as its first (and only)
927 member, use the type and mode of that member. */
928 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
929 {
930 type = TREE_TYPE (first_field (type));
931 mode = TYPE_MODE (type);
932 }
933 }
934
935 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
936 type, named_arg);
937}
938
939/* Return true if TYPE, which is passed by reference, should be callee
940 copied instead of caller copied. */
941
942bool
943reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
944 tree type, bool named_arg)
945{
946 if (type && TREE_ADDRESSABLE (type))
947 return false;
948 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
949 named_arg);
950}
951
952
6a0e6138 953/* Precompute all register parameters as described by ARGS, storing values
954 into fields within the ARGS array.
955
956 NUM_ACTUALS indicates the total number elements in the ARGS array.
957
958 Set REG_PARM_SEEN if we encounter a register parameter. */
959
960static void
e2ff5c1b 961precompute_register_parameters (int num_actuals, struct arg_data *args,
962 int *reg_parm_seen)
6a0e6138 963{
964 int i;
965
966 *reg_parm_seen = 0;
967
968 for (i = 0; i < num_actuals; i++)
969 if (args[i].reg != 0 && ! args[i].pass_on_stack)
970 {
971 *reg_parm_seen = 1;
972
973 if (args[i].value == 0)
974 {
975 push_temp_slots ();
8ec3c5c2 976 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 977 preserve_temp_slots (args[i].value);
978 pop_temp_slots ();
6a0e6138 979 }
980
981 /* If we are to promote the function arg to a wider mode,
982 do it now. */
983
984 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
985 args[i].value
986 = convert_modes (args[i].mode,
987 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
988 args[i].value, args[i].unsignedp);
989
5e050fa1 990 /* If the value is a non-legitimate constant, force it into a
991 pseudo now. TLS symbols sometimes need a call to resolve. */
992 if (CONSTANT_P (args[i].value)
993 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
994 args[i].value = force_reg (args[i].mode, args[i].value);
995
e2ff5c1b 996 /* If we're going to have to load the value by parts, pull the
997 parts into pseudos. The part extraction process can involve
998 non-trivial computation. */
999 if (GET_CODE (args[i].reg) == PARALLEL)
1000 {
1001 tree type = TREE_TYPE (args[i].tree_value);
b600a907 1002 args[i].parallel_value
e2ff5c1b 1003 = emit_group_load_into_temps (args[i].reg, args[i].value,
1004 type, int_size_in_bytes (type));
1005 }
1006
c87678e4 1007 /* If the value is expensive, and we are inside an appropriately
6a0e6138 1008 short loop, put the value into a pseudo and then put the pseudo
1009 into the hard reg.
1010
1011 For small register classes, also do this if this call uses
1012 register parameters. This is to avoid reload conflicts while
1013 loading the parameters registers. */
1014
e2ff5c1b 1015 else if ((! (REG_P (args[i].value)
1016 || (GET_CODE (args[i].value) == SUBREG
1017 && REG_P (SUBREG_REG (args[i].value)))))
1018 && args[i].mode != BLKmode
5ae4887d 1019 && (set_src_cost (args[i].value, args[i].mode,
1020 optimize_insn_for_speed_p ())
1021 > COSTS_N_INSNS (1))
ed5527ca 1022 && ((*reg_parm_seen
1023 && targetm.small_register_classes_for_mode_p (args[i].mode))
e2ff5c1b 1024 || optimize))
6a0e6138 1025 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1026 }
1027}
1028
4448f543 1029#ifdef REG_PARM_STACK_SPACE
6a0e6138 1030
1031 /* The argument list is the property of the called routine and it
1032 may clobber it. If the fixed area has been used for previous
1033 parameters, we must save and restore it. */
f7c44134 1034
6a0e6138 1035static rtx
4c9e08a4 1036save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 1037{
e0deb08c 1038 unsigned int low;
1039 unsigned int high;
6a0e6138 1040
6e96b626 1041 /* Compute the boundary of the area that needs to be saved, if any. */
1042 high = reg_parm_stack_space;
ccccd62c 1043 if (ARGS_GROW_DOWNWARD)
1044 high += 1;
1045
6e96b626 1046 if (high > highest_outgoing_arg_in_use)
1047 high = highest_outgoing_arg_in_use;
6a0e6138 1048
6e96b626 1049 for (low = 0; low < high; low++)
e0deb08c 1050 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
6e96b626 1051 {
1052 int num_to_save;
3754d046 1053 machine_mode save_mode;
6e96b626 1054 int delta;
29c05e22 1055 rtx addr;
6e96b626 1056 rtx stack_area;
1057 rtx save_area;
6a0e6138 1058
6e96b626 1059 while (stack_usage_map[--high] == 0)
1060 ;
6a0e6138 1061
6e96b626 1062 *low_to_save = low;
1063 *high_to_save = high;
1064
1065 num_to_save = high - low + 1;
6a0e6138 1066
6e96b626 1067 /* If we don't have the required alignment, must do this
1068 in BLKmode. */
44504d18 1069 scalar_int_mode imode;
1070 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1071 && (low & (MIN (GET_MODE_SIZE (imode),
1072 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1073 save_mode = imode;
1074 else
6e96b626 1075 save_mode = BLKmode;
6a0e6138 1076
ccccd62c 1077 if (ARGS_GROW_DOWNWARD)
1078 delta = -high;
1079 else
1080 delta = low;
1081
29c05e22 1082 addr = plus_constant (Pmode, argblock, delta);
1083 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
2a631e19 1084
6e96b626 1085 set_mem_align (stack_area, PARM_BOUNDARY);
1086 if (save_mode == BLKmode)
1087 {
0ab48139 1088 save_area = assign_stack_temp (BLKmode, num_to_save);
6e96b626 1089 emit_block_move (validize_mem (save_area), stack_area,
1090 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1091 }
1092 else
1093 {
1094 save_area = gen_reg_rtx (save_mode);
1095 emit_move_insn (save_area, stack_area);
1096 }
2a631e19 1097
6e96b626 1098 return save_area;
1099 }
1100
1101 return NULL_RTX;
6a0e6138 1102}
1103
1104static void
4c9e08a4 1105restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 1106{
3754d046 1107 machine_mode save_mode = GET_MODE (save_area);
6e96b626 1108 int delta;
29c05e22 1109 rtx addr, stack_area;
6e96b626 1110
ccccd62c 1111 if (ARGS_GROW_DOWNWARD)
1112 delta = -high_to_save;
1113 else
1114 delta = low_to_save;
1115
29c05e22 1116 addr = plus_constant (Pmode, argblock, delta);
1117 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
6e96b626 1118 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 1119
1120 if (save_mode != BLKmode)
1121 emit_move_insn (stack_area, save_area);
1122 else
0378dbdc 1123 emit_block_move (stack_area, validize_mem (save_area),
1124 GEN_INT (high_to_save - low_to_save + 1),
1125 BLOCK_OP_CALL_PARM);
6a0e6138 1126}
f6025ee7 1127#endif /* REG_PARM_STACK_SPACE */
c87678e4 1128
6a0e6138 1129/* If any elements in ARGS refer to parameters that are to be passed in
1130 registers, but not in memory, and whose alignment does not permit a
1131 direct copy into registers. Copy the values into a group of pseudos
c87678e4 1132 which we will later copy into the appropriate hard registers.
6d801f27 1133
1134 Pseudos for each unaligned argument will be stored into the array
1135 args[argnum].aligned_regs. The caller is responsible for deallocating
1136 the aligned_regs array if it is nonzero. */
1137
6a0e6138 1138static void
4c9e08a4 1139store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 1140{
1141 int i, j;
c87678e4 1142
6a0e6138 1143 for (i = 0; i < num_actuals; i++)
1144 if (args[i].reg != 0 && ! args[i].pass_on_stack
33eb84dc 1145 && GET_CODE (args[i].reg) != PARALLEL
6a0e6138 1146 && args[i].mode == BLKmode
77f1b1bb 1147 && MEM_P (args[i].value)
1148 && (MEM_ALIGN (args[i].value)
6a0e6138 1149 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1150 {
1151 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 1152 int endian_correction = 0;
6a0e6138 1153
f054eb3c 1154 if (args[i].partial)
1155 {
1156 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1157 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1158 }
1159 else
1160 {
1161 args[i].n_aligned_regs
1162 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1163 }
1164
4c36ffe6 1165 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 1166
5f4cd670 1167 /* Structures smaller than a word are normally aligned to the
1168 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 1169 this means we must skip the empty high order bytes when
1170 calculating the bit offset. */
5f4cd670 1171 if (bytes < UNITS_PER_WORD
1172#ifdef BLOCK_REG_PADDING
1173 && (BLOCK_REG_PADDING (args[i].mode,
1174 TREE_TYPE (args[i].tree_value), 1)
d7ab0e3d 1175 == PAD_DOWNWARD)
5f4cd670 1176#else
1177 && BYTES_BIG_ENDIAN
1178#endif
1179 )
1180 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 1181
1182 for (j = 0; j < args[i].n_aligned_regs; j++)
1183 {
1184 rtx reg = gen_reg_rtx (word_mode);
1185 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1186 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 1187
1188 args[i].aligned_regs[j] = reg;
3f71db40 1189 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
5d77cce2 1190 word_mode, word_mode, false, NULL);
6a0e6138 1191
1192 /* There is no need to restrict this code to loading items
1193 in TYPE_ALIGN sized hunks. The bitfield instructions can
1194 load up entire word sized registers efficiently.
1195
1196 ??? This may not be needed anymore.
1197 We use to emit a clobber here but that doesn't let later
1198 passes optimize the instructions we emit. By storing 0 into
1199 the register later passes know the first AND to zero out the
1200 bitfield being set in the register is unnecessary. The store
1201 of 0 will be deleted as will at least the first AND. */
1202
1203 emit_move_insn (reg, const0_rtx);
1204
1205 bytes -= bitsize / BITS_PER_UNIT;
4bb60ec7 1206 store_bit_field (reg, bitsize, endian_correction, 0, 0,
292237f3 1207 word_mode, word, false);
6a0e6138 1208 }
1209 }
1210}
1211
370e45b9 1212/* The limit set by -Walloc-larger-than=. */
1213static GTY(()) tree alloc_object_size_limit;
1214
1215/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1216 setting if the option is specified, or to the maximum object size if it
1217 is not. Return the initialized value. */
1218
1219static tree
1220alloc_max_size (void)
1221{
8306d54c 1222 if (alloc_object_size_limit)
1223 return alloc_object_size_limit;
370e45b9 1224
8e18705e 1225 alloc_object_size_limit
1226 = build_int_cst (size_type_node, warn_alloc_size_limit);
8306d54c 1227
370e45b9 1228 return alloc_object_size_limit;
1229}
1230
ae0a5f68 1231/* Return true when EXP's range can be determined and set RANGE[] to it
e6a18b5a 1232 after adjusting it if necessary to make EXP a represents a valid size
1233 of object, or a valid size argument to an allocation function declared
1234 with attribute alloc_size (whose argument may be signed), or to a string
1235 manipulation function like memset. When ALLOW_ZERO is true, allow
1236 returning a range of [0, 0] for a size in an anti-range [1, N] where
1237 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1238 allocation functions like malloc but it is a valid argument to
1239 functions like memset. */
370e45b9 1240
ae0a5f68 1241bool
e6a18b5a 1242get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
370e45b9 1243{
ae0a5f68 1244 if (tree_fits_uhwi_p (exp))
370e45b9 1245 {
ae0a5f68 1246 /* EXP is a constant. */
1247 range[0] = range[1] = exp;
1248 return true;
1249 }
1250
e6a18b5a 1251 tree exptype = TREE_TYPE (exp);
1252 bool integral = INTEGRAL_TYPE_P (exptype);
1253
ae0a5f68 1254 wide_int min, max;
e6a18b5a 1255 enum value_range_type range_type;
1256
23bd14de 1257 if (integral)
1258 range_type = determine_value_range (exp, &min, &max);
e6a18b5a 1259 else
1260 range_type = VR_VARYING;
ae0a5f68 1261
1262 if (range_type == VR_VARYING)
1263 {
e6a18b5a 1264 if (integral)
1265 {
1266 /* Use the full range of the type of the expression when
1267 no value range information is available. */
1268 range[0] = TYPE_MIN_VALUE (exptype);
1269 range[1] = TYPE_MAX_VALUE (exptype);
1270 return true;
1271 }
1272
ae0a5f68 1273 range[0] = NULL_TREE;
1274 range[1] = NULL_TREE;
1275 return false;
1276 }
1277
ae0a5f68 1278 unsigned expprec = TYPE_PRECISION (exptype);
ae0a5f68 1279
1280 bool signed_p = !TYPE_UNSIGNED (exptype);
1281
1282 if (range_type == VR_ANTI_RANGE)
1283 {
1284 if (signed_p)
370e45b9 1285 {
e3d0f65c 1286 if (wi::les_p (max, 0))
370e45b9 1287 {
ae0a5f68 1288 /* EXP is not in a strictly negative range. That means
1289 it must be in some (not necessarily strictly) positive
1290 range which includes zero. Since in signed to unsigned
1291 conversions negative values end up converted to large
1292 positive values, and otherwise they are not valid sizes,
1293 the resulting range is in both cases [0, TYPE_MAX]. */
e3d0f65c 1294 min = wi::zero (expprec);
1295 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
370e45b9 1296 }
e3d0f65c 1297 else if (wi::les_p (min - 1, 0))
ae0a5f68 1298 {
1299 /* EXP is not in a negative-positive range. That means EXP
1300 is either negative, or greater than max. Since negative
1301 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1302 min = max + 1;
e3d0f65c 1303 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
ae0a5f68 1304 }
1305 else
1306 {
1307 max = min - 1;
e3d0f65c 1308 min = wi::zero (expprec);
ae0a5f68 1309 }
1310 }
e3d0f65c 1311 else if (wi::eq_p (0, min - 1))
ae0a5f68 1312 {
1313 /* EXP is unsigned and not in the range [1, MAX]. That means
1314 it's either zero or greater than MAX. Even though 0 would
e6a18b5a 1315 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1316 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1317 is greater than the limit the whole range is diagnosed. */
1318 if (allow_zero)
1319 min = max = wi::zero (expprec);
1320 else
1321 {
1322 min = max + 1;
1323 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1324 }
ae0a5f68 1325 }
1326 else
1327 {
1328 max = min - 1;
e3d0f65c 1329 min = wi::zero (expprec);
370e45b9 1330 }
1331 }
1332
ae0a5f68 1333 range[0] = wide_int_to_tree (exptype, min);
1334 range[1] = wide_int_to_tree (exptype, max);
1335
1336 return true;
370e45b9 1337}
1338
1339/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1340 whose argument numbers given by IDX with values given by ARGS exceed
1341 the maximum object size or cause an unsigned oveflow (wrapping) when
1342 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1343 may be null for functions like malloc, and non-null for those like
1344 calloc that are decorated with a two-argument attribute alloc_size. */
1345
1346void
1347maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1348{
1349 /* The range each of the (up to) two arguments is known to be in. */
1350 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1351
1352 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1353 tree maxobjsize = alloc_max_size ();
1354
1355 location_t loc = EXPR_LOCATION (exp);
1356
1357 bool warned = false;
1358
1359 /* Validate each argument individually. */
1360 for (unsigned i = 0; i != 2 && args[i]; ++i)
1361 {
1362 if (TREE_CODE (args[i]) == INTEGER_CST)
1363 {
1364 argrange[i][0] = args[i];
1365 argrange[i][1] = args[i];
1366
1367 if (tree_int_cst_lt (args[i], integer_zero_node))
1368 {
1369 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1370 "%Kargument %i value %qE is negative",
1371 exp, idx[i] + 1, args[i]);
370e45b9 1372 }
1373 else if (integer_zerop (args[i]))
1374 {
1375 /* Avoid issuing -Walloc-zero for allocation functions other
1376 than __builtin_alloca that are declared with attribute
1377 returns_nonnull because there's no portability risk. This
1378 avoids warning for such calls to libiberty's xmalloc and
1379 friends.
1380 Also avoid issuing the warning for calls to function named
1381 "alloca". */
1382 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1383 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1384 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1385 && !lookup_attribute ("returns_nonnull",
1386 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1387 warned = warning_at (loc, OPT_Walloc_zero,
ae0a5f68 1388 "%Kargument %i value is zero",
1389 exp, idx[i] + 1);
370e45b9 1390 }
1391 else if (tree_int_cst_lt (maxobjsize, args[i]))
1392 {
1393 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1394 mode and with -fno-exceptions as a way to indicate array
1395 size overflow. There's no good way to detect C++98 here
1396 so avoid diagnosing these calls for all C++ modes. */
1397 if (i == 0
1398 && !args[1]
1399 && lang_GNU_CXX ()
1400 && DECL_IS_OPERATOR_NEW (fn)
1401 && integer_all_onesp (args[i]))
1402 continue;
1403
1404 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1405 "%Kargument %i value %qE exceeds "
370e45b9 1406 "maximum object size %E",
ae0a5f68 1407 exp, idx[i] + 1, args[i], maxobjsize);
370e45b9 1408 }
1409 }
ae0a5f68 1410 else if (TREE_CODE (args[i]) == SSA_NAME
1411 && get_size_range (args[i], argrange[i]))
370e45b9 1412 {
370e45b9 1413 /* Verify that the argument's range is not negative (including
1414 upper bound of zero). */
1415 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1416 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1417 {
1418 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1419 "%Kargument %i range [%E, %E] is negative",
1420 exp, idx[i] + 1,
1421 argrange[i][0], argrange[i][1]);
370e45b9 1422 }
1423 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1424 {
1425 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1426 "%Kargument %i range [%E, %E] exceeds "
370e45b9 1427 "maximum object size %E",
ae0a5f68 1428 exp, idx[i] + 1,
1429 argrange[i][0], argrange[i][1],
370e45b9 1430 maxobjsize);
1431 }
1432 }
1433 }
1434
1435 if (!argrange[0])
1436 return;
1437
1438 /* For a two-argument alloc_size, validate the product of the two
1439 arguments if both of their values or ranges are known. */
1440 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1441 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1442 && !integer_onep (argrange[0][0])
1443 && !integer_onep (argrange[1][0]))
1444 {
1445 /* Check for overflow in the product of a function decorated with
1446 attribute alloc_size (X, Y). */
1447 unsigned szprec = TYPE_PRECISION (size_type_node);
1448 wide_int x = wi::to_wide (argrange[0][0], szprec);
1449 wide_int y = wi::to_wide (argrange[1][0], szprec);
1450
30b5769f 1451 wi::overflow_type vflow;
370e45b9 1452 wide_int prod = wi::umul (x, y, &vflow);
1453
1454 if (vflow)
1455 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1456 "%Kproduct %<%E * %E%> of arguments %i and %i "
370e45b9 1457 "exceeds %<SIZE_MAX%>",
ae0a5f68 1458 exp, argrange[0][0], argrange[1][0],
370e45b9 1459 idx[0] + 1, idx[1] + 1);
1460 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1461 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1462 "%Kproduct %<%E * %E%> of arguments %i and %i "
370e45b9 1463 "exceeds maximum object size %E",
ae0a5f68 1464 exp, argrange[0][0], argrange[1][0],
370e45b9 1465 idx[0] + 1, idx[1] + 1,
1466 maxobjsize);
1467
1468 if (warned)
1469 {
1470 /* Print the full range of each of the two arguments to make
1471 it clear when it is, in fact, in a range and not constant. */
1472 if (argrange[0][0] != argrange [0][1])
1473 inform (loc, "argument %i in the range [%E, %E]",
1474 idx[0] + 1, argrange[0][0], argrange[0][1]);
1475 if (argrange[1][0] != argrange [1][1])
1476 inform (loc, "argument %i in the range [%E, %E]",
1477 idx[1] + 1, argrange[1][0], argrange[1][1]);
1478 }
1479 }
1480
1481 if (warned)
1482 {
1483 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1484
1485 if (DECL_IS_BUILTIN (fn))
1486 inform (loc,
1487 "in a call to built-in allocation function %qD", fn);
1488 else
1489 inform (fnloc,
1490 "in a call to allocation function %qD declared here", fn);
1491 }
1492}
1493
0c45740b 1494/* If EXPR refers to a character array or pointer declared attribute
1495 nonstring return a decl for that array or pointer and set *REF to
1496 the referenced enclosing object or pointer. Otherwise returns
1497 null. */
1498
1499tree
1500get_attr_nonstring_decl (tree expr, tree *ref)
1501{
1502 tree decl = expr;
1503 if (TREE_CODE (decl) == SSA_NAME)
1504 {
1505 gimple *def = SSA_NAME_DEF_STMT (decl);
1506
1507 if (is_gimple_assign (def))
1508 {
1509 tree_code code = gimple_assign_rhs_code (def);
1510 if (code == ADDR_EXPR
1511 || code == COMPONENT_REF
1512 || code == VAR_DECL)
1513 decl = gimple_assign_rhs1 (def);
1514 }
1515 else if (tree var = SSA_NAME_VAR (decl))
1516 decl = var;
1517 }
1518
1519 if (TREE_CODE (decl) == ADDR_EXPR)
1520 decl = TREE_OPERAND (decl, 0);
1521
1522 if (ref)
1523 *ref = decl;
1524
a20cbb58 1525 if (TREE_CODE (decl) == ARRAY_REF)
1526 decl = TREE_OPERAND (decl, 0);
1527 else if (TREE_CODE (decl) == COMPONENT_REF)
0c45740b 1528 decl = TREE_OPERAND (decl, 1);
a20cbb58 1529 else if (TREE_CODE (decl) == MEM_REF)
1530 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
0c45740b 1531
1532 if (DECL_P (decl)
1533 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1534 return decl;
1535
1536 return NULL_TREE;
1537}
1538
0c45740b 1539/* Warn about passing a non-string array/pointer to a function that
1540 expects a nul-terminated string argument. */
1541
1542void
1543maybe_warn_nonstring_arg (tree fndecl, tree exp)
1544{
a0e9bfbb 1545 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
0c45740b 1546 return;
1547
13308b37 1548 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
864bd5de 1549 return;
1550
625c5395 1551 unsigned nargs = call_expr_nargs (exp);
1552
0c45740b 1553 /* The bound argument to a bounded string function like strncpy. */
1554 tree bound = NULL_TREE;
1555
974404bd 1556 /* The range of lengths of a string argument to one of the comparison
1557 functions. If the length is less than the bound it is used instead. */
1558 tree lenrng[2] = { NULL_TREE, NULL_TREE };
1559
0c45740b 1560 /* It's safe to call "bounded" string functions with a non-string
1561 argument since the functions provide an explicit bound for this
0eff2551 1562 purpose. The exception is strncat where the bound may refer to
1563 either the destination or the source. */
1564 int fncode = DECL_FUNCTION_CODE (fndecl);
1565 switch (fncode)
0c45740b 1566 {
974404bd 1567 case BUILT_IN_STRCMP:
0c45740b 1568 case BUILT_IN_STRNCMP:
1569 case BUILT_IN_STRNCASECMP:
974404bd 1570 {
1571 /* For these, if one argument refers to one or more of a set
1572 of string constants or arrays of known size, determine
1573 the range of their known or possible lengths and use it
1574 conservatively as the bound for the unbounded function,
1575 and to adjust the range of the bound of the bounded ones. */
13308b37 1576 for (unsigned argno = 0;
1577 argno < MIN (nargs, 2)
1578 && !(lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST); argno++)
974404bd 1579 {
1580 tree arg = CALL_EXPR_ARG (exp, argno);
1581 if (!get_attr_nonstring_decl (arg))
1582 get_range_strlen (arg, lenrng);
1583 }
1584 }
1585 /* Fall through. */
1586
0eff2551 1587 case BUILT_IN_STRNCAT:
974404bd 1588 case BUILT_IN_STPNCPY:
0c45740b 1589 case BUILT_IN_STRNCPY:
13308b37 1590 if (nargs > 2)
864bd5de 1591 bound = CALL_EXPR_ARG (exp, 2);
1592 break;
0c45740b 1593
1594 case BUILT_IN_STRNDUP:
13308b37 1595 if (nargs > 1)
864bd5de 1596 bound = CALL_EXPR_ARG (exp, 1);
1597 break;
1598
1599 case BUILT_IN_STRNLEN:
625c5395 1600 {
864bd5de 1601 tree arg = CALL_EXPR_ARG (exp, 0);
1602 if (!get_attr_nonstring_decl (arg))
1603 get_range_strlen (arg, lenrng);
1604
13308b37 1605 if (nargs > 1)
864bd5de 1606 bound = CALL_EXPR_ARG (exp, 1);
625c5395 1607 break;
1608 }
0c45740b 1609
1610 default:
1611 break;
1612 }
1613
1614 /* Determine the range of the bound argument (if specified). */
1615 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1616 if (bound)
0b39ade8 1617 {
1618 STRIP_NOPS (bound);
1619 get_size_range (bound, bndrng);
1620 }
0c45740b 1621
864bd5de 1622 location_t loc = EXPR_LOCATION (exp);
1623
1624 if (bndrng[0])
1625 {
1626 /* Diagnose excessive bound prior the adjustment below and
1627 regardless of attribute nonstring. */
1628 tree maxobjsize = max_object_size ();
1629 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1630 {
1631 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1632 warning_at (loc, OPT_Wstringop_overflow_,
1633 "%K%qD specified bound %E "
1634 "exceeds maximum object size %E",
1635 exp, fndecl, bndrng[0], maxobjsize);
1636 else
1637 warning_at (loc, OPT_Wstringop_overflow_,
1638 "%K%qD specified bound [%E, %E] "
1639 "exceeds maximum object size %E",
1640 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1641 return;
1642 }
1643 }
1644
13308b37 1645 if (lenrng[1] && TREE_CODE (lenrng[1]) == INTEGER_CST)
974404bd 1646 {
1647 /* Add one for the nul. */
974404bd 1648 lenrng[1] = const_binop (PLUS_EXPR, TREE_TYPE (lenrng[1]),
1649 lenrng[1], size_one_node);
1650
1651 if (!bndrng[0])
1652 {
1653 /* Conservatively use the upper bound of the lengths for
1654 both the lower and the upper bound of the operation. */
1655 bndrng[0] = lenrng[1];
1656 bndrng[1] = lenrng[1];
1657 bound = void_type_node;
1658 }
1659 else
1660 {
3ab72211 1661 /* Replace the bound on the operation with the upper bound
974404bd 1662 of the length of the string if the latter is smaller. */
1663 if (tree_int_cst_lt (lenrng[1], bndrng[0]))
1664 bndrng[0] = lenrng[1];
1665 else if (tree_int_cst_lt (lenrng[1], bndrng[1]))
1666 bndrng[1] = lenrng[1];
1667 }
1668 }
1669
0c45740b 1670 /* Iterate over the built-in function's formal arguments and check
1671 each const char* against the actual argument. If the actual
1672 argument is declared attribute non-string issue a warning unless
1673 the argument's maximum length is bounded. */
1674 function_args_iterator it;
1675 function_args_iter_init (&it, TREE_TYPE (fndecl));
1676
1677 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1678 {
625c5395 1679 /* Avoid iterating past the declared argument in a call
1680 to function declared without a prototype. */
1681 if (argno >= nargs)
1682 break;
1683
0c45740b 1684 tree argtype = function_args_iter_cond (&it);
1685 if (!argtype)
1686 break;
1687
1688 if (TREE_CODE (argtype) != POINTER_TYPE)
1689 continue;
1690
1691 argtype = TREE_TYPE (argtype);
1692
1693 if (TREE_CODE (argtype) != INTEGER_TYPE
1694 || !TYPE_READONLY (argtype))
1695 continue;
1696
1697 argtype = TYPE_MAIN_VARIANT (argtype);
1698 if (argtype != char_type_node)
1699 continue;
1700
1701 tree callarg = CALL_EXPR_ARG (exp, argno);
1702 if (TREE_CODE (callarg) == ADDR_EXPR)
1703 callarg = TREE_OPERAND (callarg, 0);
1704
1705 /* See if the destination is declared with attribute "nonstring". */
1706 tree decl = get_attr_nonstring_decl (callarg);
1707 if (!decl)
1708 continue;
1709
974404bd 1710 /* The maximum number of array elements accessed. */
0c45740b 1711 offset_int wibnd = 0;
0eff2551 1712
1713 if (argno && fncode == BUILT_IN_STRNCAT)
1714 {
1715 /* See if the bound in strncat is derived from the length
1716 of the strlen of the destination (as it's expected to be).
1717 If so, reset BOUND and FNCODE to trigger a warning. */
1718 tree dstarg = CALL_EXPR_ARG (exp, 0);
1719 if (is_strlen_related_p (dstarg, bound))
1720 {
1721 /* The bound applies to the destination, not to the source,
1722 so reset these to trigger a warning without mentioning
1723 the bound. */
1724 bound = NULL;
1725 fncode = 0;
1726 }
1727 else if (bndrng[1])
1728 /* Use the upper bound of the range for strncat. */
1729 wibnd = wi::to_offset (bndrng[1]);
1730 }
1731 else if (bndrng[0])
1732 /* Use the lower bound of the range for functions other than
1733 strncat. */
0c45740b 1734 wibnd = wi::to_offset (bndrng[0]);
1735
0eff2551 1736 /* Determine the size of the argument array if it is one. */
0c45740b 1737 offset_int asize = wibnd;
0eff2551 1738 bool known_size = false;
1739 tree type = TREE_TYPE (decl);
0c45740b 1740
974404bd 1741 /* Determine the array size. For arrays of unknown bound and
1742 pointers reset BOUND to trigger the appropriate warning. */
0c45740b 1743 if (TREE_CODE (type) == ARRAY_TYPE)
974404bd 1744 {
1745 if (tree arrbnd = TYPE_DOMAIN (type))
1746 {
1747 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
0eff2551 1748 {
1749 asize = wi::to_offset (arrbnd) + 1;
1750 known_size = true;
1751 }
974404bd 1752 }
1753 else if (bound == void_type_node)
1754 bound = NULL_TREE;
1755 }
1756 else if (bound == void_type_node)
1757 bound = NULL_TREE;
0c45740b 1758
0eff2551 1759 /* In a call to strncat with a bound in a range whose lower but
1760 not upper bound is less than the array size, reset ASIZE to
1761 be the same as the bound and the other variable to trigger
1762 the apprpriate warning below. */
1763 if (fncode == BUILT_IN_STRNCAT
1764 && bndrng[0] != bndrng[1]
1765 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1766 && (!known_size
1767 || wi::ltu_p (asize, wibnd)))
1768 {
1769 asize = wibnd;
1770 bound = NULL_TREE;
1771 fncode = 0;
1772 }
1773
0c45740b 1774 bool warned = false;
1775
bc35ef65 1776 auto_diagnostic_group d;
0c45740b 1777 if (wi::ltu_p (asize, wibnd))
0eff2551 1778 {
1779 if (bndrng[0] == bndrng[1])
1780 warned = warning_at (loc, OPT_Wstringop_overflow_,
1781 "%qD argument %i declared attribute "
1782 "%<nonstring%> is smaller than the specified "
1783 "bound %wu",
1784 fndecl, argno + 1, wibnd.to_uhwi ());
1785 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1786 warned = warning_at (loc, OPT_Wstringop_overflow_,
1787 "%qD argument %i declared attribute "
1788 "%<nonstring%> is smaller than "
1789 "the specified bound [%E, %E]",
1790 fndecl, argno + 1, bndrng[0], bndrng[1]);
1791 else
1792 warned = warning_at (loc, OPT_Wstringop_overflow_,
1793 "%qD argument %i declared attribute "
1794 "%<nonstring%> may be smaller than "
1795 "the specified bound [%E, %E]",
1796 fndecl, argno + 1, bndrng[0], bndrng[1]);
1797 }
1798 else if (fncode == BUILT_IN_STRNCAT)
1799 ; /* Avoid warning for calls to strncat() when the bound
1800 is equal to the size of the non-string argument. */
0c45740b 1801 else if (!bound)
1802 warned = warning_at (loc, OPT_Wstringop_overflow_,
1803 "%qD argument %i declared attribute %<nonstring%>",
1804 fndecl, argno + 1);
1805
1806 if (warned)
1807 inform (DECL_SOURCE_LOCATION (decl),
1808 "argument %qD declared here", decl);
1809 }
1810}
1811
b4a61e77 1812/* Issue an error if CALL_EXPR was flagged as requiring
1813 tall-call optimization. */
1814
1815static void
1816maybe_complain_about_tail_call (tree call_expr, const char *reason)
1817{
1818 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1819 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1820 return;
1821
1822 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1823}
1824
cb543c54 1825/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
48e1416a 1826 CALL_EXPR EXP.
cb543c54 1827
1828 NUM_ACTUALS is the total number of parameters.
1829
1830 N_NAMED_ARGS is the total number of named arguments.
1831
cd46caee 1832 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1833 value, or null.
1834
cb543c54 1835 FNDECL is the tree code for the target of this call (if known)
1836
1837 ARGS_SO_FAR holds state needed by the target to know where to place
1838 the next argument.
1839
1840 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1841 for arguments which are passed in registers.
1842
1843 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1844 and may be modified by this routine.
1845
dfe08167 1846 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
47ae02b7 1847 flags which may be modified by this routine.
eaa112a0 1848
4ee9c684 1849 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1850 that requires allocation of stack space.
1851
eaa112a0 1852 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1853 the thunked-to function. */
cb543c54 1854
1855static void
4c9e08a4 1856initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1857 struct arg_data *args,
1858 struct args_size *args_size,
1859 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 1860 tree exp, tree struct_value_addr_value,
d8b9c828 1861 tree fndecl, tree fntype,
39cba157 1862 cumulative_args_t args_so_far,
4c9e08a4 1863 int reg_parm_stack_space,
e0deb08c 1864 rtx *old_stack_level,
1865 poly_int64_pod *old_pending_adj,
eaa112a0 1866 int *must_preallocate, int *ecf_flags,
4ee9c684 1867 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 1868{
39cba157 1869 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
389dd41b 1870 location_t loc = EXPR_LOCATION (exp);
cb543c54 1871
1872 /* Count arg position in order args appear. */
1873 int argpos;
1874
1875 int i;
c87678e4 1876
cb543c54 1877 args_size->constant = 0;
1878 args_size->var = 0;
1879
058a1b7a 1880 bitmap_obstack_initialize (NULL);
1881
cb543c54 1882 /* In this loop, we consider args in the order they are written.
bf29c577 1883 We fill up ARGS from the back. */
cb543c54 1884
bf29c577 1885 i = num_actuals - 1;
cd46caee 1886 {
1e42d5c6 1887 int j = i;
cd46caee 1888 call_expr_arg_iterator iter;
1889 tree arg;
058a1b7a 1890 bitmap slots = NULL;
cd46caee 1891
1892 if (struct_value_addr_value)
1893 {
1894 args[j].tree_value = struct_value_addr_value;
bf29c577 1895 j--;
cd46caee 1896 }
e66d763a 1897 argpos = 0;
cd46caee 1898 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1899 {
1900 tree argtype = TREE_TYPE (arg);
058a1b7a 1901
cd46caee 1902 if (targetm.calls.split_complex_arg
1903 && argtype
1904 && TREE_CODE (argtype) == COMPLEX_TYPE
1905 && targetm.calls.split_complex_arg (argtype))
1906 {
1907 tree subtype = TREE_TYPE (argtype);
cd46caee 1908 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
bf29c577 1909 j--;
cd46caee 1910 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1911 }
1912 else
1913 args[j].tree_value = arg;
bf29c577 1914 j--;
e66d763a 1915 argpos++;
cd46caee 1916 }
058a1b7a 1917
1918 if (slots)
1919 BITMAP_FREE (slots);
cd46caee 1920 }
1921
058a1b7a 1922 bitmap_obstack_release (NULL);
1923
370e45b9 1924 /* Extract attribute alloc_size and if set, store the indices of
1925 the corresponding arguments in ALLOC_IDX, and then the actual
1926 argument(s) at those indices in ALLOC_ARGS. */
1927 int alloc_idx[2] = { -1, -1 };
1928 if (tree alloc_size
1929 = (fndecl ? lookup_attribute ("alloc_size",
1930 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1931 : NULL_TREE))
1932 {
1933 tree args = TREE_VALUE (alloc_size);
1934 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1935 if (TREE_CHAIN (args))
1936 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1937 }
1938
1939 /* Array for up to the two attribute alloc_size arguments. */
1940 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1941
cb543c54 1942 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
bf29c577 1943 for (argpos = 0; argpos < num_actuals; i--, argpos++)
cb543c54 1944 {
cd46caee 1945 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 1946 int unsignedp;
3754d046 1947 machine_mode mode;
cb543c54 1948
cb543c54 1949 /* Replace erroneous argument with constant zero. */
4b72716d 1950 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 1951 args[i].tree_value = integer_zero_node, type = integer_type_node;
1952
8df5a43d 1953 /* If TYPE is a transparent union or record, pass things the way
1954 we would pass the first field of the union or record. We have
1955 already verified that the modes are the same. */
1956 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1957 && TYPE_TRANSPARENT_AGGR (type))
1958 type = TREE_TYPE (first_field (type));
cb543c54 1959
1960 /* Decide where to pass this arg.
1961
1962 args[i].reg is nonzero if all or part is passed in registers.
1963
1964 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1965 and the exact value says how many bytes are passed in registers.
cb543c54 1966
1967 args[i].pass_on_stack is nonzero if the argument must at least be
1968 computed on the stack. It may then be loaded back into registers
1969 if args[i].reg is nonzero.
1970
1971 These decisions are driven by the FUNCTION_... macros and must agree
1972 with those made by function.c. */
1973
1974 /* See if this argument should be passed by invisible reference. */
39cba157 1975 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
cc9b8628 1976 type, argpos < n_named_args))
cb543c54 1977 {
41dc12b4 1978 bool callee_copies;
bc4577c4 1979 tree base = NULL_TREE;
41dc12b4 1980
1981 callee_copies
39cba157 1982 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
13f08ee7 1983 type, argpos < n_named_args);
41dc12b4 1984
1985 /* If we're compiling a thunk, pass through invisible references
1986 instead of making a copy. */
eaa112a0 1987 if (call_from_thunk_p
41dc12b4 1988 || (callee_copies
1989 && !TREE_ADDRESSABLE (type)
1990 && (base = get_base_address (args[i].tree_value))
d6230243 1991 && TREE_CODE (base) != SSA_NAME
41dc12b4 1992 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 1993 {
6b7d03d8 1994 /* We may have turned the parameter value into an SSA name.
1995 Go back to the original parameter so we can take the
1996 address. */
1997 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1998 {
1999 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2000 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2001 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2002 }
9502706b 2003 /* Argument setup code may have copied the value to register. We
2004 revert that optimization now because the tail call code must
2005 use the original location. */
2006 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2007 && !MEM_P (DECL_RTL (args[i].tree_value))
2008 && DECL_INCOMING_RTL (args[i].tree_value)
2009 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2010 set_decl_rtl (args[i].tree_value,
2011 DECL_INCOMING_RTL (args[i].tree_value));
2012
006e2d5a 2013 mark_addressable (args[i].tree_value);
2014
41dc12b4 2015 /* We can't use sibcalls if a callee-copied argument is
2016 stored in the current function's frame. */
2017 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
b4a61e77 2018 {
2019 *may_tailcall = false;
2020 maybe_complain_about_tail_call (exp,
2021 "a callee-copied argument is"
dc4ed9fd 2022 " stored in the current"
b4a61e77 2023 " function's frame");
2024 }
c71e72dd 2025
389dd41b 2026 args[i].tree_value = build_fold_addr_expr_loc (loc,
2027 args[i].tree_value);
41dc12b4 2028 type = TREE_TYPE (args[i].tree_value);
2029
9c2a0c05 2030 if (*ecf_flags & ECF_CONST)
2031 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
ce95a955 2032 }
cb543c54 2033 else
2034 {
2035 /* We make a copy of the object and pass the address to the
2036 function being called. */
2037 rtx copy;
2038
4b72716d 2039 if (!COMPLETE_TYPE_P (type)
4852b829 2040 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2041 || (flag_stack_check == GENERIC_STACK_CHECK
2042 && compare_tree_int (TYPE_SIZE_UNIT (type),
2043 STACK_CHECK_MAX_VAR_SIZE) > 0))
cb543c54 2044 {
2045 /* This is a variable-sized object. Make space on the stack
2046 for it. */
cd46caee 2047 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 2048
2049 if (*old_stack_level == 0)
2050 {
e9c97615 2051 emit_stack_save (SAVE_BLOCK, old_stack_level);
cb543c54 2052 *old_pending_adj = pending_stack_adjust;
2053 pending_stack_adjust = 0;
2054 }
2055
990495a7 2056 /* We can pass TRUE as the 4th argument because we just
2057 saved the stack pointer and will restore it right after
2058 the call. */
5be42b39 2059 copy = allocate_dynamic_stack_space (size_rtx,
2060 TYPE_ALIGN (type),
2061 TYPE_ALIGN (type),
2b34677f 2062 max_int_size_in_bytes
2063 (type),
5be42b39 2064 true);
2065 copy = gen_rtx_MEM (BLKmode, copy);
f7c44134 2066 set_mem_attributes (copy, type, 1);
cb543c54 2067 }
2068 else
0ab48139 2069 copy = assign_temp (type, 1, 0);
cb543c54 2070
292237f3 2071 store_expr (args[i].tree_value, copy, 0, false, false);
cb543c54 2072
9c2a0c05 2073 /* Just change the const function to pure and then let
2074 the next test clear the pure based on
2075 callee_copies. */
2076 if (*ecf_flags & ECF_CONST)
2077 {
2078 *ecf_flags &= ~ECF_CONST;
2079 *ecf_flags |= ECF_PURE;
2080 }
2081
2082 if (!callee_copies && *ecf_flags & ECF_PURE)
2083 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
41dc12b4 2084
2085 args[i].tree_value
389dd41b 2086 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
41dc12b4 2087 type = TREE_TYPE (args[i].tree_value);
4ee9c684 2088 *may_tailcall = false;
b4a61e77 2089 maybe_complain_about_tail_call (exp,
2090 "argument must be passed"
2091 " by copying");
cb543c54 2092 }
2093 }
2094
78a8ed03 2095 unsignedp = TYPE_UNSIGNED (type);
3b2411a8 2096 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2097 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
cb543c54 2098
2099 args[i].unsignedp = unsignedp;
2100 args[i].mode = mode;
7a8d641b 2101
532d84ff 2102 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2103
f387af4f 2104 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2105 argpos < n_named_args);
2106
058a1b7a 2107 if (args[i].reg && CONST_INT_P (args[i].reg))
2108 {
2109 args[i].special_slot = args[i].reg;
2110 args[i].reg = NULL;
2111 }
2112
7a8d641b 2113 /* If this is a sibling call and the machine has register windows, the
2114 register window has to be unwinded before calling the routine, so
2115 arguments have to go into the incoming registers. */
f387af4f 2116 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2117 args[i].tail_call_reg
2118 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2119 argpos < n_named_args);
2120 else
2121 args[i].tail_call_reg = args[i].reg;
7a8d641b 2122
cb543c54 2123 if (args[i].reg)
2124 args[i].partial
f054eb3c 2125 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2126 argpos < n_named_args);
cb543c54 2127
0336f0f0 2128 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 2129
2130 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2131 it means that we are to pass this arg in the register(s) designated
2132 by the PARALLEL, but also to pass it in the stack. */
2133 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2134 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2135 args[i].pass_on_stack = 1;
2136
2137 /* If this is an addressable type, we must preallocate the stack
2138 since we must evaluate the object into its final location.
2139
2140 If this is to be passed in both registers and the stack, it is simpler
2141 to preallocate. */
2142 if (TREE_ADDRESSABLE (type)
2143 || (args[i].pass_on_stack && args[i].reg != 0))
2144 *must_preallocate = 1;
2145
cb543c54 2146 /* Compute the stack-size of this argument. */
1e42d5c6 2147 if (args[i].reg == 0 || args[i].partial != 0
058a1b7a 2148 || reg_parm_stack_space > 0
2149 || args[i].pass_on_stack)
cb543c54 2150 locate_and_pad_parm (mode, type,
2151#ifdef STACK_PARMS_IN_REG_PARM_AREA
2152 1,
2153#else
2154 args[i].reg != 0,
2155#endif
2e090bf6 2156 reg_parm_stack_space,
241399f6 2157 args[i].pass_on_stack ? 0 : args[i].partial,
2158 fndecl, args_size, &args[i].locate);
0fee47f4 2159#ifdef BLOCK_REG_PADDING
2160 else
2161 /* The argument is passed entirely in registers. See at which
2162 end it should be padded. */
2163 args[i].locate.where_pad =
2164 BLOCK_REG_PADDING (mode, type,
2165 int_size_in_bytes (type) <= UNITS_PER_WORD);
2166#endif
c87678e4 2167
cb543c54 2168 /* Update ARGS_SIZE, the total stack space for args so far. */
2169
241399f6 2170 args_size->constant += args[i].locate.size.constant;
2171 if (args[i].locate.size.var)
2172 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 2173
2174 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2175 have been used, etc. */
2176
f387af4f 2177 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2178 type, argpos < n_named_args);
370e45b9 2179
2180 /* Store argument values for functions decorated with attribute
2181 alloc_size. */
2182 if (argpos == alloc_idx[0])
2183 alloc_args[0] = args[i].tree_value;
2184 else if (argpos == alloc_idx[1])
2185 alloc_args[1] = args[i].tree_value;
2186 }
2187
2188 if (alloc_args[0])
2189 {
2190 /* Check the arguments of functions decorated with attribute
2191 alloc_size. */
2192 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
cb543c54 2193 }
0c45740b 2194
2195 /* Detect passing non-string arguments to functions expecting
2196 nul-terminated strings. */
2197 maybe_warn_nonstring_arg (fndecl, exp);
cb543c54 2198}
2199
cc45e5e8 2200/* Update ARGS_SIZE to contain the total size for the argument block.
2201 Return the original constant component of the argument block's size.
2202
2203 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2204 for arguments passed in registers. */
2205
e0deb08c 2206static poly_int64
4c9e08a4 2207compute_argument_block_size (int reg_parm_stack_space,
2208 struct args_size *args_size,
60e2260d 2209 tree fndecl ATTRIBUTE_UNUSED,
fa20f865 2210 tree fntype ATTRIBUTE_UNUSED,
4c9e08a4 2211 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 2212{
e0deb08c 2213 poly_int64 unadjusted_args_size = args_size->constant;
cc45e5e8 2214
4448f543 2215 /* For accumulate outgoing args mode we don't need to align, since the frame
2216 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 2217 backends from generating misaligned frame sizes. */
4448f543 2218 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2219 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 2220
cc45e5e8 2221 /* Compute the actual size of the argument block required. The variable
2222 and constant sizes must be combined, the size may have to be rounded,
2223 and there may be a minimum required size. */
2224
2225 if (args_size->var)
2226 {
2227 args_size->var = ARGS_SIZE_TREE (*args_size);
2228 args_size->constant = 0;
2229
d0285dd8 2230 preferred_stack_boundary /= BITS_PER_UNIT;
2231 if (preferred_stack_boundary > 1)
91b70175 2232 {
2233 /* We don't handle this case yet. To handle it correctly we have
35a3065a 2234 to add the delta, round and subtract the delta.
91b70175 2235 Currently no machine description requires this support. */
e0deb08c 2236 gcc_assert (multiple_p (stack_pointer_delta,
2237 preferred_stack_boundary));
91b70175 2238 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2239 }
cc45e5e8 2240
2241 if (reg_parm_stack_space > 0)
2242 {
2243 args_size->var
2244 = size_binop (MAX_EXPR, args_size->var,
902de8ed 2245 ssize_int (reg_parm_stack_space));
cc45e5e8 2246
cc45e5e8 2247 /* The area corresponding to register parameters is not to count in
2248 the size of the block we need. So make the adjustment. */
fa20f865 2249 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2250 args_size->var
2251 = size_binop (MINUS_EXPR, args_size->var,
2252 ssize_int (reg_parm_stack_space));
cc45e5e8 2253 }
2254 }
2255 else
2256 {
d0285dd8 2257 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 2258 if (preferred_stack_boundary < 1)
2259 preferred_stack_boundary = 1;
e0deb08c 2260 args_size->constant = (aligned_upper_bound (args_size->constant
2261 + stack_pointer_delta,
2262 preferred_stack_boundary)
91b70175 2263 - stack_pointer_delta);
cc45e5e8 2264
e0deb08c 2265 args_size->constant = upper_bound (args_size->constant,
2266 reg_parm_stack_space);
cc45e5e8 2267
fa20f865 2268 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2269 args_size->constant -= reg_parm_stack_space;
cc45e5e8 2270 }
2271 return unadjusted_args_size;
2272}
2273
caa1595a 2274/* Precompute parameters as needed for a function call.
04707f1c 2275
dfe08167 2276 FLAGS is mask of ECF_* constants.
04707f1c 2277
04707f1c 2278 NUM_ACTUALS is the number of arguments.
2279
c87678e4 2280 ARGS is an array containing information for each argument; this
2281 routine fills in the INITIAL_VALUE and VALUE fields for each
2282 precomputed argument. */
04707f1c 2283
2284static void
2dd6f9ed 2285precompute_arguments (int num_actuals, struct arg_data *args)
04707f1c 2286{
2287 int i;
2288
8c78c14b 2289 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 2290 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 2291
2292 /* If we preallocated the stack space, and some arguments must be passed
2293 on the stack, then we must precompute any parameter which contains a
2294 function call which will store arguments on the stack.
2295 Otherwise, evaluating the parameter may clobber previous parameters
2296 which have already been stored into the stack. (we have code to avoid
2297 such case by saving the outgoing stack arguments, but it results in
2298 worse code) */
2dd6f9ed 2299 if (!ACCUMULATE_OUTGOING_ARGS)
67c155cb 2300 return;
0d568ddf 2301
04707f1c 2302 for (i = 0; i < num_actuals; i++)
67c155cb 2303 {
3b2411a8 2304 tree type;
3754d046 2305 machine_mode mode;
701e46d0 2306
2dd6f9ed 2307 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
c5dc094f 2308 continue;
2309
67c155cb 2310 /* If this is an addressable type, we cannot pre-evaluate it. */
3b2411a8 2311 type = TREE_TYPE (args[i].tree_value);
2312 gcc_assert (!TREE_ADDRESSABLE (type));
04707f1c 2313
67c155cb 2314 args[i].initial_value = args[i].value
8ec3c5c2 2315 = expand_normal (args[i].tree_value);
04707f1c 2316
3b2411a8 2317 mode = TYPE_MODE (type);
67c155cb 2318 if (mode != args[i].mode)
2319 {
3b2411a8 2320 int unsignedp = args[i].unsignedp;
67c155cb 2321 args[i].value
2322 = convert_modes (args[i].mode, mode,
2323 args[i].value, args[i].unsignedp);
3b2411a8 2324
67c155cb 2325 /* CSE will replace this only if it contains args[i].value
2326 pseudo, so convert it down to the declared mode using
2327 a SUBREG. */
2328 if (REG_P (args[i].value)
3b2411a8 2329 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2330 && promote_mode (type, mode, &unsignedp) != args[i].mode)
67c155cb 2331 {
2332 args[i].initial_value
2333 = gen_lowpart_SUBREG (mode, args[i].value);
2334 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
5a9ccd1b 2335 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
67c155cb 2336 }
67c155cb 2337 }
2338 }
04707f1c 2339}
2340
e717ffc2 2341/* Given the current state of MUST_PREALLOCATE and information about
2342 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2343 compute and return the final value for MUST_PREALLOCATE. */
2344
2345static int
48e1416a 2346finalize_must_preallocate (int must_preallocate, int num_actuals,
c2f47e15 2347 struct arg_data *args, struct args_size *args_size)
e717ffc2 2348{
2349 /* See if we have or want to preallocate stack space.
2350
2351 If we would have to push a partially-in-regs parm
2352 before other stack parms, preallocate stack space instead.
2353
2354 If the size of some parm is not a multiple of the required stack
2355 alignment, we must preallocate.
2356
2357 If the total size of arguments that would otherwise create a copy in
2358 a temporary (such as a CALL) is more than half the total argument list
2359 size, preallocation is faster.
2360
2361 Another reason to preallocate is if we have a machine (like the m88k)
2362 where stack alignment is required to be maintained between every
2363 pair of insns, not just when the call is made. However, we assume here
2364 that such machines either do not have push insns (and hence preallocation
2365 would occur anyway) or the problem is taken care of with
2366 PUSH_ROUNDING. */
2367
2368 if (! must_preallocate)
2369 {
2370 int partial_seen = 0;
e0deb08c 2371 poly_int64 copy_to_evaluate_size = 0;
e717ffc2 2372 int i;
2373
2374 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2375 {
2376 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2377 partial_seen = 1;
2378 else if (partial_seen && args[i].reg == 0)
2379 must_preallocate = 1;
2380
2381 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2382 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2383 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2384 || TREE_CODE (args[i].tree_value) == COND_EXPR
2385 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2386 copy_to_evaluate_size
2387 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2388 }
2389
e0deb08c 2390 if (maybe_ne (args_size->constant, 0)
2391 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
e717ffc2 2392 must_preallocate = 1;
2393 }
2394 return must_preallocate;
2395}
cc45e5e8 2396
f3012854 2397/* If we preallocated stack space, compute the address of each argument
2398 and store it into the ARGS array.
2399
c87678e4 2400 We need not ensure it is a valid memory address here; it will be
f3012854 2401 validized when it is used.
2402
2403 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2404
2405static void
4c9e08a4 2406compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 2407{
2408 if (argblock)
2409 {
2410 rtx arg_reg = argblock;
e0deb08c 2411 int i;
2412 poly_int64 arg_offset = 0;
f3012854 2413
2414 if (GET_CODE (argblock) == PLUS)
e0deb08c 2415 {
2416 arg_reg = XEXP (argblock, 0);
2417 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2418 }
f3012854 2419
2420 for (i = 0; i < num_actuals; i++)
2421 {
241399f6 2422 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2423 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 2424 rtx addr;
c5dc0c32 2425 unsigned int align, boundary;
e0deb08c 2426 poly_uint64 units_on_stack = 0;
3754d046 2427 machine_mode partial_mode = VOIDmode;
f3012854 2428
2429 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 2430 if (! args[i].pass_on_stack
2431 && args[i].reg != 0
2432 && args[i].partial == 0)
f3012854 2433 continue;
2434
aed50899 2435 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2436 continue;
2437
53fdf12a 2438 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
29c05e22 2439 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 2440
2441 if (args[i].partial != 0)
2442 {
2443 /* Only part of the parameter is being passed on the stack.
2444 Generate a simple memory reference of the correct size. */
2445 units_on_stack = args[i].locate.size.constant;
e0deb08c 2446 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
517be012 2447 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
c2ca1bab 2448 args[i].stack = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 2449 set_mem_size (args[i].stack, units_on_stack);
c2ca1bab 2450 }
2451 else
2452 {
2453 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2454 set_mem_attributes (args[i].stack,
2455 TREE_TYPE (args[i].tree_value), 1);
2456 }
c5dc0c32 2457 align = BITS_PER_UNIT;
2458 boundary = args[i].locate.boundary;
e0deb08c 2459 poly_int64 offset_val;
d7ab0e3d 2460 if (args[i].locate.where_pad != PAD_DOWNWARD)
c5dc0c32 2461 align = boundary;
e0deb08c 2462 else if (poly_int_rtx_p (offset, &offset_val))
c5dc0c32 2463 {
e0deb08c 2464 align = least_bit_hwi (boundary);
2465 unsigned int offset_align
2466 = known_alignment (offset_val) * BITS_PER_UNIT;
2467 if (offset_align != 0)
2468 align = MIN (align, offset_align);
c5dc0c32 2469 }
2470 set_mem_align (args[i].stack, align);
f3012854 2471
53fdf12a 2472 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
29c05e22 2473 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 2474
2475 if (args[i].partial != 0)
2476 {
2477 /* Only part of the parameter is being passed on the stack.
2478 Generate a simple memory reference of the correct size.
2479 */
2480 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 2481 set_mem_size (args[i].stack_slot, units_on_stack);
c2ca1bab 2482 }
2483 else
2484 {
2485 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2486 set_mem_attributes (args[i].stack_slot,
2487 TREE_TYPE (args[i].tree_value), 1);
2488 }
c5dc0c32 2489 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 2490
2491 /* Function incoming arguments may overlap with sibling call
2492 outgoing arguments and we cannot allow reordering of reads
2493 from function arguments with stores to outgoing arguments
2494 of sibling calls. */
ab6ab77e 2495 set_mem_alias_set (args[i].stack, 0);
2496 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 2497 }
2498 }
2499}
c87678e4 2500
f3012854 2501/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2502 in a call instruction.
2503
2504 FNDECL is the tree node for the target function. For an indirect call
2505 FNDECL will be NULL_TREE.
2506
95672afe 2507 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 2508
2509static rtx
4c9e08a4 2510rtx_for_function_call (tree fndecl, tree addr)
f3012854 2511{
2512 rtx funexp;
2513
2514 /* Get the function to call, in the form of RTL. */
2515 if (fndecl)
2516 {
3d053e06 2517 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
ea259bbe 2518 TREE_USED (fndecl) = 1;
f3012854 2519
2520 /* Get a SYMBOL_REF rtx for the function address. */
2521 funexp = XEXP (DECL_RTL (fndecl), 0);
2522 }
2523 else
2524 /* Generate an rtx (probably a pseudo-register) for the address. */
2525 {
2526 push_temp_slots ();
8ec3c5c2 2527 funexp = expand_normal (addr);
c87678e4 2528 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 2529 }
2530 return funexp;
2531}
2532
3c56e0c1 2533/* Return the static chain for this function, if any. */
2534
2535rtx
2536rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2537{
2538 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2539 return NULL;
2540
2541 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2542}
2543
74c02416 2544/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2545static struct
2546{
2547 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2548 or NULL_RTX if none has been scanned yet. */
3663becd 2549 rtx_insn *scan_start;
74c02416 2550 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2551 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2552 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2553 with fixed offset, or PC if this is with variable or unknown offset. */
f1f41a6c 2554 vec<rtx> cache;
74c02416 2555} internal_arg_pointer_exp_state;
2556
474ce66a 2557static rtx internal_arg_pointer_based_exp (const_rtx, bool);
74c02416 2558
2559/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2560 the tail call sequence, starting with first insn that hasn't been
2561 scanned yet, and note for each pseudo on the LHS whether it is based
2562 on crtl->args.internal_arg_pointer or not, and what offset from that
2563 that pointer it has. */
2564
2565static void
2566internal_arg_pointer_based_exp_scan (void)
2567{
3663becd 2568 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
74c02416 2569
2570 if (scan_start == NULL_RTX)
2571 insn = get_insns ();
2572 else
2573 insn = NEXT_INSN (scan_start);
2574
2575 while (insn)
2576 {
2577 rtx set = single_set (insn);
2578 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2579 {
2580 rtx val = NULL_RTX;
2581 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2582 /* Punt on pseudos set multiple times. */
f1f41a6c 2583 if (idx < internal_arg_pointer_exp_state.cache.length ()
2584 && (internal_arg_pointer_exp_state.cache[idx]
74c02416 2585 != NULL_RTX))
2586 val = pc_rtx;
2587 else
2588 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2589 if (val != NULL_RTX)
2590 {
f1f41a6c 2591 if (idx >= internal_arg_pointer_exp_state.cache.length ())
9af5ce0c 2592 internal_arg_pointer_exp_state.cache
2593 .safe_grow_cleared (idx + 1);
f1f41a6c 2594 internal_arg_pointer_exp_state.cache[idx] = val;
74c02416 2595 }
2596 }
2597 if (NEXT_INSN (insn) == NULL_RTX)
2598 scan_start = insn;
2599 insn = NEXT_INSN (insn);
2600 }
2601
2602 internal_arg_pointer_exp_state.scan_start = scan_start;
2603}
2604
74c02416 2605/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2606 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2607 it with fixed offset, or PC if this is with variable or unknown offset.
2608 TOPLEVEL is true if the function is invoked at the topmost level. */
2609
2610static rtx
474ce66a 2611internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
74c02416 2612{
2613 if (CONSTANT_P (rtl))
2614 return NULL_RTX;
2615
2616 if (rtl == crtl->args.internal_arg_pointer)
2617 return const0_rtx;
2618
2619 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2620 return NULL_RTX;
2621
e0deb08c 2622 poly_int64 offset;
2623 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
74c02416 2624 {
2625 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2626 if (val == NULL_RTX || val == pc_rtx)
2627 return val;
e0deb08c 2628 return plus_constant (Pmode, val, offset);
74c02416 2629 }
2630
2631 /* When called at the topmost level, scan pseudo assignments in between the
2632 last scanned instruction in the tail call sequence and the latest insn
2633 in that sequence. */
2634 if (toplevel)
2635 internal_arg_pointer_based_exp_scan ();
2636
2637 if (REG_P (rtl))
2638 {
2639 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
f1f41a6c 2640 if (idx < internal_arg_pointer_exp_state.cache.length ())
2641 return internal_arg_pointer_exp_state.cache[idx];
74c02416 2642
2643 return NULL_RTX;
2644 }
2645
474ce66a 2646 subrtx_iterator::array_type array;
2647 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2648 {
2649 const_rtx x = *iter;
2650 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2651 return pc_rtx;
2652 if (MEM_P (x))
2653 iter.skip_subrtxes ();
2654 }
74c02416 2655
2656 return NULL_RTX;
2657}
2658
e0deb08c 2659/* Return true if SIZE bytes starting from address ADDR might overlap an
2660 already-clobbered argument area. This function is used to determine
2661 if we should give up a sibcall. */
ff6c0ab2 2662
2663static bool
e0deb08c 2664mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
ff6c0ab2 2665{
e0deb08c 2666 poly_int64 i;
2667 unsigned HOST_WIDE_INT start, end;
74c02416 2668 rtx val;
ff6c0ab2 2669
e0deb08c 2670 if (bitmap_empty_p (stored_args_map)
2671 && stored_args_watermark == HOST_WIDE_INT_M1U)
9ddeff7e 2672 return false;
74c02416 2673 val = internal_arg_pointer_based_exp (addr, true);
2674 if (val == NULL_RTX)
2675 return false;
e0deb08c 2676 else if (!poly_int_rtx_p (val, &i))
cc0595c0 2677 return true;
e0deb08c 2678
2679 if (known_eq (size, 0U))
2680 return false;
a8b58ffb 2681
2682 if (STACK_GROWS_DOWNWARD)
2683 i -= crtl->args.pretend_args_size;
2684 else
2685 i += crtl->args.pretend_args_size;
2686
ccccd62c 2687 if (ARGS_GROW_DOWNWARD)
2688 i = -i - size;
2689
e0deb08c 2690 /* We can ignore any references to the function's pretend args,
2691 which at this point would manifest as negative values of I. */
2692 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2693 return false;
ff6c0ab2 2694
e0deb08c 2695 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2696 if (!(i + size).is_constant (&end))
2697 end = HOST_WIDE_INT_M1U;
2698
2699 if (end > stored_args_watermark)
2700 return true;
2701
2702 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2703 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2704 if (bitmap_bit_p (stored_args_map, k))
2705 return true;
ff6c0ab2 2706
2707 return false;
2708}
2709
cde25025 2710/* Do the register loads required for any wholly-register parms or any
2711 parms which are passed both on the stack and in a register. Their
c87678e4 2712 expressions were already evaluated.
cde25025 2713
2714 Mark all register-parms as living through the call, putting these USE
4c9e08a4 2715 insns in the CALL_INSN_FUNCTION_USAGE field.
2716
dc537795 2717 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 2718 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 2719
2720static void
4c9e08a4 2721load_register_parameters (struct arg_data *args, int num_actuals,
2722 rtx *call_fusage, int flags, int is_sibcall,
2723 int *sibcall_failure)
cde25025 2724{
2725 int i, j;
2726
cde25025 2727 for (i = 0; i < num_actuals; i++)
cde25025 2728 {
0e0be288 2729 rtx reg = ((flags & ECF_SIBCALL)
2730 ? args[i].tail_call_reg : args[i].reg);
cde25025 2731 if (reg)
2732 {
5f4cd670 2733 int partial = args[i].partial;
2734 int nregs;
8e2882f4 2735 poly_int64 size = 0;
2736 HOST_WIDE_INT const_size = 0;
3663becd 2737 rtx_insn *before_arg = get_last_insn ();
83272ab4 2738 /* Set non-negative if we must move a word at a time, even if
2739 just one word (e.g, partial == 4 && mode == DFmode). Set
2740 to -1 if we just use a normal move insn. This value can be
2741 zero if the argument is a zero size structure. */
5f4cd670 2742 nregs = -1;
f054eb3c 2743 if (GET_CODE (reg) == PARALLEL)
2744 ;
2745 else if (partial)
2746 {
2747 gcc_assert (partial % UNITS_PER_WORD == 0);
2748 nregs = partial / UNITS_PER_WORD;
2749 }
5f4cd670 2750 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2751 {
8e2882f4 2752 /* Variable-sized parameters should be described by a
2753 PARALLEL instead. */
2754 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2755 gcc_assert (const_size >= 0);
2756 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2757 size = const_size;
5f4cd670 2758 }
2759 else
2760 size = GET_MODE_SIZE (args[i].mode);
cde25025 2761
2762 /* Handle calls that pass values in multiple non-contiguous
2763 locations. The Irix 6 ABI has examples of this. */
2764
2765 if (GET_CODE (reg) == PARALLEL)
b600a907 2766 emit_group_move (reg, args[i].parallel_value);
cde25025 2767
2768 /* If simple case, just do move. If normal partial, store_one_arg
2769 has already loaded the register for us. In all other cases,
2770 load the register(s) from memory. */
2771
8e67abab 2772 else if (nregs == -1)
2773 {
2774 emit_move_insn (reg, args[i].value);
5f4cd670 2775#ifdef BLOCK_REG_PADDING
8e67abab 2776 /* Handle case where we have a value that needs shifting
2777 up to the msb. eg. a QImode value and we're padding
2778 upward on a BYTES_BIG_ENDIAN machine. */
8e2882f4 2779 if (args[i].locate.where_pad
2780 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
8e67abab 2781 {
8e2882f4 2782 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2783 if (maybe_lt (size, UNITS_PER_WORD))
2784 {
2785 rtx x;
2786 poly_int64 shift
2787 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2788
2789 /* Assigning REG here rather than a temp makes
2790 CALL_FUSAGE report the whole reg as used.
2791 Strictly speaking, the call only uses SIZE
2792 bytes at the msb end, but it doesn't seem worth
2793 generating rtl to say that. */
2794 reg = gen_rtx_REG (word_mode, REGNO (reg));
2795 x = expand_shift (LSHIFT_EXPR, word_mode,
2796 reg, shift, reg, 1);
2797 if (x != reg)
2798 emit_move_insn (reg, x);
2799 }
8e67abab 2800 }
5f4cd670 2801#endif
8e67abab 2802 }
cde25025 2803
2804 /* If we have pre-computed the values to put in the registers in
2805 the case of non-aligned structures, copy them in now. */
2806
2807 else if (args[i].n_aligned_regs != 0)
2808 for (j = 0; j < args[i].n_aligned_regs; j++)
2809 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2810 args[i].aligned_regs[j]);
2811
e2e0ef92 2812 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 2813 {
8e2882f4 2814 /* SIZE and CONST_SIZE are 0 for partial arguments and
2815 the size of a BLKmode type otherwise. */
2816 gcc_checking_assert (known_eq (size, const_size));
d2b9158b 2817 rtx mem = validize_mem (copy_rtx (args[i].value));
5f4cd670 2818
e2e0ef92 2819 /* Check for overlap with already clobbered argument area,
2820 providing that this has non-zero size. */
ff6c0ab2 2821 if (is_sibcall
8e2882f4 2822 && const_size != 0
e0deb08c 2823 && (mem_might_overlap_already_clobbered_arg_p
8e2882f4 2824 (XEXP (args[i].value, 0), const_size)))
ff6c0ab2 2825 *sibcall_failure = 1;
2826
8e2882f4 2827 if (const_size % UNITS_PER_WORD == 0
72f2d6cc 2828 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2829 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2830 else
2831 {
2832 if (nregs > 1)
2833 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2834 args[i].mode);
2835 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2836 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
8e2882f4 2837 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
292237f3 2838 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
5d77cce2 2839 word_mode, word_mode, false,
2840 NULL);
72f2d6cc 2841 if (BYTES_BIG_ENDIAN)
2842 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2843 BITS_PER_WORD - bitsize, dest, 1);
2844 if (x != dest)
2845 emit_move_insn (dest, x);
2846 }
2847
5f4cd670 2848 /* Handle a BLKmode that needs shifting. */
8e2882f4 2849 if (nregs == 1 && const_size < UNITS_PER_WORD
2c267f1a 2850#ifdef BLOCK_REG_PADDING
d7ab0e3d 2851 && args[i].locate.where_pad == PAD_DOWNWARD
2c267f1a 2852#else
2853 && BYTES_BIG_ENDIAN
2854#endif
72f2d6cc 2855 )
5f4cd670 2856 {
72f2d6cc 2857 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
8e2882f4 2858 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
72f2d6cc 2859 enum tree_code dir = (BYTES_BIG_ENDIAN
2860 ? RSHIFT_EXPR : LSHIFT_EXPR);
2861 rtx x;
5f4cd670 2862
72f2d6cc 2863 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2864 if (x != dest)
2865 emit_move_insn (dest, x);
5f4cd670 2866 }
5f4cd670 2867 }
cde25025 2868
42b11544 2869 /* When a parameter is a block, and perhaps in other cases, it is
2870 possible that it did a load from an argument slot that was
6a8fa8e2 2871 already clobbered. */
42b11544 2872 if (is_sibcall
2873 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2874 *sibcall_failure = 1;
2875
cde25025 2876 /* Handle calls that pass values in multiple non-contiguous
2877 locations. The Irix 6 ABI has examples of this. */
2878 if (GET_CODE (reg) == PARALLEL)
2879 use_group_regs (call_fusage, reg);
2880 else if (nregs == -1)
b4eeceb9 2881 use_reg_mode (call_fusage, reg,
2882 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
c75d013c 2883 else if (nregs > 0)
2884 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 2885 }
2886 }
2887}
2888
92e1ef5b 2889/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2890 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2891 bytes, then we would need to push some additional bytes to pad the
e0deb08c 2892 arguments. So, we try to compute an adjust to the stack pointer for an
481feae3 2893 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2894 bytes. Then, when the arguments are pushed the stack will be perfectly
e0deb08c 2895 aligned.
92e1ef5b 2896
e0deb08c 2897 Return true if this optimization is possible, storing the adjustment
2898 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2899 bytes that should be popped after the call. */
2900
2901static bool
2902combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2903 poly_int64 unadjusted_args_size,
4c9e08a4 2904 struct args_size *args_size,
38413c80 2905 unsigned int preferred_unit_stack_boundary)
92e1ef5b 2906{
2907 /* The number of bytes to pop so that the stack will be
2908 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
e0deb08c 2909 poly_int64 adjustment;
92e1ef5b 2910 /* The alignment of the stack after the arguments are pushed, if we
2911 just pushed the arguments without adjust the stack here. */
38413c80 2912 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 2913
e0deb08c 2914 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2915 preferred_unit_stack_boundary,
2916 &unadjusted_alignment))
2917 return false;
92e1ef5b 2918
2919 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2920 as possible -- leaving just enough left to cancel out the
2921 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2922 PENDING_STACK_ADJUST is non-negative, and congruent to
2923 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2924
2925 /* Begin by trying to pop all the bytes. */
e0deb08c 2926 unsigned HOST_WIDE_INT tmp_misalignment;
2927 if (!known_misalignment (pending_stack_adjust,
2928 preferred_unit_stack_boundary,
2929 &tmp_misalignment))
2930 return false;
2931 unadjusted_alignment -= tmp_misalignment;
92e1ef5b 2932 adjustment = pending_stack_adjust;
2933 /* Push enough additional bytes that the stack will be aligned
2934 after the arguments are pushed. */
b47bf174 2935 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2936 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
c87678e4 2937
e0deb08c 2938 /* We need to know whether the adjusted argument size
2939 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2940 or a deallocation. */
2941 if (!ordered_p (adjustment, unadjusted_args_size))
2942 return false;
2943
92e1ef5b 2944 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2945 bytes after the call. The right number is the entire
2946 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2947 by the arguments in the first place. */
c87678e4 2948 args_size->constant
92e1ef5b 2949 = pending_stack_adjust - adjustment + unadjusted_args_size;
2950
e0deb08c 2951 *adjustment_out = adjustment;
2952 return true;
92e1ef5b 2953}
2954
7ecc63d3 2955/* Scan X expression if it does not dereference any argument slots
2956 we already clobbered by tail call arguments (as noted in stored_args_map
2957 bitmap).
d10cfa8d 2958 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 2959 zero otherwise. */
2960
2961static int
4c9e08a4 2962check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 2963{
2964 RTX_CODE code;
2965 int i, j;
7ecc63d3 2966 const char *fmt;
2967
2968 if (x == NULL_RTX)
2969 return 0;
2970
2971 code = GET_CODE (x);
2972
cc0595c0 2973 /* We need not check the operands of the CALL expression itself. */
2974 if (code == CALL)
2975 return 0;
2976
7ecc63d3 2977 if (code == MEM)
e0deb08c 2978 return (mem_might_overlap_already_clobbered_arg_p
2979 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
7ecc63d3 2980
c87678e4 2981 /* Scan all subexpressions. */
7ecc63d3 2982 fmt = GET_RTX_FORMAT (code);
2983 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2984 {
2985 if (*fmt == 'e')
c87678e4 2986 {
2987 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2988 return 1;
2989 }
7ecc63d3 2990 else if (*fmt == 'E')
c87678e4 2991 {
2992 for (j = 0; j < XVECLEN (x, i); j++)
2993 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2994 return 1;
2995 }
7ecc63d3 2996 }
2997 return 0;
7ecc63d3 2998}
2999
3000/* Scan sequence after INSN if it does not dereference any argument slots
3001 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 3002 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3003 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3004 should be 0). Return nonzero if sequence after INSN dereferences such argument
3005 slots, zero otherwise. */
7ecc63d3 3006
3007static int
3663becd 3008check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3009 int mark_stored_args_map)
c87678e4 3010{
e0deb08c 3011 poly_uint64 low, high;
3012 unsigned HOST_WIDE_INT const_low, const_high;
7ecc63d3 3013
3014 if (insn == NULL_RTX)
3015 insn = get_insns ();
3016 else
3017 insn = NEXT_INSN (insn);
3018
3019 for (; insn; insn = NEXT_INSN (insn))
c87678e4 3020 if (INSN_P (insn)
3021 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 3022 break;
3023
42b11544 3024 if (mark_stored_args_map)
3025 {
ccccd62c 3026 if (ARGS_GROW_DOWNWARD)
3027 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3028 else
3029 low = arg->locate.slot_offset.constant;
e0deb08c 3030 high = low + arg->locate.size.constant;
db10eec8 3031
e0deb08c 3032 const_low = constant_lower_bound (low);
3033 if (high.is_constant (&const_high))
3034 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3035 bitmap_set_bit (stored_args_map, i);
3036 else
3037 stored_args_watermark = MIN (stored_args_watermark, const_low);
42b11544 3038 }
7ecc63d3 3039 return insn != NULL_RTX;
3040}
3041
05d18e8b 3042/* Given that a function returns a value of mode MODE at the most
3043 significant end of hard register VALUE, shift VALUE left or right
3044 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 3045
05d18e8b 3046bool
3754d046 3047shift_return_value (machine_mode mode, bool left_p, rtx value)
2c8ff1ed 3048{
05d18e8b 3049 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
bd39703a 3050 machine_mode value_mode = GET_MODE (value);
eafbcd13 3051 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3052
3053 if (known_eq (shift, 0))
05d18e8b 3054 return false;
3055
3056 /* Use ashr rather than lshr for right shifts. This is for the benefit
3057 of the MIPS port, which requires SImode values to be sign-extended
3058 when stored in 64-bit registers. */
bd39703a 3059 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3060 value, gen_int_shift_amount (value_mode, shift),
3061 value, 1, OPTAB_WIDEN))
05d18e8b 3062 gcc_unreachable ();
3063 return true;
2c8ff1ed 3064}
3065
90af1361 3066/* If X is a likely-spilled register value, copy it to a pseudo
3067 register and return that register. Return X otherwise. */
3068
3069static rtx
3070avoid_likely_spilled_reg (rtx x)
3071{
f4e36c33 3072 rtx new_rtx;
90af1361 3073
3074 if (REG_P (x)
3075 && HARD_REGISTER_P (x)
24dd0668 3076 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
90af1361 3077 {
3078 /* Make sure that we generate a REG rather than a CONCAT.
3079 Moves into CONCATs can need nontrivial instructions,
3080 and the whole point of this function is to avoid
3081 using the hard register directly in such a situation. */
3082 generating_concat_p = 0;
f4e36c33 3083 new_rtx = gen_reg_rtx (GET_MODE (x));
90af1361 3084 generating_concat_p = 1;
f4e36c33 3085 emit_move_insn (new_rtx, x);
3086 return new_rtx;
90af1361 3087 }
3088 return x;
3089}
3090
80e11038 3091/* Helper function for expand_call.
3092 Return false is EXP is not implementable as a sibling call. */
3093
3094static bool
3095can_implement_as_sibling_call_p (tree exp,
3096 rtx structure_value_addr,
3097 tree funtype,
869bb2b6 3098 int reg_parm_stack_space ATTRIBUTE_UNUSED,
80e11038 3099 tree fndecl,
3100 int flags,
3101 tree addr,
3102 const args_size &args_size)
3103{
3104 if (!targetm.have_sibcall_epilogue ())
b4a61e77 3105 {
3106 maybe_complain_about_tail_call
3107 (exp,
3108 "machine description does not have"
3109 " a sibcall_epilogue instruction pattern");
3110 return false;
3111 }
80e11038 3112
3113 /* Doing sibling call optimization needs some work, since
3114 structure_value_addr can be allocated on the stack.
3115 It does not seem worth the effort since few optimizable
3116 sibling calls will return a structure. */
3117 if (structure_value_addr != NULL_RTX)
b4a61e77 3118 {
3119 maybe_complain_about_tail_call (exp, "callee returns a structure");
3120 return false;
3121 }
80e11038 3122
3123#ifdef REG_PARM_STACK_SPACE
3124 /* If outgoing reg parm stack space changes, we can not do sibcall. */
3125 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3126 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3127 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
b4a61e77 3128 {
3129 maybe_complain_about_tail_call (exp,
3130 "inconsistent size of stack space"
3131 " allocated for arguments which are"
3132 " passed in registers");
3133 return false;
3134 }
80e11038 3135#endif
3136
3137 /* Check whether the target is able to optimize the call
3138 into a sibcall. */
3139 if (!targetm.function_ok_for_sibcall (fndecl, exp))
b4a61e77 3140 {
3141 maybe_complain_about_tail_call (exp,
3142 "target is not able to optimize the"
3143 " call into a sibling call");
3144 return false;
3145 }
80e11038 3146
3147 /* Functions that do not return exactly once may not be sibcall
3148 optimized. */
b4a61e77 3149 if (flags & ECF_RETURNS_TWICE)
3150 {
3151 maybe_complain_about_tail_call (exp, "callee returns twice");
3152 return false;
3153 }
3154 if (flags & ECF_NORETURN)
3155 {
3156 maybe_complain_about_tail_call (exp, "callee does not return");
3157 return false;
3158 }
80e11038 3159
3160 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
b4a61e77 3161 {
3162 maybe_complain_about_tail_call (exp, "volatile function type");
3163 return false;
3164 }
80e11038 3165
3166 /* If the called function is nested in the current one, it might access
3167 some of the caller's arguments, but could clobber them beforehand if
3168 the argument areas are shared. */
3169 if (fndecl && decl_function_context (fndecl) == current_function_decl)
b4a61e77 3170 {
3171 maybe_complain_about_tail_call (exp, "nested function");
3172 return false;
3173 }
80e11038 3174
3175 /* If this function requires more stack slots than the current
3176 function, we cannot change it into a sibling call.
3177 crtl->args.pretend_args_size is not part of the
3178 stack allocated by our caller. */
e0deb08c 3179 if (maybe_gt (args_size.constant,
3180 crtl->args.size - crtl->args.pretend_args_size))
b4a61e77 3181 {
3182 maybe_complain_about_tail_call (exp,
3183 "callee required more stack slots"
3184 " than the caller");
3185 return false;
3186 }
80e11038 3187
3188 /* If the callee pops its own arguments, then it must pop exactly
3189 the same number of arguments as the current function. */
e0deb08c 3190 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3191 args_size.constant),
3192 targetm.calls.return_pops_args (current_function_decl,
3193 TREE_TYPE
3194 (current_function_decl),
3195 crtl->args.size)))
b4a61e77 3196 {
3197 maybe_complain_about_tail_call (exp,
3198 "inconsistent number of"
3199 " popped arguments");
3200 return false;
3201 }
80e11038 3202
3203 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
b4a61e77 3204 {
3205 maybe_complain_about_tail_call (exp, "frontend does not support"
3206 " sibling call");
3207 return false;
3208 }
80e11038 3209
3210 /* All checks passed. */
3211 return true;
3212}
3213
c2f47e15 3214/* Generate all the code for a CALL_EXPR exp
66d433c7 3215 and return an rtx for its value.
3216 Store the value in TARGET (specified as an rtx) if convenient.
3217 If the value is stored in TARGET then TARGET is returned.
3218 If IGNORE is nonzero, then we ignore the value of the function call. */
3219
3220rtx
4c9e08a4 3221expand_call (tree exp, rtx target, int ignore)
66d433c7 3222{
60ecc450 3223 /* Nonzero if we are currently expanding a call. */
3224 static int currently_expanding_call = 0;
3225
66d433c7 3226 /* RTX for the function to be called. */
3227 rtx funexp;
60ecc450 3228 /* Sequence of insns to perform a normal "call". */
3663becd 3229 rtx_insn *normal_call_insns = NULL;
4ee9c684 3230 /* Sequence of insns to perform a tail "call". */
3663becd 3231 rtx_insn *tail_call_insns = NULL;
66d433c7 3232 /* Data type of the function. */
3233 tree funtype;
915e81b8 3234 tree type_arg_types;
16c9337c 3235 tree rettype;
66d433c7 3236 /* Declaration of the function being called,
3237 or 0 if the function is computed (not known by name). */
3238 tree fndecl = 0;
e100aadc 3239 /* The type of the function being called. */
3240 tree fntype;
4ee9c684 3241 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
b4a61e77 3242 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
60ecc450 3243 int pass;
66d433c7 3244
3245 /* Register in which non-BLKmode value will be returned,
3246 or 0 if no value or if value is BLKmode. */
3247 rtx valreg;
3248 /* Address where we should return a BLKmode value;
3249 0 if value not BLKmode. */
3250 rtx structure_value_addr = 0;
3251 /* Nonzero if that address is being passed by treating it as
3252 an extra, implicit first parameter. Otherwise,
3253 it is passed by being copied directly into struct_value_rtx. */
3254 int structure_value_addr_parm = 0;
cd46caee 3255 /* Holds the value of implicit argument for the struct value. */
3256 tree structure_value_addr_value = NULL_TREE;
66d433c7 3257 /* Size of aggregate value wanted, or zero if none wanted
3258 or if we are using the non-reentrant PCC calling convention
3259 or expecting the value in registers. */
e967c3ed 3260 poly_int64 struct_value_size = 0;
66d433c7 3261 /* Nonzero if called function returns an aggregate in memory PCC style,
3262 by returning the address of where to find it. */
3263 int pcc_struct_value = 0;
45550790 3264 rtx struct_value = 0;
66d433c7 3265
3266 /* Number of actual parameters in this call, including struct value addr. */
3267 int num_actuals;
3268 /* Number of named args. Args after this are anonymous ones
3269 and they must all go on the stack. */
3270 int n_named_args;
cd46caee 3271 /* Number of complex actual arguments that need to be split. */
3272 int num_complex_actuals = 0;
66d433c7 3273
3274 /* Vector of information about each argument.
3275 Arguments are numbered in the order they will be pushed,
3276 not the order they are written. */
3277 struct arg_data *args;
3278
3279 /* Total size in bytes of all the stack-parms scanned so far. */
3280 struct args_size args_size;
0e0be288 3281 struct args_size adjusted_args_size;
66d433c7 3282 /* Size of arguments before any adjustments (such as rounding). */
e0deb08c 3283 poly_int64 unadjusted_args_size;
66d433c7 3284 /* Data on reg parms scanned so far. */
39cba157 3285 CUMULATIVE_ARGS args_so_far_v;
3286 cumulative_args_t args_so_far;
66d433c7 3287 /* Nonzero if a reg parm has been scanned. */
3288 int reg_parm_seen;
a50ca374 3289 /* Nonzero if this is an indirect function call. */
66d433c7 3290
c87678e4 3291 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 3292 If stack space is allocated for register parameters, but not by the
3293 caller, then it is preallocated in the fixed part of the stack frame.
3294 So the entire argument block must then be preallocated (i.e., we
3295 ignore PUSH_ROUNDING in that case). */
3296
4448f543 3297 int must_preallocate = !PUSH_ARGS;
66d433c7 3298
eb2f80f3 3299 /* Size of the stack reserved for parameter registers. */
2d7187c2 3300 int reg_parm_stack_space = 0;
3301
66d433c7 3302 /* Address of space preallocated for stack parms
3303 (on machines that lack push insns), or 0 if space not preallocated. */
3304 rtx argblock = 0;
3305
c8010b80 3306 /* Mask of ECF_ and ERF_ flags. */
dfe08167 3307 int flags = 0;
c8010b80 3308 int return_flags = 0;
4448f543 3309#ifdef REG_PARM_STACK_SPACE
66d433c7 3310 /* Define the boundary of the register parm stack space that needs to be
6e96b626 3311 saved, if any. */
3312 int low_to_save, high_to_save;
66d433c7 3313 rtx save_area = 0; /* Place that it is saved */
3314#endif
3315
e0deb08c 3316 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
66d433c7 3317 char *initial_stack_usage_map = stack_usage_map;
e0deb08c 3318 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
a331ea1b 3319 char *stack_usage_map_buf = NULL;
66d433c7 3320
e0deb08c 3321 poly_int64 old_stack_allocated;
9069face 3322
3323 /* State variables to track stack modifications. */
66d433c7 3324 rtx old_stack_level = 0;
9069face 3325 int old_stack_arg_under_construction = 0;
e0deb08c 3326 poly_int64 old_pending_adj = 0;
66d433c7 3327 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 3328
3329 /* Some stack pointer alterations we make are performed via
3330 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3331 which we then also need to save/restore along the way. */
e0deb08c 3332 poly_int64 old_stack_pointer_delta = 0;
9069face 3333
60ecc450 3334 rtx call_fusage;
c2f47e15 3335 tree addr = CALL_EXPR_FN (exp);
19cb6b50 3336 int i;
92e1ef5b 3337 /* The alignment of the stack, in bits. */
38413c80 3338 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 3339 /* The alignment of the stack, in bytes. */
38413c80 3340 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 3341 /* The static chain value to use for this call. */
3342 rtx static_chain_value;
dfe08167 3343 /* See if this is "nothrow" function call. */
3344 if (TREE_NOTHROW (exp))
3345 flags |= ECF_NOTHROW;
3346
4ee9c684 3347 /* See if we can find a DECL-node for the actual function, and get the
3348 function attributes (flags) from the function decl or type node. */
97a1590b 3349 fndecl = get_callee_fndecl (exp);
3350 if (fndecl)
66d433c7 3351 {
e100aadc 3352 fntype = TREE_TYPE (fndecl);
97a1590b 3353 flags |= flags_from_decl_or_type (fndecl);
c8010b80 3354 return_flags |= decl_return_flags (fndecl);
66d433c7 3355 }
97a1590b 3356 else
8a8cdb8d 3357 {
16c9337c 3358 fntype = TREE_TYPE (TREE_TYPE (addr));
e100aadc 3359 flags |= flags_from_decl_or_type (fntype);
a27e3913 3360 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3361 flags |= ECF_BY_DESCRIPTOR;
8a8cdb8d 3362 }
16c9337c 3363 rettype = TREE_TYPE (exp);
d490e2f2 3364
e100aadc 3365 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 3366
4a081ddd 3367 /* Warn if this value is an aggregate type,
3368 regardless of which calling convention we are using for it. */
16c9337c 3369 if (AGGREGATE_TYPE_P (rettype))
efb9d9ee 3370 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 3371
9c2a0c05 3372 /* If the result of a non looping pure or const function call is
3373 ignored (or void), and none of its arguments are volatile, we can
3374 avoid expanding the call and just evaluate the arguments for
3375 side-effects. */
4a081ddd 3376 if ((flags & (ECF_CONST | ECF_PURE))
9c2a0c05 3377 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
4a081ddd 3378 && (ignore || target == const0_rtx
16c9337c 3379 || TYPE_MODE (rettype) == VOIDmode))
4a081ddd 3380 {
3381 bool volatilep = false;
3382 tree arg;
cd46caee 3383 call_expr_arg_iterator iter;
4a081ddd 3384
cd46caee 3385 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3386 if (TREE_THIS_VOLATILE (arg))
4a081ddd 3387 {
3388 volatilep = true;
3389 break;
3390 }
3391
3392 if (! volatilep)
3393 {
cd46caee 3394 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3395 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 3396 return const0_rtx;
3397 }
3398 }
3399
2d7187c2 3400#ifdef REG_PARM_STACK_SPACE
fa20f865 3401 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2d7187c2 3402#endif
2d7187c2 3403
fa20f865 3404 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 3405 && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 3406 must_preallocate = 1;
997d68fe 3407
66d433c7 3408 /* Set up a place to return a structure. */
3409
3410 /* Cater to broken compilers. */
4cd5bb61 3411 if (aggregate_value_p (exp, fntype))
66d433c7 3412 {
3413 /* This call returns a big structure. */
2dd6f9ed 3414 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
66d433c7 3415
3416#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 3417 {
3418 pcc_struct_value = 1;
f49c64ba 3419 }
3420#else /* not PCC_STATIC_STRUCT_RETURN */
3421 {
e967c3ed 3422 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3423 struct_value_size = -1;
66d433c7 3424
e012cdc7 3425 /* Even if it is semantically safe to use the target as the return
3426 slot, it may be not sufficiently aligned for the return type. */
3427 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3428 && target
3429 && MEM_P (target)
8fb8d942 3430 /* If rettype is addressable, we may not create a temporary.
3431 If target is properly aligned at runtime and the compiler
3432 just doesn't know about it, it will work fine, otherwise it
3433 will be UB. */
3434 && (TREE_ADDRESSABLE (rettype)
3435 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3436 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3437 MEM_ALIGN (target)))))
f49c64ba 3438 structure_value_addr = XEXP (target, 0);
3439 else
3440 {
f49c64ba 3441 /* For variable-sized objects, we must be called with a target
3442 specified. If we were to allocate space on the stack here,
3443 we would have no way of knowing when to free it. */
0ab48139 3444 rtx d = assign_temp (rettype, 1, 1);
930f0e87 3445 structure_value_addr = XEXP (d, 0);
f49c64ba 3446 target = 0;
3447 }
3448 }
3449#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 3450 }
3451
0e0be288 3452 /* Figure out the amount to which the stack should be aligned. */
0e0be288 3453 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 3454 if (fndecl)
3455 {
35ee1c66 3456 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
9a27561f 3457 /* Without automatic stack alignment, we can't increase preferred
3458 stack boundary. With automatic stack alignment, it is
3459 unnecessary since unless we can guarantee that all callers will
3460 align the outgoing stack properly, callee has to align its
3461 stack anyway. */
3462 if (i
3463 && i->preferred_incoming_stack_boundary
3464 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
28992b23 3465 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3466 }
0e0be288 3467
3468 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 3469 funtype = TREE_TYPE (addr);
231bd014 3470 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 3471 funtype = TREE_TYPE (funtype);
3472
cd46caee 3473 /* Count whether there are actual complex arguments that need to be split
3474 into their real and imaginary parts. Munge the type_arg_types
3475 appropriately here as well. */
92d40bc4 3476 if (targetm.calls.split_complex_arg)
915e81b8 3477 {
cd46caee 3478 call_expr_arg_iterator iter;
3479 tree arg;
3480 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3481 {
3482 tree type = TREE_TYPE (arg);
3483 if (type && TREE_CODE (type) == COMPLEX_TYPE
3484 && targetm.calls.split_complex_arg (type))
3485 num_complex_actuals++;
3486 }
915e81b8 3487 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 3488 }
3489 else
3490 type_arg_types = TYPE_ARG_TYPES (funtype);
3491
0e0be288 3492 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 3493 cfun->calls_alloca = 1;
0e0be288 3494
3495 /* If struct_value_rtx is 0, it means pass the address
cd46caee 3496 as if it were an extra parameter. Put the argument expression
3497 in structure_value_addr_value. */
45550790 3498 if (structure_value_addr && struct_value == 0)
0e0be288 3499 {
3500 /* If structure_value_addr is a REG other than
3501 virtual_outgoing_args_rtx, we can use always use it. If it
3502 is not a REG, we must always copy it into a register.
3503 If it is virtual_outgoing_args_rtx, we must copy it to another
3504 register in some cases. */
8ad4c111 3505 rtx temp = (!REG_P (structure_value_addr)
0e0be288 3506 || (ACCUMULATE_OUTGOING_ARGS
3507 && stack_arg_under_construction
3508 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 3509 ? copy_addr_to_reg (convert_memory_address
e100aadc 3510 (Pmode, structure_value_addr))
0e0be288 3511 : structure_value_addr);
3512
cd46caee 3513 structure_value_addr_value =
3514 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
1e42d5c6 3515 structure_value_addr_parm = 1;
0e0be288 3516 }
3517
3518 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 3519 num_actuals =
3520 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 3521
3522 /* Compute number of named args.
30a10006 3523 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3524
3525 if (type_arg_types != 0)
3526 n_named_args
3527 = (list_length (type_arg_types)
3528 /* Count the struct value address, if it is passed as a parm. */
3529 + structure_value_addr_parm);
3530 else
3531 /* If we know nothing, treat all args as named. */
3532 n_named_args = num_actuals;
3533
3534 /* Start updating where the next arg would go.
3535
3536 On some machines (such as the PA) indirect calls have a different
3537 calling convention than normal calls. The fourth argument in
3538 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3539 or not. */
39cba157 3540 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3541 args_so_far = pack_cumulative_args (&args_so_far_v);
30a10006 3542
3543 /* Now possibly adjust the number of named args.
0e0be288 3544 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 3545 We do include the last named arg if
3546 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 3547 (If no anonymous args follow, the result of list_length is actually
3548 one too large. This is harmless.)
3549
a107cd89 3550 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 3551 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3552 this machine will be able to place unnamed args that were passed
3553 in registers into the stack. So treat all args as named. This
3554 allows the insns emitting for a specific argument list to be
3555 independent of the function declaration.
a107cd89 3556
3557 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3558 we do not have any reliable way to pass unnamed args in
3559 registers, so we must force them into memory. */
0e0be288 3560
30a10006 3561 if (type_arg_types != 0
39cba157 3562 && targetm.calls.strict_argument_naming (args_so_far))
30a10006 3563 ;
3564 else if (type_arg_types != 0
39cba157 3565 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
30a10006 3566 /* Don't include the last named arg. */
3567 --n_named_args;
0e0be288 3568 else
30a10006 3569 /* Treat all args as named. */
0e0be288 3570 n_named_args = num_actuals;
3571
0e0be288 3572 /* Make a vector to hold all the information about each arg. */
1f303606 3573 args = XCNEWVEC (struct arg_data, num_actuals);
0e0be288 3574
00dddcf2 3575 /* Build up entries in the ARGS array, compute the size of the
3576 arguments into ARGS_SIZE, etc. */
0e0be288 3577 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 3578 n_named_args, exp,
d8b9c828 3579 structure_value_addr_value, fndecl, fntype,
39cba157 3580 args_so_far, reg_parm_stack_space,
0e0be288 3581 &old_stack_level, &old_pending_adj,
eaa112a0 3582 &must_preallocate, &flags,
4ee9c684 3583 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 3584
3585 if (args_size.var)
2dd6f9ed 3586 must_preallocate = 1;
0e0be288 3587
3588 /* Now make final decision about preallocating stack space. */
3589 must_preallocate = finalize_must_preallocate (must_preallocate,
3590 num_actuals, args,
3591 &args_size);
3592
3593 /* If the structure value address will reference the stack pointer, we
3594 must stabilize it. We don't need to do this if we know that we are
3595 not going to adjust the stack pointer in processing this call. */
3596
3597 if (structure_value_addr
3598 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3599 || reg_mentioned_p (virtual_outgoing_args_rtx,
3600 structure_value_addr))
3601 && (args_size.var
e0deb08c 3602 || (!ACCUMULATE_OUTGOING_ARGS
3603 && maybe_ne (args_size.constant, 0))))
0e0be288 3604 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 3605
0d568ddf 3606 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 3607 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 3608 expanding a call, as that means we're an argument. Don't try if
011e6b51 3609 there's cleanups, as we know there's code to follow the call. */
60ecc450 3610
0e0be288 3611 if (currently_expanding_call++ != 0
3612 || !flag_optimize_sibling_calls
4ee9c684 3613 || args_size.var
3072d30e 3614 || dbg_cnt (tail_call) == false)
4ee9c684 3615 try_tail_call = 0;
0e0be288 3616
b4a61e77 3617 /* If the user has marked the function as requiring tail-call
3618 optimization, attempt it. */
3619 if (must_tail_call)
3620 try_tail_call = 1;
3621
0e0be288 3622 /* Rest of purposes for tail call optimizations to fail. */
80e11038 3623 if (try_tail_call)
b4a61e77 3624 try_tail_call = can_implement_as_sibling_call_p (exp,
3625 structure_value_addr,
3626 funtype,
3627 reg_parm_stack_space,
3628 fndecl,
80e11038 3629 flags, addr, args_size);
4b066641 3630
4681dd41 3631 /* Check if caller and callee disagree in promotion of function
3632 return value. */
3633 if (try_tail_call)
3634 {
3754d046 3635 machine_mode caller_mode, caller_promoted_mode;
3636 machine_mode callee_mode, callee_promoted_mode;
4681dd41 3637 int caller_unsignedp, callee_unsignedp;
3638 tree caller_res = DECL_RESULT (current_function_decl);
3639
3640 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3b2411a8 3641 caller_mode = DECL_MODE (caller_res);
4681dd41 3642 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3b2411a8 3643 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3644 caller_promoted_mode
3645 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3646 &caller_unsignedp,
3647 TREE_TYPE (current_function_decl), 1);
3648 callee_promoted_mode
c879dbcf 3649 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3b2411a8 3650 &callee_unsignedp,
c879dbcf 3651 funtype, 1);
4681dd41 3652 if (caller_mode != VOIDmode
3653 && (caller_promoted_mode != callee_promoted_mode
3654 || ((caller_mode != caller_promoted_mode
3655 || callee_mode != callee_promoted_mode)
3656 && (caller_unsignedp != callee_unsignedp
974534ab 3657 || partial_subreg_p (caller_mode, callee_mode)))))
b4a61e77 3658 {
3659 try_tail_call = 0;
3660 maybe_complain_about_tail_call (exp,
3661 "caller and callee disagree in"
3662 " promotion of function"
3663 " return value");
3664 }
4681dd41 3665 }
3666
755ece1f 3667 /* Ensure current function's preferred stack boundary is at least
3668 what we need. Stack alignment may also increase preferred stack
3669 boundary. */
54d759e3 3670 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
edb7afe8 3671 crtl->preferred_stack_boundary = preferred_stack_boundary;
755ece1f 3672 else
3673 preferred_stack_boundary = crtl->preferred_stack_boundary;
d0285dd8 3674
0e0be288 3675 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 3676
60ecc450 3677 /* We want to make two insn chains; one for a sibling call, the other
3678 for a normal call. We will select one of the two chains after
3679 initial RTL generation is complete. */
6e96b626 3680 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 3681 {
3682 int sibcall_failure = 0;
35a3065a 3683 /* We want to emit any pending stack adjustments before the tail
60ecc450 3684 recursion "call". That way we know any adjustment after the tail
0d568ddf 3685 recursion call can be ignored if we indeed use the tail
60ecc450 3686 call expansion. */
b6d206a2 3687 saved_pending_stack_adjust save;
3663becd 3688 rtx_insn *insns, *before_call, *after_args;
3689 rtx next_arg_reg;
1e2b2ab3 3690
60ecc450 3691 if (pass == 0)
3692 {
60ecc450 3693 /* State variables we need to save and restore between
3694 iterations. */
b6d206a2 3695 save_pending_stack_adjust (&save);
60ecc450 3696 }
dfe08167 3697 if (pass)
3698 flags &= ~ECF_SIBCALL;
3699 else
3700 flags |= ECF_SIBCALL;
66d433c7 3701
60ecc450 3702 /* Other state variables that we must reinitialize each time
dfe08167 3703 through the loop (that are not initialized by the loop itself). */
60ecc450 3704 argblock = 0;
3705 call_fusage = 0;
2f921ec9 3706
c87678e4 3707 /* Start a new sequence for the normal call case.
66d433c7 3708
60ecc450 3709 From this point on, if the sibling call fails, we want to set
3710 sibcall_failure instead of continuing the loop. */
3711 start_sequence ();
412321ce 3712
60ecc450 3713 /* Don't let pending stack adjusts add up to too much.
3714 Also, do all pending adjustments now if there is any chance
3715 this might be a call to alloca or if we are expanding a sibling
ff3ae375 3716 call sequence.
82e95be3 3717 Also do the adjustments before a throwing call, otherwise
3718 exception handling can fail; PR 19225. */
e0deb08c 3719 if (maybe_ge (pending_stack_adjust, 32)
3720 || (maybe_ne (pending_stack_adjust, 0)
ff3ae375 3721 && (flags & ECF_MAY_BE_ALLOCA))
e0deb08c 3722 || (maybe_ne (pending_stack_adjust, 0)
82e95be3 3723 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 3724 || pass == 0)
3725 do_pending_stack_adjust ();
66d433c7 3726
60ecc450 3727 /* Precompute any arguments as needed. */
02510658 3728 if (pass)
2dd6f9ed 3729 precompute_arguments (num_actuals, args);
66d433c7 3730
60ecc450 3731 /* Now we are about to start emitting insns that can be deleted
3732 if a libcall is deleted. */
2dd6f9ed 3733 if (pass && (flags & ECF_MALLOC))
60ecc450 3734 start_sequence ();
66d433c7 3735
783f362b 3736 if (pass == 0
3737 && crtl->stack_protect_guard
3738 && targetm.stack_protect_runtime_enabled_p ())
71d89928 3739 stack_protect_epilogue ();
3740
0e0be288 3741 adjusted_args_size = args_size;
481feae3 3742 /* Compute the actual size of the argument block required. The variable
3743 and constant sizes must be combined, the size may have to be rounded,
3744 and there may be a minimum required size. When generating a sibcall
3745 pattern, do not round up, since we'll be re-using whatever space our
3746 caller provided. */
3747 unadjusted_args_size
c87678e4 3748 = compute_argument_block_size (reg_parm_stack_space,
3749 &adjusted_args_size,
fa20f865 3750 fndecl, fntype,
481feae3 3751 (pass == 0 ? 0
3752 : preferred_stack_boundary));
3753
c87678e4 3754 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 3755
02510658 3756 /* The argument block when performing a sibling call is the
a0c938f0 3757 incoming argument block. */
02510658 3758 if (pass == 0)
7ecc63d3 3759 {
27a7a23a 3760 argblock = crtl->args.internal_arg_pointer;
a8b58ffb 3761 if (STACK_GROWS_DOWNWARD)
3762 argblock
3763 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3764 else
3765 argblock
3766 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3767
e0deb08c 3768 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3769 stored_args_map = sbitmap_alloc (map_size);
53c5d9d4 3770 bitmap_clear (stored_args_map);
e0deb08c 3771 stored_args_watermark = HOST_WIDE_INT_M1U;
7ecc63d3 3772 }
481feae3 3773
60ecc450 3774 /* If we have no actual push instructions, or shouldn't use them,
3775 make space for all args right now. */
0e0be288 3776 else if (adjusted_args_size.var != 0)
66d433c7 3777 {
60ecc450 3778 if (old_stack_level == 0)
3779 {
e9c97615 3780 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 3781 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 3782 old_pending_adj = pending_stack_adjust;
3783 pending_stack_adjust = 0;
60ecc450 3784 /* stack_arg_under_construction says whether a stack arg is
3785 being constructed at the old stack level. Pushing the stack
3786 gets a clean outgoing argument block. */
3787 old_stack_arg_under_construction = stack_arg_under_construction;
3788 stack_arg_under_construction = 0;
60ecc450 3789 }
0e0be288 3790 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
8c0dd614 3791 if (flag_stack_usage_info)
990495a7 3792 current_function_has_unbounded_dynamic_stack_size = 1;
66d433c7 3793 }
60ecc450 3794 else
3795 {
3796 /* Note that we must go through the motions of allocating an argument
3797 block even if the size is zero because we may be storing args
3798 in the area reserved for register arguments, which may be part of
3799 the stack frame. */
7221f864 3800
e0deb08c 3801 poly_int64 needed = adjusted_args_size.constant;
66d433c7 3802
60ecc450 3803 /* Store the maximum argument space used. It will be pushed by
3804 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3805 checking). */
66d433c7 3806
e0deb08c 3807 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3808 needed);
66d433c7 3809
60ecc450 3810 if (must_preallocate)
3811 {
4448f543 3812 if (ACCUMULATE_OUTGOING_ARGS)
3813 {
02510658 3814 /* Since the stack pointer will never be pushed, it is
3815 possible for the evaluation of a parm to clobber
3816 something we have already written to the stack.
3817 Since most function calls on RISC machines do not use
3818 the stack, this is uncommon, but must work correctly.
7221f864 3819
4448f543 3820 Therefore, we save any area of the stack that was already
02510658 3821 written and that we are using. Here we set up to do this
3822 by making a new stack usage map from the old one. The
c87678e4 3823 actual save will be done by store_one_arg.
7221f864 3824
4448f543 3825 Another approach might be to try to reorder the argument
3826 evaluations to avoid this conflicting stack usage. */
7221f864 3827
02510658 3828 /* Since we will be writing into the entire argument area,
3829 the map must be allocated for its entire size, not just
3830 the part that is the responsibility of the caller. */
fa20f865 3831 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3832 needed += reg_parm_stack_space;
66d433c7 3833
e0deb08c 3834 poly_int64 limit = needed;
ccccd62c 3835 if (ARGS_GROW_DOWNWARD)
e0deb08c 3836 limit += 1;
3837
3838 /* For polynomial sizes, this is the maximum possible
3839 size needed for arguments with a constant size
3840 and offset. */
3841 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3842 highest_outgoing_arg_in_use
3843 = MAX (initial_highest_arg_in_use, const_limit);
ccccd62c 3844
dd045aee 3845 free (stack_usage_map_buf);
4c36ffe6 3846 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3847 stack_usage_map = stack_usage_map_buf;
66d433c7 3848
4448f543 3849 if (initial_highest_arg_in_use)
8e547276 3850 memcpy (stack_usage_map, initial_stack_usage_map,
3851 initial_highest_arg_in_use);
d1b03b62 3852
4448f543 3853 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3854 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3855 (highest_outgoing_arg_in_use
3856 - initial_highest_arg_in_use));
3857 needed = 0;
d1b03b62 3858
02510658 3859 /* The address of the outgoing argument list must not be
3860 copied to a register here, because argblock would be left
3861 pointing to the wrong place after the call to
c87678e4 3862 allocate_dynamic_stack_space below. */
d1b03b62 3863
4448f543 3864 argblock = virtual_outgoing_args_rtx;
c87678e4 3865 }
4448f543 3866 else
7221f864 3867 {
e0deb08c 3868 /* Try to reuse some or all of the pending_stack_adjust
3869 to get this space. */
3870 if (inhibit_defer_pop == 0
3871 && (combine_pending_stack_adjustment_and_call
3872 (&needed,
3873 unadjusted_args_size,
3874 &adjusted_args_size,
3875 preferred_unit_stack_boundary)))
60ecc450 3876 {
481feae3 3877 /* combine_pending_stack_adjustment_and_call computes
3878 an adjustment before the arguments are allocated.
3879 Account for them and see whether or not the stack
3880 needs to go up or down. */
3881 needed = unadjusted_args_size - needed;
3882
e0deb08c 3883 /* Checked by
3884 combine_pending_stack_adjustment_and_call. */
3885 gcc_checking_assert (ordered_p (needed, 0));
3886 if (maybe_lt (needed, 0))
4448f543 3887 {
481feae3 3888 /* We're releasing stack space. */
3889 /* ??? We can avoid any adjustment at all if we're
3890 already aligned. FIXME. */
3891 pending_stack_adjust = -needed;
3892 do_pending_stack_adjust ();
4448f543 3893 needed = 0;
3894 }
c87678e4 3895 else
481feae3 3896 /* We need to allocate space. We'll do that in
3897 push_block below. */
3898 pending_stack_adjust = 0;
60ecc450 3899 }
481feae3 3900
3901 /* Special case this because overhead of `push_block' in
3902 this case is non-trivial. */
e0deb08c 3903 if (known_eq (needed, 0))
4448f543 3904 argblock = virtual_outgoing_args_rtx;
60ecc450 3905 else
ad3b56f3 3906 {
e0deb08c 3907 rtx needed_rtx = gen_int_mode (needed, Pmode);
3908 argblock = push_block (needed_rtx, 0, 0);
ccccd62c 3909 if (ARGS_GROW_DOWNWARD)
3910 argblock = plus_constant (Pmode, argblock, needed);
ad3b56f3 3911 }
4448f543 3912
02510658 3913 /* We only really need to call `copy_to_reg' in the case
3914 where push insns are going to be used to pass ARGBLOCK
3915 to a function call in ARGS. In that case, the stack
3916 pointer changes value from the allocation point to the
3917 call point, and hence the value of
3918 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3919 as well always do it. */
4448f543 3920 argblock = copy_to_reg (argblock);
9069face 3921 }
3922 }
3923 }
60ecc450 3924
9069face 3925 if (ACCUMULATE_OUTGOING_ARGS)
3926 {
3927 /* The save/restore code in store_one_arg handles all
3928 cases except one: a constructor call (including a C
3929 function returning a BLKmode struct) to initialize
3930 an argument. */
3931 if (stack_arg_under_construction)
3932 {
63c68695 3933 rtx push_size
e0deb08c 3934 = (gen_int_mode
3935 (adjusted_args_size.constant
3936 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3937 : TREE_TYPE (fndecl))
3938 ? 0 : reg_parm_stack_space), Pmode));
9069face 3939 if (old_stack_level == 0)
3940 {
e9c97615 3941 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 3942 old_stack_pointer_delta = stack_pointer_delta;
3943 old_pending_adj = pending_stack_adjust;
3944 pending_stack_adjust = 0;
3945 /* stack_arg_under_construction says whether a stack
3946 arg is being constructed at the old stack level.
3947 Pushing the stack gets a clean outgoing argument
3948 block. */
3949 old_stack_arg_under_construction
3950 = stack_arg_under_construction;
3951 stack_arg_under_construction = 0;
3952 /* Make a new map for the new argument list. */
dd045aee 3953 free (stack_usage_map_buf);
43959b95 3954 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3955 stack_usage_map = stack_usage_map_buf;
9069face 3956 highest_outgoing_arg_in_use = 0;
e0deb08c 3957 stack_usage_watermark = HOST_WIDE_INT_M1U;
4448f543 3958 }
990495a7 3959 /* We can pass TRUE as the 4th argument because we just
3960 saved the stack pointer and will restore it right after
3961 the call. */
2b34677f 3962 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3963 -1, true);
60ecc450 3964 }
a3585b90 3965
9069face 3966 /* If argument evaluation might modify the stack pointer,
3967 copy the address of the argument list to a register. */
3968 for (i = 0; i < num_actuals; i++)
3969 if (args[i].pass_on_stack)
3970 {
3971 argblock = copy_addr_to_reg (argblock);
3972 break;
3973 }
3974 }
4c9e08a4 3975
60ecc450 3976 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 3977
2d298c93 3978 /* Stack is properly aligned, pops can't safely be deferred during
3979 the evaluation of the arguments. */
3980 NO_DEFER_POP;
3981
3a12804f 3982 /* Precompute all register parameters. It isn't safe to compute
3983 anything once we have started filling any specific hard regs.
3984 TLS symbols sometimes need a call to resolve. Precompute
3985 register parameters before any stack pointer manipulation
3986 to avoid unaligned stack in the called function. */
3987 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3988
2d298c93 3989 OK_DEFER_POP;
3990
bf29c577 3991 /* Perform stack alignment before the first push (the last arg). */
3992 if (argblock == 0
e0deb08c 3993 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3994 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
ff92623c 3995 {
60ecc450 3996 /* When the stack adjustment is pending, we get better code
3997 by combining the adjustments. */
e0deb08c 3998 if (maybe_ne (pending_stack_adjust, 0)
3999 && ! inhibit_defer_pop
4000 && (combine_pending_stack_adjustment_and_call
4001 (&pending_stack_adjust,
4002 unadjusted_args_size,
4003 &adjusted_args_size,
4004 preferred_unit_stack_boundary)))
4005 do_pending_stack_adjust ();
60ecc450 4006 else if (argblock == 0)
e0deb08c 4007 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4008 - unadjusted_args_size,
4009 Pmode));
60ecc450 4010 }
fa4f1f09 4011 /* Now that the stack is properly aligned, pops can't safely
4012 be deferred during the evaluation of the arguments. */
4013 NO_DEFER_POP;
66d433c7 4014
990495a7 4015 /* Record the maximum pushed stack space size. We need to delay
4016 doing it this far to take into account the optimization done
4017 by combine_pending_stack_adjustment_and_call. */
8c0dd614 4018 if (flag_stack_usage_info
990495a7 4019 && !ACCUMULATE_OUTGOING_ARGS
4020 && pass
4021 && adjusted_args_size.var == 0)
4022 {
e0deb08c 4023 poly_int64 pushed = (adjusted_args_size.constant
4024 + pending_stack_adjust);
4025 current_function_pushed_stack_size
4026 = upper_bound (current_function_pushed_stack_size, pushed);
990495a7 4027 }
4028
95672afe 4029 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 4030
c2f47e15 4031 if (CALL_EXPR_STATIC_CHAIN (exp))
4032 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 4033 else
4034 static_chain_value = 0;
4035
4448f543 4036#ifdef REG_PARM_STACK_SPACE
60ecc450 4037 /* Save the fixed argument area if it's part of the caller's frame and
4038 is clobbered by argument setup for this call. */
02510658 4039 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 4040 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4041 &low_to_save, &high_to_save);
41332f48 4042#endif
66d433c7 4043
60ecc450 4044 /* Now store (and compute if necessary) all non-register parms.
4045 These come before register parms, since they can require block-moves,
4046 which could clobber the registers used for register parms.
4047 Parms which have partial registers are not stored here,
4048 but we do preallocate space here if they want that. */
66d433c7 4049
60ecc450 4050 for (i = 0; i < num_actuals; i++)
eb940a48 4051 {
1e42d5c6 4052 if (args[i].reg == 0 || args[i].pass_on_stack)
eb940a48 4053 {
3663becd 4054 rtx_insn *before_arg = get_last_insn ();
eb940a48 4055
ba83222c 4056 /* We don't allow passing huge (> 2^30 B) arguments
4057 by value. It would cause an overflow later on. */
e0deb08c 4058 if (constant_lower_bound (adjusted_args_size.constant)
ba83222c 4059 >= (1 << (HOST_BITS_PER_INT - 2)))
4060 {
4061 sorry ("passing too large argument on stack");
4062 continue;
4063 }
4064
eb940a48 4065 if (store_one_arg (&args[i], argblock, flags,
4066 adjusted_args_size.var != 0,
4067 reg_parm_stack_space)
4068 || (pass == 0
4069 && check_sibcall_argument_overlap (before_arg,
4070 &args[i], 1)))
4071 sibcall_failure = 1;
4072 }
4073
4143d08b 4074 if (args[i].stack)
b4eeceb9 4075 call_fusage
4076 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4077 gen_rtx_USE (VOIDmode, args[i].stack),
4078 call_fusage);
eb940a48 4079 }
60ecc450 4080
4081 /* If we have a parm that is passed in registers but not in memory
4082 and whose alignment does not permit a direct copy into registers,
4083 make a group of pseudos that correspond to each register that we
4084 will later fill. */
4085 if (STRICT_ALIGNMENT)
4086 store_unaligned_arguments_into_pseudos (args, num_actuals);
4087
4088 /* Now store any partially-in-registers parm.
4089 This is the last place a block-move can happen. */
4090 if (reg_parm_seen)
4091 for (i = 0; i < num_actuals; i++)
4092 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 4093 {
3663becd 4094 rtx_insn *before_arg = get_last_insn ();
7ecc63d3 4095
a95e5776 4096 /* On targets with weird calling conventions (e.g. PA) it's
4097 hard to ensure that all cases of argument overlap between
4098 stack and registers work. Play it safe and bail out. */
4099 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4100 {
4101 sibcall_failure = 1;
4102 break;
4103 }
4104
57679d39 4105 if (store_one_arg (&args[i], argblock, flags,
4106 adjusted_args_size.var != 0,
4107 reg_parm_stack_space)
4108 || (pass == 0
4109 && check_sibcall_argument_overlap (before_arg,
42b11544 4110 &args[i], 1)))
7ecc63d3 4111 sibcall_failure = 1;
4112 }
66d433c7 4113
53597a55 4114 bool any_regs = false;
4115 for (i = 0; i < num_actuals; i++)
4116 if (args[i].reg != NULL_RTX)
4117 {
4118 any_regs = true;
4119 targetm.calls.call_args (args[i].reg, funtype);
4120 }
4121 if (!any_regs)
4122 targetm.calls.call_args (pc_rtx, funtype);
4123
4124 /* Figure out the register where the value, if any, will come back. */
4125 valreg = 0;
53597a55 4126 if (TYPE_MODE (rettype) != VOIDmode
4127 && ! structure_value_addr)
4128 {
4129 if (pcc_struct_value)
1e42d5c6 4130 valreg = hard_function_value (build_pointer_type (rettype),
4131 fndecl, NULL, (pass == 0));
53597a55 4132 else
1e42d5c6 4133 valreg = hard_function_value (rettype, fndecl, fntype,
4134 (pass == 0));
53597a55 4135
4136 /* If VALREG is a PARALLEL whose first member has a zero
4137 offset, use that. This is for targets such as m68k that
4138 return the same value in multiple places. */
4139 if (GET_CODE (valreg) == PARALLEL)
4140 {
4141 rtx elem = XVECEXP (valreg, 0, 0);
4142 rtx where = XEXP (elem, 0);
4143 rtx offset = XEXP (elem, 1);
4144 if (offset == const0_rtx
4145 && GET_MODE (where) == GET_MODE (valreg))
4146 valreg = where;
4147 }
4148 }
4149
60ecc450 4150 /* If register arguments require space on the stack and stack space
4151 was not preallocated, allocate stack space here for arguments
4152 passed in registers. */
fa20f865 4153 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 4154 && !ACCUMULATE_OUTGOING_ARGS
c87678e4 4155 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 4156 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 4157
60ecc450 4158 /* Pass the function the address in which to return a
4159 structure value. */
4160 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4161 {
0d568ddf 4162 structure_value_addr
85d654dd 4163 = convert_memory_address (Pmode, structure_value_addr);
45550790 4164 emit_move_insn (struct_value,
60ecc450 4165 force_reg (Pmode,
4166 force_operand (structure_value_addr,
4167 NULL_RTX)));
4168
8ad4c111 4169 if (REG_P (struct_value))
45550790 4170 use_reg (&call_fusage, struct_value);
60ecc450 4171 }
02c736f4 4172
c0e7e9f7 4173 after_args = get_last_insn ();
88f80691 4174 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4175 static_chain_value, &call_fusage,
4176 reg_parm_seen, flags);
e726704c 4177
42b11544 4178 load_register_parameters (args, num_actuals, &call_fusage, flags,
4179 pass == 0, &sibcall_failure);
c87678e4 4180
60ecc450 4181 /* Save a pointer to the last insn before the call, so that we can
4182 later safely search backwards to find the CALL_INSN. */
4183 before_call = get_last_insn ();
66d433c7 4184
7a8d641b 4185 /* Set up next argument register. For sibling calls on machines
4186 with register windows this should be the incoming register. */
7a8d641b 4187 if (pass == 0)
39cba157 4188 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
f387af4f 4189 VOIDmode,
4190 void_type_node,
4191 true);
7a8d641b 4192 else
39cba157 4193 next_arg_reg = targetm.calls.function_arg (args_so_far,
f387af4f 4194 VOIDmode, void_type_node,
4195 true);
7a8d641b 4196
c8010b80 4197 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4198 {
4199 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
bf29c577 4200 arg_nr = num_actuals - arg_nr - 1;
3d38d682 4201 if (arg_nr >= 0
4202 && arg_nr < num_actuals
4203 && args[arg_nr].reg
c8010b80 4204 && valreg
4205 && REG_P (valreg)
4206 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4207 call_fusage
4208 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
d1f9b275 4209 gen_rtx_SET (valreg, args[arg_nr].reg),
c8010b80 4210 call_fusage);
4211 }
60ecc450 4212 /* All arguments and registers used for the call must be set up by
4213 now! */
4214
481feae3 4215 /* Stack must be properly aligned now. */
231bd014 4216 gcc_assert (!pass
e0deb08c 4217 || multiple_p (stack_pointer_delta,
4218 preferred_unit_stack_boundary));
fa4f1f09 4219
60ecc450 4220 /* Generate the actual call instruction. */
4ee9c684 4221 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 4222 adjusted_args_size.constant, struct_value_size,
7a8d641b 4223 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
39cba157 4224 flags, args_so_far);
60ecc450 4225
fcf56aaf 4226 if (flag_ipa_ra)
2e3b0d0f 4227 {
3663becd 4228 rtx_call_insn *last;
4229 rtx datum = NULL_RTX;
2e3b0d0f 4230 if (fndecl != NULL_TREE)
4231 {
4232 datum = XEXP (DECL_RTL (fndecl), 0);
4233 gcc_assert (datum != NULL_RTX
4234 && GET_CODE (datum) == SYMBOL_REF);
4235 }
4236 last = last_call_insn ();
4237 add_reg_note (last, REG_CALL_DECL, datum);
4238 }
4239
c0e7e9f7 4240 /* If the call setup or the call itself overlaps with anything
4241 of the argument setup we probably clobbered our call address.
4242 In that case we can't do sibcalls. */
4243 if (pass == 0
4244 && check_sibcall_argument_overlap (after_args, 0, 0))
4245 sibcall_failure = 1;
4246
05d18e8b 4247 /* If a non-BLKmode value is returned at the most significant end
4248 of a register, shift the register right by the appropriate amount
4249 and update VALREG accordingly. BLKmode values are handled by the
4250 group load/store machinery below. */
4251 if (!structure_value_addr
4252 && !pcc_struct_value
d8ef55fc 4253 && TYPE_MODE (rettype) != VOIDmode
16c9337c 4254 && TYPE_MODE (rettype) != BLKmode
d8ef55fc 4255 && REG_P (valreg)
16c9337c 4256 && targetm.calls.return_in_msb (rettype))
05d18e8b 4257 {
16c9337c 4258 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
05d18e8b 4259 sibcall_failure = 1;
16c9337c 4260 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
05d18e8b 4261 }
4262
2dd6f9ed 4263 if (pass && (flags & ECF_MALLOC))
60ecc450 4264 {
4265 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3663becd 4266 rtx_insn *last, *insns;
60ecc450 4267
c87678e4 4268 /* The return value from a malloc-like function is a pointer. */
16c9337c 4269 if (TREE_CODE (rettype) == POINTER_TYPE)
10836fcc 4270 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
60ecc450 4271
4272 emit_move_insn (temp, valreg);
4273
4274 /* The return value from a malloc-like function can not alias
4275 anything else. */
4276 last = get_last_insn ();
a1ddb869 4277 add_reg_note (last, REG_NOALIAS, temp);
60ecc450 4278
4279 /* Write out the sequence. */
4280 insns = get_insns ();
4281 end_sequence ();
31d3e01c 4282 emit_insn (insns);
60ecc450 4283 valreg = temp;
4284 }
66d433c7 4285
3072d30e 4286 /* For calls to `setjmp', etc., inform
4287 function.c:setjmp_warnings that it should complain if
4288 nonvolatile values are live. For functions that cannot
4289 return, inform flow that control does not fall through. */
66d433c7 4290
4fec1d6c 4291 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 4292 {
9239aee6 4293 /* The barrier must be emitted
60ecc450 4294 immediately after the CALL_INSN. Some ports emit more
4295 than just a CALL_INSN above, so we must search for it here. */
66d433c7 4296
3663becd 4297 rtx_insn *last = get_last_insn ();
6d7dc5b9 4298 while (!CALL_P (last))
60ecc450 4299 {
4300 last = PREV_INSN (last);
4301 /* There was no CALL_INSN? */
231bd014 4302 gcc_assert (last != before_call);
60ecc450 4303 }
66d433c7 4304
9239aee6 4305 emit_barrier_after (last);
20f5f6d0 4306
b494d193 4307 /* Stack adjustments after a noreturn call are dead code.
4308 However when NO_DEFER_POP is in effect, we must preserve
4309 stack_pointer_delta. */
4310 if (inhibit_defer_pop == 0)
4311 {
4312 stack_pointer_delta = old_stack_allocated;
4313 pending_stack_adjust = 0;
4314 }
60ecc450 4315 }
66d433c7 4316
60ecc450 4317 /* If value type not void, return an rtx for the value. */
66d433c7 4318
16c9337c 4319 if (TYPE_MODE (rettype) == VOIDmode
60ecc450 4320 || ignore)
5edaabad 4321 target = const0_rtx;
60ecc450 4322 else if (structure_value_addr)
4323 {
e16ceb8e 4324 if (target == 0 || !MEM_P (target))
60ecc450 4325 {
f7c44134 4326 target
16c9337c 4327 = gen_rtx_MEM (TYPE_MODE (rettype),
4328 memory_address (TYPE_MODE (rettype),
f7c44134 4329 structure_value_addr));
16c9337c 4330 set_mem_attributes (target, rettype, 1);
60ecc450 4331 }
4332 }
4333 else if (pcc_struct_value)
566d850a 4334 {
60ecc450 4335 /* This is the special C++ case where we need to
4336 know what the true target was. We take care to
4337 never use this value more than once in one expression. */
16c9337c 4338 target = gen_rtx_MEM (TYPE_MODE (rettype),
60ecc450 4339 copy_to_reg (valreg));
16c9337c 4340 set_mem_attributes (target, rettype, 1);
566d850a 4341 }
60ecc450 4342 /* Handle calls that return values in multiple non-contiguous locations.
4343 The Irix 6 ABI has examples of this. */
4344 else if (GET_CODE (valreg) == PARALLEL)
4345 {
4ee9c684 4346 if (target == 0)
2d0fd66d 4347 target = emit_group_move_into_temps (valreg);
5bd5c1c2 4348 else if (rtx_equal_p (target, valreg))
4349 ;
4350 else if (GET_CODE (target) == PARALLEL)
4351 /* Handle the result of a emit_group_move_into_temps
4352 call in the previous pass. */
4353 emit_group_move (target, valreg);
4354 else
16c9337c 4355 emit_group_store (target, valreg, rettype,
4356 int_size_in_bytes (rettype));
60ecc450 4357 }
4358 else if (target
16c9337c 4359 && GET_MODE (target) == TYPE_MODE (rettype)
60ecc450 4360 && GET_MODE (target) == GET_MODE (valreg))
4361 {
aadbaa40 4362 bool may_overlap = false;
4363
360738f1 4364 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4365 reg to a plain register. */
90af1361 4366 if (!REG_P (target) || HARD_REGISTER_P (target))
4367 valreg = avoid_likely_spilled_reg (valreg);
360738f1 4368
aadbaa40 4369 /* If TARGET is a MEM in the argument area, and we have
4370 saved part of the argument area, then we can't store
4371 directly into TARGET as it may get overwritten when we
4372 restore the argument save area below. Don't work too
4373 hard though and simply force TARGET to a register if it
4374 is a MEM; the optimizer is quite likely to sort it out. */
4375 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4376 for (i = 0; i < num_actuals; i++)
4377 if (args[i].save_area)
4378 {
4379 may_overlap = true;
4380 break;
4381 }
dbe1f550 4382
aadbaa40 4383 if (may_overlap)
4384 target = copy_to_reg (valreg);
4385 else
4386 {
4387 /* TARGET and VALREG cannot be equal at this point
4388 because the latter would not have
4389 REG_FUNCTION_VALUE_P true, while the former would if
4390 it were referring to the same register.
4391
4392 If they refer to the same register, this move will be
4393 a no-op, except when function inlining is being
4394 done. */
4395 emit_move_insn (target, valreg);
4396
4397 /* If we are setting a MEM, this code must be executed.
4398 Since it is emitted after the call insn, sibcall
4399 optimization cannot be performed in that case. */
4400 if (MEM_P (target))
4401 sibcall_failure = 1;
4402 }
60ecc450 4403 }
60ecc450 4404 else
90af1361 4405 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 4406
3b2411a8 4407 /* If we promoted this return value, make the proper SUBREG.
4408 TARGET might be const0_rtx here, so be careful. */
4409 if (REG_P (target)
16c9337c 4410 && TYPE_MODE (rettype) != BLKmode
4411 && GET_MODE (target) != TYPE_MODE (rettype))
45550790 4412 {
16c9337c 4413 tree type = rettype;
3b2411a8 4414 int unsignedp = TYPE_UNSIGNED (type);
3754d046 4415 machine_mode pmode;
3b2411a8 4416
4417 /* Ensure we promote as expected, and get the new unsignedness. */
4418 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4419 funtype, 1);
4420 gcc_assert (GET_MODE (target) == pmode);
4421
9edf7ea8 4422 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4423 GET_MODE (target));
3b2411a8 4424 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4425 SUBREG_PROMOTED_VAR_P (target) = 1;
e8629f9e 4426 SUBREG_PROMOTED_SET (target, unsignedp);
45550790 4427 }
23eb5fa6 4428
60ecc450 4429 /* If size of args is variable or this was a constructor call for a stack
4430 argument, restore saved stack-pointer value. */
66d433c7 4431
ff3ae375 4432 if (old_stack_level)
60ecc450 4433 {
3663becd 4434 rtx_insn *prev = get_last_insn ();
dfe00a8f 4435
e9c97615 4436 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9069face 4437 stack_pointer_delta = old_stack_pointer_delta;
dfe00a8f 4438
897445c7 4439 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
dfe00a8f 4440
60ecc450 4441 pending_stack_adjust = old_pending_adj;
80f06481 4442 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 4443 stack_arg_under_construction = old_stack_arg_under_construction;
4444 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4445 stack_usage_map = initial_stack_usage_map;
e0deb08c 4446 stack_usage_watermark = initial_stack_usage_watermark;
60ecc450 4447 sibcall_failure = 1;
4448 }
02510658 4449 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 4450 {
66d433c7 4451#ifdef REG_PARM_STACK_SPACE
60ecc450 4452 if (save_area)
6e96b626 4453 restore_fixed_argument_area (save_area, argblock,
4454 high_to_save, low_to_save);
41332f48 4455#endif
66d433c7 4456
60ecc450 4457 /* If we saved any argument areas, restore them. */
4458 for (i = 0; i < num_actuals; i++)
4459 if (args[i].save_area)
4460 {
3754d046 4461 machine_mode save_mode = GET_MODE (args[i].save_area);
60ecc450 4462 rtx stack_area
4463 = gen_rtx_MEM (save_mode,
4464 memory_address (save_mode,
4465 XEXP (args[i].stack_slot, 0)));
4466
4467 if (save_mode != BLKmode)
4468 emit_move_insn (stack_area, args[i].save_area);
4469 else
0378dbdc 4470 emit_block_move (stack_area, args[i].save_area,
e0deb08c 4471 (gen_int_mode
4472 (args[i].locate.size.constant, Pmode)),
0378dbdc 4473 BLOCK_OP_CALL_PARM);
60ecc450 4474 }
66d433c7 4475
60ecc450 4476 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4477 stack_usage_map = initial_stack_usage_map;
e0deb08c 4478 stack_usage_watermark = initial_stack_usage_watermark;
60ecc450 4479 }
66d433c7 4480
97354ae4 4481 /* If this was alloca, record the new stack level. */
4482 if (flags & ECF_MAY_BE_ALLOCA)
4483 record_new_stack_level ();
66d433c7 4484
60ecc450 4485 /* Free up storage we no longer need. */
4486 for (i = 0; i < num_actuals; ++i)
dd045aee 4487 free (args[i].aligned_regs);
60ecc450 4488
53597a55 4489 targetm.calls.end_call_args ();
4490
60ecc450 4491 insns = get_insns ();
4492 end_sequence ();
4493
4494 if (pass == 0)
4495 {
4496 tail_call_insns = insns;
4497
60ecc450 4498 /* Restore the pending stack adjustment now that we have
4499 finished generating the sibling call sequence. */
91b70175 4500
b6d206a2 4501 restore_pending_stack_adjust (&save);
0e0be288 4502
4503 /* Prepare arg structure for next iteration. */
c87678e4 4504 for (i = 0; i < num_actuals; i++)
0e0be288 4505 {
4506 args[i].value = 0;
4507 args[i].aligned_regs = 0;
4508 args[i].stack = 0;
4509 }
7ecc63d3 4510
4511 sbitmap_free (stored_args_map);
3663becd 4512 internal_arg_pointer_exp_state.scan_start = NULL;
f1f41a6c 4513 internal_arg_pointer_exp_state.cache.release ();
60ecc450 4514 }
4515 else
9069face 4516 {
4517 normal_call_insns = insns;
4518
4519 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 4520 gcc_assert ((flags & ECF_NORETURN)
e0deb08c 4521 || known_eq (old_stack_allocated,
4522 stack_pointer_delta
4523 - pending_stack_adjust));
9069face 4524 }
ae8d6151 4525
4526 /* If something prevents making this a sibling call,
4527 zero out the sequence. */
4528 if (sibcall_failure)
3663becd 4529 tail_call_insns = NULL;
4ee9c684 4530 else
4531 break;
60ecc450 4532 }
4533
365db11e 4534 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 4535 arguments too, as argument area is now clobbered by the call. */
4536 if (tail_call_insns)
60ecc450 4537 {
4ee9c684 4538 emit_insn (tail_call_insns);
18d50ae6 4539 crtl->tail_call_emit = true;
60ecc450 4540 }
4541 else
b4a61e77 4542 {
4543 emit_insn (normal_call_insns);
4544 if (try_tail_call)
4545 /* Ideally we'd emit a message for all of the ways that it could
4546 have failed. */
4547 maybe_complain_about_tail_call (exp, "tail call production failed");
4548 }
66d433c7 4549
60ecc450 4550 currently_expanding_call--;
6d801f27 4551
dd045aee 4552 free (stack_usage_map_buf);
1f303606 4553 free (args);
66d433c7 4554 return target;
4555}
915e81b8 4556
4ee9c684 4557/* A sibling call sequence invalidates any REG_EQUIV notes made for
4558 this function's incoming arguments.
4559
4560 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 4561 in the rtl chain are those for incoming arguments, so we can look
4562 for REG_EQUIV notes between the start of the function and the
4563 NOTE_INSN_FUNCTION_BEG.
4ee9c684 4564
4565 This is (slight) overkill. We could keep track of the highest
4566 argument we clobber and be more selective in removing notes, but it
4567 does not seem to be worth the effort. */
0a227ed5 4568
4ee9c684 4569void
4570fixup_tail_calls (void)
4571{
3663becd 4572 rtx_insn *insn;
0a227ed5 4573
4574 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4575 {
750a330e 4576 rtx note;
4577
0a227ed5 4578 /* There are never REG_EQUIV notes for the incoming arguments
4579 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4580 if (NOTE_P (insn)
ad4583d9 4581 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 4582 break;
4583
750a330e 4584 note = find_reg_note (insn, REG_EQUIV, 0);
4585 if (note)
4586 remove_note (insn, note);
4587 note = find_reg_note (insn, REG_EQUIV, 0);
4588 gcc_assert (!note);
0a227ed5 4589 }
4ee9c684 4590}
4591
915e81b8 4592/* Traverse a list of TYPES and expand all complex types into their
4593 components. */
5ab29745 4594static tree
915e81b8 4595split_complex_types (tree types)
4596{
4597 tree p;
4598
92d40bc4 4599 /* Before allocating memory, check for the common case of no complex. */
4600 for (p = types; p; p = TREE_CHAIN (p))
4601 {
4602 tree type = TREE_VALUE (p);
4603 if (TREE_CODE (type) == COMPLEX_TYPE
4604 && targetm.calls.split_complex_arg (type))
a0c938f0 4605 goto found;
92d40bc4 4606 }
4607 return types;
4608
4609 found:
915e81b8 4610 types = copy_list (types);
4611
4612 for (p = types; p; p = TREE_CHAIN (p))
4613 {
4614 tree complex_type = TREE_VALUE (p);
4615
92d40bc4 4616 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4617 && targetm.calls.split_complex_arg (complex_type))
915e81b8 4618 {
4619 tree next, imag;
4620
4621 /* Rewrite complex type with component type. */
4622 TREE_VALUE (p) = TREE_TYPE (complex_type);
4623 next = TREE_CHAIN (p);
4624
4625 /* Add another component type for the imaginary part. */
4626 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4627 TREE_CHAIN (p) = imag;
4628 TREE_CHAIN (imag) = next;
4629
4630 /* Skip the newly created node. */
4631 p = TREE_CHAIN (p);
4632 }
4633 }
4634
4635 return types;
4636}
66d433c7 4637\f
9e9e5c15 4638/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4639 for a value of mode OUTMODE,
4640 with NARGS different arguments, passed as ARGS.
4641 Store the return value if RETVAL is nonzero: store it in VALUE if
4642 VALUE is nonnull, otherwise pick a convenient location. In either
4643 case return the location of the stored value.
2a631e19 4644
9e9e5c15 4645 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4646 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4647 other types of library calls. */
4648
4649rtx
4c9e08a4 4650emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4651 enum libcall_type fn_type,
9e9e5c15 4652 machine_mode outmode, int nargs, rtx_mode_t *args)
b39693dd 4653{
9bdaf1ba 4654 /* Total size in bytes of all the stack-parms scanned so far. */
4655 struct args_size args_size;
4656 /* Size of arguments before any adjustments (such as rounding). */
4657 struct args_size original_args_size;
19cb6b50 4658 int argnum;
9bdaf1ba 4659 rtx fun;
22c61100 4660 /* Todo, choose the correct decl type of orgfun. Sadly this information
4661 isn't present here, so we default to native calling abi here. */
60e2260d 4662 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
fa20f865 4663 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
9bdaf1ba 4664 int count;
9bdaf1ba 4665 rtx argblock = 0;
39cba157 4666 CUMULATIVE_ARGS args_so_far_v;
4667 cumulative_args_t args_so_far;
c87678e4 4668 struct arg
4669 {
4670 rtx value;
3754d046 4671 machine_mode mode;
c87678e4 4672 rtx reg;
4673 int partial;
241399f6 4674 struct locate_and_pad_arg_data locate;
c87678e4 4675 rtx save_area;
4676 };
9bdaf1ba 4677 struct arg *argvec;
4678 int old_inhibit_defer_pop = inhibit_defer_pop;
4679 rtx call_fusage = 0;
4680 rtx mem_value = 0;
16204096 4681 rtx valreg;
9bdaf1ba 4682 int pcc_struct_value = 0;
52acb7ae 4683 poly_int64 struct_value_size = 0;
df4b504c 4684 int flags;
9bdaf1ba 4685 int reg_parm_stack_space = 0;
e0deb08c 4686 poly_int64 needed;
3663becd 4687 rtx_insn *before_call;
8700bf9e 4688 bool have_push_fusage;
771d21fa 4689 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 4690
4448f543 4691#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 4692 /* Define the boundary of the register parm stack space that needs to be
4693 save, if any. */
75a70cf9 4694 int low_to_save = 0, high_to_save = 0;
c87678e4 4695 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 4696#endif
4697
9bdaf1ba 4698 /* Size of the stack reserved for parameter registers. */
e0deb08c 4699 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
9bdaf1ba 4700 char *initial_stack_usage_map = stack_usage_map;
e0deb08c 4701 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
a331ea1b 4702 char *stack_usage_map_buf = NULL;
9bdaf1ba 4703
45550790 4704 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4705
9bdaf1ba 4706#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 4707 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 4708#endif
4709
1c1a1b9a 4710 /* By default, library functions cannot throw. */
df4b504c 4711 flags = ECF_NOTHROW;
4712
ab7ccfa2 4713 switch (fn_type)
4714 {
4715 case LCT_NORMAL:
2a0c81bf 4716 break;
ab7ccfa2 4717 case LCT_CONST:
2a0c81bf 4718 flags |= ECF_CONST;
4719 break;
ab7ccfa2 4720 case LCT_PURE:
2a0c81bf 4721 flags |= ECF_PURE;
ab7ccfa2 4722 break;
ab7ccfa2 4723 case LCT_NORETURN:
4724 flags |= ECF_NORETURN;
4725 break;
4726 case LCT_THROW:
1c1a1b9a 4727 flags &= ~ECF_NOTHROW;
ab7ccfa2 4728 break;
0ff18307 4729 case LCT_RETURNS_TWICE:
4730 flags = ECF_RETURNS_TWICE;
4731 break;
ab7ccfa2 4732 }
9bdaf1ba 4733 fun = orgfun;
4734
9bdaf1ba 4735 /* Ensure current function's preferred stack boundary is at least
4736 what we need. */
edb7afe8 4737 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4738 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 4739
4740 /* If this kind of value comes back in memory,
4741 decide where in memory it should come back. */
771d21fa 4742 if (outmode != VOIDmode)
9bdaf1ba 4743 {
dc24ddbd 4744 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 4745 if (aggregate_value_p (tfom, 0))
771d21fa 4746 {
9bdaf1ba 4747#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 4748 rtx pointer_reg
46b3ff29 4749 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 4750 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4751 pcc_struct_value = 1;
4752 if (value == 0)
4753 value = gen_reg_rtx (outmode);
9bdaf1ba 4754#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 4755 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 4756 if (value != 0 && MEM_P (value))
771d21fa 4757 mem_value = value;
4758 else
0ab48139 4759 mem_value = assign_temp (tfom, 1, 1);
9bdaf1ba 4760#endif
771d21fa 4761 /* This call returns a big structure. */
2dd6f9ed 4762 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
771d21fa 4763 }
9bdaf1ba 4764 }
771d21fa 4765 else
4766 tfom = void_type_node;
9bdaf1ba 4767
4768 /* ??? Unfinished: must pass the memory address as an argument. */
4769
4770 /* Copy all the libcall-arguments out of the varargs data
4771 and into a vector ARGVEC.
4772
4773 Compute how to pass each argument. We only support a very small subset
4774 of the full argument passing conventions to limit complexity here since
4775 library functions shouldn't have many args. */
4776
364c0c59 4777 argvec = XALLOCAVEC (struct arg, nargs + 1);
f0af5a88 4778 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 4779
e1efd914 4780#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
39cba157 4781 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
e1efd914 4782#else
39cba157 4783 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
e1efd914 4784#endif
39cba157 4785 args_so_far = pack_cumulative_args (&args_so_far_v);
9bdaf1ba 4786
4787 args_size.constant = 0;
4788 args_size.var = 0;
4789
4790 count = 0;
4791
4792 push_temp_slots ();
4793
4794 /* If there's a structure value address to be passed,
4795 either pass it in the special place, or pass it as an extra argument. */
45550790 4796 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 4797 {
4798 rtx addr = XEXP (mem_value, 0);
a0c938f0 4799
9bdaf1ba 4800 nargs++;
4801
a56c46d2 4802 /* Make sure it is a reasonable operand for a move or push insn. */
4803 if (!REG_P (addr) && !MEM_P (addr)
ca316360 4804 && !(CONSTANT_P (addr)
4805 && targetm.legitimate_constant_p (Pmode, addr)))
a56c46d2 4806 addr = force_operand (addr, NULL_RTX);
4807
9bdaf1ba 4808 argvec[count].value = addr;
4809 argvec[count].mode = Pmode;
4810 argvec[count].partial = 0;
4811
39cba157 4812 argvec[count].reg = targetm.calls.function_arg (args_so_far,
f387af4f 4813 Pmode, NULL_TREE, true);
39cba157 4814 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
f054eb3c 4815 NULL_TREE, 1) == 0);
9bdaf1ba 4816
4817 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 4818#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 4819 1,
2e735c0d 4820#else
4821 argvec[count].reg != 0,
4822#endif
2e090bf6 4823 reg_parm_stack_space, 0,
4824 NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 4825
9bdaf1ba 4826 if (argvec[count].reg == 0 || argvec[count].partial != 0
4827 || reg_parm_stack_space > 0)
241399f6 4828 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 4829
39cba157 4830 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
9bdaf1ba 4831
4832 count++;
4833 }
4834
9e9e5c15 4835 for (unsigned int i = 0; count < nargs; i++, count++)
9bdaf1ba 4836 {
9e9e5c15 4837 rtx val = args[i].first;
4838 machine_mode mode = args[i].second;
adaf4ef0 4839 int unsigned_p = 0;
9bdaf1ba 4840
4841 /* We cannot convert the arg value to the mode the library wants here;
4842 must do it earlier where we know the signedness of the arg. */
231bd014 4843 gcc_assert (mode != BLKmode
4844 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 4845
a56c46d2 4846 /* Make sure it is a reasonable operand for a move or push insn. */
4847 if (!REG_P (val) && !MEM_P (val)
ca316360 4848 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
a56c46d2 4849 val = force_operand (val, NULL_RTX);
4850
39cba157 4851 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
9bdaf1ba 4852 {
ddaf7ad3 4853 rtx slot;
13f08ee7 4854 int must_copy
39cba157 4855 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
ddaf7ad3 4856
9c2a0c05 4857 /* If this was a CONST function, it is now PURE since it now
4858 reads memory. */
5096b8b0 4859 if (flags & ECF_CONST)
4860 {
4861 flags &= ~ECF_CONST;
4862 flags |= ECF_PURE;
4863 }
4864
590c3166 4865 if (MEM_P (val) && !must_copy)
006e2d5a 4866 {
4867 tree val_expr = MEM_EXPR (val);
4868 if (val_expr)
4869 mark_addressable (val_expr);
4870 slot = val;
4871 }
41dc12b4 4872 else
ddaf7ad3 4873 {
dc24ddbd 4874 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
0ab48139 4875 1, 1);
ddaf7ad3 4876 emit_move_insn (slot, val);
4877 }
387bc205 4878
a683e787 4879 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4880 gen_rtx_USE (VOIDmode, slot),
4881 call_fusage);
ddaf7ad3 4882 if (must_copy)
4883 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4884 gen_rtx_CLOBBER (VOIDmode,
4885 slot),
4886 call_fusage);
4887
9bdaf1ba 4888 mode = Pmode;
ddaf7ad3 4889 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 4890 }
9bdaf1ba 4891
adaf4ef0 4892 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
9bdaf1ba 4893 argvec[count].mode = mode;
adaf4ef0 4894 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
39cba157 4895 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
f387af4f 4896 NULL_TREE, true);
9bdaf1ba 4897
9bdaf1ba 4898 argvec[count].partial
39cba157 4899 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 4900
11fb947f 4901 if (argvec[count].reg == 0
4902 || argvec[count].partial != 0
4903 || reg_parm_stack_space > 0)
4904 {
4905 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 4906#ifdef STACK_PARMS_IN_REG_PARM_AREA
11fb947f 4907 1,
2e735c0d 4908#else
11fb947f 4909 argvec[count].reg != 0,
4910#endif
2e090bf6 4911 reg_parm_stack_space, argvec[count].partial,
11fb947f 4912 NULL_TREE, &args_size, &argvec[count].locate);
4913 args_size.constant += argvec[count].locate.size.constant;
4914 gcc_assert (!argvec[count].locate.size.var);
4915 }
4916#ifdef BLOCK_REG_PADDING
4917 else
4918 /* The argument is passed entirely in registers. See at which
4919 end it should be padded. */
4920 argvec[count].locate.where_pad =
4921 BLOCK_REG_PADDING (mode, NULL_TREE,
52acb7ae 4922 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
2e735c0d 4923#endif
9bdaf1ba 4924
39cba157 4925 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
9bdaf1ba 4926 }
9bdaf1ba 4927
9bdaf1ba 4928 /* If this machine requires an external definition for library
4929 functions, write one out. */
4930 assemble_external_libcall (fun);
4931
4932 original_args_size = args_size;
e0deb08c 4933 args_size.constant = (aligned_upper_bound (args_size.constant
4934 + stack_pointer_delta,
4935 STACK_BYTES)
4936 - stack_pointer_delta);
9bdaf1ba 4937
e0deb08c 4938 args_size.constant = upper_bound (args_size.constant,
4939 reg_parm_stack_space);
9bdaf1ba 4940
fa20f865 4941 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 4942 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 4943
e0deb08c 4944 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4945 args_size.constant);
9bdaf1ba 4946
8c0dd614 4947 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
990495a7 4948 {
e0deb08c 4949 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4950 current_function_pushed_stack_size
4951 = upper_bound (current_function_pushed_stack_size, pushed);
990495a7 4952 }
4953
4448f543 4954 if (ACCUMULATE_OUTGOING_ARGS)
4955 {
4956 /* Since the stack pointer will never be pushed, it is possible for
4957 the evaluation of a parm to clobber something we have already
4958 written to the stack. Since most function calls on RISC machines
4959 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 4960
4448f543 4961 Therefore, we save any area of the stack that was already written
4962 and that we are using. Here we set up to do this by making a new
4963 stack usage map from the old one.
9bdaf1ba 4964
4448f543 4965 Another approach might be to try to reorder the argument
4966 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 4967
4448f543 4968 needed = args_size.constant;
9bdaf1ba 4969
4448f543 4970 /* Since we will be writing into the entire argument area, the
4971 map must be allocated for its entire size, not just the part that
4972 is the responsibility of the caller. */
fa20f865 4973 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 4974 needed += reg_parm_stack_space;
9bdaf1ba 4975
e0deb08c 4976 poly_int64 limit = needed;
ccccd62c 4977 if (ARGS_GROW_DOWNWARD)
e0deb08c 4978 limit += 1;
4979
4980 /* For polynomial sizes, this is the maximum possible size needed
4981 for arguments with a constant size and offset. */
4982 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4983 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4984 const_limit);
ccccd62c 4985
4c36ffe6 4986 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 4987 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 4988
4448f543 4989 if (initial_highest_arg_in_use)
8e547276 4990 memcpy (stack_usage_map, initial_stack_usage_map,
4991 initial_highest_arg_in_use);
9bdaf1ba 4992
4448f543 4993 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 4994 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 4995 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4996 needed = 0;
9bdaf1ba 4997
9c0a756f 4998 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 4999 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 5000 new libcalls after we've instantiated the virtual regs, and if we
5001 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 5002
9c0a756f 5003 if (virtuals_instantiated)
29c05e22 5004 argblock = plus_constant (Pmode, stack_pointer_rtx,
5005 STACK_POINTER_OFFSET);
9c0a756f 5006 else
5007 argblock = virtual_outgoing_args_rtx;
4448f543 5008 }
5009 else
5010 {
5011 if (!PUSH_ARGS)
e0deb08c 5012 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
4448f543 5013 }
9bdaf1ba 5014
bf29c577 5015 /* We push args individually in reverse order, perform stack alignment
9bdaf1ba 5016 before the first push (the last arg). */
bf29c577 5017 if (argblock == 0)
e0deb08c 5018 anti_adjust_stack (gen_int_mode (args_size.constant
5019 - original_args_size.constant,
5020 Pmode));
9bdaf1ba 5021
bf29c577 5022 argnum = nargs - 1;
9bdaf1ba 5023
4448f543 5024#ifdef REG_PARM_STACK_SPACE
5025 if (ACCUMULATE_OUTGOING_ARGS)
5026 {
5027 /* The argument list is the property of the called routine and it
5028 may clobber it. If the fixed area has been used for previous
6e96b626 5029 parameters, we must save and restore it. */
5030 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5031 &low_to_save, &high_to_save);
9bdaf1ba 5032 }
5033#endif
c87678e4 5034
53597a55 5035 /* When expanding a normal call, args are stored in push order,
5036 which is the reverse of what we have here. */
5037 bool any_regs = false;
5038 for (int i = nargs; i-- > 0; )
5039 if (argvec[i].reg != NULL_RTX)
5040 {
5041 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5042 any_regs = true;
5043 }
5044 if (!any_regs)
5045 targetm.calls.call_args (pc_rtx, NULL_TREE);
5046
9bdaf1ba 5047 /* Push the args that need to be pushed. */
5048
8700bf9e 5049 have_push_fusage = false;
5050
9bdaf1ba 5051 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5052 are to be pushed. */
bf29c577 5053 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 5054 {
3754d046 5055 machine_mode mode = argvec[argnum].mode;
19cb6b50 5056 rtx val = argvec[argnum].value;
9bdaf1ba 5057 rtx reg = argvec[argnum].reg;
5058 int partial = argvec[argnum].partial;
c2fd5e89 5059 unsigned int parm_align = argvec[argnum].locate.boundary;
e0deb08c 5060 poly_int64 lower_bound = 0, upper_bound = 0;
9bdaf1ba 5061
5062 if (! (reg != 0 && partial == 0))
5063 {
4143d08b 5064 rtx use;
5065
4448f543 5066 if (ACCUMULATE_OUTGOING_ARGS)
5067 {
02510658 5068 /* If this is being stored into a pre-allocated, fixed-size,
5069 stack area, save any previous data at that location. */
9bdaf1ba 5070
ccccd62c 5071 if (ARGS_GROW_DOWNWARD)
5072 {
5073 /* stack_slot is negative, but we want to index stack_usage_map
5074 with positive values. */
5075 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5076 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5077 }
5078 else
5079 {
5080 lower_bound = argvec[argnum].locate.slot_offset.constant;
5081 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5082 }
9bdaf1ba 5083
e0deb08c 5084 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5085 reg_parm_stack_space))
4448f543 5086 {
241399f6 5087 /* We need to make a save area. */
e0deb08c 5088 poly_uint64 size
241399f6 5089 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3754d046 5090 machine_mode save_mode
517be012 5091 = int_mode_for_size (size, 1).else_blk ();
241399f6 5092 rtx adr
29c05e22 5093 = plus_constant (Pmode, argblock,
241399f6 5094 argvec[argnum].locate.offset.constant);
4448f543 5095 rtx stack_area
241399f6 5096 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 5097
f9c6a9c3 5098 if (save_mode == BLKmode)
5099 {
5100 argvec[argnum].save_area
5101 = assign_stack_temp (BLKmode,
0ab48139 5102 argvec[argnum].locate.size.constant
5103 );
f9c6a9c3 5104
d2b9158b 5105 emit_block_move (validize_mem
5106 (copy_rtx (argvec[argnum].save_area)),
a0c938f0 5107 stack_area,
e0deb08c 5108 (gen_int_mode
5109 (argvec[argnum].locate.size.constant,
5110 Pmode)),
f9c6a9c3 5111 BLOCK_OP_CALL_PARM);
5112 }
5113 else
5114 {
5115 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5116
5117 emit_move_insn (argvec[argnum].save_area, stack_area);
5118 }
4448f543 5119 }
9bdaf1ba 5120 }
325d1c45 5121
c2fd5e89 5122 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
0378dbdc 5123 partial, reg, 0, argblock,
e0deb08c 5124 (gen_int_mode
5125 (argvec[argnum].locate.offset.constant, Pmode)),
241399f6 5126 reg_parm_stack_space,
a95e5776 5127 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
9bdaf1ba 5128
9bdaf1ba 5129 /* Now mark the segment we just used. */
4448f543 5130 if (ACCUMULATE_OUTGOING_ARGS)
e0deb08c 5131 mark_stack_region_used (lower_bound, upper_bound);
9bdaf1ba 5132
5133 NO_DEFER_POP;
2eb9302a 5134
4143d08b 5135 /* Indicate argument access so that alias.c knows that these
5136 values are live. */
5137 if (argblock)
29c05e22 5138 use = plus_constant (Pmode, argblock,
4143d08b 5139 argvec[argnum].locate.offset.constant);
8700bf9e 5140 else if (have_push_fusage)
5141 continue;
4143d08b 5142 else
8700bf9e 5143 {
5144 /* When arguments are pushed, trying to tell alias.c where
5145 exactly this argument is won't work, because the
5146 auto-increment causes confusion. So we merely indicate
5147 that we access something with a known mode somewhere on
5148 the stack. */
5149 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5150 gen_rtx_SCRATCH (Pmode));
5151 have_push_fusage = true;
5152 }
4143d08b 5153 use = gen_rtx_MEM (argvec[argnum].mode, use);
5154 use = gen_rtx_USE (VOIDmode, use);
5155 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
9bdaf1ba 5156 }
5157 }
5158
bf29c577 5159 argnum = nargs - 1;
9bdaf1ba 5160
82c7907c 5161 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 5162
5163 /* Now load any reg parms into their regs. */
5164
5165 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5166 are to be pushed. */
bf29c577 5167 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 5168 {
3754d046 5169 machine_mode mode = argvec[argnum].mode;
19cb6b50 5170 rtx val = argvec[argnum].value;
9bdaf1ba 5171 rtx reg = argvec[argnum].reg;
5172 int partial = argvec[argnum].partial;
37cd19a4 5173
9bdaf1ba 5174 /* Handle calls that pass values in multiple non-contiguous
5175 locations. The PA64 has examples of this for library calls. */
5176 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 5177 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 5178 else if (reg != 0 && partial == 0)
37cd19a4 5179 {
5180 emit_move_insn (reg, val);
5181#ifdef BLOCK_REG_PADDING
52acb7ae 5182 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
37cd19a4 5183
5184 /* Copied from load_register_parameters. */
5185
5186 /* Handle case where we have a value that needs shifting
5187 up to the msb. eg. a QImode value and we're padding
5188 upward on a BYTES_BIG_ENDIAN machine. */
52acb7ae 5189 if (known_lt (size, UNITS_PER_WORD)
37cd19a4 5190 && (argvec[argnum].locate.where_pad
d7ab0e3d 5191 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
37cd19a4 5192 {
5193 rtx x;
52acb7ae 5194 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
37cd19a4 5195
5196 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5197 report the whole reg as used. Strictly speaking, the
5198 call only uses SIZE bytes at the msb end, but it doesn't
5199 seem worth generating rtl to say that. */
5200 reg = gen_rtx_REG (word_mode, REGNO (reg));
5201 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5202 if (x != reg)
5203 emit_move_insn (reg, x);
5204 }
5205#endif
5206 }
9bdaf1ba 5207
5208 NO_DEFER_POP;
5209 }
5210
9bdaf1ba 5211 /* Any regs containing parms remain in use through the call. */
5212 for (count = 0; count < nargs; count++)
5213 {
5214 rtx reg = argvec[count].reg;
5215 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5216 use_group_regs (&call_fusage, reg);
5217 else if (reg != 0)
6c6f16e5 5218 {
5219 int partial = argvec[count].partial;
5220 if (partial)
5221 {
5222 int nregs;
5223 gcc_assert (partial % UNITS_PER_WORD == 0);
5224 nregs = partial / UNITS_PER_WORD;
5225 use_regs (&call_fusage, REGNO (reg), nregs);
5226 }
5227 else
5228 use_reg (&call_fusage, reg);
5229 }
9bdaf1ba 5230 }
5231
5232 /* Pass the function the address in which to return a structure value. */
45550790 5233 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 5234 {
45550790 5235 emit_move_insn (struct_value,
9bdaf1ba 5236 force_reg (Pmode,
5237 force_operand (XEXP (mem_value, 0),
5238 NULL_RTX)));
8ad4c111 5239 if (REG_P (struct_value))
45550790 5240 use_reg (&call_fusage, struct_value);
9bdaf1ba 5241 }
5242
5243 /* Don't allow popping to be deferred, since then
5244 cse'ing of library calls could delete a call and leave the pop. */
5245 NO_DEFER_POP;
16204096 5246 valreg = (mem_value == 0 && outmode != VOIDmode
578d1295 5247 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
9bdaf1ba 5248
481feae3 5249 /* Stack must be properly aligned now. */
e0deb08c 5250 gcc_assert (multiple_p (stack_pointer_delta,
5251 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
fa4f1f09 5252
644c283b 5253 before_call = get_last_insn ();
5254
9bdaf1ba 5255 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5256 will set inhibit_defer_pop to that value. */
20f7032f 5257 /* The return type is needed to decide how many bytes the function pops.
5258 Signedness plays no role in that, so for simplicity, we pretend it's
5259 always signed. We also assume that the list of arguments passed has
5260 no impact, so we pretend it is unknown. */
9bdaf1ba 5261
4ee9c684 5262 emit_call_1 (fun, NULL,
c87678e4 5263 get_identifier (XSTR (orgfun, 0)),
771d21fa 5264 build_function_type (tfom, NULL_TREE),
c87678e4 5265 original_args_size.constant, args_size.constant,
9bdaf1ba 5266 struct_value_size,
39cba157 5267 targetm.calls.function_arg (args_so_far,
f387af4f 5268 VOIDmode, void_type_node, true),
16204096 5269 valreg,
39cba157 5270 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
9bdaf1ba 5271
fcf56aaf 5272 if (flag_ipa_ra)
2e3b0d0f 5273 {
9ed997be 5274 rtx datum = orgfun;
2e3b0d0f 5275 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
9ed997be 5276 rtx_call_insn *last = last_call_insn ();
2e3b0d0f 5277 add_reg_note (last, REG_CALL_DECL, datum);
5278 }
5279
37cd19a4 5280 /* Right-shift returned value if necessary. */
5281 if (!pcc_struct_value
5282 && TYPE_MODE (tfom) != BLKmode
5283 && targetm.calls.return_in_msb (tfom))
5284 {
5285 shift_return_value (TYPE_MODE (tfom), false, valreg);
5286 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5287 }
5288
53597a55 5289 targetm.calls.end_call_args ();
5290
3072d30e 5291 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5292 that it should complain if nonvolatile values are live. For
5293 functions that cannot return, inform flow that control does not
5294 fall through. */
4fec1d6c 5295 if (flags & ECF_NORETURN)
644c283b 5296 {
9239aee6 5297 /* The barrier note must be emitted
644c283b 5298 immediately after the CALL_INSN. Some ports emit more than
5299 just a CALL_INSN above, so we must search for it here. */
3663becd 5300 rtx_insn *last = get_last_insn ();
6d7dc5b9 5301 while (!CALL_P (last))
644c283b 5302 {
5303 last = PREV_INSN (last);
5304 /* There was no CALL_INSN? */
231bd014 5305 gcc_assert (last != before_call);
644c283b 5306 }
5307
9239aee6 5308 emit_barrier_after (last);
644c283b 5309 }
5310
43926c6a 5311 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5312 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5313 if (flags & ECF_NOTHROW)
5314 {
3663becd 5315 rtx_insn *last = get_last_insn ();
43926c6a 5316 while (!CALL_P (last))
5317 {
5318 last = PREV_INSN (last);
5319 /* There was no CALL_INSN? */
5320 gcc_assert (last != before_call);
5321 }
5322
5323 make_reg_eh_region_note_nothrow_nononlocal (last);
5324 }
5325
9bdaf1ba 5326 /* Now restore inhibit_defer_pop to its actual original value. */
5327 OK_DEFER_POP;
5328
5329 pop_temp_slots ();
5330
5331 /* Copy the value to the right place. */
20f7032f 5332 if (outmode != VOIDmode && retval)
9bdaf1ba 5333 {
5334 if (mem_value)
5335 {
5336 if (value == 0)
5337 value = mem_value;
5338 if (value != mem_value)
5339 emit_move_insn (value, mem_value);
5340 }
40651bac 5341 else if (GET_CODE (valreg) == PARALLEL)
5342 {
5343 if (value == 0)
5344 value = gen_reg_rtx (outmode);
4c3a0ea5 5345 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 5346 }
9bdaf1ba 5347 else
4e1a3169 5348 {
3b2411a8 5349 /* Convert to the proper mode if a promotion has been active. */
4e1a3169 5350 if (GET_MODE (valreg) != outmode)
5351 {
5352 int unsignedp = TYPE_UNSIGNED (tfom);
5353
3b2411a8 5354 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5355 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4e1a3169 5356 == GET_MODE (valreg));
4e1a3169 5357 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5358 }
5359
5360 if (value != 0)
5361 emit_move_insn (value, valreg);
5362 else
5363 value = valreg;
5364 }
9bdaf1ba 5365 }
5366
4448f543 5367 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 5368 {
4448f543 5369#ifdef REG_PARM_STACK_SPACE
5370 if (save_area)
6e96b626 5371 restore_fixed_argument_area (save_area, argblock,
5372 high_to_save, low_to_save);
9bdaf1ba 5373#endif
c87678e4 5374
4448f543 5375 /* If we saved any argument areas, restore them. */
5376 for (count = 0; count < nargs; count++)
5377 if (argvec[count].save_area)
5378 {
3754d046 5379 machine_mode save_mode = GET_MODE (argvec[count].save_area);
29c05e22 5380 rtx adr = plus_constant (Pmode, argblock,
241399f6 5381 argvec[count].locate.offset.constant);
5382 rtx stack_area = gen_rtx_MEM (save_mode,
5383 memory_address (save_mode, adr));
4448f543 5384
f9c6a9c3 5385 if (save_mode == BLKmode)
5386 emit_block_move (stack_area,
d2b9158b 5387 validize_mem
5388 (copy_rtx (argvec[count].save_area)),
e0deb08c 5389 (gen_int_mode
5390 (argvec[count].locate.size.constant, Pmode)),
f9c6a9c3 5391 BLOCK_OP_CALL_PARM);
5392 else
5393 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 5394 }
9bdaf1ba 5395
4448f543 5396 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5397 stack_usage_map = initial_stack_usage_map;
e0deb08c 5398 stack_usage_watermark = initial_stack_usage_watermark;
4448f543 5399 }
b39693dd 5400
dd045aee 5401 free (stack_usage_map_buf);
a331ea1b 5402
20f7032f 5403 return value;
5404
5405}
5406\f
058a1b7a 5407
66d433c7 5408/* Store a single argument for a function call
5409 into the register or memory area where it must be passed.
5410 *ARG describes the argument value and where to pass it.
5411
5412 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 5413 or 0 on a machine where arguments are pushed individually.
66d433c7 5414
5415 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 5416 so must be careful about how the stack is used.
66d433c7 5417
5418 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5419 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5420 that we need not worry about saving and restoring the stack.
5421
57679d39 5422 FNDECL is the declaration of the function we are calling.
c87678e4 5423
d10cfa8d 5424 Return nonzero if this arg should cause sibcall failure,
57679d39 5425 zero otherwise. */
66d433c7 5426
57679d39 5427static int
4c9e08a4 5428store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5429 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 5430{
19cb6b50 5431 tree pval = arg->tree_value;
66d433c7 5432 rtx reg = 0;
5433 int partial = 0;
e0deb08c 5434 poly_int64 used = 0;
5435 poly_int64 lower_bound = 0, upper_bound = 0;
57679d39 5436 int sibcall_failure = 0;
66d433c7 5437
5438 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 5439 return 1;
66d433c7 5440
1b117c60 5441 /* Push a new temporary level for any temporaries we make for
5442 this argument. */
5443 push_temp_slots ();
5444
02510658 5445 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 5446 {
4448f543 5447 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5448 save any previous data at that location. */
5449 if (argblock && ! variable_size && arg->stack)
5450 {
ccccd62c 5451 if (ARGS_GROW_DOWNWARD)
5452 {
5453 /* stack_slot is negative, but we want to index stack_usage_map
5454 with positive values. */
5455 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
e0deb08c 5456 {
5457 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5458 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5459 }
ccccd62c 5460 else
5461 upper_bound = 0;
66d433c7 5462
ccccd62c 5463 lower_bound = upper_bound - arg->locate.size.constant;
5464 }
4448f543 5465 else
ccccd62c 5466 {
5467 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
e0deb08c 5468 {
5469 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5470 lower_bound = rtx_to_poly_int64 (offset);
5471 }
ccccd62c 5472 else
5473 lower_bound = 0;
66d433c7 5474
ccccd62c 5475 upper_bound = lower_bound + arg->locate.size.constant;
5476 }
66d433c7 5477
e0deb08c 5478 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5479 reg_parm_stack_space))
66d433c7 5480 {
241399f6 5481 /* We need to make a save area. */
e0deb08c 5482 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
517be012 5483 machine_mode save_mode
5484 = int_mode_for_size (size, 1).else_blk ();
241399f6 5485 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5486 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 5487
5488 if (save_mode == BLKmode)
5489 {
9f495e8d 5490 arg->save_area
5491 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4448f543 5492 preserve_temp_slots (arg->save_area);
d2b9158b 5493 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5494 stack_area,
e0deb08c 5495 (gen_int_mode
5496 (arg->locate.size.constant, Pmode)),
0378dbdc 5497 BLOCK_OP_CALL_PARM);
4448f543 5498 }
5499 else
5500 {
5501 arg->save_area = gen_reg_rtx (save_mode);
5502 emit_move_insn (arg->save_area, stack_area);
5503 }
66d433c7 5504 }
5505 }
5506 }
b3caaea3 5507
66d433c7 5508 /* If this isn't going to be placed on both the stack and in registers,
5509 set up the register and number of words. */
5510 if (! arg->pass_on_stack)
04d6fcf8 5511 {
5512 if (flags & ECF_SIBCALL)
5513 reg = arg->tail_call_reg;
5514 else
5515 reg = arg->reg;
5516 partial = arg->partial;
5517 }
66d433c7 5518
231bd014 5519 /* Being passed entirely in a register. We shouldn't be called in
5520 this case. */
5521 gcc_assert (reg == 0 || partial != 0);
a0c938f0 5522
f28c7a75 5523 /* If this arg needs special alignment, don't load the registers
5524 here. */
5525 if (arg->n_aligned_regs != 0)
5526 reg = 0;
c87678e4 5527
f28c7a75 5528 /* If this is being passed partially in a register, we can't evaluate
66d433c7 5529 it directly into its stack slot. Otherwise, we can. */
5530 if (arg->value == 0)
f848041f 5531 {
f848041f 5532 /* stack_arg_under_construction is nonzero if a function argument is
5533 being evaluated directly into the outgoing argument list and
5534 expand_call must take special action to preserve the argument list
5535 if it is called recursively.
5536
5537 For scalar function arguments stack_usage_map is sufficient to
5538 determine which stack slots must be saved and restored. Scalar
5539 arguments in general have pass_on_stack == 0.
5540
5541 If this argument is initialized by a function which takes the
5542 address of the argument (a C++ constructor or a C function
5543 returning a BLKmode structure), then stack_usage_map is
5544 insufficient and expand_call must push the stack around the
5545 function call. Such arguments have pass_on_stack == 1.
5546
5547 Note that it is always safe to set stack_arg_under_construction,
5548 but this generates suboptimal code if set when not needed. */
5549
5550 if (arg->pass_on_stack)
5551 stack_arg_under_construction++;
4448f543 5552
7dbf1af4 5553 arg->value = expand_expr (pval,
5554 (partial
5555 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5556 ? NULL_RTX : arg->stack,
a35a63ff 5557 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 5558
5559 /* If we are promoting object (or for any other reason) the mode
5560 doesn't agree, convert the mode. */
5561
1560ef8f 5562 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5563 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5564 arg->value, arg->unsignedp);
1c0c37a5 5565
f848041f 5566 if (arg->pass_on_stack)
5567 stack_arg_under_construction--;
f848041f 5568 }
66d433c7 5569
63864e1c 5570 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 5571 if ((flags & ECF_SIBCALL)
5572 && MEM_P (arg->value)
e0deb08c 5573 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5574 arg->locate.size.constant))
ff6c0ab2 5575 sibcall_failure = 1;
63864e1c 5576
66d433c7 5577 /* Don't allow anything left on stack from computation
5578 of argument to alloca. */
02510658 5579 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 5580 do_pending_stack_adjust ();
5581
5582 if (arg->value == arg->stack)
8a06f2d4 5583 /* If the value is already in the stack slot, we are done. */
5584 ;
1c0c37a5 5585 else if (arg->mode != BLKmode)
66d433c7 5586 {
851fc2b3 5587 unsigned int parm_align;
66d433c7 5588
5589 /* Argument is a scalar, not entirely passed in registers.
5590 (If part is passed in registers, arg->partial says how much
5591 and emit_push_insn will take care of putting it there.)
c87678e4 5592
66d433c7 5593 Push it, and if its size is less than the
5594 amount of space allocated to it,
5595 also bump stack pointer by the additional space.
5596 Note that in C the default argument promotions
5597 will prevent such mismatches. */
5598
adbaa93b 5599 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5600 ? 0 : GET_MODE_SIZE (arg->mode));
532d84ff 5601
66d433c7 5602 /* Compute how much space the push instruction will push.
5603 On many machines, pushing a byte will advance the stack
5604 pointer by a halfword. */
5605#ifdef PUSH_ROUNDING
5606 size = PUSH_ROUNDING (size);
5607#endif
5608 used = size;
5609
5610 /* Compute how much space the argument should get:
5611 round up to a multiple of the alignment for arguments. */
d7ab0e3d 5612 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5613 != PAD_NONE)
adbaa93b 5614 /* At the moment we don't (need to) support ABIs for which the
5615 padding isn't known at compile time. In principle it should
5616 be easy to add though. */
5617 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
66d433c7 5618
851fc2b3 5619 /* Compute the alignment of the pushed argument. */
5620 parm_align = arg->locate.boundary;
d7ab0e3d 5621 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5622 == PAD_DOWNWARD)
851fc2b3 5623 {
e0deb08c 5624 poly_int64 pad = used - size;
5625 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5626 if (pad_align != 0)
5627 parm_align = MIN (parm_align, pad_align);
851fc2b3 5628 }
5629
66d433c7 5630 /* This isn't already where we want it on the stack, so put it there.
5631 This can either be done with push or copy insns. */
e0deb08c 5632 if (maybe_ne (used, 0)
532d84ff 5633 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5634 NULL_RTX, parm_align, partial, reg, used - size,
5635 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5636 reg_parm_stack_space,
5637 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
a95e5776 5638 sibcall_failure = 1;
d5c9a99f 5639
5640 /* Unless this is a partially-in-register argument, the argument is now
5641 in the stack. */
5642 if (partial == 0)
5643 arg->value = arg->stack;
66d433c7 5644 }
5645 else
5646 {
5647 /* BLKmode, at least partly to be pushed. */
5648
cf78c9ff 5649 unsigned int parm_align;
e0deb08c 5650 poly_int64 excess;
66d433c7 5651 rtx size_rtx;
5652
5653 /* Pushing a nonscalar.
5654 If part is passed in registers, PARTIAL says how much
5655 and emit_push_insn will take care of putting it there. */
5656
5657 /* Round its size up to a multiple
5658 of the allocation unit for arguments. */
5659
241399f6 5660 if (arg->locate.size.var != 0)
66d433c7 5661 {
5662 excess = 0;
241399f6 5663 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 5664 }
5665 else
5666 {
f054eb3c 5667 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5668 for BLKmode is careful to avoid it. */
5669 excess = (arg->locate.size.constant
532d84ff 5670 - arg_int_size_in_bytes (TREE_TYPE (pval))
f054eb3c 5671 + partial);
532d84ff 5672 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
b9c74b4d 5673 NULL_RTX, TYPE_MODE (sizetype),
5674 EXPAND_NORMAL);
66d433c7 5675 }
5676
c5dc0c32 5677 parm_align = arg->locate.boundary;
cf78c9ff 5678
5679 /* When an argument is padded down, the block is aligned to
5680 PARM_BOUNDARY, but the actual argument isn't. */
d7ab0e3d 5681 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5682 == PAD_DOWNWARD)
cf78c9ff 5683 {
241399f6 5684 if (arg->locate.size.var)
cf78c9ff 5685 parm_align = BITS_PER_UNIT;
e0deb08c 5686 else
cf78c9ff 5687 {
e0deb08c 5688 unsigned int excess_align
5689 = known_alignment (excess) * BITS_PER_UNIT;
5690 if (excess_align != 0)
5691 parm_align = MIN (parm_align, excess_align);
cf78c9ff 5692 }
5693 }
5694
e16ceb8e 5695 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 5696 {
5697 /* emit_push_insn might not work properly if arg->value and
241399f6 5698 argblock + arg->locate.offset areas overlap. */
57679d39 5699 rtx x = arg->value;
e0deb08c 5700 poly_int64 i = 0;
57679d39 5701
7e3747b0 5702 if (strip_offset (XEXP (x, 0), &i)
5703 == crtl->args.internal_arg_pointer)
57679d39 5704 {
c62f411b 5705 /* arg.locate doesn't contain the pretend_args_size offset,
5706 it's part of argblock. Ensure we don't count it in I. */
5707 if (STACK_GROWS_DOWNWARD)
5708 i -= crtl->args.pretend_args_size;
5709 else
5710 i += crtl->args.pretend_args_size;
5711
21dda4ee 5712 /* expand_call should ensure this. */
231bd014 5713 gcc_assert (!arg->locate.offset.var
e0deb08c 5714 && arg->locate.size.var == 0);
5715 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
57679d39 5716
e0deb08c 5717 if (known_eq (arg->locate.offset.constant, i))
2ad152f7 5718 {
5719 /* Even though they appear to be at the same location,
5720 if part of the outgoing argument is in registers,
5721 they aren't really at the same location. Check for
5722 this by making sure that the incoming size is the
5723 same as the outgoing size. */
e0deb08c 5724 if (maybe_ne (arg->locate.size.constant, size_val))
57679d39 5725 sibcall_failure = 1;
5726 }
e0deb08c 5727 else if (maybe_in_range_p (arg->locate.offset.constant,
5728 i, size_val))
5729 sibcall_failure = 1;
5730 /* Use arg->locate.size.constant instead of size_rtx
5731 because we only care about the part of the argument
5732 on the stack. */
5733 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5734 arg->locate.size.constant))
5735 sibcall_failure = 1;
57679d39 5736 }
5737 }
5738
532d84ff 5739 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5740 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5741 parm_align, partial, reg, excess, argblock,
5742 ARGS_SIZE_RTX (arg->locate.offset),
5743 reg_parm_stack_space,
5744 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
66d433c7 5745
d5c9a99f 5746 /* Unless this is a partially-in-register argument, the argument is now
5747 in the stack.
66d433c7 5748
d5c9a99f 5749 ??? Unlike the case above, in which we want the actual
5750 address of the data, so that we can load it directly into a
5751 register, here we want the address of the stack slot, so that
5752 it's properly aligned for word-by-word copying or something
5753 like that. It's not clear that this is always correct. */
5754 if (partial == 0)
5755 arg->value = arg->stack_slot;
5756 }
b600a907 5757
5758 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5759 {
5760 tree type = TREE_TYPE (arg->tree_value);
5761 arg->parallel_value
5762 = emit_group_load_into_temps (arg->reg, arg->value, type,
5763 int_size_in_bytes (type));
5764 }
66d433c7 5765
a35a63ff 5766 /* Mark all slots this store used. */
5767 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5768 && argblock && ! variable_size && arg->stack)
e0deb08c 5769 mark_stack_region_used (lower_bound, upper_bound);
a35a63ff 5770
66d433c7 5771 /* Once we have pushed something, pops can't safely
5772 be deferred during the rest of the arguments. */
5773 NO_DEFER_POP;
5774
0ab48139 5775 /* Free any temporary slots made in processing this argument. */
1b117c60 5776 pop_temp_slots ();
57679d39 5777
5778 return sibcall_failure;
66d433c7 5779}
890f0c17 5780
0336f0f0 5781/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 5782
0336f0f0 5783bool
3754d046 5784must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 5785 const_tree type)
0336f0f0 5786{
5787 if (!type)
5788 return false;
5789
5790 /* If the type has variable size... */
5791 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5792 return true;
890f0c17 5793
0336f0f0 5794 /* If the type is marked as addressable (it is required
5795 to be constructed into the stack)... */
5796 if (TREE_ADDRESSABLE (type))
5797 return true;
5798
5799 return false;
5800}
890f0c17 5801
0d568ddf 5802/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 5803 takes trailing padding of a structure into account. */
5804/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 5805
5806bool
3754d046 5807must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
890f0c17 5808{
5809 if (!type)
dceaa0b1 5810 return false;
890f0c17 5811
5812 /* If the type has variable size... */
5813 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5814 return true;
5815
5816 /* If the type is marked as addressable (it is required
5817 to be constructed into the stack)... */
5818 if (TREE_ADDRESSABLE (type))
5819 return true;
5820
532d84ff 5821 if (TYPE_EMPTY_P (type))
5822 return false;
5823
890f0c17 5824 /* If the padding and mode of the type is such that a copy into
5825 a register would put it into the wrong part of the register. */
5826 if (mode == BLKmode
5827 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
d7ab0e3d 5828 && (targetm.calls.function_arg_padding (mode, type)
5829 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
890f0c17 5830 return true;
5831
5832 return false;
5833}
3f82fe35 5834
5835/* Tell the garbage collector about GTY markers in this source file. */
5836#include "gt-calls.h"