]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Daily bump.
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
cbe34bb5 2 Copyright (C) 1989-2017 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
4d0cdd0c 29#include "memmodel.h"
957060b5
AM
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
957060b5
AM
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "varasm.h"
2fb9a547 40#include "internal-fn.h"
36566b39
PK
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
670ee920 44#include "expr.h"
d6f4ec51 45#include "output.h"
b0c48229 46#include "langhooks.h"
b2dd096b 47#include "except.h"
6fb5fa3c 48#include "dbgcnt.h"
e9f56944 49#include "rtl-iter.h"
d5e254e1 50#include "tree-chkp.h"
8bd9f164
MS
51#include "tree-vrp.h"
52#include "tree-ssanames.h"
d5e254e1 53#include "rtl-chkp.h"
8bd9f164 54#include "intl.h"
314e6352
ML
55#include "stringpool.h"
56#include "attribs.h"
76e048a8 57
c795bca9
BS
58/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
60
61/* Data structure and subroutines used within expand_call. */
62
63struct arg_data
64{
65 /* Tree node for this argument. */
66 tree tree_value;
1efe6448 67 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 68 machine_mode mode;
51bbfa0c
RS
69 /* Current RTL value for argument, or 0 if it isn't precomputed. */
70 rtx value;
71 /* Initially-compute RTL value for argument; only for const functions. */
72 rtx initial_value;
73 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 74 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
75 registers. */
76 rtx reg;
099e9712
JH
77 /* Register to pass this argument in when generating tail call sequence.
78 This is not the same register as for normal calls on machines with
79 register windows. */
80 rtx tail_call_reg;
8df3dbb7
RH
81 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
82 form for emit_group_move. */
83 rtx parallel_value;
d5e254e1
IE
84 /* If value is passed in neither reg nor stack, this field holds a number
85 of a special slot to be used. */
86 rtx special_slot;
87 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
88 there is no such pointer. */
89 int pointer_arg;
90 /* If pointer_arg refers a structure, then pointer_offset holds an offset
91 of a pointer in this structure. */
92 int pointer_offset;
84b55618
RK
93 /* If REG was promoted from the actual mode of the argument expression,
94 indicates whether the promotion is sign- or zero-extended. */
95 int unsignedp;
f0078f86
AM
96 /* Number of bytes to put in registers. 0 means put the whole arg
97 in registers. Also 0 if not passed in registers. */
51bbfa0c 98 int partial;
da7d8304 99 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
100 Note that some arguments may be passed on the stack
101 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
102 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 103 int pass_on_stack;
e7949876
AM
104 /* Some fields packaged up for locate_and_pad_parm. */
105 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
106 /* Location on the stack at which parameter should be stored. The store
107 has already been done if STACK == VALUE. */
108 rtx stack;
109 /* Location on the stack of the start of this argument slot. This can
110 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 111 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 112 rtx stack_slot;
51bbfa0c
RS
113 /* Place that this stack area has been saved, if needed. */
114 rtx save_area;
4ab56118
RK
115 /* If an argument's alignment does not permit direct copying into registers,
116 copy in smaller-sized pieces into pseudos. These are stored in a
117 block pointed to by this field. The next field says how many
118 word-sized pseudos we made. */
119 rtx *aligned_regs;
120 int n_aligned_regs;
51bbfa0c
RS
121};
122
da7d8304 123/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
124 the corresponding stack location has been used.
125 This vector is used to prevent a function call within an argument from
126 clobbering any stack already set up. */
127static char *stack_usage_map;
128
129/* Size of STACK_USAGE_MAP. */
130static int highest_outgoing_arg_in_use;
2f4aa534 131
c67846f2
JJ
132/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
133 stack location's tail call argument has been already stored into the stack.
134 This bitmap is used to prevent sibling call optimization if function tries
135 to use parent's incoming argument slots when they have been already
136 overwritten with tail call arguments. */
137static sbitmap stored_args_map;
138
2f4aa534
RS
139/* stack_arg_under_construction is nonzero when an argument may be
140 initialized with a constructor call (including a C function that
141 returns a BLKmode struct) and expand_call must take special action
142 to make sure the object being constructed does not overlap the
143 argument list for the constructor call. */
0405cc0e 144static int stack_arg_under_construction;
51bbfa0c 145
6de9cd9a 146static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
d329e058 147 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
d5cc9181 148 cumulative_args_t);
d329e058 149static void precompute_register_parameters (int, struct arg_data *, int *);
d5e254e1 150static void store_bounds (struct arg_data *, struct arg_data *);
d329e058
AJ
151static int store_one_arg (struct arg_data *, rtx, int, int, int);
152static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
153static int finalize_must_preallocate (int, int, struct arg_data *,
154 struct args_size *);
84b8030f 155static void precompute_arguments (int, struct arg_data *);
5d059ed9 156static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
d329e058 157static void initialize_argument_information (int, struct arg_data *,
078a18a4
SL
158 struct args_size *, int,
159 tree, tree,
d5cc9181 160 tree, tree, cumulative_args_t, int,
dd292d0a 161 rtx *, int *, int *, int *,
6de9cd9a 162 bool *, bool);
d329e058
AJ
163static void compute_argument_addresses (struct arg_data *, rtx, int);
164static rtx rtx_for_function_call (tree, tree);
165static void load_register_parameters (struct arg_data *, int, rtx *, int,
166 int, int *);
167static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
ef4bddc2 168 machine_mode, int, va_list);
6ea2b70d 169static int special_function_p (const_tree, int);
d329e058 170static int check_sibcall_argument_overlap_1 (rtx);
48810515 171static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058
AJ
172
173static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
95899b34 174 unsigned int);
2f2b4a02 175static tree split_complex_types (tree);
21a3b983 176
f73ad30e 177#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
178static rtx save_fixed_argument_area (int, rtx, int *, int *);
179static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 180#endif
51bbfa0c 181\f
51bbfa0c
RS
182/* Force FUNEXP into a form suitable for the address of a CALL,
183 and return that as an rtx. Also load the static chain register
184 if FNDECL is a nested function.
185
77cac2f2
RK
186 CALL_FUSAGE points to a variable holding the prospective
187 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 188
03dacb02 189rtx
f2d3d07e 190prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 191 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 192{
ba228239 193 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
194 but not for a constant address if -fno-function-cse. */
195 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
196 {
197 /* If it's an indirect call by descriptor, generate code to perform
198 runtime identification of the pointer and load the descriptor. */
199 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
200 {
201 const int bit_val = targetm.calls.custom_function_descriptors;
202 rtx call_lab = gen_label_rtx ();
203
204 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
205 fndecl_or_type
206 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
207 fndecl_or_type);
208 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
209 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
210
84355514
AS
211 if (GET_MODE (funexp) != Pmode)
212 funexp = convert_memory_address (Pmode, funexp);
213
4c640e26
EB
214 /* Avoid long live ranges around function calls. */
215 funexp = copy_to_mode_reg (Pmode, funexp);
216
217 if (REG_P (chain))
218 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
219
220 /* Emit the runtime identification pattern. */
221 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
222 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
223 call_lab);
224
225 /* Statically predict the branch to very likely taken. */
226 rtx_insn *insn = get_last_insn ();
227 if (JUMP_P (insn))
228 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
229
230 /* Load the descriptor. */
231 rtx mem = gen_rtx_MEM (ptr_mode,
232 plus_constant (Pmode, funexp, - bit_val));
233 MEM_NOTRAP_P (mem) = 1;
234 mem = convert_memory_address (Pmode, mem);
235 emit_move_insn (chain, mem);
236
237 mem = gen_rtx_MEM (ptr_mode,
238 plus_constant (Pmode, funexp,
239 POINTER_SIZE / BITS_PER_UNIT
240 - bit_val));
241 MEM_NOTRAP_P (mem) = 1;
242 mem = convert_memory_address (Pmode, mem);
243 emit_move_insn (funexp, mem);
244
245 emit_label (call_lab);
246
247 if (REG_P (chain))
248 {
249 use_reg (call_fusage, chain);
250 STATIC_CHAIN_REG_P (chain) = 1;
251 }
252
253 /* Make sure we're not going to be overwritten below. */
254 gcc_assert (!static_chain_value);
255 }
256
257 /* If we are using registers for parameters, force the
258 function address into a register now. */
259 funexp = ((reg_parm_seen
260 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
261 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
262 : memory_address (FUNCTION_MODE, funexp));
263 }
408702b4 264 else
51bbfa0c 265 {
408702b4
RL
266 /* funexp could be a SYMBOL_REF represents a function pointer which is
267 of ptr_mode. In this case, it should be converted into address mode
268 to be a valid address for memory rtx pattern. See PR 64971. */
269 if (GET_MODE (funexp) != Pmode)
270 funexp = convert_memory_address (Pmode, funexp);
271
4c640e26 272 if (!(flags & ECF_SIBCALL))
408702b4
RL
273 {
274 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
275 funexp = force_reg (Pmode, funexp);
276 }
51bbfa0c
RS
277 }
278
f2d3d07e
RH
279 if (static_chain_value != 0
280 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
281 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 282 {
531ca746
RH
283 rtx chain;
284
f2d3d07e 285 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 286 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 287
531ca746
RH
288 emit_move_insn (chain, static_chain_value);
289 if (REG_P (chain))
4c640e26
EB
290 {
291 use_reg (call_fusage, chain);
292 STATIC_CHAIN_REG_P (chain) = 1;
293 }
51bbfa0c
RS
294 }
295
296 return funexp;
297}
298
299/* Generate instructions to call function FUNEXP,
300 and optionally pop the results.
301 The CALL_INSN is the first insn generated.
302
607ea900 303 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
304 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
305 its own args.
2c8da025 306
079e7538
NF
307 FUNTYPE is the data type of the function. This is given to the hook
308 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
309 own args. We used to allow an identifier for library functions, but
310 that doesn't work when the return type is an aggregate type and the
311 calling convention says that the pointer to this aggregate is to be
312 popped by the callee.
51bbfa0c
RS
313
314 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
315 ROUNDED_STACK_SIZE is that number rounded up to
316 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
317 both to put into the call insn and to generate explicit popping
318 code if necessary.
51bbfa0c
RS
319
320 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
321 It is zero if this call doesn't want a structure value.
322
323 NEXT_ARG_REG is the rtx that results from executing
3c07301f 324 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
51bbfa0c
RS
325 just after all the args have had their registers assigned.
326 This could be whatever you like, but normally it is the first
327 arg-register beyond those used for args in this call,
328 or 0 if all the arg-registers are used in this call.
329 It is passed on to `gen_call' so you can put this info in the call insn.
330
331 VALREG is a hard register in which a value is returned,
332 or 0 if the call does not return a value.
333
334 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
335 the args to this call were processed.
336 We restore `inhibit_defer_pop' to that value.
337
94b25f81 338 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 339 denote registers used by the called function. */
f725a3ec 340
322e3e34 341static void
28ed065e 342emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 343 tree funtype ATTRIBUTE_UNUSED,
d329e058
AJ
344 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
345 HOST_WIDE_INT rounded_stack_size,
346 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
347 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
348 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 349 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 350{
062e7fd8 351 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
58d745ec 352 rtx call, funmem, pat;
51bbfa0c 353 int already_popped = 0;
a00fe3b7
RS
354 HOST_WIDE_INT n_popped = 0;
355
356 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
357 patterns exist). Any popping that the callee does on return will
358 be from our caller's frame rather than ours. */
359 if (!(ecf_flags & ECF_SIBCALL))
360 {
361 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 362
fa5322fa 363#ifdef CALL_POPS_ARGS
a00fe3b7 364 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 365#endif
a00fe3b7 366 }
d329e058 367
51bbfa0c
RS
368 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
369 and we don't want to load it into a register as an optimization,
370 because prepare_call_address already did it if it should be done. */
371 if (GET_CODE (funexp) != SYMBOL_REF)
372 funexp = memory_address (FUNCTION_MODE, funexp);
373
325f5379
JJ
374 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
375 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
376 {
377 tree t = fndecl;
e79983f4 378
047d33a0
AO
379 /* Although a built-in FUNCTION_DECL and its non-__builtin
380 counterpart compare equal and get a shared mem_attrs, they
381 produce different dump output in compare-debug compilations,
382 if an entry gets garbage collected in one compilation, then
383 adds a different (but equivalent) entry, while the other
384 doesn't run the garbage collector at the same spot and then
385 shares the mem_attr with the equivalent entry. */
e79983f4
MM
386 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
387 {
388 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
389 if (t2)
390 t = t2;
391 }
392
393 set_mem_expr (funmem, t);
047d33a0 394 }
325f5379 395 else if (fntree)
e19f6650 396 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 397
58d745ec 398 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 399 {
0a1c58a2 400 if (valreg)
58d745ec
RS
401 pat = targetm.gen_sibcall_value (valreg, funmem,
402 rounded_stack_size_rtx,
403 next_arg_reg, NULL_RTX);
0a1c58a2 404 else
58d745ec
RS
405 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
406 next_arg_reg, GEN_INT (struct_value_size));
0a1c58a2 407 }
8ac61af7
RK
408 /* If the target has "call" or "call_value" insns, then prefer them
409 if no arguments are actually popped. If the target does not have
410 "call" or "call_value" insns, then we must use the popping versions
411 even if the call has no arguments to pop. */
58d745ec
RS
412 else if (n_popped > 0
413 || !(valreg
414 ? targetm.have_call_value ()
415 : targetm.have_call ()))
51bbfa0c 416 {
fb5eebb9 417 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
418
419 /* If this subroutine pops its own args, record that in the call insn
420 if possible, for the sake of frame pointer elimination. */
2c8da025 421
51bbfa0c 422 if (valreg)
58d745ec
RS
423 pat = targetm.gen_call_value_pop (valreg, funmem,
424 rounded_stack_size_rtx,
425 next_arg_reg, n_pop);
51bbfa0c 426 else
58d745ec
RS
427 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
428 next_arg_reg, n_pop);
51bbfa0c 429
51bbfa0c
RS
430 already_popped = 1;
431 }
432 else
0a1c58a2
JL
433 {
434 if (valreg)
58d745ec
RS
435 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
0a1c58a2 437 else
58d745ec
RS
438 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
439 GEN_INT (struct_value_size));
0a1c58a2 440 }
58d745ec 441 emit_insn (pat);
51bbfa0c 442
ee960939 443 /* Find the call we just emitted. */
e67d1102 444 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 445
325f5379
JJ
446 /* Some target create a fresh MEM instead of reusing the one provided
447 above. Set its MEM_EXPR. */
da4fdf2d
SB
448 call = get_call_rtx_from (call_insn);
449 if (call
325f5379
JJ
450 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
451 && MEM_EXPR (funmem) != NULL_TREE)
452 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
453
d5e254e1
IE
454 /* Mark instrumented calls. */
455 if (call && fntree)
456 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
457
ee960939
OH
458 /* Put the register usage information there. */
459 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
460
461 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
462 if (ecf_flags & ECF_CONST)
463 RTL_CONST_CALL_P (call_insn) = 1;
464
465 /* If this is a pure call, then set the insn's unchanging bit. */
466 if (ecf_flags & ECF_PURE)
467 RTL_PURE_CALL_P (call_insn) = 1;
468
469 /* If this is a const call, then set the insn's unchanging bit. */
470 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
471 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 472
1d65f45c
RH
473 /* Create a nothrow REG_EH_REGION note, if needed. */
474 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 475
ca3920ad 476 if (ecf_flags & ECF_NORETURN)
65c5f2a6 477 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 478
570a98eb 479 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 480 {
65c5f2a6 481 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 482 cfun->calls_setjmp = 1;
9defc9b7 483 }
570a98eb 484
0a1c58a2
JL
485 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
486
b1e64e0d
RS
487 /* Restore this now, so that we do defer pops for this call's args
488 if the context of the call as a whole permits. */
489 inhibit_defer_pop = old_inhibit_defer_pop;
490
fb5eebb9 491 if (n_popped > 0)
51bbfa0c
RS
492 {
493 if (!already_popped)
e3da301d 494 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
495 = gen_rtx_EXPR_LIST (VOIDmode,
496 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
497 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 498 rounded_stack_size -= n_popped;
062e7fd8 499 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 500 stack_pointer_delta -= n_popped;
2e3f842f 501
9a08d230
RH
502 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
503
2e3f842f
L
504 /* If popup is needed, stack realign must use DRAP */
505 if (SUPPORTS_STACK_ALIGNMENT)
506 crtl->need_drap = true;
51bbfa0c 507 }
f8f75b16
JJ
508 /* For noreturn calls when not accumulating outgoing args force
509 REG_ARGS_SIZE note to prevent crossjumping of calls with different
510 args sizes. */
511 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
512 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
51bbfa0c 513
f73ad30e 514 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 515 {
f73ad30e
JH
516 /* If returning from the subroutine does not automatically pop the args,
517 we need an instruction to pop them sooner or later.
518 Perhaps do it now; perhaps just record how much space to pop later.
519
520 If returning from the subroutine does pop the args, indicate that the
521 stack pointer will be changed. */
522
f79a65c0 523 if (rounded_stack_size != 0)
f73ad30e 524 {
9dd9bf80 525 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
526 /* Just pretend we did the pop. */
527 stack_pointer_delta -= rounded_stack_size;
528 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 529 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
530 pending_stack_adjust += rounded_stack_size;
531 else
532 adjust_stack (rounded_stack_size_rtx);
533 }
51bbfa0c 534 }
f73ad30e
JH
535 /* When we accumulate outgoing args, we must avoid any stack manipulations.
536 Restore the stack pointer to its original value now. Usually
537 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
538 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
539 popping variants of functions exist as well.
540
541 ??? We may optimize similar to defer_pop above, but it is
542 probably not worthwhile.
f725a3ec 543
f73ad30e
JH
544 ??? It will be worthwhile to enable combine_stack_adjustments even for
545 such machines. */
546 else if (n_popped)
547 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
548}
549
25f0609b
BE
550/* Determine if the function identified by FNDECL is one with
551 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
552
553 For example, if the function might return more than one time (setjmp), then
25f0609b 554 set ECF_RETURNS_TWICE.
20efdf74 555
25f0609b 556 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
557 space from the stack such as alloca. */
558
f2d33f13 559static int
6ea2b70d 560special_function_p (const_tree fndecl, int flags)
20efdf74 561{
d5e254e1
IE
562 tree name_decl = DECL_NAME (fndecl);
563
564 /* For instrumentation clones we want to derive flags
565 from the original name. */
566 if (cgraph_node::get (fndecl)
567 && cgraph_node::get (fndecl)->instrumentation_clone)
568 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
569
570 if (fndecl && name_decl
25f0609b 571 && IDENTIFIER_LENGTH (name_decl) <= 11
20efdf74
JL
572 /* Exclude functions not at the file scope, or not `extern',
573 since they are not the magic functions we would otherwise
d1bd0ded 574 think they are.
c22cacf3
MS
575 FIXME: this should be handled with attributes, not with this
576 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
577 because you can declare fork() inside a function if you
578 wish. */
7ae4ad28 579 && (DECL_CONTEXT (fndecl) == NULL_TREE
d1bd0ded
GK
580 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
581 && TREE_PUBLIC (fndecl))
20efdf74 582 {
d5e254e1 583 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 584 const char *tname = name;
20efdf74 585
ca54603f
JL
586 /* We assume that alloca will always be called by name. It
587 makes no sense to pass it as a pointer-to-function to
588 anything that does not understand its behavior. */
4e722cf1
JJ
589 if (IDENTIFIER_LENGTH (name_decl) == 6
590 && name[0] == 'a'
591 && ! strcmp (name, "alloca"))
f2d33f13 592 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 593
25f0609b 594 /* Disregard prefix _ or __. */
20efdf74
JL
595 if (name[0] == '_')
596 {
25f0609b 597 if (name[1] == '_')
20efdf74
JL
598 tname += 2;
599 else
600 tname += 1;
601 }
602
25f0609b
BE
603 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
604 if (! strcmp (tname, "setjmp")
605 || ! strcmp (tname, "sigsetjmp")
606 || ! strcmp (name, "savectx")
607 || ! strcmp (name, "vfork")
608 || ! strcmp (name, "getcontext"))
609 flags |= ECF_RETURNS_TWICE;
20efdf74 610 }
d1c38823 611
4e722cf1
JJ
612 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
613 switch (DECL_FUNCTION_CODE (fndecl))
614 {
615 case BUILT_IN_ALLOCA:
616 case BUILT_IN_ALLOCA_WITH_ALIGN:
617 flags |= ECF_MAY_BE_ALLOCA;
618 break;
619 default:
620 break;
621 }
622
f2d33f13 623 return flags;
20efdf74
JL
624}
625
e384e6b5
BS
626/* Similar to special_function_p; return a set of ERF_ flags for the
627 function FNDECL. */
628static int
629decl_return_flags (tree fndecl)
630{
631 tree attr;
632 tree type = TREE_TYPE (fndecl);
633 if (!type)
634 return 0;
635
636 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
637 if (!attr)
638 return 0;
639
640 attr = TREE_VALUE (TREE_VALUE (attr));
641 if (!attr || TREE_STRING_LENGTH (attr) < 1)
642 return 0;
643
644 switch (TREE_STRING_POINTER (attr)[0])
645 {
646 case '1':
647 case '2':
648 case '3':
649 case '4':
650 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
651
652 case 'm':
653 return ERF_NOALIAS;
654
655 case '.':
656 default:
657 return 0;
658 }
659}
660
bae802f9 661/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 662
f2d33f13 663int
6ea2b70d 664setjmp_call_p (const_tree fndecl)
f2d33f13 665{
275311c4
MP
666 if (DECL_IS_RETURNS_TWICE (fndecl))
667 return ECF_RETURNS_TWICE;
f2d33f13
JH
668 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
669}
670
726a989a 671
159e8ef0 672/* Return true if STMT may be an alloca call. */
726a989a
RB
673
674bool
159e8ef0 675gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
676{
677 tree fndecl;
678
679 if (!is_gimple_call (stmt))
680 return false;
681
682 fndecl = gimple_call_fndecl (stmt);
683 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
684 return true;
685
686 return false;
687}
688
159e8ef0
BE
689/* Return true if STMT is a builtin alloca call. */
690
691bool
692gimple_alloca_call_p (const gimple *stmt)
693{
694 tree fndecl;
695
696 if (!is_gimple_call (stmt))
697 return false;
698
699 fndecl = gimple_call_fndecl (stmt);
700 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
701 switch (DECL_FUNCTION_CODE (fndecl))
702 {
703 case BUILT_IN_ALLOCA:
704 case BUILT_IN_ALLOCA_WITH_ALIGN:
705 return true;
706 default:
707 break;
708 }
709
710 return false;
711}
712
713/* Return true when exp contains a builtin alloca call. */
726a989a 714
c986baf6 715bool
6ea2b70d 716alloca_call_p (const_tree exp)
c986baf6 717{
2284b034 718 tree fndecl;
c986baf6 719 if (TREE_CODE (exp) == CALL_EXPR
2284b034 720 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
721 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
722 switch (DECL_FUNCTION_CODE (fndecl))
723 {
724 case BUILT_IN_ALLOCA:
725 case BUILT_IN_ALLOCA_WITH_ALIGN:
726 return true;
727 default:
728 break;
729 }
730
c986baf6
JH
731 return false;
732}
733
0a35513e
AH
734/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
735 function. Return FALSE otherwise. */
736
737static bool
738is_tm_builtin (const_tree fndecl)
739{
740 if (fndecl == NULL)
741 return false;
742
743 if (decl_is_tm_clone (fndecl))
744 return true;
745
746 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
747 {
748 switch (DECL_FUNCTION_CODE (fndecl))
749 {
750 case BUILT_IN_TM_COMMIT:
751 case BUILT_IN_TM_COMMIT_EH:
752 case BUILT_IN_TM_ABORT:
753 case BUILT_IN_TM_IRREVOCABLE:
754 case BUILT_IN_TM_GETTMCLONE_IRR:
755 case BUILT_IN_TM_MEMCPY:
756 case BUILT_IN_TM_MEMMOVE:
757 case BUILT_IN_TM_MEMSET:
758 CASE_BUILT_IN_TM_STORE (1):
759 CASE_BUILT_IN_TM_STORE (2):
760 CASE_BUILT_IN_TM_STORE (4):
761 CASE_BUILT_IN_TM_STORE (8):
762 CASE_BUILT_IN_TM_STORE (FLOAT):
763 CASE_BUILT_IN_TM_STORE (DOUBLE):
764 CASE_BUILT_IN_TM_STORE (LDOUBLE):
765 CASE_BUILT_IN_TM_STORE (M64):
766 CASE_BUILT_IN_TM_STORE (M128):
767 CASE_BUILT_IN_TM_STORE (M256):
768 CASE_BUILT_IN_TM_LOAD (1):
769 CASE_BUILT_IN_TM_LOAD (2):
770 CASE_BUILT_IN_TM_LOAD (4):
771 CASE_BUILT_IN_TM_LOAD (8):
772 CASE_BUILT_IN_TM_LOAD (FLOAT):
773 CASE_BUILT_IN_TM_LOAD (DOUBLE):
774 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
775 CASE_BUILT_IN_TM_LOAD (M64):
776 CASE_BUILT_IN_TM_LOAD (M128):
777 CASE_BUILT_IN_TM_LOAD (M256):
778 case BUILT_IN_TM_LOG:
779 case BUILT_IN_TM_LOG_1:
780 case BUILT_IN_TM_LOG_2:
781 case BUILT_IN_TM_LOG_4:
782 case BUILT_IN_TM_LOG_8:
783 case BUILT_IN_TM_LOG_FLOAT:
784 case BUILT_IN_TM_LOG_DOUBLE:
785 case BUILT_IN_TM_LOG_LDOUBLE:
786 case BUILT_IN_TM_LOG_M64:
787 case BUILT_IN_TM_LOG_M128:
788 case BUILT_IN_TM_LOG_M256:
789 return true;
790 default:
791 break;
792 }
793 }
794 return false;
795}
796
b5cd4ed4 797/* Detect flags (function attributes) from the function decl or type node. */
7393c642 798
4977bab6 799int
6ea2b70d 800flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
801{
802 int flags = 0;
36dbb93d 803
f2d33f13
JH
804 if (DECL_P (exp))
805 {
806 /* The function exp may have the `malloc' attribute. */
36dbb93d 807 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
808 flags |= ECF_MALLOC;
809
6e9a3221
AN
810 /* The function exp may have the `returns_twice' attribute. */
811 if (DECL_IS_RETURNS_TWICE (exp))
812 flags |= ECF_RETURNS_TWICE;
813
becfd6e5 814 /* Process the pure and const attributes. */
9e3920e9 815 if (TREE_READONLY (exp))
becfd6e5
KZ
816 flags |= ECF_CONST;
817 if (DECL_PURE_P (exp))
e238ccac 818 flags |= ECF_PURE;
becfd6e5
KZ
819 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
820 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 821
dcd6de6d
ZD
822 if (DECL_IS_NOVOPS (exp))
823 flags |= ECF_NOVOPS;
46a4da10
JH
824 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
825 flags |= ECF_LEAF;
cb59f689
JH
826 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
827 flags |= ECF_COLD;
dcd6de6d 828
f2d33f13
JH
829 if (TREE_NOTHROW (exp))
830 flags |= ECF_NOTHROW;
2b187c63 831
0a35513e
AH
832 if (flag_tm)
833 {
834 if (is_tm_builtin (exp))
835 flags |= ECF_TM_BUILTIN;
fe924d9f 836 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
837 || lookup_attribute ("transaction_pure",
838 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
839 flags |= ECF_TM_PURE;
840 }
841
6de9cd9a 842 flags = special_function_p (exp, flags);
f2d33f13 843 }
0a35513e
AH
844 else if (TYPE_P (exp))
845 {
846 if (TYPE_READONLY (exp))
847 flags |= ECF_CONST;
848
849 if (flag_tm
850 && ((flags & ECF_CONST) != 0
851 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
852 flags |= ECF_TM_PURE;
853 }
17fc8d6f
AH
854 else
855 gcc_unreachable ();
f2d33f13
JH
856
857 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
858 {
859 flags |= ECF_NORETURN;
860 if (flags & (ECF_CONST|ECF_PURE))
861 flags |= ECF_LOOPING_CONST_OR_PURE;
862 }
f2d33f13
JH
863
864 return flags;
865}
866
f027e0a2
JM
867/* Detect flags from a CALL_EXPR. */
868
869int
fa233e34 870call_expr_flags (const_tree t)
f027e0a2
JM
871{
872 int flags;
873 tree decl = get_callee_fndecl (t);
874
875 if (decl)
876 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
877 else if (CALL_EXPR_FN (t) == NULL_TREE)
878 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
879 else
880 {
4c640e26
EB
881 tree type = TREE_TYPE (CALL_EXPR_FN (t));
882 if (type && TREE_CODE (type) == POINTER_TYPE)
883 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
884 else
885 flags = 0;
4c640e26
EB
886 if (CALL_EXPR_BY_DESCRIPTOR (t))
887 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
888 }
889
890 return flags;
891}
892
16a16ec7
AM
893/* Return true if TYPE should be passed by invisible reference. */
894
895bool
896pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
897 tree type, bool named_arg)
898{
899 if (type)
900 {
901 /* If this type contains non-trivial constructors, then it is
902 forbidden for the middle-end to create any new copies. */
903 if (TREE_ADDRESSABLE (type))
904 return true;
905
906 /* GCC post 3.4 passes *all* variable sized types by reference. */
907 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
908 return true;
909
910 /* If a record type should be passed the same as its first (and only)
911 member, use the type and mode of that member. */
912 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
913 {
914 type = TREE_TYPE (first_field (type));
915 mode = TYPE_MODE (type);
916 }
917 }
918
919 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
920 type, named_arg);
921}
922
923/* Return true if TYPE, which is passed by reference, should be callee
924 copied instead of caller copied. */
925
926bool
927reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
928 tree type, bool named_arg)
929{
930 if (type && TREE_ADDRESSABLE (type))
931 return false;
932 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
933 named_arg);
934}
935
936
20efdf74
JL
937/* Precompute all register parameters as described by ARGS, storing values
938 into fields within the ARGS array.
939
940 NUM_ACTUALS indicates the total number elements in the ARGS array.
941
942 Set REG_PARM_SEEN if we encounter a register parameter. */
943
944static void
27e29549
RH
945precompute_register_parameters (int num_actuals, struct arg_data *args,
946 int *reg_parm_seen)
20efdf74
JL
947{
948 int i;
949
950 *reg_parm_seen = 0;
951
952 for (i = 0; i < num_actuals; i++)
953 if (args[i].reg != 0 && ! args[i].pass_on_stack)
954 {
955 *reg_parm_seen = 1;
956
957 if (args[i].value == 0)
958 {
959 push_temp_slots ();
84217346 960 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
961 preserve_temp_slots (args[i].value);
962 pop_temp_slots ();
20efdf74
JL
963 }
964
965 /* If we are to promote the function arg to a wider mode,
966 do it now. */
967
968 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
969 args[i].value
970 = convert_modes (args[i].mode,
971 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
972 args[i].value, args[i].unsignedp);
973
a7adbbcb
L
974 /* If the value is a non-legitimate constant, force it into a
975 pseudo now. TLS symbols sometimes need a call to resolve. */
976 if (CONSTANT_P (args[i].value)
977 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
978 args[i].value = force_reg (args[i].mode, args[i].value);
979
27e29549
RH
980 /* If we're going to have to load the value by parts, pull the
981 parts into pseudos. The part extraction process can involve
982 non-trivial computation. */
983 if (GET_CODE (args[i].reg) == PARALLEL)
984 {
985 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 986 args[i].parallel_value
27e29549
RH
987 = emit_group_load_into_temps (args[i].reg, args[i].value,
988 type, int_size_in_bytes (type));
989 }
990
f725a3ec 991 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
992 short loop, put the value into a pseudo and then put the pseudo
993 into the hard reg.
994
995 For small register classes, also do this if this call uses
996 register parameters. This is to avoid reload conflicts while
997 loading the parameters registers. */
998
27e29549
RH
999 else if ((! (REG_P (args[i].value)
1000 || (GET_CODE (args[i].value) == SUBREG
1001 && REG_P (SUBREG_REG (args[i].value)))))
1002 && args[i].mode != BLKmode
e548c9df
AM
1003 && (set_src_cost (args[i].value, args[i].mode,
1004 optimize_insn_for_speed_p ())
1005 > COSTS_N_INSNS (1))
42db504c
SB
1006 && ((*reg_parm_seen
1007 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1008 || optimize))
20efdf74
JL
1009 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1010 }
1011}
1012
f73ad30e 1013#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1014
1015 /* The argument list is the property of the called routine and it
1016 may clobber it. If the fixed area has been used for previous
1017 parameters, we must save and restore it. */
3bdf5ad1 1018
20efdf74 1019static rtx
d329e058 1020save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1021{
b820d2b8
AM
1022 int low;
1023 int high;
20efdf74 1024
b820d2b8
AM
1025 /* Compute the boundary of the area that needs to be saved, if any. */
1026 high = reg_parm_stack_space;
6dad9361
TS
1027 if (ARGS_GROW_DOWNWARD)
1028 high += 1;
1029
b820d2b8
AM
1030 if (high > highest_outgoing_arg_in_use)
1031 high = highest_outgoing_arg_in_use;
20efdf74 1032
b820d2b8
AM
1033 for (low = 0; low < high; low++)
1034 if (stack_usage_map[low] != 0)
1035 {
1036 int num_to_save;
ef4bddc2 1037 machine_mode save_mode;
b820d2b8 1038 int delta;
0a81f074 1039 rtx addr;
b820d2b8
AM
1040 rtx stack_area;
1041 rtx save_area;
20efdf74 1042
b820d2b8
AM
1043 while (stack_usage_map[--high] == 0)
1044 ;
20efdf74 1045
b820d2b8
AM
1046 *low_to_save = low;
1047 *high_to_save = high;
1048
1049 num_to_save = high - low + 1;
20efdf74 1050
b820d2b8
AM
1051 /* If we don't have the required alignment, must do this
1052 in BLKmode. */
fffbab82
RS
1053 scalar_int_mode imode;
1054 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1055 && (low & (MIN (GET_MODE_SIZE (imode),
1056 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1057 save_mode = imode;
1058 else
b820d2b8 1059 save_mode = BLKmode;
20efdf74 1060
6dad9361
TS
1061 if (ARGS_GROW_DOWNWARD)
1062 delta = -high;
1063 else
1064 delta = low;
1065
0a81f074
RS
1066 addr = plus_constant (Pmode, argblock, delta);
1067 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1068
b820d2b8
AM
1069 set_mem_align (stack_area, PARM_BOUNDARY);
1070 if (save_mode == BLKmode)
1071 {
9474e8ab 1072 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1073 emit_block_move (validize_mem (save_area), stack_area,
1074 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1075 }
1076 else
1077 {
1078 save_area = gen_reg_rtx (save_mode);
1079 emit_move_insn (save_area, stack_area);
1080 }
8ac61af7 1081
b820d2b8
AM
1082 return save_area;
1083 }
1084
1085 return NULL_RTX;
20efdf74
JL
1086}
1087
1088static void
d329e058 1089restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1090{
ef4bddc2 1091 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1092 int delta;
0a81f074 1093 rtx addr, stack_area;
b820d2b8 1094
6dad9361
TS
1095 if (ARGS_GROW_DOWNWARD)
1096 delta = -high_to_save;
1097 else
1098 delta = low_to_save;
1099
0a81f074
RS
1100 addr = plus_constant (Pmode, argblock, delta);
1101 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1102 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1103
1104 if (save_mode != BLKmode)
1105 emit_move_insn (stack_area, save_area);
1106 else
44bb111a
RH
1107 emit_block_move (stack_area, validize_mem (save_area),
1108 GEN_INT (high_to_save - low_to_save + 1),
1109 BLOCK_OP_CALL_PARM);
20efdf74 1110}
19652adf 1111#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1112
20efdf74
JL
1113/* If any elements in ARGS refer to parameters that are to be passed in
1114 registers, but not in memory, and whose alignment does not permit a
1115 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1116 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1117
1118 Pseudos for each unaligned argument will be stored into the array
1119 args[argnum].aligned_regs. The caller is responsible for deallocating
1120 the aligned_regs array if it is nonzero. */
1121
20efdf74 1122static void
d329e058 1123store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1124{
1125 int i, j;
f725a3ec 1126
20efdf74
JL
1127 for (i = 0; i < num_actuals; i++)
1128 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1129 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1130 && args[i].mode == BLKmode
852d22b4
EB
1131 && MEM_P (args[i].value)
1132 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1133 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1134 {
1135 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1136 int endian_correction = 0;
20efdf74 1137
78a52f11
RH
1138 if (args[i].partial)
1139 {
1140 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1141 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1142 }
1143 else
1144 {
1145 args[i].n_aligned_regs
1146 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1147 }
1148
5ed6ace5 1149 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1150
6e985040
AM
1151 /* Structures smaller than a word are normally aligned to the
1152 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1153 this means we must skip the empty high order bytes when
1154 calculating the bit offset. */
6e985040
AM
1155 if (bytes < UNITS_PER_WORD
1156#ifdef BLOCK_REG_PADDING
1157 && (BLOCK_REG_PADDING (args[i].mode,
1158 TREE_TYPE (args[i].tree_value), 1)
1159 == downward)
1160#else
1161 && BYTES_BIG_ENDIAN
1162#endif
1163 )
1164 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1165
1166 for (j = 0; j < args[i].n_aligned_regs; j++)
1167 {
1168 rtx reg = gen_reg_rtx (word_mode);
1169 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1170 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1171
1172 args[i].aligned_regs[j] = reg;
c6285bd7 1173 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1174 word_mode, word_mode, false, NULL);
20efdf74
JL
1175
1176 /* There is no need to restrict this code to loading items
1177 in TYPE_ALIGN sized hunks. The bitfield instructions can
1178 load up entire word sized registers efficiently.
1179
1180 ??? This may not be needed anymore.
1181 We use to emit a clobber here but that doesn't let later
1182 passes optimize the instructions we emit. By storing 0 into
1183 the register later passes know the first AND to zero out the
1184 bitfield being set in the register is unnecessary. The store
1185 of 0 will be deleted as will at least the first AND. */
1186
1187 emit_move_insn (reg, const0_rtx);
1188
1189 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1190 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1191 word_mode, word, false);
20efdf74
JL
1192 }
1193 }
1194}
1195
8bd9f164
MS
1196/* The limit set by -Walloc-larger-than=. */
1197static GTY(()) tree alloc_object_size_limit;
1198
1199/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1200 setting if the option is specified, or to the maximum object size if it
1201 is not. Return the initialized value. */
1202
1203static tree
1204alloc_max_size (void)
1205{
1206 if (!alloc_object_size_limit)
1207 {
1208 alloc_object_size_limit = TYPE_MAX_VALUE (ssizetype);
1209
c16880ef 1210 if (warn_alloc_size_limit)
8bd9f164 1211 {
c16880ef
MS
1212 char *end = NULL;
1213 errno = 0;
1214 unsigned HOST_WIDE_INT unit = 1;
1215 unsigned HOST_WIDE_INT limit
1216 = strtoull (warn_alloc_size_limit, &end, 10);
1217
1218 if (!errno)
8bd9f164 1219 {
c16880ef
MS
1220 if (end && *end)
1221 {
1222 /* Numeric option arguments are at most INT_MAX. Make it
1223 possible to specify a larger value by accepting common
1224 suffixes. */
1225 if (!strcmp (end, "kB"))
1226 unit = 1000;
1227 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1228 unit = 1024;
1229 else if (!strcmp (end, "MB"))
2392baa5 1230 unit = HOST_WIDE_INT_UC (1000) * 1000;
c16880ef 1231 else if (!strcasecmp (end, "MiB"))
2392baa5 1232 unit = HOST_WIDE_INT_UC (1024) * 1024;
c16880ef 1233 else if (!strcasecmp (end, "GB"))
2392baa5 1234 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
c16880ef 1235 else if (!strcasecmp (end, "GiB"))
2392baa5 1236 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
c16880ef 1237 else if (!strcasecmp (end, "TB"))
2392baa5 1238 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
c16880ef 1239 else if (!strcasecmp (end, "TiB"))
2392baa5 1240 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
c16880ef 1241 else if (!strcasecmp (end, "PB"))
2392baa5 1242 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
c16880ef 1243 else if (!strcasecmp (end, "PiB"))
2392baa5 1244 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
c16880ef 1245 else if (!strcasecmp (end, "EB"))
2392baa5
JJ
1246 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1247 * 1000;
c16880ef 1248 else if (!strcasecmp (end, "EiB"))
2392baa5
JJ
1249 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1250 * 1024;
c16880ef
MS
1251 else
1252 unit = 0;
1253 }
8bd9f164 1254
c16880ef 1255 if (unit)
2392baa5
JJ
1256 {
1257 wide_int w = wi::uhwi (limit, HOST_BITS_PER_WIDE_INT + 64);
1258 w *= unit;
1259 if (wi::ltu_p (w, alloc_object_size_limit))
1260 alloc_object_size_limit = wide_int_to_tree (ssizetype, w);
1261 }
c16880ef 1262 }
8bd9f164
MS
1263 }
1264 }
1265 return alloc_object_size_limit;
1266}
1267
c16880ef
MS
1268/* Return true when EXP's range can be determined and set RANGE[] to it
1269 after adjusting it if necessary to make EXP a valid size argument to
1270 an allocation function declared with attribute alloc_size (whose
1271 argument may be signed), or to a string manipulation function like
1272 memset. */
8bd9f164 1273
c16880ef
MS
1274bool
1275get_size_range (tree exp, tree range[2])
8bd9f164 1276{
c16880ef 1277 if (tree_fits_uhwi_p (exp))
8bd9f164 1278 {
c16880ef
MS
1279 /* EXP is a constant. */
1280 range[0] = range[1] = exp;
1281 return true;
1282 }
1283
1284 wide_int min, max;
1285 enum value_range_type range_type
c89ffd99 1286 = ((TREE_CODE (exp) == SSA_NAME && INTEGRAL_TYPE_P (TREE_TYPE (exp)))
c16880ef
MS
1287 ? get_range_info (exp, &min, &max) : VR_VARYING);
1288
1289 if (range_type == VR_VARYING)
1290 {
1291 /* No range information available. */
1292 range[0] = NULL_TREE;
1293 range[1] = NULL_TREE;
1294 return false;
1295 }
1296
1297 tree exptype = TREE_TYPE (exp);
1298 unsigned expprec = TYPE_PRECISION (exptype);
1299 wide_int wzero = wi::zero (expprec);
1300 wide_int wmaxval = wide_int (TYPE_MAX_VALUE (exptype));
1301
1302 bool signed_p = !TYPE_UNSIGNED (exptype);
1303
1304 if (range_type == VR_ANTI_RANGE)
1305 {
1306 if (signed_p)
8bd9f164 1307 {
c16880ef 1308 if (wi::les_p (max, wzero))
8bd9f164 1309 {
c16880ef
MS
1310 /* EXP is not in a strictly negative range. That means
1311 it must be in some (not necessarily strictly) positive
1312 range which includes zero. Since in signed to unsigned
1313 conversions negative values end up converted to large
1314 positive values, and otherwise they are not valid sizes,
1315 the resulting range is in both cases [0, TYPE_MAX]. */
1316 min = wzero;
1317 max = wmaxval;
8bd9f164 1318 }
c16880ef
MS
1319 else if (wi::les_p (min - 1, wzero))
1320 {
1321 /* EXP is not in a negative-positive range. That means EXP
1322 is either negative, or greater than max. Since negative
1323 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1324 min = max + 1;
1325 max = wmaxval;
1326 }
1327 else
1328 {
1329 max = min - 1;
1330 min = wzero;
1331 }
1332 }
1333 else if (wi::eq_p (wzero, min - 1))
1334 {
1335 /* EXP is unsigned and not in the range [1, MAX]. That means
1336 it's either zero or greater than MAX. Even though 0 would
1337 normally be detected by -Walloc-zero set the range to
1338 [MAX, TYPE_MAX] so that when MAX is greater than the limit
1339 the whole range is diagnosed. */
1340 min = max + 1;
1341 max = wmaxval;
1342 }
1343 else
1344 {
1345 max = min - 1;
1346 min = wzero;
8bd9f164
MS
1347 }
1348 }
1349
c16880ef
MS
1350 range[0] = wide_int_to_tree (exptype, min);
1351 range[1] = wide_int_to_tree (exptype, max);
1352
1353 return true;
8bd9f164
MS
1354}
1355
1356/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1357 whose argument numbers given by IDX with values given by ARGS exceed
1358 the maximum object size or cause an unsigned oveflow (wrapping) when
1359 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1360 may be null for functions like malloc, and non-null for those like
1361 calloc that are decorated with a two-argument attribute alloc_size. */
1362
1363void
1364maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1365{
1366 /* The range each of the (up to) two arguments is known to be in. */
1367 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1368
1369 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1370 tree maxobjsize = alloc_max_size ();
1371
1372 location_t loc = EXPR_LOCATION (exp);
1373
1374 bool warned = false;
1375
1376 /* Validate each argument individually. */
1377 for (unsigned i = 0; i != 2 && args[i]; ++i)
1378 {
1379 if (TREE_CODE (args[i]) == INTEGER_CST)
1380 {
1381 argrange[i][0] = args[i];
1382 argrange[i][1] = args[i];
1383
1384 if (tree_int_cst_lt (args[i], integer_zero_node))
1385 {
1386 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1387 "%Kargument %i value %qE is negative",
1388 exp, idx[i] + 1, args[i]);
8bd9f164
MS
1389 }
1390 else if (integer_zerop (args[i]))
1391 {
1392 /* Avoid issuing -Walloc-zero for allocation functions other
1393 than __builtin_alloca that are declared with attribute
1394 returns_nonnull because there's no portability risk. This
1395 avoids warning for such calls to libiberty's xmalloc and
1396 friends.
1397 Also avoid issuing the warning for calls to function named
1398 "alloca". */
1399 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1400 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1401 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1402 && !lookup_attribute ("returns_nonnull",
1403 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1404 warned = warning_at (loc, OPT_Walloc_zero,
c16880ef
MS
1405 "%Kargument %i value is zero",
1406 exp, idx[i] + 1);
8bd9f164
MS
1407 }
1408 else if (tree_int_cst_lt (maxobjsize, args[i]))
1409 {
1410 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1411 mode and with -fno-exceptions as a way to indicate array
1412 size overflow. There's no good way to detect C++98 here
1413 so avoid diagnosing these calls for all C++ modes. */
1414 if (i == 0
1415 && !args[1]
1416 && lang_GNU_CXX ()
1417 && DECL_IS_OPERATOR_NEW (fn)
1418 && integer_all_onesp (args[i]))
1419 continue;
1420
1421 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1422 "%Kargument %i value %qE exceeds "
8bd9f164 1423 "maximum object size %E",
c16880ef 1424 exp, idx[i] + 1, args[i], maxobjsize);
8bd9f164
MS
1425 }
1426 }
c16880ef
MS
1427 else if (TREE_CODE (args[i]) == SSA_NAME
1428 && get_size_range (args[i], argrange[i]))
8bd9f164 1429 {
8bd9f164
MS
1430 /* Verify that the argument's range is not negative (including
1431 upper bound of zero). */
1432 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1433 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1434 {
1435 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef
MS
1436 "%Kargument %i range [%E, %E] is negative",
1437 exp, idx[i] + 1,
1438 argrange[i][0], argrange[i][1]);
8bd9f164
MS
1439 }
1440 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1441 {
1442 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1443 "%Kargument %i range [%E, %E] exceeds "
8bd9f164 1444 "maximum object size %E",
c16880ef
MS
1445 exp, idx[i] + 1,
1446 argrange[i][0], argrange[i][1],
8bd9f164
MS
1447 maxobjsize);
1448 }
1449 }
1450 }
1451
1452 if (!argrange[0])
1453 return;
1454
1455 /* For a two-argument alloc_size, validate the product of the two
1456 arguments if both of their values or ranges are known. */
1457 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1458 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1459 && !integer_onep (argrange[0][0])
1460 && !integer_onep (argrange[1][0]))
1461 {
1462 /* Check for overflow in the product of a function decorated with
1463 attribute alloc_size (X, Y). */
1464 unsigned szprec = TYPE_PRECISION (size_type_node);
1465 wide_int x = wi::to_wide (argrange[0][0], szprec);
1466 wide_int y = wi::to_wide (argrange[1][0], szprec);
1467
1468 bool vflow;
1469 wide_int prod = wi::umul (x, y, &vflow);
1470
1471 if (vflow)
1472 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1473 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1474 "exceeds %<SIZE_MAX%>",
c16880ef 1475 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1476 idx[0] + 1, idx[1] + 1);
1477 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1478 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
c16880ef 1479 "%Kproduct %<%E * %E%> of arguments %i and %i "
8bd9f164 1480 "exceeds maximum object size %E",
c16880ef 1481 exp, argrange[0][0], argrange[1][0],
8bd9f164
MS
1482 idx[0] + 1, idx[1] + 1,
1483 maxobjsize);
1484
1485 if (warned)
1486 {
1487 /* Print the full range of each of the two arguments to make
1488 it clear when it is, in fact, in a range and not constant. */
1489 if (argrange[0][0] != argrange [0][1])
1490 inform (loc, "argument %i in the range [%E, %E]",
1491 idx[0] + 1, argrange[0][0], argrange[0][1]);
1492 if (argrange[1][0] != argrange [1][1])
1493 inform (loc, "argument %i in the range [%E, %E]",
1494 idx[1] + 1, argrange[1][0], argrange[1][1]);
1495 }
1496 }
1497
1498 if (warned)
1499 {
1500 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1501
1502 if (DECL_IS_BUILTIN (fn))
1503 inform (loc,
1504 "in a call to built-in allocation function %qD", fn);
1505 else
1506 inform (fnloc,
1507 "in a call to allocation function %qD declared here", fn);
1508 }
1509}
1510
9a385c2d
DM
1511/* Issue an error if CALL_EXPR was flagged as requiring
1512 tall-call optimization. */
1513
1514static void
1515maybe_complain_about_tail_call (tree call_expr, const char *reason)
1516{
1517 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1518 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1519 return;
1520
1521 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1522}
1523
d7cdf113 1524/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 1525 CALL_EXPR EXP.
d7cdf113
JL
1526
1527 NUM_ACTUALS is the total number of parameters.
1528
1529 N_NAMED_ARGS is the total number of named arguments.
1530
078a18a4
SL
1531 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1532 value, or null.
1533
d7cdf113
JL
1534 FNDECL is the tree code for the target of this call (if known)
1535
1536 ARGS_SO_FAR holds state needed by the target to know where to place
1537 the next argument.
1538
1539 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1540 for arguments which are passed in registers.
1541
1542 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1543 and may be modified by this routine.
1544
f2d33f13 1545 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 1546 flags which may be modified by this routine.
dd292d0a 1547
6de9cd9a
DN
1548 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1549 that requires allocation of stack space.
1550
dd292d0a
MM
1551 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1552 the thunked-to function. */
d7cdf113
JL
1553
1554static void
d329e058
AJ
1555initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1556 struct arg_data *args,
1557 struct args_size *args_size,
1558 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 1559 tree exp, tree struct_value_addr_value,
45769134 1560 tree fndecl, tree fntype,
d5cc9181 1561 cumulative_args_t args_so_far,
d329e058
AJ
1562 int reg_parm_stack_space,
1563 rtx *old_stack_level, int *old_pending_adj,
dd292d0a 1564 int *must_preallocate, int *ecf_flags,
6de9cd9a 1565 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 1566{
d5cc9181 1567 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 1568 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
1569
1570 /* Count arg position in order args appear. */
1571 int argpos;
1572
1573 int i;
f725a3ec 1574
d7cdf113
JL
1575 args_size->constant = 0;
1576 args_size->var = 0;
1577
d5e254e1
IE
1578 bitmap_obstack_initialize (NULL);
1579
d7cdf113 1580 /* In this loop, we consider args in the order they are written.
3d9684ae 1581 We fill up ARGS from the back. */
d7cdf113 1582
3d9684ae 1583 i = num_actuals - 1;
078a18a4 1584 {
d5e254e1 1585 int j = i, ptr_arg = -1;
078a18a4
SL
1586 call_expr_arg_iterator iter;
1587 tree arg;
d5e254e1 1588 bitmap slots = NULL;
078a18a4
SL
1589
1590 if (struct_value_addr_value)
1591 {
1592 args[j].tree_value = struct_value_addr_value;
3d9684ae 1593 j--;
d5e254e1
IE
1594
1595 /* If we pass structure address then we need to
1596 create bounds for it. Since created bounds is
1597 a call statement, we expand it right here to avoid
1598 fixing all other places where it may be expanded. */
1599 if (CALL_WITH_BOUNDS_P (exp))
1600 {
1601 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1602 args[j].tree_value
1603 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1604 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1605 EXPAND_NORMAL, 0, false);
1606 args[j].pointer_arg = j + 1;
1607 j--;
1608 }
078a18a4 1609 }
afc610db 1610 argpos = 0;
078a18a4
SL
1611 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1612 {
1613 tree argtype = TREE_TYPE (arg);
d5e254e1
IE
1614
1615 /* Remember last param with pointer and associate it
1616 with following pointer bounds. */
1617 if (CALL_WITH_BOUNDS_P (exp)
1618 && chkp_type_has_pointer (argtype))
1619 {
1620 if (slots)
1621 BITMAP_FREE (slots);
1622 ptr_arg = j;
1623 if (!BOUNDED_TYPE_P (argtype))
1624 {
1625 slots = BITMAP_ALLOC (NULL);
1626 chkp_find_bound_slots (argtype, slots);
1627 }
1628 }
afc610db
IE
1629 else if (CALL_WITH_BOUNDS_P (exp)
1630 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1631 argpos < n_named_args))
1632 {
1633 if (slots)
1634 BITMAP_FREE (slots);
1635 ptr_arg = j;
1636 }
d5e254e1
IE
1637 else if (POINTER_BOUNDS_TYPE_P (argtype))
1638 {
1639 /* We expect bounds in instrumented calls only.
1640 Otherwise it is a sign we lost flag due to some optimization
1641 and may emit call args incorrectly. */
1642 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1643
1644 /* For structures look for the next available pointer. */
1645 if (ptr_arg != -1 && slots)
1646 {
1647 unsigned bnd_no = bitmap_first_set_bit (slots);
1648 args[j].pointer_offset =
1649 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1650
1651 bitmap_clear_bit (slots, bnd_no);
1652
1653 /* Check we have no more pointers in the structure. */
1654 if (bitmap_empty_p (slots))
1655 BITMAP_FREE (slots);
1656 }
1657 args[j].pointer_arg = ptr_arg;
1658
1659 /* Check we covered all pointers in the previous
1660 non bounds arg. */
1661 if (!slots)
1662 ptr_arg = -1;
1663 }
1664 else
1665 ptr_arg = -1;
1666
078a18a4
SL
1667 if (targetm.calls.split_complex_arg
1668 && argtype
1669 && TREE_CODE (argtype) == COMPLEX_TYPE
1670 && targetm.calls.split_complex_arg (argtype))
1671 {
1672 tree subtype = TREE_TYPE (argtype);
078a18a4 1673 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 1674 j--;
078a18a4
SL
1675 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1676 }
1677 else
1678 args[j].tree_value = arg;
3d9684ae 1679 j--;
afc610db 1680 argpos++;
078a18a4 1681 }
d5e254e1
IE
1682
1683 if (slots)
1684 BITMAP_FREE (slots);
078a18a4
SL
1685 }
1686
d5e254e1
IE
1687 bitmap_obstack_release (NULL);
1688
8bd9f164
MS
1689 /* Extract attribute alloc_size and if set, store the indices of
1690 the corresponding arguments in ALLOC_IDX, and then the actual
1691 argument(s) at those indices in ALLOC_ARGS. */
1692 int alloc_idx[2] = { -1, -1 };
1693 if (tree alloc_size
1694 = (fndecl ? lookup_attribute ("alloc_size",
1695 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1696 : NULL_TREE))
1697 {
1698 tree args = TREE_VALUE (alloc_size);
1699 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1700 if (TREE_CHAIN (args))
1701 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1702 }
1703
1704 /* Array for up to the two attribute alloc_size arguments. */
1705 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1706
d7cdf113 1707 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 1708 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 1709 {
078a18a4 1710 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 1711 int unsignedp;
ef4bddc2 1712 machine_mode mode;
d7cdf113 1713
d7cdf113 1714 /* Replace erroneous argument with constant zero. */
d0f062fb 1715 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1716 args[i].tree_value = integer_zero_node, type = integer_type_node;
1717
ebf0bf7f
JJ
1718 /* If TYPE is a transparent union or record, pass things the way
1719 we would pass the first field of the union or record. We have
1720 already verified that the modes are the same. */
1721 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1722 && TYPE_TRANSPARENT_AGGR (type))
1723 type = TREE_TYPE (first_field (type));
d7cdf113
JL
1724
1725 /* Decide where to pass this arg.
1726
1727 args[i].reg is nonzero if all or part is passed in registers.
1728
1729 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1730 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
1731
1732 args[i].pass_on_stack is nonzero if the argument must at least be
1733 computed on the stack. It may then be loaded back into registers
1734 if args[i].reg is nonzero.
1735
1736 These decisions are driven by the FUNCTION_... macros and must agree
1737 with those made by function.c. */
1738
1739 /* See if this argument should be passed by invisible reference. */
d5cc9181 1740 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
0976078c 1741 type, argpos < n_named_args))
d7cdf113 1742 {
9969aaf6 1743 bool callee_copies;
d6e1acf6 1744 tree base = NULL_TREE;
9969aaf6
RH
1745
1746 callee_copies
d5cc9181 1747 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
6cdd5672 1748 type, argpos < n_named_args);
9969aaf6
RH
1749
1750 /* If we're compiling a thunk, pass through invisible references
1751 instead of making a copy. */
dd292d0a 1752 if (call_from_thunk_p
9969aaf6
RH
1753 || (callee_copies
1754 && !TREE_ADDRESSABLE (type)
1755 && (base = get_base_address (args[i].tree_value))
9c3d55b4 1756 && TREE_CODE (base) != SSA_NAME
9969aaf6 1757 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
d7cdf113 1758 {
006e317a
JH
1759 /* We may have turned the parameter value into an SSA name.
1760 Go back to the original parameter so we can take the
1761 address. */
1762 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1763 {
1764 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1765 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1766 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1767 }
fe8dd12e
JH
1768 /* Argument setup code may have copied the value to register. We
1769 revert that optimization now because the tail call code must
1770 use the original location. */
1771 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1772 && !MEM_P (DECL_RTL (args[i].tree_value))
1773 && DECL_INCOMING_RTL (args[i].tree_value)
1774 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1775 set_decl_rtl (args[i].tree_value,
1776 DECL_INCOMING_RTL (args[i].tree_value));
1777
c4b9a87e
ER
1778 mark_addressable (args[i].tree_value);
1779
9969aaf6
RH
1780 /* We can't use sibcalls if a callee-copied argument is
1781 stored in the current function's frame. */
1782 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
1783 {
1784 *may_tailcall = false;
1785 maybe_complain_about_tail_call (exp,
1786 "a callee-copied argument is"
1787 " stored in the current "
1788 " function's frame");
1789 }
9fd47435 1790
db3927fb
AH
1791 args[i].tree_value = build_fold_addr_expr_loc (loc,
1792 args[i].tree_value);
9969aaf6
RH
1793 type = TREE_TYPE (args[i].tree_value);
1794
becfd6e5
KZ
1795 if (*ecf_flags & ECF_CONST)
1796 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 1797 }
d7cdf113
JL
1798 else
1799 {
1800 /* We make a copy of the object and pass the address to the
1801 function being called. */
1802 rtx copy;
1803
d0f062fb 1804 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
1805 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1806 || (flag_stack_check == GENERIC_STACK_CHECK
1807 && compare_tree_int (TYPE_SIZE_UNIT (type),
1808 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
1809 {
1810 /* This is a variable-sized object. Make space on the stack
1811 for it. */
078a18a4 1812 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
1813
1814 if (*old_stack_level == 0)
1815 {
9eac0f2a 1816 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
1817 *old_pending_adj = pending_stack_adjust;
1818 pending_stack_adjust = 0;
1819 }
1820
d3c12306
EB
1821 /* We can pass TRUE as the 4th argument because we just
1822 saved the stack pointer and will restore it right after
1823 the call. */
3a42502d
RH
1824 copy = allocate_dynamic_stack_space (size_rtx,
1825 TYPE_ALIGN (type),
1826 TYPE_ALIGN (type),
1827 true);
1828 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 1829 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1830 }
1831 else
9474e8ab 1832 copy = assign_temp (type, 1, 0);
d7cdf113 1833
ee45a32d 1834 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 1835
becfd6e5
KZ
1836 /* Just change the const function to pure and then let
1837 the next test clear the pure based on
1838 callee_copies. */
1839 if (*ecf_flags & ECF_CONST)
1840 {
1841 *ecf_flags &= ~ECF_CONST;
1842 *ecf_flags |= ECF_PURE;
1843 }
1844
1845 if (!callee_copies && *ecf_flags & ECF_PURE)
1846 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
1847
1848 args[i].tree_value
db3927fb 1849 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 1850 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 1851 *may_tailcall = false;
9a385c2d
DM
1852 maybe_complain_about_tail_call (exp,
1853 "argument must be passed"
1854 " by copying");
d7cdf113
JL
1855 }
1856 }
1857
8df83eae 1858 unsignedp = TYPE_UNSIGNED (type);
cde0f3fd
PB
1859 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1860 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
1861
1862 args[i].unsignedp = unsignedp;
1863 args[i].mode = mode;
7d167afd 1864
3c07301f
NF
1865 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1866 argpos < n_named_args);
1867
d5e254e1
IE
1868 if (args[i].reg && CONST_INT_P (args[i].reg))
1869 {
1870 args[i].special_slot = args[i].reg;
1871 args[i].reg = NULL;
1872 }
1873
7d167afd
JJ
1874 /* If this is a sibling call and the machine has register windows, the
1875 register window has to be unwinded before calling the routine, so
1876 arguments have to go into the incoming registers. */
3c07301f
NF
1877 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1878 args[i].tail_call_reg
1879 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1880 argpos < n_named_args);
1881 else
1882 args[i].tail_call_reg = args[i].reg;
7d167afd 1883
d7cdf113
JL
1884 if (args[i].reg)
1885 args[i].partial
78a52f11
RH
1886 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1887 argpos < n_named_args);
d7cdf113 1888
fe984136 1889 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
d7cdf113
JL
1890
1891 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1892 it means that we are to pass this arg in the register(s) designated
1893 by the PARALLEL, but also to pass it in the stack. */
1894 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1895 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1896 args[i].pass_on_stack = 1;
1897
1898 /* If this is an addressable type, we must preallocate the stack
1899 since we must evaluate the object into its final location.
1900
1901 If this is to be passed in both registers and the stack, it is simpler
1902 to preallocate. */
1903 if (TREE_ADDRESSABLE (type)
1904 || (args[i].pass_on_stack && args[i].reg != 0))
1905 *must_preallocate = 1;
1906
d5e254e1
IE
1907 /* No stack allocation and padding for bounds. */
1908 if (POINTER_BOUNDS_P (args[i].tree_value))
1909 ;
d7cdf113 1910 /* Compute the stack-size of this argument. */
d5e254e1
IE
1911 else if (args[i].reg == 0 || args[i].partial != 0
1912 || reg_parm_stack_space > 0
1913 || args[i].pass_on_stack)
d7cdf113
JL
1914 locate_and_pad_parm (mode, type,
1915#ifdef STACK_PARMS_IN_REG_PARM_AREA
1916 1,
1917#else
1918 args[i].reg != 0,
1919#endif
2e4ceca5 1920 reg_parm_stack_space,
e7949876
AM
1921 args[i].pass_on_stack ? 0 : args[i].partial,
1922 fndecl, args_size, &args[i].locate);
648bb159
RS
1923#ifdef BLOCK_REG_PADDING
1924 else
1925 /* The argument is passed entirely in registers. See at which
1926 end it should be padded. */
1927 args[i].locate.where_pad =
1928 BLOCK_REG_PADDING (mode, type,
1929 int_size_in_bytes (type) <= UNITS_PER_WORD);
1930#endif
f725a3ec 1931
d7cdf113
JL
1932 /* Update ARGS_SIZE, the total stack space for args so far. */
1933
e7949876
AM
1934 args_size->constant += args[i].locate.size.constant;
1935 if (args[i].locate.size.var)
1936 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1937
1938 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1939 have been used, etc. */
1940
3c07301f
NF
1941 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1942 type, argpos < n_named_args);
8bd9f164
MS
1943
1944 /* Store argument values for functions decorated with attribute
1945 alloc_size. */
1946 if (argpos == alloc_idx[0])
1947 alloc_args[0] = args[i].tree_value;
1948 else if (argpos == alloc_idx[1])
1949 alloc_args[1] = args[i].tree_value;
1950 }
1951
1952 if (alloc_args[0])
1953 {
1954 /* Check the arguments of functions decorated with attribute
1955 alloc_size. */
1956 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
d7cdf113
JL
1957 }
1958}
1959
599f37b6
JL
1960/* Update ARGS_SIZE to contain the total size for the argument block.
1961 Return the original constant component of the argument block's size.
1962
1963 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1964 for arguments passed in registers. */
1965
1966static int
d329e058
AJ
1967compute_argument_block_size (int reg_parm_stack_space,
1968 struct args_size *args_size,
033df0b9 1969 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 1970 tree fntype ATTRIBUTE_UNUSED,
d329e058 1971 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6
JL
1972{
1973 int unadjusted_args_size = args_size->constant;
1974
f73ad30e
JH
1975 /* For accumulate outgoing args mode we don't need to align, since the frame
1976 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1977 backends from generating misaligned frame sizes. */
f73ad30e
JH
1978 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1979 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1980
599f37b6
JL
1981 /* Compute the actual size of the argument block required. The variable
1982 and constant sizes must be combined, the size may have to be rounded,
1983 and there may be a minimum required size. */
1984
1985 if (args_size->var)
1986 {
1987 args_size->var = ARGS_SIZE_TREE (*args_size);
1988 args_size->constant = 0;
1989
c2f8b491
JH
1990 preferred_stack_boundary /= BITS_PER_UNIT;
1991 if (preferred_stack_boundary > 1)
1503a7ec
JH
1992 {
1993 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1994 to add the delta, round and subtract the delta.
1503a7ec 1995 Currently no machine description requires this support. */
366de0ce 1996 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1503a7ec
JH
1997 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1998 }
599f37b6
JL
1999
2000 if (reg_parm_stack_space > 0)
2001 {
2002 args_size->var
2003 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 2004 ssize_int (reg_parm_stack_space));
599f37b6 2005
599f37b6
JL
2006 /* The area corresponding to register parameters is not to count in
2007 the size of the block we need. So make the adjustment. */
5d059ed9 2008 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
2009 args_size->var
2010 = size_binop (MINUS_EXPR, args_size->var,
2011 ssize_int (reg_parm_stack_space));
599f37b6
JL
2012 }
2013 }
2014 else
2015 {
c2f8b491 2016 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
2017 if (preferred_stack_boundary < 1)
2018 preferred_stack_boundary = 1;
fb5eebb9 2019 args_size->constant = (((args_size->constant
1503a7ec 2020 + stack_pointer_delta
c2f8b491
JH
2021 + preferred_stack_boundary - 1)
2022 / preferred_stack_boundary
2023 * preferred_stack_boundary)
1503a7ec 2024 - stack_pointer_delta);
599f37b6
JL
2025
2026 args_size->constant = MAX (args_size->constant,
2027 reg_parm_stack_space);
2028
5d059ed9 2029 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 2030 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
2031 }
2032 return unadjusted_args_size;
2033}
2034
19832c77 2035/* Precompute parameters as needed for a function call.
cc0b1adc 2036
f2d33f13 2037 FLAGS is mask of ECF_* constants.
cc0b1adc 2038
cc0b1adc
JL
2039 NUM_ACTUALS is the number of arguments.
2040
f725a3ec
KH
2041 ARGS is an array containing information for each argument; this
2042 routine fills in the INITIAL_VALUE and VALUE fields for each
2043 precomputed argument. */
cc0b1adc
JL
2044
2045static void
84b8030f 2046precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
2047{
2048 int i;
2049
3638733b 2050 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 2051 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
2052
2053 /* If we preallocated the stack space, and some arguments must be passed
2054 on the stack, then we must precompute any parameter which contains a
2055 function call which will store arguments on the stack.
2056 Otherwise, evaluating the parameter may clobber previous parameters
2057 which have already been stored into the stack. (we have code to avoid
2058 such case by saving the outgoing stack arguments, but it results in
2059 worse code) */
84b8030f 2060 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 2061 return;
7ae4ad28 2062
cc0b1adc 2063 for (i = 0; i < num_actuals; i++)
82c82743 2064 {
cde0f3fd 2065 tree type;
ef4bddc2 2066 machine_mode mode;
ddef6bc7 2067
84b8030f 2068 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
2069 continue;
2070
82c82743 2071 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
2072 type = TREE_TYPE (args[i].tree_value);
2073 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 2074
82c82743 2075 args[i].initial_value = args[i].value
84217346 2076 = expand_normal (args[i].tree_value);
cc0b1adc 2077
cde0f3fd 2078 mode = TYPE_MODE (type);
82c82743
RH
2079 if (mode != args[i].mode)
2080 {
cde0f3fd 2081 int unsignedp = args[i].unsignedp;
82c82743
RH
2082 args[i].value
2083 = convert_modes (args[i].mode, mode,
2084 args[i].value, args[i].unsignedp);
cde0f3fd 2085
82c82743
RH
2086 /* CSE will replace this only if it contains args[i].value
2087 pseudo, so convert it down to the declared mode using
2088 a SUBREG. */
2089 if (REG_P (args[i].value)
cde0f3fd
PB
2090 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2091 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
2092 {
2093 args[i].initial_value
2094 = gen_lowpart_SUBREG (mode, args[i].value);
2095 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 2096 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 2097 }
82c82743
RH
2098 }
2099 }
cc0b1adc
JL
2100}
2101
0f9b3ea6
JL
2102/* Given the current state of MUST_PREALLOCATE and information about
2103 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2104 compute and return the final value for MUST_PREALLOCATE. */
2105
2106static int
b8698a0f 2107finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 2108 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
2109{
2110 /* See if we have or want to preallocate stack space.
2111
2112 If we would have to push a partially-in-regs parm
2113 before other stack parms, preallocate stack space instead.
2114
2115 If the size of some parm is not a multiple of the required stack
2116 alignment, we must preallocate.
2117
2118 If the total size of arguments that would otherwise create a copy in
2119 a temporary (such as a CALL) is more than half the total argument list
2120 size, preallocation is faster.
2121
2122 Another reason to preallocate is if we have a machine (like the m88k)
2123 where stack alignment is required to be maintained between every
2124 pair of insns, not just when the call is made. However, we assume here
2125 that such machines either do not have push insns (and hence preallocation
2126 would occur anyway) or the problem is taken care of with
2127 PUSH_ROUNDING. */
2128
2129 if (! must_preallocate)
2130 {
2131 int partial_seen = 0;
2132 int copy_to_evaluate_size = 0;
2133 int i;
2134
2135 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2136 {
2137 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2138 partial_seen = 1;
2139 else if (partial_seen && args[i].reg == 0)
2140 must_preallocate = 1;
d5e254e1
IE
2141 /* We preallocate in case there are bounds passed
2142 in the bounds table to have precomputed address
2143 for bounds association. */
2144 else if (POINTER_BOUNDS_P (args[i].tree_value)
2145 && !args[i].reg)
2146 must_preallocate = 1;
0f9b3ea6
JL
2147
2148 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2149 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2150 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2151 || TREE_CODE (args[i].tree_value) == COND_EXPR
2152 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2153 copy_to_evaluate_size
2154 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2155 }
2156
2157 if (copy_to_evaluate_size * 2 >= args_size->constant
2158 && args_size->constant > 0)
2159 must_preallocate = 1;
2160 }
2161 return must_preallocate;
2162}
599f37b6 2163
a45bdd02
JL
2164/* If we preallocated stack space, compute the address of each argument
2165 and store it into the ARGS array.
2166
f725a3ec 2167 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
2168 validized when it is used.
2169
2170 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2171
2172static void
d329e058 2173compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
2174{
2175 if (argblock)
2176 {
2177 rtx arg_reg = argblock;
2178 int i, arg_offset = 0;
2179
2180 if (GET_CODE (argblock) == PLUS)
2181 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
2182
2183 for (i = 0; i < num_actuals; i++)
2184 {
e7949876
AM
2185 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2186 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 2187 rtx addr;
bfc45551 2188 unsigned int align, boundary;
7816b87e 2189 unsigned int units_on_stack = 0;
ef4bddc2 2190 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
2191
2192 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
2193 if (! args[i].pass_on_stack
2194 && args[i].reg != 0
2195 && args[i].partial == 0)
a45bdd02
JL
2196 continue;
2197
d5e254e1
IE
2198 /* Pointer Bounds are never passed on the stack. */
2199 if (POINTER_BOUNDS_P (args[i].tree_value))
2200 continue;
2201
481683e1 2202 if (CONST_INT_P (offset))
0a81f074 2203 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
a45bdd02
JL
2204 else
2205 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
2206
0a81f074 2207 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2208
2209 if (args[i].partial != 0)
2210 {
2211 /* Only part of the parameter is being passed on the stack.
2212 Generate a simple memory reference of the correct size. */
2213 units_on_stack = args[i].locate.size.constant;
2214 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
2215 MODE_INT, 1);
2216 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 2217 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
2218 }
2219 else
2220 {
2221 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2222 set_mem_attributes (args[i].stack,
2223 TREE_TYPE (args[i].tree_value), 1);
2224 }
bfc45551
AM
2225 align = BITS_PER_UNIT;
2226 boundary = args[i].locate.boundary;
2227 if (args[i].locate.where_pad != downward)
2228 align = boundary;
481683e1 2229 else if (CONST_INT_P (offset))
bfc45551
AM
2230 {
2231 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
146ec50f 2232 align = least_bit_hwi (align);
bfc45551
AM
2233 }
2234 set_mem_align (args[i].stack, align);
a45bdd02 2235
481683e1 2236 if (CONST_INT_P (slot_offset))
0a81f074 2237 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
a45bdd02
JL
2238 else
2239 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
2240
0a81f074 2241 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
2242
2243 if (args[i].partial != 0)
2244 {
2245 /* Only part of the parameter is being passed on the stack.
2246 Generate a simple memory reference of the correct size.
2247 */
2248 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 2249 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
2250 }
2251 else
2252 {
2253 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2254 set_mem_attributes (args[i].stack_slot,
2255 TREE_TYPE (args[i].tree_value), 1);
2256 }
bfc45551 2257 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
2258
2259 /* Function incoming arguments may overlap with sibling call
2260 outgoing arguments and we cannot allow reordering of reads
2261 from function arguments with stores to outgoing arguments
2262 of sibling calls. */
ba4828e0
RK
2263 set_mem_alias_set (args[i].stack, 0);
2264 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
2265 }
2266 }
2267}
f725a3ec 2268
a45bdd02
JL
2269/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2270 in a call instruction.
2271
2272 FNDECL is the tree node for the target function. For an indirect call
2273 FNDECL will be NULL_TREE.
2274
09e2bf48 2275 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
2276
2277static rtx
d329e058 2278rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
2279{
2280 rtx funexp;
2281
2282 /* Get the function to call, in the form of RTL. */
2283 if (fndecl)
2284 {
ad960f56 2285 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 2286 TREE_USED (fndecl) = 1;
a45bdd02
JL
2287
2288 /* Get a SYMBOL_REF rtx for the function address. */
2289 funexp = XEXP (DECL_RTL (fndecl), 0);
2290 }
2291 else
2292 /* Generate an rtx (probably a pseudo-register) for the address. */
2293 {
2294 push_temp_slots ();
84217346 2295 funexp = expand_normal (addr);
f725a3ec 2296 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
2297 }
2298 return funexp;
2299}
2300
5275901c
JJ
2301/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2302static struct
2303{
2304 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2305 or NULL_RTX if none has been scanned yet. */
48810515 2306 rtx_insn *scan_start;
5275901c
JJ
2307 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2308 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2309 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2310 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 2311 vec<rtx> cache;
5275901c
JJ
2312} internal_arg_pointer_exp_state;
2313
e9f56944 2314static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
2315
2316/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2317 the tail call sequence, starting with first insn that hasn't been
2318 scanned yet, and note for each pseudo on the LHS whether it is based
2319 on crtl->args.internal_arg_pointer or not, and what offset from that
2320 that pointer it has. */
2321
2322static void
2323internal_arg_pointer_based_exp_scan (void)
2324{
48810515 2325 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
2326
2327 if (scan_start == NULL_RTX)
2328 insn = get_insns ();
2329 else
2330 insn = NEXT_INSN (scan_start);
2331
2332 while (insn)
2333 {
2334 rtx set = single_set (insn);
2335 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2336 {
2337 rtx val = NULL_RTX;
2338 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2339 /* Punt on pseudos set multiple times. */
9771b263
DN
2340 if (idx < internal_arg_pointer_exp_state.cache.length ()
2341 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
2342 != NULL_RTX))
2343 val = pc_rtx;
2344 else
2345 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2346 if (val != NULL_RTX)
2347 {
9771b263 2348 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718
RS
2349 internal_arg_pointer_exp_state.cache
2350 .safe_grow_cleared (idx + 1);
9771b263 2351 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
2352 }
2353 }
2354 if (NEXT_INSN (insn) == NULL_RTX)
2355 scan_start = insn;
2356 insn = NEXT_INSN (insn);
2357 }
2358
2359 internal_arg_pointer_exp_state.scan_start = scan_start;
2360}
2361
5275901c
JJ
2362/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2363 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2364 it with fixed offset, or PC if this is with variable or unknown offset.
2365 TOPLEVEL is true if the function is invoked at the topmost level. */
2366
2367static rtx
e9f56944 2368internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
2369{
2370 if (CONSTANT_P (rtl))
2371 return NULL_RTX;
2372
2373 if (rtl == crtl->args.internal_arg_pointer)
2374 return const0_rtx;
2375
2376 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2377 return NULL_RTX;
2378
2379 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
2380 {
2381 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2382 if (val == NULL_RTX || val == pc_rtx)
2383 return val;
0a81f074 2384 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
5275901c
JJ
2385 }
2386
2387 /* When called at the topmost level, scan pseudo assignments in between the
2388 last scanned instruction in the tail call sequence and the latest insn
2389 in that sequence. */
2390 if (toplevel)
2391 internal_arg_pointer_based_exp_scan ();
2392
2393 if (REG_P (rtl))
2394 {
2395 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
2396 if (idx < internal_arg_pointer_exp_state.cache.length ())
2397 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
2398
2399 return NULL_RTX;
2400 }
2401
e9f56944
RS
2402 subrtx_iterator::array_type array;
2403 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2404 {
2405 const_rtx x = *iter;
2406 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2407 return pc_rtx;
2408 if (MEM_P (x))
2409 iter.skip_subrtxes ();
2410 }
5275901c
JJ
2411
2412 return NULL_RTX;
2413}
2414
07eef816
KH
2415/* Return true if and only if SIZE storage units (usually bytes)
2416 starting from address ADDR overlap with already clobbered argument
2417 area. This function is used to determine if we should give up a
2418 sibcall. */
2419
2420static bool
2421mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
2422{
2423 HOST_WIDE_INT i;
5275901c 2424 rtx val;
07eef816 2425
f61e445a 2426 if (bitmap_empty_p (stored_args_map))
4189fb53 2427 return false;
5275901c
JJ
2428 val = internal_arg_pointer_based_exp (addr, true);
2429 if (val == NULL_RTX)
2430 return false;
2431 else if (val == pc_rtx)
6c3cb698 2432 return true;
07eef816 2433 else
5275901c 2434 i = INTVAL (val);
76e048a8
KT
2435
2436 if (STACK_GROWS_DOWNWARD)
2437 i -= crtl->args.pretend_args_size;
2438 else
2439 i += crtl->args.pretend_args_size;
2440
07eef816 2441
6dad9361
TS
2442 if (ARGS_GROW_DOWNWARD)
2443 i = -i - size;
2444
07eef816
KH
2445 if (size > 0)
2446 {
2447 unsigned HOST_WIDE_INT k;
2448
2449 for (k = 0; k < size; k++)
5829cc0f 2450 if (i + k < SBITMAP_SIZE (stored_args_map)
d7c028c0 2451 && bitmap_bit_p (stored_args_map, i + k))
07eef816
KH
2452 return true;
2453 }
2454
2455 return false;
2456}
2457
21a3b983
JL
2458/* Do the register loads required for any wholly-register parms or any
2459 parms which are passed both on the stack and in a register. Their
f725a3ec 2460 expressions were already evaluated.
21a3b983
JL
2461
2462 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
2463 insns in the CALL_INSN_FUNCTION_USAGE field.
2464
40b0345d 2465 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 2466 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
2467
2468static void
d329e058
AJ
2469load_register_parameters (struct arg_data *args, int num_actuals,
2470 rtx *call_fusage, int flags, int is_sibcall,
2471 int *sibcall_failure)
21a3b983
JL
2472{
2473 int i, j;
2474
21a3b983 2475 for (i = 0; i < num_actuals; i++)
21a3b983 2476 {
099e9712
JH
2477 rtx reg = ((flags & ECF_SIBCALL)
2478 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
2479 if (reg)
2480 {
6e985040
AM
2481 int partial = args[i].partial;
2482 int nregs;
2483 int size = 0;
48810515 2484 rtx_insn *before_arg = get_last_insn ();
f0078f86
AM
2485 /* Set non-negative if we must move a word at a time, even if
2486 just one word (e.g, partial == 4 && mode == DFmode). Set
2487 to -1 if we just use a normal move insn. This value can be
2488 zero if the argument is a zero size structure. */
6e985040 2489 nregs = -1;
78a52f11
RH
2490 if (GET_CODE (reg) == PARALLEL)
2491 ;
2492 else if (partial)
2493 {
2494 gcc_assert (partial % UNITS_PER_WORD == 0);
2495 nregs = partial / UNITS_PER_WORD;
2496 }
6e985040
AM
2497 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2498 {
2499 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2500 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2501 }
2502 else
2503 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
2504
2505 /* Handle calls that pass values in multiple non-contiguous
2506 locations. The Irix 6 ABI has examples of this. */
2507
2508 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 2509 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
2510
2511 /* If simple case, just do move. If normal partial, store_one_arg
2512 has already loaded the register for us. In all other cases,
2513 load the register(s) from memory. */
2514
9206d736
AM
2515 else if (nregs == -1)
2516 {
2517 emit_move_insn (reg, args[i].value);
6e985040 2518#ifdef BLOCK_REG_PADDING
9206d736
AM
2519 /* Handle case where we have a value that needs shifting
2520 up to the msb. eg. a QImode value and we're padding
2521 upward on a BYTES_BIG_ENDIAN machine. */
2522 if (size < UNITS_PER_WORD
2523 && (args[i].locate.where_pad
2524 == (BYTES_BIG_ENDIAN ? upward : downward)))
2525 {
9206d736
AM
2526 rtx x;
2527 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
980f6e8e
AM
2528
2529 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2530 report the whole reg as used. Strictly speaking, the
2531 call only uses SIZE bytes at the msb end, but it doesn't
2532 seem worth generating rtl to say that. */
2533 reg = gen_rtx_REG (word_mode, REGNO (reg));
eb6c3df1 2534 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
980f6e8e
AM
2535 if (x != reg)
2536 emit_move_insn (reg, x);
9206d736 2537 }
6e985040 2538#endif
9206d736 2539 }
21a3b983
JL
2540
2541 /* If we have pre-computed the values to put in the registers in
2542 the case of non-aligned structures, copy them in now. */
2543
2544 else if (args[i].n_aligned_regs != 0)
2545 for (j = 0; j < args[i].n_aligned_regs; j++)
2546 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2547 args[i].aligned_regs[j]);
2548
3b2ee170 2549 else if (partial == 0 || args[i].pass_on_stack)
6e985040 2550 {
1a8cb155 2551 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 2552
3b2ee170
IS
2553 /* Check for overlap with already clobbered argument area,
2554 providing that this has non-zero size. */
07eef816 2555 if (is_sibcall
07c10d8f
JM
2556 && size != 0
2557 && (mem_overlaps_already_clobbered_arg_p
2558 (XEXP (args[i].value, 0), size)))
07eef816
KH
2559 *sibcall_failure = 1;
2560
984b2054
AM
2561 if (size % UNITS_PER_WORD == 0
2562 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2563 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2564 else
2565 {
2566 if (nregs > 1)
2567 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2568 args[i].mode);
2569 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2570 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2571 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
ee45a32d 2572 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
2573 word_mode, word_mode, false,
2574 NULL);
984b2054
AM
2575 if (BYTES_BIG_ENDIAN)
2576 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2577 BITS_PER_WORD - bitsize, dest, 1);
2578 if (x != dest)
2579 emit_move_insn (dest, x);
2580 }
2581
6e985040 2582 /* Handle a BLKmode that needs shifting. */
9206d736 2583 if (nregs == 1 && size < UNITS_PER_WORD
03ca1672
UW
2584#ifdef BLOCK_REG_PADDING
2585 && args[i].locate.where_pad == downward
2586#else
2587 && BYTES_BIG_ENDIAN
2588#endif
984b2054 2589 )
6e985040 2590 {
984b2054 2591 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
6e985040 2592 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
984b2054
AM
2593 enum tree_code dir = (BYTES_BIG_ENDIAN
2594 ? RSHIFT_EXPR : LSHIFT_EXPR);
2595 rtx x;
6e985040 2596
984b2054
AM
2597 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2598 if (x != dest)
2599 emit_move_insn (dest, x);
6e985040 2600 }
6e985040 2601 }
21a3b983 2602
0cdca92b
DJ
2603 /* When a parameter is a block, and perhaps in other cases, it is
2604 possible that it did a load from an argument slot that was
32dd366d 2605 already clobbered. */
0cdca92b
DJ
2606 if (is_sibcall
2607 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2608 *sibcall_failure = 1;
2609
21a3b983
JL
2610 /* Handle calls that pass values in multiple non-contiguous
2611 locations. The Irix 6 ABI has examples of this. */
2612 if (GET_CODE (reg) == PARALLEL)
2613 use_group_regs (call_fusage, reg);
2614 else if (nregs == -1)
7d810276
JJ
2615 use_reg_mode (call_fusage, reg,
2616 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
faa00334
AO
2617 else if (nregs > 0)
2618 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
2619 }
2620 }
2621}
2622
739fb049
MM
2623/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2624 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2625 bytes, then we would need to push some additional bytes to pad the
ce48579b
RH
2626 arguments. So, we compute an adjust to the stack pointer for an
2627 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2628 bytes. Then, when the arguments are pushed the stack will be perfectly
2629 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2630 be popped after the call. Returns the adjustment. */
739fb049 2631
ce48579b 2632static int
d329e058
AJ
2633combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2634 struct args_size *args_size,
95899b34 2635 unsigned int preferred_unit_stack_boundary)
739fb049
MM
2636{
2637 /* The number of bytes to pop so that the stack will be
2638 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2639 HOST_WIDE_INT adjustment;
2640 /* The alignment of the stack after the arguments are pushed, if we
2641 just pushed the arguments without adjust the stack here. */
95899b34 2642 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 2643
f725a3ec 2644 unadjusted_alignment
739fb049
MM
2645 = ((stack_pointer_delta + unadjusted_args_size)
2646 % preferred_unit_stack_boundary);
2647
2648 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2649 as possible -- leaving just enough left to cancel out the
2650 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2651 PENDING_STACK_ADJUST is non-negative, and congruent to
2652 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2653
2654 /* Begin by trying to pop all the bytes. */
f725a3ec
KH
2655 unadjusted_alignment
2656 = (unadjusted_alignment
739fb049
MM
2657 - (pending_stack_adjust % preferred_unit_stack_boundary));
2658 adjustment = pending_stack_adjust;
2659 /* Push enough additional bytes that the stack will be aligned
2660 after the arguments are pushed. */
0aae1572
NS
2661 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2662 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 2663
739fb049
MM
2664 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2665 bytes after the call. The right number is the entire
2666 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2667 by the arguments in the first place. */
f725a3ec 2668 args_size->constant
739fb049
MM
2669 = pending_stack_adjust - adjustment + unadjusted_args_size;
2670
ce48579b 2671 return adjustment;
739fb049
MM
2672}
2673
c67846f2
JJ
2674/* Scan X expression if it does not dereference any argument slots
2675 we already clobbered by tail call arguments (as noted in stored_args_map
2676 bitmap).
da7d8304 2677 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
2678 zero otherwise. */
2679
2680static int
d329e058 2681check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
2682{
2683 RTX_CODE code;
2684 int i, j;
c67846f2
JJ
2685 const char *fmt;
2686
2687 if (x == NULL_RTX)
2688 return 0;
2689
2690 code = GET_CODE (x);
2691
6c3cb698
KY
2692 /* We need not check the operands of the CALL expression itself. */
2693 if (code == CALL)
2694 return 0;
2695
c67846f2 2696 if (code == MEM)
07eef816
KH
2697 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2698 GET_MODE_SIZE (GET_MODE (x)));
c67846f2 2699
f725a3ec 2700 /* Scan all subexpressions. */
c67846f2
JJ
2701 fmt = GET_RTX_FORMAT (code);
2702 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2703 {
2704 if (*fmt == 'e')
f725a3ec
KH
2705 {
2706 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2707 return 1;
2708 }
c67846f2 2709 else if (*fmt == 'E')
f725a3ec
KH
2710 {
2711 for (j = 0; j < XVECLEN (x, i); j++)
2712 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2713 return 1;
2714 }
c67846f2
JJ
2715 }
2716 return 0;
c67846f2
JJ
2717}
2718
2719/* Scan sequence after INSN if it does not dereference any argument slots
2720 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
2721 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2722 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2723 should be 0). Return nonzero if sequence after INSN dereferences such argument
2724 slots, zero otherwise. */
c67846f2
JJ
2725
2726static int
48810515
DM
2727check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2728 int mark_stored_args_map)
f725a3ec 2729{
c67846f2
JJ
2730 int low, high;
2731
2732 if (insn == NULL_RTX)
2733 insn = get_insns ();
2734 else
2735 insn = NEXT_INSN (insn);
2736
2737 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
2738 if (INSN_P (insn)
2739 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
2740 break;
2741
0cdca92b
DJ
2742 if (mark_stored_args_map)
2743 {
6dad9361
TS
2744 if (ARGS_GROW_DOWNWARD)
2745 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2746 else
2747 low = arg->locate.slot_offset.constant;
d60eab50 2748
e7949876 2749 for (high = low + arg->locate.size.constant; low < high; low++)
d7c028c0 2750 bitmap_set_bit (stored_args_map, low);
0cdca92b 2751 }
c67846f2
JJ
2752 return insn != NULL_RTX;
2753}
2754
bef5d8b6
RS
2755/* Given that a function returns a value of mode MODE at the most
2756 significant end of hard register VALUE, shift VALUE left or right
2757 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 2758
bef5d8b6 2759bool
ef4bddc2 2760shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 2761{
bef5d8b6
RS
2762 HOST_WIDE_INT shift;
2763
2764 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2765 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2766 if (shift == 0)
2767 return false;
2768
2769 /* Use ashr rather than lshr for right shifts. This is for the benefit
2770 of the MIPS port, which requires SImode values to be sign-extended
2771 when stored in 64-bit registers. */
2772 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2773 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2774 gcc_unreachable ();
2775 return true;
c988af2b
RS
2776}
2777
3fb30019
RS
2778/* If X is a likely-spilled register value, copy it to a pseudo
2779 register and return that register. Return X otherwise. */
2780
2781static rtx
2782avoid_likely_spilled_reg (rtx x)
2783{
82d6e6fc 2784 rtx new_rtx;
3fb30019
RS
2785
2786 if (REG_P (x)
2787 && HARD_REGISTER_P (x)
07b8f0a8 2788 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
2789 {
2790 /* Make sure that we generate a REG rather than a CONCAT.
2791 Moves into CONCATs can need nontrivial instructions,
2792 and the whole point of this function is to avoid
2793 using the hard register directly in such a situation. */
2794 generating_concat_p = 0;
82d6e6fc 2795 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 2796 generating_concat_p = 1;
82d6e6fc
KG
2797 emit_move_insn (new_rtx, x);
2798 return new_rtx;
3fb30019
RS
2799 }
2800 return x;
2801}
2802
b40d90e6
DM
2803/* Helper function for expand_call.
2804 Return false is EXP is not implementable as a sibling call. */
2805
2806static bool
2807can_implement_as_sibling_call_p (tree exp,
2808 rtx structure_value_addr,
2809 tree funtype,
dfbdde16 2810 int reg_parm_stack_space ATTRIBUTE_UNUSED,
b40d90e6
DM
2811 tree fndecl,
2812 int flags,
2813 tree addr,
2814 const args_size &args_size)
2815{
2816 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
2817 {
2818 maybe_complain_about_tail_call
2819 (exp,
2820 "machine description does not have"
2821 " a sibcall_epilogue instruction pattern");
2822 return false;
2823 }
b40d90e6
DM
2824
2825 /* Doing sibling call optimization needs some work, since
2826 structure_value_addr can be allocated on the stack.
2827 It does not seem worth the effort since few optimizable
2828 sibling calls will return a structure. */
2829 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
2830 {
2831 maybe_complain_about_tail_call (exp, "callee returns a structure");
2832 return false;
2833 }
b40d90e6
DM
2834
2835#ifdef REG_PARM_STACK_SPACE
2836 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2837 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2838 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
2839 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
9a385c2d
DM
2840 {
2841 maybe_complain_about_tail_call (exp,
2842 "inconsistent size of stack space"
2843 " allocated for arguments which are"
2844 " passed in registers");
2845 return false;
2846 }
b40d90e6
DM
2847#endif
2848
2849 /* Check whether the target is able to optimize the call
2850 into a sibcall. */
2851 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
2852 {
2853 maybe_complain_about_tail_call (exp,
2854 "target is not able to optimize the"
2855 " call into a sibling call");
2856 return false;
2857 }
b40d90e6
DM
2858
2859 /* Functions that do not return exactly once may not be sibcall
2860 optimized. */
9a385c2d
DM
2861 if (flags & ECF_RETURNS_TWICE)
2862 {
2863 maybe_complain_about_tail_call (exp, "callee returns twice");
2864 return false;
2865 }
2866 if (flags & ECF_NORETURN)
2867 {
2868 maybe_complain_about_tail_call (exp, "callee does not return");
2869 return false;
2870 }
b40d90e6
DM
2871
2872 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
2873 {
2874 maybe_complain_about_tail_call (exp, "volatile function type");
2875 return false;
2876 }
b40d90e6
DM
2877
2878 /* If the called function is nested in the current one, it might access
2879 some of the caller's arguments, but could clobber them beforehand if
2880 the argument areas are shared. */
2881 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
2882 {
2883 maybe_complain_about_tail_call (exp, "nested function");
2884 return false;
2885 }
b40d90e6
DM
2886
2887 /* If this function requires more stack slots than the current
2888 function, we cannot change it into a sibling call.
2889 crtl->args.pretend_args_size is not part of the
2890 stack allocated by our caller. */
2891 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
2892 {
2893 maybe_complain_about_tail_call (exp,
2894 "callee required more stack slots"
2895 " than the caller");
2896 return false;
2897 }
b40d90e6
DM
2898
2899 /* If the callee pops its own arguments, then it must pop exactly
2900 the same number of arguments as the current function. */
2901 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2902 != targetm.calls.return_pops_args (current_function_decl,
2903 TREE_TYPE (current_function_decl),
2904 crtl->args.size))
9a385c2d
DM
2905 {
2906 maybe_complain_about_tail_call (exp,
2907 "inconsistent number of"
2908 " popped arguments");
2909 return false;
2910 }
b40d90e6
DM
2911
2912 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
2913 {
2914 maybe_complain_about_tail_call (exp, "frontend does not support"
2915 " sibling call");
2916 return false;
2917 }
b40d90e6
DM
2918
2919 /* All checks passed. */
2920 return true;
2921}
2922
5039610b 2923/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
2924 and return an rtx for its value.
2925 Store the value in TARGET (specified as an rtx) if convenient.
2926 If the value is stored in TARGET then TARGET is returned.
2927 If IGNORE is nonzero, then we ignore the value of the function call. */
2928
2929rtx
d329e058 2930expand_call (tree exp, rtx target, int ignore)
51bbfa0c 2931{
0a1c58a2
JL
2932 /* Nonzero if we are currently expanding a call. */
2933 static int currently_expanding_call = 0;
2934
51bbfa0c
RS
2935 /* RTX for the function to be called. */
2936 rtx funexp;
0a1c58a2 2937 /* Sequence of insns to perform a normal "call". */
48810515 2938 rtx_insn *normal_call_insns = NULL;
6de9cd9a 2939 /* Sequence of insns to perform a tail "call". */
48810515 2940 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
2941 /* Data type of the function. */
2942 tree funtype;
ded9bf77 2943 tree type_arg_types;
28ed065e 2944 tree rettype;
51bbfa0c
RS
2945 /* Declaration of the function being called,
2946 or 0 if the function is computed (not known by name). */
2947 tree fndecl = 0;
57782ad8
MM
2948 /* The type of the function being called. */
2949 tree fntype;
6de9cd9a 2950 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 2951 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 2952 int pass;
51bbfa0c
RS
2953
2954 /* Register in which non-BLKmode value will be returned,
2955 or 0 if no value or if value is BLKmode. */
2956 rtx valreg;
d5e254e1
IE
2957 /* Register(s) in which bounds are returned. */
2958 rtx valbnd = NULL;
51bbfa0c
RS
2959 /* Address where we should return a BLKmode value;
2960 0 if value not BLKmode. */
2961 rtx structure_value_addr = 0;
2962 /* Nonzero if that address is being passed by treating it as
2963 an extra, implicit first parameter. Otherwise,
2964 it is passed by being copied directly into struct_value_rtx. */
2965 int structure_value_addr_parm = 0;
078a18a4
SL
2966 /* Holds the value of implicit argument for the struct value. */
2967 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
2968 /* Size of aggregate value wanted, or zero if none wanted
2969 or if we are using the non-reentrant PCC calling convention
2970 or expecting the value in registers. */
e5e809f4 2971 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
2972 /* Nonzero if called function returns an aggregate in memory PCC style,
2973 by returning the address of where to find it. */
2974 int pcc_struct_value = 0;
61f71b34 2975 rtx struct_value = 0;
51bbfa0c
RS
2976
2977 /* Number of actual parameters in this call, including struct value addr. */
2978 int num_actuals;
2979 /* Number of named args. Args after this are anonymous ones
2980 and they must all go on the stack. */
2981 int n_named_args;
078a18a4
SL
2982 /* Number of complex actual arguments that need to be split. */
2983 int num_complex_actuals = 0;
51bbfa0c
RS
2984
2985 /* Vector of information about each argument.
2986 Arguments are numbered in the order they will be pushed,
2987 not the order they are written. */
2988 struct arg_data *args;
2989
2990 /* Total size in bytes of all the stack-parms scanned so far. */
2991 struct args_size args_size;
099e9712 2992 struct args_size adjusted_args_size;
51bbfa0c 2993 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 2994 int unadjusted_args_size;
51bbfa0c 2995 /* Data on reg parms scanned so far. */
d5cc9181
JR
2996 CUMULATIVE_ARGS args_so_far_v;
2997 cumulative_args_t args_so_far;
51bbfa0c
RS
2998 /* Nonzero if a reg parm has been scanned. */
2999 int reg_parm_seen;
efd65a8b 3000 /* Nonzero if this is an indirect function call. */
51bbfa0c 3001
f725a3ec 3002 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
3003 If stack space is allocated for register parameters, but not by the
3004 caller, then it is preallocated in the fixed part of the stack frame.
3005 So the entire argument block must then be preallocated (i.e., we
3006 ignore PUSH_ROUNDING in that case). */
3007
f73ad30e 3008 int must_preallocate = !PUSH_ARGS;
51bbfa0c 3009
f72aed24 3010 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
3011 int reg_parm_stack_space = 0;
3012
51bbfa0c
RS
3013 /* Address of space preallocated for stack parms
3014 (on machines that lack push insns), or 0 if space not preallocated. */
3015 rtx argblock = 0;
3016
e384e6b5 3017 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 3018 int flags = 0;
e384e6b5 3019 int return_flags = 0;
f73ad30e 3020#ifdef REG_PARM_STACK_SPACE
51bbfa0c 3021 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
3022 saved, if any. */
3023 int low_to_save, high_to_save;
51bbfa0c
RS
3024 rtx save_area = 0; /* Place that it is saved */
3025#endif
3026
51bbfa0c
RS
3027 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3028 char *initial_stack_usage_map = stack_usage_map;
d9725c41 3029 char *stack_usage_map_buf = NULL;
51bbfa0c 3030
38afb23f
OH
3031 int old_stack_allocated;
3032
3033 /* State variables to track stack modifications. */
51bbfa0c 3034 rtx old_stack_level = 0;
38afb23f 3035 int old_stack_arg_under_construction = 0;
79be3418 3036 int old_pending_adj = 0;
51bbfa0c 3037 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
3038
3039 /* Some stack pointer alterations we make are performed via
3040 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3041 which we then also need to save/restore along the way. */
a259f218 3042 int old_stack_pointer_delta = 0;
38afb23f 3043
0a1c58a2 3044 rtx call_fusage;
5039610b 3045 tree addr = CALL_EXPR_FN (exp);
b3694847 3046 int i;
739fb049 3047 /* The alignment of the stack, in bits. */
95899b34 3048 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 3049 /* The alignment of the stack, in bytes. */
95899b34 3050 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
3051 /* The static chain value to use for this call. */
3052 rtx static_chain_value;
f2d33f13
JH
3053 /* See if this is "nothrow" function call. */
3054 if (TREE_NOTHROW (exp))
3055 flags |= ECF_NOTHROW;
3056
6de9cd9a
DN
3057 /* See if we can find a DECL-node for the actual function, and get the
3058 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
3059 fndecl = get_callee_fndecl (exp);
3060 if (fndecl)
51bbfa0c 3061 {
57782ad8 3062 fntype = TREE_TYPE (fndecl);
39b0dce7 3063 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 3064 return_flags |= decl_return_flags (fndecl);
51bbfa0c 3065 }
39b0dce7 3066 else
72954a4f 3067 {
28ed065e 3068 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 3069 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
3070 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3071 flags |= ECF_BY_DESCRIPTOR;
72954a4f 3072 }
28ed065e 3073 rettype = TREE_TYPE (exp);
7393c642 3074
57782ad8 3075 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 3076
8c6a8269
RS
3077 /* Warn if this value is an aggregate type,
3078 regardless of which calling convention we are using for it. */
28ed065e 3079 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 3080 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 3081
becfd6e5
KZ
3082 /* If the result of a non looping pure or const function call is
3083 ignored (or void), and none of its arguments are volatile, we can
3084 avoid expanding the call and just evaluate the arguments for
3085 side-effects. */
8c6a8269 3086 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 3087 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8c6a8269 3088 && (ignore || target == const0_rtx
28ed065e 3089 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
3090 {
3091 bool volatilep = false;
3092 tree arg;
078a18a4 3093 call_expr_arg_iterator iter;
8c6a8269 3094
078a18a4
SL
3095 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3096 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
3097 {
3098 volatilep = true;
3099 break;
3100 }
3101
3102 if (! volatilep)
3103 {
078a18a4
SL
3104 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3105 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
3106 return const0_rtx;
3107 }
3108 }
3109
6f90e075 3110#ifdef REG_PARM_STACK_SPACE
5d059ed9 3111 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 3112#endif
6f90e075 3113
5d059ed9 3114 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3115 && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 3116 must_preallocate = 1;
e5e809f4 3117
51bbfa0c
RS
3118 /* Set up a place to return a structure. */
3119
3120 /* Cater to broken compilers. */
d47d0a8d 3121 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
3122 {
3123 /* This call returns a big structure. */
84b8030f 3124 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
3125
3126#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
3127 {
3128 pcc_struct_value = 1;
9e7b1d0a
RS
3129 }
3130#else /* not PCC_STATIC_STRUCT_RETURN */
3131 {
28ed065e 3132 struct_value_size = int_size_in_bytes (rettype);
51bbfa0c 3133
391756ad
EB
3134 /* Even if it is semantically safe to use the target as the return
3135 slot, it may be not sufficiently aligned for the return type. */
3136 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3137 && target
3138 && MEM_P (target)
3139 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3140 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
3141 MEM_ALIGN (target))))
9e7b1d0a
RS
3142 structure_value_addr = XEXP (target, 0);
3143 else
3144 {
9e7b1d0a
RS
3145 /* For variable-sized objects, we must be called with a target
3146 specified. If we were to allocate space on the stack here,
3147 we would have no way of knowing when to free it. */
9474e8ab 3148 rtx d = assign_temp (rettype, 1, 1);
4361b41d 3149 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
3150 target = 0;
3151 }
3152 }
3153#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
3154 }
3155
099e9712 3156 /* Figure out the amount to which the stack should be aligned. */
099e9712 3157 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
3158 if (fndecl)
3159 {
3dafb85c 3160 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
3161 /* Without automatic stack alignment, we can't increase preferred
3162 stack boundary. With automatic stack alignment, it is
3163 unnecessary since unless we can guarantee that all callers will
3164 align the outgoing stack properly, callee has to align its
3165 stack anyway. */
3166 if (i
3167 && i->preferred_incoming_stack_boundary
3168 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
3169 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3170 }
099e9712
JH
3171
3172 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 3173 funtype = TREE_TYPE (addr);
366de0ce 3174 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
3175 funtype = TREE_TYPE (funtype);
3176
078a18a4
SL
3177 /* Count whether there are actual complex arguments that need to be split
3178 into their real and imaginary parts. Munge the type_arg_types
3179 appropriately here as well. */
42ba5130 3180 if (targetm.calls.split_complex_arg)
ded9bf77 3181 {
078a18a4
SL
3182 call_expr_arg_iterator iter;
3183 tree arg;
3184 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3185 {
3186 tree type = TREE_TYPE (arg);
3187 if (type && TREE_CODE (type) == COMPLEX_TYPE
3188 && targetm.calls.split_complex_arg (type))
3189 num_complex_actuals++;
3190 }
ded9bf77 3191 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
3192 }
3193 else
3194 type_arg_types = TYPE_ARG_TYPES (funtype);
3195
099e9712 3196 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 3197 cfun->calls_alloca = 1;
099e9712
JH
3198
3199 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
3200 as if it were an extra parameter. Put the argument expression
3201 in structure_value_addr_value. */
61f71b34 3202 if (structure_value_addr && struct_value == 0)
099e9712
JH
3203 {
3204 /* If structure_value_addr is a REG other than
3205 virtual_outgoing_args_rtx, we can use always use it. If it
3206 is not a REG, we must always copy it into a register.
3207 If it is virtual_outgoing_args_rtx, we must copy it to another
3208 register in some cases. */
f8cfc6aa 3209 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
3210 || (ACCUMULATE_OUTGOING_ARGS
3211 && stack_arg_under_construction
3212 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 3213 ? copy_addr_to_reg (convert_memory_address
57782ad8 3214 (Pmode, structure_value_addr))
099e9712
JH
3215 : structure_value_addr);
3216
078a18a4
SL
3217 structure_value_addr_value =
3218 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
d5e254e1 3219 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
099e9712
JH
3220 }
3221
3222 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
3223 num_actuals =
3224 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
3225
3226 /* Compute number of named args.
3a4d587b
AM
3227 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3228
3229 if (type_arg_types != 0)
3230 n_named_args
3231 = (list_length (type_arg_types)
3232 /* Count the struct value address, if it is passed as a parm. */
3233 + structure_value_addr_parm);
3234 else
3235 /* If we know nothing, treat all args as named. */
3236 n_named_args = num_actuals;
3237
3238 /* Start updating where the next arg would go.
3239
3240 On some machines (such as the PA) indirect calls have a different
3241 calling convention than normal calls. The fourth argument in
3242 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3243 or not. */
d5cc9181
JR
3244 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3245 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
3246
3247 /* Now possibly adjust the number of named args.
099e9712 3248 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
3249 We do include the last named arg if
3250 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
3251 (If no anonymous args follow, the result of list_length is actually
3252 one too large. This is harmless.)
3253
4ac8340c 3254 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
3255 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3256 this machine will be able to place unnamed args that were passed
3257 in registers into the stack. So treat all args as named. This
3258 allows the insns emitting for a specific argument list to be
3259 independent of the function declaration.
4ac8340c
KH
3260
3261 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3262 we do not have any reliable way to pass unnamed args in
3263 registers, so we must force them into memory. */
099e9712 3264
3a4d587b 3265 if (type_arg_types != 0
d5cc9181 3266 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
3267 ;
3268 else if (type_arg_types != 0
d5cc9181 3269 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
3270 /* Don't include the last named arg. */
3271 --n_named_args;
099e9712 3272 else
3a4d587b 3273 /* Treat all args as named. */
099e9712
JH
3274 n_named_args = num_actuals;
3275
099e9712 3276 /* Make a vector to hold all the information about each arg. */
765fc0f7 3277 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 3278
d80d2d2a
KH
3279 /* Build up entries in the ARGS array, compute the size of the
3280 arguments into ARGS_SIZE, etc. */
099e9712 3281 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 3282 n_named_args, exp,
45769134 3283 structure_value_addr_value, fndecl, fntype,
d5cc9181 3284 args_so_far, reg_parm_stack_space,
099e9712 3285 &old_stack_level, &old_pending_adj,
dd292d0a 3286 &must_preallocate, &flags,
6de9cd9a 3287 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
3288
3289 if (args_size.var)
84b8030f 3290 must_preallocate = 1;
099e9712
JH
3291
3292 /* Now make final decision about preallocating stack space. */
3293 must_preallocate = finalize_must_preallocate (must_preallocate,
3294 num_actuals, args,
3295 &args_size);
3296
3297 /* If the structure value address will reference the stack pointer, we
3298 must stabilize it. We don't need to do this if we know that we are
3299 not going to adjust the stack pointer in processing this call. */
3300
3301 if (structure_value_addr
3302 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3303 || reg_mentioned_p (virtual_outgoing_args_rtx,
3304 structure_value_addr))
3305 && (args_size.var
3306 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
3307 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 3308
7ae4ad28 3309 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 3310 pushed these optimizations into -O2. Don't try if we're already
fb158467 3311 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 3312 there's cleanups, as we know there's code to follow the call. */
0a1c58a2 3313
099e9712
JH
3314 if (currently_expanding_call++ != 0
3315 || !flag_optimize_sibling_calls
6de9cd9a 3316 || args_size.var
6fb5fa3c 3317 || dbg_cnt (tail_call) == false)
6de9cd9a 3318 try_tail_call = 0;
099e9712 3319
9a385c2d
DM
3320 /* If the user has marked the function as requiring tail-call
3321 optimization, attempt it. */
3322 if (must_tail_call)
3323 try_tail_call = 1;
3324
099e9712 3325 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 3326 if (try_tail_call)
9a385c2d
DM
3327 try_tail_call = can_implement_as_sibling_call_p (exp,
3328 structure_value_addr,
3329 funtype,
3330 reg_parm_stack_space,
3331 fndecl,
b40d90e6 3332 flags, addr, args_size);
497eb8c3 3333
c69cd1f5
JJ
3334 /* Check if caller and callee disagree in promotion of function
3335 return value. */
3336 if (try_tail_call)
3337 {
ef4bddc2
RS
3338 machine_mode caller_mode, caller_promoted_mode;
3339 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
3340 int caller_unsignedp, callee_unsignedp;
3341 tree caller_res = DECL_RESULT (current_function_decl);
3342
3343 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 3344 caller_mode = DECL_MODE (caller_res);
c69cd1f5 3345 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
3346 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3347 caller_promoted_mode
3348 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3349 &caller_unsignedp,
3350 TREE_TYPE (current_function_decl), 1);
3351 callee_promoted_mode
666e3ceb 3352 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 3353 &callee_unsignedp,
666e3ceb 3354 funtype, 1);
c69cd1f5
JJ
3355 if (caller_mode != VOIDmode
3356 && (caller_promoted_mode != callee_promoted_mode
3357 || ((caller_mode != caller_promoted_mode
3358 || callee_mode != callee_promoted_mode)
3359 && (caller_unsignedp != callee_unsignedp
bd4288c0 3360 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
3361 {
3362 try_tail_call = 0;
3363 maybe_complain_about_tail_call (exp,
3364 "caller and callee disagree in"
3365 " promotion of function"
3366 " return value");
3367 }
c69cd1f5
JJ
3368 }
3369
01973e26
L
3370 /* Ensure current function's preferred stack boundary is at least
3371 what we need. Stack alignment may also increase preferred stack
3372 boundary. */
b5f772ce 3373 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 3374 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
3375 else
3376 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 3377
099e9712 3378 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 3379
0a1c58a2
JL
3380 /* We want to make two insn chains; one for a sibling call, the other
3381 for a normal call. We will select one of the two chains after
3382 initial RTL generation is complete. */
b820d2b8 3383 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
3384 {
3385 int sibcall_failure = 0;
f5143c46 3386 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 3387 recursion "call". That way we know any adjustment after the tail
7ae4ad28 3388 recursion call can be ignored if we indeed use the tail
0a1c58a2 3389 call expansion. */
7f2f0a01 3390 saved_pending_stack_adjust save;
48810515
DM
3391 rtx_insn *insns, *before_call, *after_args;
3392 rtx next_arg_reg;
39842893 3393
0a1c58a2
JL
3394 if (pass == 0)
3395 {
0a1c58a2
JL
3396 /* State variables we need to save and restore between
3397 iterations. */
7f2f0a01 3398 save_pending_stack_adjust (&save);
0a1c58a2 3399 }
f2d33f13
JH
3400 if (pass)
3401 flags &= ~ECF_SIBCALL;
3402 else
3403 flags |= ECF_SIBCALL;
51bbfa0c 3404
0a1c58a2 3405 /* Other state variables that we must reinitialize each time
f2d33f13 3406 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
3407 argblock = 0;
3408 call_fusage = 0;
fa76d9e0 3409
f725a3ec 3410 /* Start a new sequence for the normal call case.
51bbfa0c 3411
0a1c58a2
JL
3412 From this point on, if the sibling call fails, we want to set
3413 sibcall_failure instead of continuing the loop. */
3414 start_sequence ();
eecb6f50 3415
0a1c58a2
JL
3416 /* Don't let pending stack adjusts add up to too much.
3417 Also, do all pending adjustments now if there is any chance
3418 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 3419 call sequence.
63579539
DJ
3420 Also do the adjustments before a throwing call, otherwise
3421 exception handling can fail; PR 19225. */
0a1c58a2 3422 if (pending_stack_adjust >= 32
b5cd4ed4 3423 || (pending_stack_adjust > 0
9dd9bf80 3424 && (flags & ECF_MAY_BE_ALLOCA))
63579539
DJ
3425 || (pending_stack_adjust > 0
3426 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
3427 || pass == 0)
3428 do_pending_stack_adjust ();
51bbfa0c 3429
0a1c58a2 3430 /* Precompute any arguments as needed. */
f8a097cd 3431 if (pass)
84b8030f 3432 precompute_arguments (num_actuals, args);
51bbfa0c 3433
0a1c58a2
JL
3434 /* Now we are about to start emitting insns that can be deleted
3435 if a libcall is deleted. */
84b8030f 3436 if (pass && (flags & ECF_MALLOC))
0a1c58a2 3437 start_sequence ();
51bbfa0c 3438
87a5dc2d
JW
3439 if (pass == 0
3440 && crtl->stack_protect_guard
3441 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
3442 stack_protect_epilogue ();
3443
099e9712 3444 adjusted_args_size = args_size;
ce48579b
RH
3445 /* Compute the actual size of the argument block required. The variable
3446 and constant sizes must be combined, the size may have to be rounded,
3447 and there may be a minimum required size. When generating a sibcall
3448 pattern, do not round up, since we'll be re-using whatever space our
3449 caller provided. */
3450 unadjusted_args_size
f725a3ec
KH
3451 = compute_argument_block_size (reg_parm_stack_space,
3452 &adjusted_args_size,
5d059ed9 3453 fndecl, fntype,
ce48579b
RH
3454 (pass == 0 ? 0
3455 : preferred_stack_boundary));
3456
f725a3ec 3457 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 3458
f8a097cd 3459 /* The argument block when performing a sibling call is the
c22cacf3 3460 incoming argument block. */
f8a097cd 3461 if (pass == 0)
c67846f2 3462 {
2e3f842f 3463 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
3464 if (STACK_GROWS_DOWNWARD)
3465 argblock
3466 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3467 else
3468 argblock
3469 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3470
c67846f2 3471 stored_args_map = sbitmap_alloc (args_size.constant);
f61e445a 3472 bitmap_clear (stored_args_map);
c67846f2 3473 }
ce48579b 3474
0a1c58a2
JL
3475 /* If we have no actual push instructions, or shouldn't use them,
3476 make space for all args right now. */
099e9712 3477 else if (adjusted_args_size.var != 0)
51bbfa0c 3478 {
0a1c58a2
JL
3479 if (old_stack_level == 0)
3480 {
9eac0f2a 3481 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 3482 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
3483 old_pending_adj = pending_stack_adjust;
3484 pending_stack_adjust = 0;
0a1c58a2
JL
3485 /* stack_arg_under_construction says whether a stack arg is
3486 being constructed at the old stack level. Pushing the stack
3487 gets a clean outgoing argument block. */
3488 old_stack_arg_under_construction = stack_arg_under_construction;
3489 stack_arg_under_construction = 0;
0a1c58a2 3490 }
099e9712 3491 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 3492 if (flag_stack_usage_info)
d3c12306 3493 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 3494 }
0a1c58a2
JL
3495 else
3496 {
3497 /* Note that we must go through the motions of allocating an argument
3498 block even if the size is zero because we may be storing args
3499 in the area reserved for register arguments, which may be part of
3500 the stack frame. */
26a258fe 3501
099e9712 3502 int needed = adjusted_args_size.constant;
51bbfa0c 3503
0a1c58a2
JL
3504 /* Store the maximum argument space used. It will be pushed by
3505 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3506 checking). */
51bbfa0c 3507
38173d38
JH
3508 if (needed > crtl->outgoing_args_size)
3509 crtl->outgoing_args_size = needed;
51bbfa0c 3510
0a1c58a2
JL
3511 if (must_preallocate)
3512 {
f73ad30e
JH
3513 if (ACCUMULATE_OUTGOING_ARGS)
3514 {
f8a097cd
JH
3515 /* Since the stack pointer will never be pushed, it is
3516 possible for the evaluation of a parm to clobber
3517 something we have already written to the stack.
3518 Since most function calls on RISC machines do not use
3519 the stack, this is uncommon, but must work correctly.
26a258fe 3520
f73ad30e 3521 Therefore, we save any area of the stack that was already
f8a097cd
JH
3522 written and that we are using. Here we set up to do this
3523 by making a new stack usage map from the old one. The
f725a3ec 3524 actual save will be done by store_one_arg.
26a258fe 3525
f73ad30e
JH
3526 Another approach might be to try to reorder the argument
3527 evaluations to avoid this conflicting stack usage. */
26a258fe 3528
f8a097cd
JH
3529 /* Since we will be writing into the entire argument area,
3530 the map must be allocated for its entire size, not just
3531 the part that is the responsibility of the caller. */
5d059ed9 3532 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 3533 needed += reg_parm_stack_space;
51bbfa0c 3534
6dad9361
TS
3535 if (ARGS_GROW_DOWNWARD)
3536 highest_outgoing_arg_in_use
3537 = MAX (initial_highest_arg_in_use, needed + 1);
3538 else
3539 highest_outgoing_arg_in_use
3540 = MAX (initial_highest_arg_in_use, needed);
3541
04695783 3542 free (stack_usage_map_buf);
5ed6ace5 3543 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3544 stack_usage_map = stack_usage_map_buf;
51bbfa0c 3545
f73ad30e 3546 if (initial_highest_arg_in_use)
2e09e75a
JM
3547 memcpy (stack_usage_map, initial_stack_usage_map,
3548 initial_highest_arg_in_use);
2f4aa534 3549
f73ad30e 3550 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3551 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3552 (highest_outgoing_arg_in_use
3553 - initial_highest_arg_in_use));
3554 needed = 0;
2f4aa534 3555
f8a097cd
JH
3556 /* The address of the outgoing argument list must not be
3557 copied to a register here, because argblock would be left
3558 pointing to the wrong place after the call to
f725a3ec 3559 allocate_dynamic_stack_space below. */
2f4aa534 3560
f73ad30e 3561 argblock = virtual_outgoing_args_rtx;
f725a3ec 3562 }
f73ad30e 3563 else
26a258fe 3564 {
f73ad30e 3565 if (inhibit_defer_pop == 0)
0a1c58a2 3566 {
f73ad30e 3567 /* Try to reuse some or all of the pending_stack_adjust
ce48579b
RH
3568 to get this space. */
3569 needed
f725a3ec 3570 = (combine_pending_stack_adjustment_and_call
ce48579b 3571 (unadjusted_args_size,
099e9712 3572 &adjusted_args_size,
ce48579b
RH
3573 preferred_unit_stack_boundary));
3574
3575 /* combine_pending_stack_adjustment_and_call computes
3576 an adjustment before the arguments are allocated.
3577 Account for them and see whether or not the stack
3578 needs to go up or down. */
3579 needed = unadjusted_args_size - needed;
3580
3581 if (needed < 0)
f73ad30e 3582 {
ce48579b
RH
3583 /* We're releasing stack space. */
3584 /* ??? We can avoid any adjustment at all if we're
3585 already aligned. FIXME. */
3586 pending_stack_adjust = -needed;
3587 do_pending_stack_adjust ();
f73ad30e
JH
3588 needed = 0;
3589 }
f725a3ec 3590 else
ce48579b
RH
3591 /* We need to allocate space. We'll do that in
3592 push_block below. */
3593 pending_stack_adjust = 0;
0a1c58a2 3594 }
ce48579b
RH
3595
3596 /* Special case this because overhead of `push_block' in
3597 this case is non-trivial. */
f73ad30e
JH
3598 if (needed == 0)
3599 argblock = virtual_outgoing_args_rtx;
0a1c58a2 3600 else
d892f288
DD
3601 {
3602 argblock = push_block (GEN_INT (needed), 0, 0);
6dad9361
TS
3603 if (ARGS_GROW_DOWNWARD)
3604 argblock = plus_constant (Pmode, argblock, needed);
d892f288 3605 }
f73ad30e 3606
f8a097cd
JH
3607 /* We only really need to call `copy_to_reg' in the case
3608 where push insns are going to be used to pass ARGBLOCK
3609 to a function call in ARGS. In that case, the stack
3610 pointer changes value from the allocation point to the
3611 call point, and hence the value of
3612 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3613 as well always do it. */
f73ad30e 3614 argblock = copy_to_reg (argblock);
38afb23f
OH
3615 }
3616 }
3617 }
0a1c58a2 3618
38afb23f
OH
3619 if (ACCUMULATE_OUTGOING_ARGS)
3620 {
3621 /* The save/restore code in store_one_arg handles all
3622 cases except one: a constructor call (including a C
3623 function returning a BLKmode struct) to initialize
3624 an argument. */
3625 if (stack_arg_under_construction)
3626 {
ac294f0b
KT
3627 rtx push_size
3628 = GEN_INT (adjusted_args_size.constant
5d059ed9 3629 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
81464b2c 3630 : TREE_TYPE (fndecl))) ? 0
ac294f0b 3631 : reg_parm_stack_space));
38afb23f
OH
3632 if (old_stack_level == 0)
3633 {
9eac0f2a 3634 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
3635 old_stack_pointer_delta = stack_pointer_delta;
3636 old_pending_adj = pending_stack_adjust;
3637 pending_stack_adjust = 0;
3638 /* stack_arg_under_construction says whether a stack
3639 arg is being constructed at the old stack level.
3640 Pushing the stack gets a clean outgoing argument
3641 block. */
3642 old_stack_arg_under_construction
3643 = stack_arg_under_construction;
3644 stack_arg_under_construction = 0;
3645 /* Make a new map for the new argument list. */
04695783 3646 free (stack_usage_map_buf);
b9eae1a9 3647 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3648 stack_usage_map = stack_usage_map_buf;
38afb23f 3649 highest_outgoing_arg_in_use = 0;
f73ad30e 3650 }
d3c12306
EB
3651 /* We can pass TRUE as the 4th argument because we just
3652 saved the stack pointer and will restore it right after
3653 the call. */
3a42502d
RH
3654 allocate_dynamic_stack_space (push_size, 0,
3655 BIGGEST_ALIGNMENT, true);
0a1c58a2 3656 }
bfbf933a 3657
38afb23f
OH
3658 /* If argument evaluation might modify the stack pointer,
3659 copy the address of the argument list to a register. */
3660 for (i = 0; i < num_actuals; i++)
3661 if (args[i].pass_on_stack)
3662 {
3663 argblock = copy_addr_to_reg (argblock);
3664 break;
3665 }
3666 }
d329e058 3667
0a1c58a2 3668 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 3669
5ba53785
UB
3670 /* Stack is properly aligned, pops can't safely be deferred during
3671 the evaluation of the arguments. */
3672 NO_DEFER_POP;
3673
ac4ee457
UB
3674 /* Precompute all register parameters. It isn't safe to compute
3675 anything once we have started filling any specific hard regs.
3676 TLS symbols sometimes need a call to resolve. Precompute
3677 register parameters before any stack pointer manipulation
3678 to avoid unaligned stack in the called function. */
3679 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3680
5ba53785
UB
3681 OK_DEFER_POP;
3682
3d9684ae
JG
3683 /* Perform stack alignment before the first push (the last arg). */
3684 if (argblock == 0
f830ddc2 3685 && adjusted_args_size.constant > reg_parm_stack_space
099e9712 3686 && adjusted_args_size.constant != unadjusted_args_size)
4e217aed 3687 {
0a1c58a2
JL
3688 /* When the stack adjustment is pending, we get better code
3689 by combining the adjustments. */
f725a3ec 3690 if (pending_stack_adjust
0a1c58a2 3691 && ! inhibit_defer_pop)
ce48579b
RH
3692 {
3693 pending_stack_adjust
f725a3ec 3694 = (combine_pending_stack_adjustment_and_call
ce48579b 3695 (unadjusted_args_size,
099e9712 3696 &adjusted_args_size,
ce48579b
RH
3697 preferred_unit_stack_boundary));
3698 do_pending_stack_adjust ();
3699 }
0a1c58a2 3700 else if (argblock == 0)
099e9712 3701 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 3702 - unadjusted_args_size));
0a1c58a2 3703 }
ebcd0b57
JH
3704 /* Now that the stack is properly aligned, pops can't safely
3705 be deferred during the evaluation of the arguments. */
3706 NO_DEFER_POP;
51bbfa0c 3707
d3c12306
EB
3708 /* Record the maximum pushed stack space size. We need to delay
3709 doing it this far to take into account the optimization done
3710 by combine_pending_stack_adjustment_and_call. */
a11e0df4 3711 if (flag_stack_usage_info
d3c12306
EB
3712 && !ACCUMULATE_OUTGOING_ARGS
3713 && pass
3714 && adjusted_args_size.var == 0)
3715 {
3716 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3717 if (pushed > current_function_pushed_stack_size)
3718 current_function_pushed_stack_size = pushed;
3719 }
3720
09e2bf48 3721 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 3722
5039610b
SL
3723 if (CALL_EXPR_STATIC_CHAIN (exp))
3724 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
3725 else
3726 static_chain_value = 0;
3727
f73ad30e 3728#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3729 /* Save the fixed argument area if it's part of the caller's frame and
3730 is clobbered by argument setup for this call. */
f8a097cd 3731 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
3732 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3733 &low_to_save, &high_to_save);
b94301c2 3734#endif
51bbfa0c 3735
0a1c58a2
JL
3736 /* Now store (and compute if necessary) all non-register parms.
3737 These come before register parms, since they can require block-moves,
3738 which could clobber the registers used for register parms.
3739 Parms which have partial registers are not stored here,
3740 but we do preallocate space here if they want that. */
51bbfa0c 3741
0a1c58a2 3742 for (i = 0; i < num_actuals; i++)
0196c95e 3743 {
d5e254e1
IE
3744 /* Delay bounds until all other args are stored. */
3745 if (POINTER_BOUNDS_P (args[i].tree_value))
3746 continue;
3747 else if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 3748 {
48810515 3749 rtx_insn *before_arg = get_last_insn ();
0196c95e 3750
ddc923b5
MP
3751 /* We don't allow passing huge (> 2^30 B) arguments
3752 by value. It would cause an overflow later on. */
3753 if (adjusted_args_size.constant
3754 >= (1 << (HOST_BITS_PER_INT - 2)))
3755 {
3756 sorry ("passing too large argument on stack");
3757 continue;
3758 }
3759
0196c95e
JJ
3760 if (store_one_arg (&args[i], argblock, flags,
3761 adjusted_args_size.var != 0,
3762 reg_parm_stack_space)
3763 || (pass == 0
3764 && check_sibcall_argument_overlap (before_arg,
3765 &args[i], 1)))
3766 sibcall_failure = 1;
3767 }
3768
2b1c5433 3769 if (args[i].stack)
7d810276
JJ
3770 call_fusage
3771 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3772 gen_rtx_USE (VOIDmode, args[i].stack),
3773 call_fusage);
0196c95e 3774 }
0a1c58a2
JL
3775
3776 /* If we have a parm that is passed in registers but not in memory
3777 and whose alignment does not permit a direct copy into registers,
3778 make a group of pseudos that correspond to each register that we
3779 will later fill. */
3780 if (STRICT_ALIGNMENT)
3781 store_unaligned_arguments_into_pseudos (args, num_actuals);
3782
3783 /* Now store any partially-in-registers parm.
3784 This is the last place a block-move can happen. */
3785 if (reg_parm_seen)
3786 for (i = 0; i < num_actuals; i++)
3787 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 3788 {
48810515 3789 rtx_insn *before_arg = get_last_insn ();
c67846f2 3790
99206968
KT
3791 /* On targets with weird calling conventions (e.g. PA) it's
3792 hard to ensure that all cases of argument overlap between
3793 stack and registers work. Play it safe and bail out. */
3794 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3795 {
3796 sibcall_failure = 1;
3797 break;
3798 }
3799
4c6b3b2a
JJ
3800 if (store_one_arg (&args[i], argblock, flags,
3801 adjusted_args_size.var != 0,
3802 reg_parm_stack_space)
3803 || (pass == 0
3804 && check_sibcall_argument_overlap (before_arg,
0cdca92b 3805 &args[i], 1)))
c67846f2
JJ
3806 sibcall_failure = 1;
3807 }
51bbfa0c 3808
2f21e1ba
BS
3809 bool any_regs = false;
3810 for (i = 0; i < num_actuals; i++)
3811 if (args[i].reg != NULL_RTX)
3812 {
3813 any_regs = true;
3814 targetm.calls.call_args (args[i].reg, funtype);
3815 }
3816 if (!any_regs)
3817 targetm.calls.call_args (pc_rtx, funtype);
3818
3819 /* Figure out the register where the value, if any, will come back. */
3820 valreg = 0;
3821 valbnd = 0;
3822 if (TYPE_MODE (rettype) != VOIDmode
3823 && ! structure_value_addr)
3824 {
3825 if (pcc_struct_value)
3826 {
3827 valreg = hard_function_value (build_pointer_type (rettype),
3828 fndecl, NULL, (pass == 0));
3829 if (CALL_WITH_BOUNDS_P (exp))
3830 valbnd = targetm.calls.
3831 chkp_function_value_bounds (build_pointer_type (rettype),
3832 fndecl, (pass == 0));
3833 }
3834 else
3835 {
3836 valreg = hard_function_value (rettype, fndecl, fntype,
3837 (pass == 0));
3838 if (CALL_WITH_BOUNDS_P (exp))
3839 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3840 fndecl,
3841 (pass == 0));
3842 }
3843
3844 /* If VALREG is a PARALLEL whose first member has a zero
3845 offset, use that. This is for targets such as m68k that
3846 return the same value in multiple places. */
3847 if (GET_CODE (valreg) == PARALLEL)
3848 {
3849 rtx elem = XVECEXP (valreg, 0, 0);
3850 rtx where = XEXP (elem, 0);
3851 rtx offset = XEXP (elem, 1);
3852 if (offset == const0_rtx
3853 && GET_MODE (where) == GET_MODE (valreg))
3854 valreg = where;
3855 }
3856 }
3857
d5e254e1
IE
3858 /* Store all bounds not passed in registers. */
3859 for (i = 0; i < num_actuals; i++)
3860 {
3861 if (POINTER_BOUNDS_P (args[i].tree_value)
3862 && !args[i].reg)
3863 store_bounds (&args[i],
3864 args[i].pointer_arg == -1
3865 ? NULL
3866 : &args[args[i].pointer_arg]);
3867 }
3868
0a1c58a2
JL
3869 /* If register arguments require space on the stack and stack space
3870 was not preallocated, allocate stack space here for arguments
3871 passed in registers. */
5d059ed9 3872 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3873 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 3874 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 3875 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 3876
0a1c58a2
JL
3877 /* Pass the function the address in which to return a
3878 structure value. */
3879 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3880 {
7ae4ad28 3881 structure_value_addr
5ae6cd0d 3882 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 3883 emit_move_insn (struct_value,
0a1c58a2
JL
3884 force_reg (Pmode,
3885 force_operand (structure_value_addr,
3886 NULL_RTX)));
3887
f8cfc6aa 3888 if (REG_P (struct_value))
61f71b34 3889 use_reg (&call_fusage, struct_value);
0a1c58a2 3890 }
c2939b57 3891
05e6ee93 3892 after_args = get_last_insn ();
78bcf3dc
EB
3893 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3894 static_chain_value, &call_fusage,
3895 reg_parm_seen, flags);
6b8805cf 3896
0cdca92b
DJ
3897 load_register_parameters (args, num_actuals, &call_fusage, flags,
3898 pass == 0, &sibcall_failure);
f725a3ec 3899
0a1c58a2
JL
3900 /* Save a pointer to the last insn before the call, so that we can
3901 later safely search backwards to find the CALL_INSN. */
3902 before_call = get_last_insn ();
51bbfa0c 3903
7d167afd
JJ
3904 /* Set up next argument register. For sibling calls on machines
3905 with register windows this should be the incoming register. */
7d167afd 3906 if (pass == 0)
d5cc9181 3907 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3c07301f
NF
3908 VOIDmode,
3909 void_type_node,
3910 true);
7d167afd 3911 else
d5cc9181 3912 next_arg_reg = targetm.calls.function_arg (args_so_far,
3c07301f
NF
3913 VOIDmode, void_type_node,
3914 true);
7d167afd 3915
e384e6b5
BS
3916 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3917 {
3918 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 3919 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
3920 if (arg_nr >= 0
3921 && arg_nr < num_actuals
3922 && args[arg_nr].reg
e384e6b5
BS
3923 && valreg
3924 && REG_P (valreg)
3925 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3926 call_fusage
3927 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 3928 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
3929 call_fusage);
3930 }
0a1c58a2
JL
3931 /* All arguments and registers used for the call must be set up by
3932 now! */
3933
ce48579b 3934 /* Stack must be properly aligned now. */
366de0ce
NS
3935 gcc_assert (!pass
3936 || !(stack_pointer_delta % preferred_unit_stack_boundary));
ebcd0b57 3937
0a1c58a2 3938 /* Generate the actual call instruction. */
6de9cd9a 3939 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 3940 adjusted_args_size.constant, struct_value_size,
7d167afd 3941 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 3942 flags, args_so_far);
0a1c58a2 3943
1e288103 3944 if (flag_ipa_ra)
4f660b15 3945 {
48810515
DM
3946 rtx_call_insn *last;
3947 rtx datum = NULL_RTX;
4f660b15
RO
3948 if (fndecl != NULL_TREE)
3949 {
3950 datum = XEXP (DECL_RTL (fndecl), 0);
3951 gcc_assert (datum != NULL_RTX
3952 && GET_CODE (datum) == SYMBOL_REF);
3953 }
3954 last = last_call_insn ();
3955 add_reg_note (last, REG_CALL_DECL, datum);
3956 }
3957
05e6ee93
MM
3958 /* If the call setup or the call itself overlaps with anything
3959 of the argument setup we probably clobbered our call address.
3960 In that case we can't do sibcalls. */
3961 if (pass == 0
3962 && check_sibcall_argument_overlap (after_args, 0, 0))
3963 sibcall_failure = 1;
3964
bef5d8b6
RS
3965 /* If a non-BLKmode value is returned at the most significant end
3966 of a register, shift the register right by the appropriate amount
3967 and update VALREG accordingly. BLKmode values are handled by the
3968 group load/store machinery below. */
3969 if (!structure_value_addr
3970 && !pcc_struct_value
66de4d7c 3971 && TYPE_MODE (rettype) != VOIDmode
28ed065e 3972 && TYPE_MODE (rettype) != BLKmode
66de4d7c 3973 && REG_P (valreg)
28ed065e 3974 && targetm.calls.return_in_msb (rettype))
bef5d8b6 3975 {
28ed065e 3976 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 3977 sibcall_failure = 1;
28ed065e 3978 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
3979 }
3980
84b8030f 3981 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3982 {
3983 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 3984 rtx_insn *last, *insns;
0a1c58a2 3985
f725a3ec 3986 /* The return value from a malloc-like function is a pointer. */
28ed065e 3987 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 3988 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
3989
3990 emit_move_insn (temp, valreg);
3991
3992 /* The return value from a malloc-like function can not alias
3993 anything else. */
3994 last = get_last_insn ();
65c5f2a6 3995 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
3996
3997 /* Write out the sequence. */
3998 insns = get_insns ();
3999 end_sequence ();
2f937369 4000 emit_insn (insns);
0a1c58a2
JL
4001 valreg = temp;
4002 }
51bbfa0c 4003
6fb5fa3c
DB
4004 /* For calls to `setjmp', etc., inform
4005 function.c:setjmp_warnings that it should complain if
4006 nonvolatile values are live. For functions that cannot
4007 return, inform flow that control does not fall through. */
51bbfa0c 4008
6e14af16 4009 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 4010 {
570a98eb 4011 /* The barrier must be emitted
0a1c58a2
JL
4012 immediately after the CALL_INSN. Some ports emit more
4013 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 4014
48810515 4015 rtx_insn *last = get_last_insn ();
4b4bf941 4016 while (!CALL_P (last))
0a1c58a2
JL
4017 {
4018 last = PREV_INSN (last);
4019 /* There was no CALL_INSN? */
366de0ce 4020 gcc_assert (last != before_call);
0a1c58a2 4021 }
51bbfa0c 4022
570a98eb 4023 emit_barrier_after (last);
8af61113 4024
f451eeef
JS
4025 /* Stack adjustments after a noreturn call are dead code.
4026 However when NO_DEFER_POP is in effect, we must preserve
4027 stack_pointer_delta. */
4028 if (inhibit_defer_pop == 0)
4029 {
4030 stack_pointer_delta = old_stack_allocated;
4031 pending_stack_adjust = 0;
4032 }
0a1c58a2 4033 }
51bbfa0c 4034
0a1c58a2 4035 /* If value type not void, return an rtx for the value. */
51bbfa0c 4036
28ed065e 4037 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 4038 || ignore)
b5cd4ed4 4039 target = const0_rtx;
0a1c58a2
JL
4040 else if (structure_value_addr)
4041 {
3c0cb5de 4042 if (target == 0 || !MEM_P (target))
0a1c58a2 4043 {
3bdf5ad1 4044 target
28ed065e
MM
4045 = gen_rtx_MEM (TYPE_MODE (rettype),
4046 memory_address (TYPE_MODE (rettype),
3bdf5ad1 4047 structure_value_addr));
28ed065e 4048 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
4049 }
4050 }
4051 else if (pcc_struct_value)
cacbd532 4052 {
0a1c58a2
JL
4053 /* This is the special C++ case where we need to
4054 know what the true target was. We take care to
4055 never use this value more than once in one expression. */
28ed065e 4056 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 4057 copy_to_reg (valreg));
28ed065e 4058 set_mem_attributes (target, rettype, 1);
cacbd532 4059 }
0a1c58a2
JL
4060 /* Handle calls that return values in multiple non-contiguous locations.
4061 The Irix 6 ABI has examples of this. */
4062 else if (GET_CODE (valreg) == PARALLEL)
4063 {
6de9cd9a 4064 if (target == 0)
5ef0b50d 4065 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
4066 else if (rtx_equal_p (target, valreg))
4067 ;
4068 else if (GET_CODE (target) == PARALLEL)
4069 /* Handle the result of a emit_group_move_into_temps
4070 call in the previous pass. */
4071 emit_group_move (target, valreg);
4072 else
28ed065e
MM
4073 emit_group_store (target, valreg, rettype,
4074 int_size_in_bytes (rettype));
0a1c58a2
JL
4075 }
4076 else if (target
28ed065e 4077 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
4078 && GET_MODE (target) == GET_MODE (valreg))
4079 {
51caaefe
EB
4080 bool may_overlap = false;
4081
f2d18690
KK
4082 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4083 reg to a plain register. */
3fb30019
RS
4084 if (!REG_P (target) || HARD_REGISTER_P (target))
4085 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 4086
51caaefe
EB
4087 /* If TARGET is a MEM in the argument area, and we have
4088 saved part of the argument area, then we can't store
4089 directly into TARGET as it may get overwritten when we
4090 restore the argument save area below. Don't work too
4091 hard though and simply force TARGET to a register if it
4092 is a MEM; the optimizer is quite likely to sort it out. */
4093 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4094 for (i = 0; i < num_actuals; i++)
4095 if (args[i].save_area)
4096 {
4097 may_overlap = true;
4098 break;
4099 }
0219237c 4100
51caaefe
EB
4101 if (may_overlap)
4102 target = copy_to_reg (valreg);
4103 else
4104 {
4105 /* TARGET and VALREG cannot be equal at this point
4106 because the latter would not have
4107 REG_FUNCTION_VALUE_P true, while the former would if
4108 it were referring to the same register.
4109
4110 If they refer to the same register, this move will be
4111 a no-op, except when function inlining is being
4112 done. */
4113 emit_move_insn (target, valreg);
4114
4115 /* If we are setting a MEM, this code must be executed.
4116 Since it is emitted after the call insn, sibcall
4117 optimization cannot be performed in that case. */
4118 if (MEM_P (target))
4119 sibcall_failure = 1;
4120 }
0a1c58a2 4121 }
0a1c58a2 4122 else
3fb30019 4123 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 4124
cde0f3fd
PB
4125 /* If we promoted this return value, make the proper SUBREG.
4126 TARGET might be const0_rtx here, so be careful. */
4127 if (REG_P (target)
28ed065e
MM
4128 && TYPE_MODE (rettype) != BLKmode
4129 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 4130 {
28ed065e 4131 tree type = rettype;
cde0f3fd
PB
4132 int unsignedp = TYPE_UNSIGNED (type);
4133 int offset = 0;
ef4bddc2 4134 machine_mode pmode;
cde0f3fd
PB
4135
4136 /* Ensure we promote as expected, and get the new unsignedness. */
4137 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4138 funtype, 1);
4139 gcc_assert (GET_MODE (target) == pmode);
4140
4141 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
4142 && (GET_MODE_SIZE (GET_MODE (target))
4143 > GET_MODE_SIZE (TYPE_MODE (type))))
366de0ce 4144 {
cde0f3fd
PB
4145 offset = GET_MODE_SIZE (GET_MODE (target))
4146 - GET_MODE_SIZE (TYPE_MODE (type));
4147 if (! BYTES_BIG_ENDIAN)
4148 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
4149 else if (! WORDS_BIG_ENDIAN)
4150 offset %= UNITS_PER_WORD;
366de0ce 4151 }
cde0f3fd
PB
4152
4153 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4154 SUBREG_PROMOTED_VAR_P (target) = 1;
362d42dc 4155 SUBREG_PROMOTED_SET (target, unsignedp);
61f71b34 4156 }
84b55618 4157
0a1c58a2
JL
4158 /* If size of args is variable or this was a constructor call for a stack
4159 argument, restore saved stack-pointer value. */
51bbfa0c 4160
9dd9bf80 4161 if (old_stack_level)
0a1c58a2 4162 {
48810515 4163 rtx_insn *prev = get_last_insn ();
9a08d230 4164
9eac0f2a 4165 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 4166 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 4167
faf7a23d 4168 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 4169
0a1c58a2 4170 pending_stack_adjust = old_pending_adj;
d25cee4d 4171 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
4172 stack_arg_under_construction = old_stack_arg_under_construction;
4173 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4174 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
4175 sibcall_failure = 1;
4176 }
f8a097cd 4177 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 4178 {
51bbfa0c 4179#ifdef REG_PARM_STACK_SPACE
0a1c58a2 4180 if (save_area)
b820d2b8
AM
4181 restore_fixed_argument_area (save_area, argblock,
4182 high_to_save, low_to_save);
b94301c2 4183#endif
51bbfa0c 4184
0a1c58a2
JL
4185 /* If we saved any argument areas, restore them. */
4186 for (i = 0; i < num_actuals; i++)
4187 if (args[i].save_area)
4188 {
ef4bddc2 4189 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
4190 rtx stack_area
4191 = gen_rtx_MEM (save_mode,
4192 memory_address (save_mode,
4193 XEXP (args[i].stack_slot, 0)));
4194
4195 if (save_mode != BLKmode)
4196 emit_move_insn (stack_area, args[i].save_area);
4197 else
44bb111a 4198 emit_block_move (stack_area, args[i].save_area,
e7949876 4199 GEN_INT (args[i].locate.size.constant),
44bb111a 4200 BLOCK_OP_CALL_PARM);
0a1c58a2 4201 }
51bbfa0c 4202
0a1c58a2
JL
4203 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4204 stack_usage_map = initial_stack_usage_map;
4205 }
51bbfa0c 4206
d33606c3
EB
4207 /* If this was alloca, record the new stack level. */
4208 if (flags & ECF_MAY_BE_ALLOCA)
4209 record_new_stack_level ();
51bbfa0c 4210
0a1c58a2
JL
4211 /* Free up storage we no longer need. */
4212 for (i = 0; i < num_actuals; ++i)
04695783 4213 free (args[i].aligned_regs);
0a1c58a2 4214
2f21e1ba
BS
4215 targetm.calls.end_call_args ();
4216
0a1c58a2
JL
4217 insns = get_insns ();
4218 end_sequence ();
4219
4220 if (pass == 0)
4221 {
4222 tail_call_insns = insns;
4223
0a1c58a2
JL
4224 /* Restore the pending stack adjustment now that we have
4225 finished generating the sibling call sequence. */
1503a7ec 4226
7f2f0a01 4227 restore_pending_stack_adjust (&save);
099e9712
JH
4228
4229 /* Prepare arg structure for next iteration. */
f725a3ec 4230 for (i = 0; i < num_actuals; i++)
099e9712
JH
4231 {
4232 args[i].value = 0;
4233 args[i].aligned_regs = 0;
4234 args[i].stack = 0;
4235 }
c67846f2
JJ
4236
4237 sbitmap_free (stored_args_map);
48810515 4238 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 4239 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
4240 }
4241 else
38afb23f
OH
4242 {
4243 normal_call_insns = insns;
4244
4245 /* Verify that we've deallocated all the stack we used. */
6e14af16 4246 gcc_assert ((flags & ECF_NORETURN)
366de0ce
NS
4247 || (old_stack_allocated
4248 == stack_pointer_delta - pending_stack_adjust));
38afb23f 4249 }
fadb729c
JJ
4250
4251 /* If something prevents making this a sibling call,
4252 zero out the sequence. */
4253 if (sibcall_failure)
48810515 4254 tail_call_insns = NULL;
6de9cd9a
DN
4255 else
4256 break;
0a1c58a2
JL
4257 }
4258
1ea7e6ad 4259 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
4260 arguments too, as argument area is now clobbered by the call. */
4261 if (tail_call_insns)
0a1c58a2 4262 {
6de9cd9a 4263 emit_insn (tail_call_insns);
e3b5732b 4264 crtl->tail_call_emit = true;
0a1c58a2
JL
4265 }
4266 else
9a385c2d
DM
4267 {
4268 emit_insn (normal_call_insns);
4269 if (try_tail_call)
4270 /* Ideally we'd emit a message for all of the ways that it could
4271 have failed. */
4272 maybe_complain_about_tail_call (exp, "tail call production failed");
4273 }
51bbfa0c 4274
0a1c58a2 4275 currently_expanding_call--;
8e6a59fe 4276
04695783 4277 free (stack_usage_map_buf);
765fc0f7 4278 free (args);
d9725c41 4279
d5e254e1
IE
4280 /* Join result with returned bounds so caller may use them if needed. */
4281 target = chkp_join_splitted_slot (target, valbnd);
4282
51bbfa0c
RS
4283 return target;
4284}
ded9bf77 4285
6de9cd9a
DN
4286/* A sibling call sequence invalidates any REG_EQUIV notes made for
4287 this function's incoming arguments.
4288
4289 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
4290 in the rtl chain are those for incoming arguments, so we can look
4291 for REG_EQUIV notes between the start of the function and the
4292 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
4293
4294 This is (slight) overkill. We could keep track of the highest
4295 argument we clobber and be more selective in removing notes, but it
4296 does not seem to be worth the effort. */
29d51cdb 4297
6de9cd9a
DN
4298void
4299fixup_tail_calls (void)
4300{
48810515 4301 rtx_insn *insn;
29d51cdb
SB
4302
4303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4304 {
a31830a7
SB
4305 rtx note;
4306
29d51cdb
SB
4307 /* There are never REG_EQUIV notes for the incoming arguments
4308 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4309 if (NOTE_P (insn)
a38e7aa5 4310 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
4311 break;
4312
a31830a7
SB
4313 note = find_reg_note (insn, REG_EQUIV, 0);
4314 if (note)
4315 remove_note (insn, note);
4316 note = find_reg_note (insn, REG_EQUIV, 0);
4317 gcc_assert (!note);
29d51cdb 4318 }
6de9cd9a
DN
4319}
4320
ded9bf77
AH
4321/* Traverse a list of TYPES and expand all complex types into their
4322 components. */
2f2b4a02 4323static tree
ded9bf77
AH
4324split_complex_types (tree types)
4325{
4326 tree p;
4327
42ba5130
RH
4328 /* Before allocating memory, check for the common case of no complex. */
4329 for (p = types; p; p = TREE_CHAIN (p))
4330 {
4331 tree type = TREE_VALUE (p);
4332 if (TREE_CODE (type) == COMPLEX_TYPE
4333 && targetm.calls.split_complex_arg (type))
c22cacf3 4334 goto found;
42ba5130
RH
4335 }
4336 return types;
4337
4338 found:
ded9bf77
AH
4339 types = copy_list (types);
4340
4341 for (p = types; p; p = TREE_CHAIN (p))
4342 {
4343 tree complex_type = TREE_VALUE (p);
4344
42ba5130
RH
4345 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4346 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
4347 {
4348 tree next, imag;
4349
4350 /* Rewrite complex type with component type. */
4351 TREE_VALUE (p) = TREE_TYPE (complex_type);
4352 next = TREE_CHAIN (p);
4353
4354 /* Add another component type for the imaginary part. */
4355 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4356 TREE_CHAIN (p) = imag;
4357 TREE_CHAIN (imag) = next;
4358
4359 /* Skip the newly created node. */
4360 p = TREE_CHAIN (p);
4361 }
4362 }
4363
4364 return types;
4365}
51bbfa0c 4366\f
de76b467 4367/* Output a library call to function FUN (a SYMBOL_REF rtx).
f725a3ec 4368 The RETVAL parameter specifies whether return value needs to be saved, other
0407c02b 4369 parameters are documented in the emit_library_call function below. */
8ac61af7 4370
de76b467 4371static rtx
d329e058
AJ
4372emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4373 enum libcall_type fn_type,
ef4bddc2 4374 machine_mode outmode, int nargs, va_list p)
43bc5f13 4375{
3c0fca12
RH
4376 /* Total size in bytes of all the stack-parms scanned so far. */
4377 struct args_size args_size;
4378 /* Size of arguments before any adjustments (such as rounding). */
4379 struct args_size original_args_size;
b3694847 4380 int argnum;
3c0fca12 4381 rtx fun;
81464b2c
KT
4382 /* Todo, choose the correct decl type of orgfun. Sadly this information
4383 isn't present here, so we default to native calling abi here. */
033df0b9 4384 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 4385 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 4386 int count;
3c0fca12 4387 rtx argblock = 0;
d5cc9181
JR
4388 CUMULATIVE_ARGS args_so_far_v;
4389 cumulative_args_t args_so_far;
f725a3ec
KH
4390 struct arg
4391 {
4392 rtx value;
ef4bddc2 4393 machine_mode mode;
f725a3ec
KH
4394 rtx reg;
4395 int partial;
e7949876 4396 struct locate_and_pad_arg_data locate;
f725a3ec
KH
4397 rtx save_area;
4398 };
3c0fca12
RH
4399 struct arg *argvec;
4400 int old_inhibit_defer_pop = inhibit_defer_pop;
4401 rtx call_fusage = 0;
4402 rtx mem_value = 0;
5591ee6f 4403 rtx valreg;
3c0fca12
RH
4404 int pcc_struct_value = 0;
4405 int struct_value_size = 0;
52a11cbf 4406 int flags;
3c0fca12 4407 int reg_parm_stack_space = 0;
3c0fca12 4408 int needed;
48810515 4409 rtx_insn *before_call;
0ed4bf92 4410 bool have_push_fusage;
b0c48229 4411 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 4412
f73ad30e 4413#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
4414 /* Define the boundary of the register parm stack space that needs to be
4415 save, if any. */
726a989a 4416 int low_to_save = 0, high_to_save = 0;
f725a3ec 4417 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
4418#endif
4419
3c0fca12
RH
4420 /* Size of the stack reserved for parameter registers. */
4421 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4422 char *initial_stack_usage_map = stack_usage_map;
d9725c41 4423 char *stack_usage_map_buf = NULL;
3c0fca12 4424
61f71b34
DD
4425 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4426
3c0fca12 4427#ifdef REG_PARM_STACK_SPACE
3c0fca12 4428 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
4429#endif
4430
0529235d 4431 /* By default, library functions cannot throw. */
52a11cbf
RH
4432 flags = ECF_NOTHROW;
4433
9555a122
RH
4434 switch (fn_type)
4435 {
4436 case LCT_NORMAL:
53d4257f 4437 break;
9555a122 4438 case LCT_CONST:
53d4257f
JH
4439 flags |= ECF_CONST;
4440 break;
9555a122 4441 case LCT_PURE:
53d4257f 4442 flags |= ECF_PURE;
9555a122 4443 break;
9555a122
RH
4444 case LCT_NORETURN:
4445 flags |= ECF_NORETURN;
4446 break;
4447 case LCT_THROW:
0529235d 4448 flags &= ~ECF_NOTHROW;
9555a122 4449 break;
9defc9b7
RH
4450 case LCT_RETURNS_TWICE:
4451 flags = ECF_RETURNS_TWICE;
4452 break;
9555a122 4453 }
3c0fca12
RH
4454 fun = orgfun;
4455
3c0fca12
RH
4456 /* Ensure current function's preferred stack boundary is at least
4457 what we need. */
cb91fab0
JH
4458 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4459 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
4460
4461 /* If this kind of value comes back in memory,
4462 decide where in memory it should come back. */
b0c48229 4463 if (outmode != VOIDmode)
3c0fca12 4464 {
ae2bcd98 4465 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 4466 if (aggregate_value_p (tfom, 0))
b0c48229 4467 {
3c0fca12 4468#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 4469 rtx pointer_reg
1d636cc6 4470 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
4471 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4472 pcc_struct_value = 1;
4473 if (value == 0)
4474 value = gen_reg_rtx (outmode);
3c0fca12 4475#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 4476 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 4477 if (value != 0 && MEM_P (value))
b0c48229
NB
4478 mem_value = value;
4479 else
9474e8ab 4480 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 4481#endif
b0c48229 4482 /* This call returns a big structure. */
84b8030f 4483 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 4484 }
3c0fca12 4485 }
b0c48229
NB
4486 else
4487 tfom = void_type_node;
3c0fca12
RH
4488
4489 /* ??? Unfinished: must pass the memory address as an argument. */
4490
4491 /* Copy all the libcall-arguments out of the varargs data
4492 and into a vector ARGVEC.
4493
4494 Compute how to pass each argument. We only support a very small subset
4495 of the full argument passing conventions to limit complexity here since
4496 library functions shouldn't have many args. */
4497
f883e0a7 4498 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 4499 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 4500
97fc4caf 4501#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 4502 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 4503#else
d5cc9181 4504 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 4505#endif
d5cc9181 4506 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
4507
4508 args_size.constant = 0;
4509 args_size.var = 0;
4510
4511 count = 0;
4512
4513 push_temp_slots ();
4514
4515 /* If there's a structure value address to be passed,
4516 either pass it in the special place, or pass it as an extra argument. */
61f71b34 4517 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
4518 {
4519 rtx addr = XEXP (mem_value, 0);
c22cacf3 4520
3c0fca12
RH
4521 nargs++;
4522
ee88d9aa
MK
4523 /* Make sure it is a reasonable operand for a move or push insn. */
4524 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
4525 && !(CONSTANT_P (addr)
4526 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
4527 addr = force_operand (addr, NULL_RTX);
4528
3c0fca12
RH
4529 argvec[count].value = addr;
4530 argvec[count].mode = Pmode;
4531 argvec[count].partial = 0;
4532
d5cc9181 4533 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3c07301f 4534 Pmode, NULL_TREE, true);
d5cc9181 4535 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
78a52f11 4536 NULL_TREE, 1) == 0);
3c0fca12
RH
4537
4538 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 4539#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 4540 1,
a4d5044f
CM
4541#else
4542 argvec[count].reg != 0,
4543#endif
2e4ceca5
UW
4544 reg_parm_stack_space, 0,
4545 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 4546
3c0fca12
RH
4547 if (argvec[count].reg == 0 || argvec[count].partial != 0
4548 || reg_parm_stack_space > 0)
e7949876 4549 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 4550
d5cc9181 4551 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3c0fca12
RH
4552
4553 count++;
4554 }
4555
4556 for (; count < nargs; count++)
4557 {
4558 rtx val = va_arg (p, rtx);
ef4bddc2 4559 machine_mode mode = (machine_mode) va_arg (p, int);
5e617be8 4560 int unsigned_p = 0;
3c0fca12
RH
4561
4562 /* We cannot convert the arg value to the mode the library wants here;
4563 must do it earlier where we know the signedness of the arg. */
366de0ce
NS
4564 gcc_assert (mode != BLKmode
4565 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3c0fca12 4566
ee88d9aa
MK
4567 /* Make sure it is a reasonable operand for a move or push insn. */
4568 if (!REG_P (val) && !MEM_P (val)
1a627b35 4569 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
ee88d9aa
MK
4570 val = force_operand (val, NULL_RTX);
4571
d5cc9181 4572 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3c0fca12 4573 {
f474c6f8 4574 rtx slot;
6cdd5672 4575 int must_copy
d5cc9181 4576 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
f474c6f8 4577
becfd6e5
KZ
4578 /* If this was a CONST function, it is now PURE since it now
4579 reads memory. */
99a32567
DM
4580 if (flags & ECF_CONST)
4581 {
4582 flags &= ~ECF_CONST;
4583 flags |= ECF_PURE;
4584 }
4585
e0c68ce9 4586 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
4587 {
4588 tree val_expr = MEM_EXPR (val);
4589 if (val_expr)
4590 mark_addressable (val_expr);
4591 slot = val;
4592 }
9969aaf6 4593 else
f474c6f8 4594 {
ae2bcd98 4595 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
9474e8ab 4596 1, 1);
f474c6f8
AO
4597 emit_move_insn (slot, val);
4598 }
1da68f56 4599
6b5273c3
AO
4600 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4601 gen_rtx_USE (VOIDmode, slot),
4602 call_fusage);
f474c6f8
AO
4603 if (must_copy)
4604 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4605 gen_rtx_CLOBBER (VOIDmode,
4606 slot),
4607 call_fusage);
4608
3c0fca12 4609 mode = Pmode;
f474c6f8 4610 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 4611 }
3c0fca12 4612
5e617be8 4613 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3c0fca12 4614 argvec[count].mode = mode;
5e617be8 4615 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
d5cc9181 4616 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3c07301f 4617 NULL_TREE, true);
3c0fca12 4618
3c0fca12 4619 argvec[count].partial
d5cc9181 4620 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3c0fca12 4621
3576f984
RS
4622 if (argvec[count].reg == 0
4623 || argvec[count].partial != 0
4624 || reg_parm_stack_space > 0)
4625 {
4626 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 4627#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 4628 1,
a4d5044f 4629#else
3576f984
RS
4630 argvec[count].reg != 0,
4631#endif
2e4ceca5 4632 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
4633 NULL_TREE, &args_size, &argvec[count].locate);
4634 args_size.constant += argvec[count].locate.size.constant;
4635 gcc_assert (!argvec[count].locate.size.var);
4636 }
4637#ifdef BLOCK_REG_PADDING
4638 else
4639 /* The argument is passed entirely in registers. See at which
4640 end it should be padded. */
4641 argvec[count].locate.where_pad =
4642 BLOCK_REG_PADDING (mode, NULL_TREE,
4643 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
a4d5044f 4644#endif
3c0fca12 4645
d5cc9181 4646 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3c0fca12 4647 }
3c0fca12 4648
3c0fca12
RH
4649 /* If this machine requires an external definition for library
4650 functions, write one out. */
4651 assemble_external_libcall (fun);
4652
4653 original_args_size = args_size;
1503a7ec
JH
4654 args_size.constant = (((args_size.constant
4655 + stack_pointer_delta
4656 + STACK_BYTES - 1)
4657 / STACK_BYTES
4658 * STACK_BYTES)
4659 - stack_pointer_delta);
3c0fca12
RH
4660
4661 args_size.constant = MAX (args_size.constant,
4662 reg_parm_stack_space);
4663
5d059ed9 4664 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4665 args_size.constant -= reg_parm_stack_space;
3c0fca12 4666
38173d38
JH
4667 if (args_size.constant > crtl->outgoing_args_size)
4668 crtl->outgoing_args_size = args_size.constant;
3c0fca12 4669
a11e0df4 4670 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306
EB
4671 {
4672 int pushed = args_size.constant + pending_stack_adjust;
4673 if (pushed > current_function_pushed_stack_size)
4674 current_function_pushed_stack_size = pushed;
4675 }
4676
f73ad30e
JH
4677 if (ACCUMULATE_OUTGOING_ARGS)
4678 {
4679 /* Since the stack pointer will never be pushed, it is possible for
4680 the evaluation of a parm to clobber something we have already
4681 written to the stack. Since most function calls on RISC machines
4682 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 4683
f73ad30e
JH
4684 Therefore, we save any area of the stack that was already written
4685 and that we are using. Here we set up to do this by making a new
4686 stack usage map from the old one.
3c0fca12 4687
f73ad30e
JH
4688 Another approach might be to try to reorder the argument
4689 evaluations to avoid this conflicting stack usage. */
3c0fca12 4690
f73ad30e 4691 needed = args_size.constant;
3c0fca12 4692
f73ad30e
JH
4693 /* Since we will be writing into the entire argument area, the
4694 map must be allocated for its entire size, not just the part that
4695 is the responsibility of the caller. */
5d059ed9 4696 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4697 needed += reg_parm_stack_space;
3c0fca12 4698
6dad9361
TS
4699 if (ARGS_GROW_DOWNWARD)
4700 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4701 needed + 1);
4702 else
4703 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4704
5ed6ace5 4705 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4706 stack_usage_map = stack_usage_map_buf;
3c0fca12 4707
f73ad30e 4708 if (initial_highest_arg_in_use)
2e09e75a
JM
4709 memcpy (stack_usage_map, initial_stack_usage_map,
4710 initial_highest_arg_in_use);
3c0fca12 4711
f73ad30e 4712 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4713 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4714 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4715 needed = 0;
3c0fca12 4716
c39ada04 4717 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 4718 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
4719 new libcalls after we've instantiated the virtual regs, and if we
4720 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 4721
c39ada04 4722 if (virtuals_instantiated)
0a81f074
RS
4723 argblock = plus_constant (Pmode, stack_pointer_rtx,
4724 STACK_POINTER_OFFSET);
c39ada04
DD
4725 else
4726 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
4727 }
4728 else
4729 {
4730 if (!PUSH_ARGS)
4731 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4732 }
3c0fca12 4733
3d9684ae 4734 /* We push args individually in reverse order, perform stack alignment
3c0fca12 4735 before the first push (the last arg). */
3d9684ae 4736 if (argblock == 0)
3c0fca12
RH
4737 anti_adjust_stack (GEN_INT (args_size.constant
4738 - original_args_size.constant));
3c0fca12 4739
3d9684ae 4740 argnum = nargs - 1;
3c0fca12 4741
f73ad30e
JH
4742#ifdef REG_PARM_STACK_SPACE
4743 if (ACCUMULATE_OUTGOING_ARGS)
4744 {
4745 /* The argument list is the property of the called routine and it
4746 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
4747 parameters, we must save and restore it. */
4748 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4749 &low_to_save, &high_to_save);
3c0fca12
RH
4750 }
4751#endif
f725a3ec 4752
2f21e1ba
BS
4753 /* When expanding a normal call, args are stored in push order,
4754 which is the reverse of what we have here. */
4755 bool any_regs = false;
4756 for (int i = nargs; i-- > 0; )
4757 if (argvec[i].reg != NULL_RTX)
4758 {
4759 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4760 any_regs = true;
4761 }
4762 if (!any_regs)
4763 targetm.calls.call_args (pc_rtx, NULL_TREE);
4764
3c0fca12
RH
4765 /* Push the args that need to be pushed. */
4766
0ed4bf92
BS
4767 have_push_fusage = false;
4768
3c0fca12
RH
4769 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4770 are to be pushed. */
3d9684ae 4771 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4772 {
ef4bddc2 4773 machine_mode mode = argvec[argnum].mode;
b3694847 4774 rtx val = argvec[argnum].value;
3c0fca12
RH
4775 rtx reg = argvec[argnum].reg;
4776 int partial = argvec[argnum].partial;
6bdf8c2e 4777 unsigned int parm_align = argvec[argnum].locate.boundary;
f73ad30e 4778 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
4779
4780 if (! (reg != 0 && partial == 0))
4781 {
2b1c5433
JJ
4782 rtx use;
4783
f73ad30e
JH
4784 if (ACCUMULATE_OUTGOING_ARGS)
4785 {
f8a097cd
JH
4786 /* If this is being stored into a pre-allocated, fixed-size,
4787 stack area, save any previous data at that location. */
3c0fca12 4788
6dad9361
TS
4789 if (ARGS_GROW_DOWNWARD)
4790 {
4791 /* stack_slot is negative, but we want to index stack_usage_map
4792 with positive values. */
4793 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4794 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4795 }
4796 else
4797 {
4798 lower_bound = argvec[argnum].locate.slot_offset.constant;
4799 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4800 }
3c0fca12 4801
546ff777
AM
4802 i = lower_bound;
4803 /* Don't worry about things in the fixed argument area;
4804 it has already been saved. */
4805 if (i < reg_parm_stack_space)
4806 i = reg_parm_stack_space;
4807 while (i < upper_bound && stack_usage_map[i] == 0)
4808 i++;
3c0fca12 4809
546ff777 4810 if (i < upper_bound)
f73ad30e 4811 {
e7949876
AM
4812 /* We need to make a save area. */
4813 unsigned int size
4814 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 4815 machine_mode save_mode
e7949876
AM
4816 = mode_for_size (size, MODE_INT, 1);
4817 rtx adr
0a81f074 4818 = plus_constant (Pmode, argblock,
e7949876 4819 argvec[argnum].locate.offset.constant);
f73ad30e 4820 rtx stack_area
e7949876 4821 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 4822
9778f2f8
JH
4823 if (save_mode == BLKmode)
4824 {
4825 argvec[argnum].save_area
4826 = assign_stack_temp (BLKmode,
9474e8ab
MM
4827 argvec[argnum].locate.size.constant
4828 );
9778f2f8 4829
1a8cb155
RS
4830 emit_block_move (validize_mem
4831 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 4832 stack_area,
9778f2f8
JH
4833 GEN_INT (argvec[argnum].locate.size.constant),
4834 BLOCK_OP_CALL_PARM);
4835 }
4836 else
4837 {
4838 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4839
4840 emit_move_insn (argvec[argnum].save_area, stack_area);
4841 }
f73ad30e 4842 }
3c0fca12 4843 }
19caa751 4844
6bdf8c2e 4845 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 4846 partial, reg, 0, argblock,
e7949876
AM
4847 GEN_INT (argvec[argnum].locate.offset.constant),
4848 reg_parm_stack_space,
99206968 4849 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 4850
3c0fca12 4851 /* Now mark the segment we just used. */
f73ad30e
JH
4852 if (ACCUMULATE_OUTGOING_ARGS)
4853 for (i = lower_bound; i < upper_bound; i++)
4854 stack_usage_map[i] = 1;
3c0fca12
RH
4855
4856 NO_DEFER_POP;
475a3eef 4857
2b1c5433
JJ
4858 /* Indicate argument access so that alias.c knows that these
4859 values are live. */
4860 if (argblock)
0a81f074 4861 use = plus_constant (Pmode, argblock,
2b1c5433 4862 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
4863 else if (have_push_fusage)
4864 continue;
2b1c5433 4865 else
0ed4bf92
BS
4866 {
4867 /* When arguments are pushed, trying to tell alias.c where
4868 exactly this argument is won't work, because the
4869 auto-increment causes confusion. So we merely indicate
4870 that we access something with a known mode somewhere on
4871 the stack. */
4872 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4873 gen_rtx_SCRATCH (Pmode));
4874 have_push_fusage = true;
4875 }
2b1c5433
JJ
4876 use = gen_rtx_MEM (argvec[argnum].mode, use);
4877 use = gen_rtx_USE (VOIDmode, use);
4878 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
4879 }
4880 }
4881
3d9684ae 4882 argnum = nargs - 1;
3c0fca12 4883
531ca746 4884 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
4885
4886 /* Now load any reg parms into their regs. */
4887
4888 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4889 are to be pushed. */
3d9684ae 4890 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4891 {
ef4bddc2 4892 machine_mode mode = argvec[argnum].mode;
b3694847 4893 rtx val = argvec[argnum].value;
3c0fca12
RH
4894 rtx reg = argvec[argnum].reg;
4895 int partial = argvec[argnum].partial;
ee222ce0 4896#ifdef BLOCK_REG_PADDING
460b171d 4897 int size = 0;
ee222ce0 4898#endif
460b171d 4899
3c0fca12
RH
4900 /* Handle calls that pass values in multiple non-contiguous
4901 locations. The PA64 has examples of this for library calls. */
4902 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 4903 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 4904 else if (reg != 0 && partial == 0)
460b171d
JB
4905 {
4906 emit_move_insn (reg, val);
4907#ifdef BLOCK_REG_PADDING
4908 size = GET_MODE_SIZE (argvec[argnum].mode);
4909
4910 /* Copied from load_register_parameters. */
4911
4912 /* Handle case where we have a value that needs shifting
4913 up to the msb. eg. a QImode value and we're padding
4914 upward on a BYTES_BIG_ENDIAN machine. */
4915 if (size < UNITS_PER_WORD
4916 && (argvec[argnum].locate.where_pad
4917 == (BYTES_BIG_ENDIAN ? upward : downward)))
4918 {
4919 rtx x;
4920 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4921
4922 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4923 report the whole reg as used. Strictly speaking, the
4924 call only uses SIZE bytes at the msb end, but it doesn't
4925 seem worth generating rtl to say that. */
4926 reg = gen_rtx_REG (word_mode, REGNO (reg));
4927 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4928 if (x != reg)
4929 emit_move_insn (reg, x);
4930 }
4931#endif
4932 }
3c0fca12
RH
4933
4934 NO_DEFER_POP;
4935 }
4936
3c0fca12
RH
4937 /* Any regs containing parms remain in use through the call. */
4938 for (count = 0; count < nargs; count++)
4939 {
4940 rtx reg = argvec[count].reg;
4941 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4942 use_group_regs (&call_fusage, reg);
4943 else if (reg != 0)
3b1bf459
BS
4944 {
4945 int partial = argvec[count].partial;
4946 if (partial)
4947 {
4948 int nregs;
4949 gcc_assert (partial % UNITS_PER_WORD == 0);
4950 nregs = partial / UNITS_PER_WORD;
4951 use_regs (&call_fusage, REGNO (reg), nregs);
4952 }
4953 else
4954 use_reg (&call_fusage, reg);
4955 }
3c0fca12
RH
4956 }
4957
4958 /* Pass the function the address in which to return a structure value. */
61f71b34 4959 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 4960 {
61f71b34 4961 emit_move_insn (struct_value,
3c0fca12
RH
4962 force_reg (Pmode,
4963 force_operand (XEXP (mem_value, 0),
4964 NULL_RTX)));
f8cfc6aa 4965 if (REG_P (struct_value))
61f71b34 4966 use_reg (&call_fusage, struct_value);
3c0fca12
RH
4967 }
4968
4969 /* Don't allow popping to be deferred, since then
4970 cse'ing of library calls could delete a call and leave the pop. */
4971 NO_DEFER_POP;
5591ee6f 4972 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 4973 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 4974
ce48579b 4975 /* Stack must be properly aligned now. */
366de0ce
NS
4976 gcc_assert (!(stack_pointer_delta
4977 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
ebcd0b57 4978
695ee791
RH
4979 before_call = get_last_insn ();
4980
3c0fca12
RH
4981 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4982 will set inhibit_defer_pop to that value. */
de76b467
JH
4983 /* The return type is needed to decide how many bytes the function pops.
4984 Signedness plays no role in that, so for simplicity, we pretend it's
4985 always signed. We also assume that the list of arguments passed has
4986 no impact, so we pretend it is unknown. */
3c0fca12 4987
6de9cd9a 4988 emit_call_1 (fun, NULL,
f725a3ec 4989 get_identifier (XSTR (orgfun, 0)),
b0c48229 4990 build_function_type (tfom, NULL_TREE),
f725a3ec 4991 original_args_size.constant, args_size.constant,
3c0fca12 4992 struct_value_size,
d5cc9181 4993 targetm.calls.function_arg (args_so_far,
3c07301f 4994 VOIDmode, void_type_node, true),
5591ee6f 4995 valreg,
d5cc9181 4996 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 4997
1e288103 4998 if (flag_ipa_ra)
4f660b15 4999 {
e67d1102 5000 rtx datum = orgfun;
4f660b15 5001 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 5002 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
5003 add_reg_note (last, REG_CALL_DECL, datum);
5004 }
5005
460b171d
JB
5006 /* Right-shift returned value if necessary. */
5007 if (!pcc_struct_value
5008 && TYPE_MODE (tfom) != BLKmode
5009 && targetm.calls.return_in_msb (tfom))
5010 {
5011 shift_return_value (TYPE_MODE (tfom), false, valreg);
5012 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5013 }
5014
2f21e1ba
BS
5015 targetm.calls.end_call_args ();
5016
6fb5fa3c
DB
5017 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5018 that it should complain if nonvolatile values are live. For
5019 functions that cannot return, inform flow that control does not
5020 fall through. */
6e14af16 5021 if (flags & ECF_NORETURN)
695ee791 5022 {
570a98eb 5023 /* The barrier note must be emitted
695ee791
RH
5024 immediately after the CALL_INSN. Some ports emit more than
5025 just a CALL_INSN above, so we must search for it here. */
48810515 5026 rtx_insn *last = get_last_insn ();
4b4bf941 5027 while (!CALL_P (last))
695ee791
RH
5028 {
5029 last = PREV_INSN (last);
5030 /* There was no CALL_INSN? */
366de0ce 5031 gcc_assert (last != before_call);
695ee791
RH
5032 }
5033
570a98eb 5034 emit_barrier_after (last);
695ee791
RH
5035 }
5036
85da11a6
EB
5037 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5038 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5039 if (flags & ECF_NOTHROW)
5040 {
48810515 5041 rtx_insn *last = get_last_insn ();
85da11a6
EB
5042 while (!CALL_P (last))
5043 {
5044 last = PREV_INSN (last);
5045 /* There was no CALL_INSN? */
5046 gcc_assert (last != before_call);
5047 }
5048
5049 make_reg_eh_region_note_nothrow_nononlocal (last);
5050 }
5051
3c0fca12
RH
5052 /* Now restore inhibit_defer_pop to its actual original value. */
5053 OK_DEFER_POP;
5054
5055 pop_temp_slots ();
5056
5057 /* Copy the value to the right place. */
de76b467 5058 if (outmode != VOIDmode && retval)
3c0fca12
RH
5059 {
5060 if (mem_value)
5061 {
5062 if (value == 0)
5063 value = mem_value;
5064 if (value != mem_value)
5065 emit_move_insn (value, mem_value);
5066 }
c3297561
AO
5067 else if (GET_CODE (valreg) == PARALLEL)
5068 {
5069 if (value == 0)
5070 value = gen_reg_rtx (outmode);
643642eb 5071 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 5072 }
3c0fca12 5073 else
7ab0aca2 5074 {
cde0f3fd 5075 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
5076 if (GET_MODE (valreg) != outmode)
5077 {
5078 int unsignedp = TYPE_UNSIGNED (tfom);
5079
cde0f3fd
PB
5080 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5081 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 5082 == GET_MODE (valreg));
7ab0aca2
RH
5083 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5084 }
5085
5086 if (value != 0)
5087 emit_move_insn (value, valreg);
5088 else
5089 value = valreg;
5090 }
3c0fca12
RH
5091 }
5092
f73ad30e 5093 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 5094 {
f73ad30e
JH
5095#ifdef REG_PARM_STACK_SPACE
5096 if (save_area)
b820d2b8
AM
5097 restore_fixed_argument_area (save_area, argblock,
5098 high_to_save, low_to_save);
3c0fca12 5099#endif
f725a3ec 5100
f73ad30e
JH
5101 /* If we saved any argument areas, restore them. */
5102 for (count = 0; count < nargs; count++)
5103 if (argvec[count].save_area)
5104 {
ef4bddc2 5105 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 5106 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
5107 argvec[count].locate.offset.constant);
5108 rtx stack_area = gen_rtx_MEM (save_mode,
5109 memory_address (save_mode, adr));
f73ad30e 5110
9778f2f8
JH
5111 if (save_mode == BLKmode)
5112 emit_block_move (stack_area,
1a8cb155
RS
5113 validize_mem
5114 (copy_rtx (argvec[count].save_area)),
9778f2f8
JH
5115 GEN_INT (argvec[count].locate.size.constant),
5116 BLOCK_OP_CALL_PARM);
5117 else
5118 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 5119 }
3c0fca12 5120
f73ad30e
JH
5121 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5122 stack_usage_map = initial_stack_usage_map;
5123 }
43bc5f13 5124
04695783 5125 free (stack_usage_map_buf);
d9725c41 5126
de76b467
JH
5127 return value;
5128
5129}
5130\f
5131/* Output a library call to function FUN (a SYMBOL_REF rtx)
5132 (emitting the queue unless NO_QUEUE is nonzero),
5133 for a value of mode OUTMODE,
5134 with NARGS different arguments, passed as alternating rtx values
5135 and machine_modes to convert them to.
de76b467 5136
84b8030f
KZ
5137 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5138 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
5139 other types of library calls. */
de76b467
JH
5140
5141void
e34d07f2 5142emit_library_call (rtx orgfun, enum libcall_type fn_type,
ef4bddc2 5143 machine_mode outmode, int nargs, ...)
de76b467 5144{
e34d07f2 5145 va_list p;
d329e058 5146
e34d07f2 5147 va_start (p, nargs);
2a8f6b90 5148 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
e34d07f2 5149 va_end (p);
de76b467
JH
5150}
5151\f
5152/* Like emit_library_call except that an extra argument, VALUE,
5153 comes second and says where to store the result.
5154 (If VALUE is zero, this function chooses a convenient way
5155 to return the value.
5156
5157 This function returns an rtx for where the value is to be found.
5158 If VALUE is nonzero, VALUE is returned. */
5159
5160rtx
e34d07f2
KG
5161emit_library_call_value (rtx orgfun, rtx value,
5162 enum libcall_type fn_type,
ef4bddc2 5163 machine_mode outmode, int nargs, ...)
de76b467 5164{
6268b922 5165 rtx result;
e34d07f2 5166 va_list p;
d329e058 5167
e34d07f2 5168 va_start (p, nargs);
6268b922
KG
5169 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
5170 nargs, p);
e34d07f2 5171 va_end (p);
de76b467 5172
6268b922 5173 return result;
322e3e34
RK
5174}
5175\f
d5e254e1
IE
5176
5177/* Store pointer bounds argument ARG into Bounds Table entry
5178 associated with PARM. */
5179static void
5180store_bounds (struct arg_data *arg, struct arg_data *parm)
5181{
5182 rtx slot = NULL, ptr = NULL, addr = NULL;
5183
5184 /* We may pass bounds not associated with any pointer. */
5185 if (!parm)
5186 {
5187 gcc_assert (arg->special_slot);
5188 slot = arg->special_slot;
5189 ptr = const0_rtx;
5190 }
5191 /* Find pointer associated with bounds and where it is
5192 passed. */
5193 else
5194 {
5195 if (!parm->reg)
5196 {
5197 gcc_assert (!arg->special_slot);
5198
5199 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5200 }
5201 else if (REG_P (parm->reg))
5202 {
5203 gcc_assert (arg->special_slot);
5204 slot = arg->special_slot;
5205
5206 if (MEM_P (parm->value))
5207 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5208 else if (REG_P (parm->value))
5209 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5210 else
5211 {
5212 gcc_assert (!arg->pointer_offset);
5213 ptr = parm->value;
5214 }
5215 }
5216 else
5217 {
5218 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5219
5220 gcc_assert (arg->special_slot);
5221 slot = arg->special_slot;
5222
5223 if (parm->parallel_value)
5224 ptr = chkp_get_value_with_offs (parm->parallel_value,
5225 GEN_INT (arg->pointer_offset));
5226 else
5227 gcc_unreachable ();
5228 }
5229 }
5230
5231 /* Expand bounds. */
5232 if (!arg->value)
5233 arg->value = expand_normal (arg->tree_value);
5234
5235 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5236}
5237
51bbfa0c
RS
5238/* Store a single argument for a function call
5239 into the register or memory area where it must be passed.
5240 *ARG describes the argument value and where to pass it.
5241
5242 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 5243 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
5244
5245 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 5246 so must be careful about how the stack is used.
51bbfa0c
RS
5247
5248 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5249 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5250 that we need not worry about saving and restoring the stack.
5251
4c6b3b2a 5252 FNDECL is the declaration of the function we are calling.
f725a3ec 5253
da7d8304 5254 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 5255 zero otherwise. */
51bbfa0c 5256
4c6b3b2a 5257static int
d329e058
AJ
5258store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5259 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 5260{
b3694847 5261 tree pval = arg->tree_value;
51bbfa0c
RS
5262 rtx reg = 0;
5263 int partial = 0;
5264 int used = 0;
6a651371 5265 int i, lower_bound = 0, upper_bound = 0;
4c6b3b2a 5266 int sibcall_failure = 0;
51bbfa0c
RS
5267
5268 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 5269 return 1;
51bbfa0c 5270
cc79451b
RK
5271 /* Push a new temporary level for any temporaries we make for
5272 this argument. */
5273 push_temp_slots ();
5274
f8a097cd 5275 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 5276 {
f73ad30e
JH
5277 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5278 save any previous data at that location. */
5279 if (argblock && ! variable_size && arg->stack)
5280 {
6dad9361
TS
5281 if (ARGS_GROW_DOWNWARD)
5282 {
5283 /* stack_slot is negative, but we want to index stack_usage_map
5284 with positive values. */
5285 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5286 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
5287 else
5288 upper_bound = 0;
51bbfa0c 5289
6dad9361
TS
5290 lower_bound = upper_bound - arg->locate.size.constant;
5291 }
f73ad30e 5292 else
6dad9361
TS
5293 {
5294 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5295 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
5296 else
5297 lower_bound = 0;
51bbfa0c 5298
6dad9361
TS
5299 upper_bound = lower_bound + arg->locate.size.constant;
5300 }
51bbfa0c 5301
546ff777
AM
5302 i = lower_bound;
5303 /* Don't worry about things in the fixed argument area;
5304 it has already been saved. */
5305 if (i < reg_parm_stack_space)
5306 i = reg_parm_stack_space;
5307 while (i < upper_bound && stack_usage_map[i] == 0)
5308 i++;
51bbfa0c 5309
546ff777 5310 if (i < upper_bound)
51bbfa0c 5311 {
e7949876
AM
5312 /* We need to make a save area. */
5313 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
ef4bddc2 5314 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
e7949876
AM
5315 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5316 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
5317
5318 if (save_mode == BLKmode)
5319 {
9ee5337d
EB
5320 arg->save_area
5321 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 5322 preserve_temp_slots (arg->save_area);
1a8cb155
RS
5323 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5324 stack_area,
7816b87e 5325 GEN_INT (arg->locate.size.constant),
44bb111a 5326 BLOCK_OP_CALL_PARM);
f73ad30e
JH
5327 }
5328 else
5329 {
5330 arg->save_area = gen_reg_rtx (save_mode);
5331 emit_move_insn (arg->save_area, stack_area);
5332 }
51bbfa0c
RS
5333 }
5334 }
5335 }
b564df06 5336
51bbfa0c
RS
5337 /* If this isn't going to be placed on both the stack and in registers,
5338 set up the register and number of words. */
5339 if (! arg->pass_on_stack)
aa7634dd
DM
5340 {
5341 if (flags & ECF_SIBCALL)
5342 reg = arg->tail_call_reg;
5343 else
5344 reg = arg->reg;
5345 partial = arg->partial;
5346 }
51bbfa0c 5347
366de0ce
NS
5348 /* Being passed entirely in a register. We shouldn't be called in
5349 this case. */
5350 gcc_assert (reg == 0 || partial != 0);
c22cacf3 5351
4ab56118
RK
5352 /* If this arg needs special alignment, don't load the registers
5353 here. */
5354 if (arg->n_aligned_regs != 0)
5355 reg = 0;
f725a3ec 5356
4ab56118 5357 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
5358 it directly into its stack slot. Otherwise, we can. */
5359 if (arg->value == 0)
d64f5a78 5360 {
d64f5a78
RS
5361 /* stack_arg_under_construction is nonzero if a function argument is
5362 being evaluated directly into the outgoing argument list and
5363 expand_call must take special action to preserve the argument list
5364 if it is called recursively.
5365
5366 For scalar function arguments stack_usage_map is sufficient to
5367 determine which stack slots must be saved and restored. Scalar
5368 arguments in general have pass_on_stack == 0.
5369
5370 If this argument is initialized by a function which takes the
5371 address of the argument (a C++ constructor or a C function
5372 returning a BLKmode structure), then stack_usage_map is
5373 insufficient and expand_call must push the stack around the
5374 function call. Such arguments have pass_on_stack == 1.
5375
5376 Note that it is always safe to set stack_arg_under_construction,
5377 but this generates suboptimal code if set when not needed. */
5378
5379 if (arg->pass_on_stack)
5380 stack_arg_under_construction++;
f73ad30e 5381
3a08477a
RK
5382 arg->value = expand_expr (pval,
5383 (partial
5384 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5385 ? NULL_RTX : arg->stack,
8403445a 5386 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
5387
5388 /* If we are promoting object (or for any other reason) the mode
5389 doesn't agree, convert the mode. */
5390
7373d92d
RK
5391 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5392 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5393 arg->value, arg->unsignedp);
1efe6448 5394
d64f5a78
RS
5395 if (arg->pass_on_stack)
5396 stack_arg_under_construction--;
d64f5a78 5397 }
51bbfa0c 5398
0dc42b03 5399 /* Check for overlap with already clobbered argument area. */
07eef816
KH
5400 if ((flags & ECF_SIBCALL)
5401 && MEM_P (arg->value)
5402 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
5403 arg->locate.size.constant))
5404 sibcall_failure = 1;
0dc42b03 5405
51bbfa0c
RS
5406 /* Don't allow anything left on stack from computation
5407 of argument to alloca. */
f8a097cd 5408 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
5409 do_pending_stack_adjust ();
5410
5411 if (arg->value == arg->stack)
37a08a29
RK
5412 /* If the value is already in the stack slot, we are done. */
5413 ;
1efe6448 5414 else if (arg->mode != BLKmode)
51bbfa0c 5415 {
b3694847 5416 int size;
46bd2bee 5417 unsigned int parm_align;
51bbfa0c
RS
5418
5419 /* Argument is a scalar, not entirely passed in registers.
5420 (If part is passed in registers, arg->partial says how much
5421 and emit_push_insn will take care of putting it there.)
f725a3ec 5422
51bbfa0c
RS
5423 Push it, and if its size is less than the
5424 amount of space allocated to it,
5425 also bump stack pointer by the additional space.
5426 Note that in C the default argument promotions
5427 will prevent such mismatches. */
5428
1efe6448 5429 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
5430 /* Compute how much space the push instruction will push.
5431 On many machines, pushing a byte will advance the stack
5432 pointer by a halfword. */
5433#ifdef PUSH_ROUNDING
5434 size = PUSH_ROUNDING (size);
5435#endif
5436 used = size;
5437
5438 /* Compute how much space the argument should get:
5439 round up to a multiple of the alignment for arguments. */
1efe6448 5440 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
5441 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
5442 / (PARM_BOUNDARY / BITS_PER_UNIT))
5443 * (PARM_BOUNDARY / BITS_PER_UNIT));
5444
46bd2bee
JM
5445 /* Compute the alignment of the pushed argument. */
5446 parm_align = arg->locate.boundary;
5447 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5448 {
5449 int pad = used - size;
5450 if (pad)
5451 {
146ec50f 5452 unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT;
46bd2bee
JM
5453 parm_align = MIN (parm_align, pad_align);
5454 }
5455 }
5456
51bbfa0c
RS
5457 /* This isn't already where we want it on the stack, so put it there.
5458 This can either be done with push or copy insns. */
99206968 5459 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
46bd2bee 5460 parm_align, partial, reg, used - size, argblock,
e7949876 5461 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
99206968
KT
5462 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5463 sibcall_failure = 1;
841404cd
AO
5464
5465 /* Unless this is a partially-in-register argument, the argument is now
5466 in the stack. */
5467 if (partial == 0)
5468 arg->value = arg->stack;
51bbfa0c
RS
5469 }
5470 else
5471 {
5472 /* BLKmode, at least partly to be pushed. */
5473
1b1f20ca 5474 unsigned int parm_align;
b3694847 5475 int excess;
51bbfa0c
RS
5476 rtx size_rtx;
5477
5478 /* Pushing a nonscalar.
5479 If part is passed in registers, PARTIAL says how much
5480 and emit_push_insn will take care of putting it there. */
5481
5482 /* Round its size up to a multiple
5483 of the allocation unit for arguments. */
5484
e7949876 5485 if (arg->locate.size.var != 0)
51bbfa0c
RS
5486 {
5487 excess = 0;
e7949876 5488 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
5489 }
5490 else
5491 {
78a52f11
RH
5492 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5493 for BLKmode is careful to avoid it. */
5494 excess = (arg->locate.size.constant
5495 - int_size_in_bytes (TREE_TYPE (pval))
5496 + partial);
db4c55f6 5497 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
5498 NULL_RTX, TYPE_MODE (sizetype),
5499 EXPAND_NORMAL);
51bbfa0c
RS
5500 }
5501
bfc45551 5502 parm_align = arg->locate.boundary;
1b1f20ca
RH
5503
5504 /* When an argument is padded down, the block is aligned to
5505 PARM_BOUNDARY, but the actual argument isn't. */
5506 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5507 {
e7949876 5508 if (arg->locate.size.var)
1b1f20ca
RH
5509 parm_align = BITS_PER_UNIT;
5510 else if (excess)
5511 {
146ec50f 5512 unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT;
1b1f20ca
RH
5513 parm_align = MIN (parm_align, excess_align);
5514 }
5515 }
5516
3c0cb5de 5517 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
5518 {
5519 /* emit_push_insn might not work properly if arg->value and
e7949876 5520 argblock + arg->locate.offset areas overlap. */
4c6b3b2a
JJ
5521 rtx x = arg->value;
5522 int i = 0;
5523
38173d38 5524 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4c6b3b2a
JJ
5525 || (GET_CODE (XEXP (x, 0)) == PLUS
5526 && XEXP (XEXP (x, 0), 0) ==
38173d38 5527 crtl->args.internal_arg_pointer
481683e1 5528 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4c6b3b2a 5529 {
38173d38 5530 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4c6b3b2a
JJ
5531 i = INTVAL (XEXP (XEXP (x, 0), 1));
5532
b3877860
KT
5533 /* arg.locate doesn't contain the pretend_args_size offset,
5534 it's part of argblock. Ensure we don't count it in I. */
5535 if (STACK_GROWS_DOWNWARD)
5536 i -= crtl->args.pretend_args_size;
5537 else
5538 i += crtl->args.pretend_args_size;
5539
e0a21ab9 5540 /* expand_call should ensure this. */
366de0ce 5541 gcc_assert (!arg->locate.offset.var
d6c2c77c 5542 && arg->locate.size.var == 0
481683e1 5543 && CONST_INT_P (size_rtx));
4c6b3b2a 5544
e7949876 5545 if (arg->locate.offset.constant > i)
4c6b3b2a 5546 {
e7949876 5547 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4c6b3b2a
JJ
5548 sibcall_failure = 1;
5549 }
e7949876 5550 else if (arg->locate.offset.constant < i)
4c6b3b2a 5551 {
d6c2c77c
JC
5552 /* Use arg->locate.size.constant instead of size_rtx
5553 because we only care about the part of the argument
5554 on the stack. */
5555 if (i < (arg->locate.offset.constant
5556 + arg->locate.size.constant))
5557 sibcall_failure = 1;
5558 }
5559 else
5560 {
5561 /* Even though they appear to be at the same location,
5562 if part of the outgoing argument is in registers,
5563 they aren't really at the same location. Check for
5564 this by making sure that the incoming size is the
5565 same as the outgoing size. */
5566 if (arg->locate.size.constant != INTVAL (size_rtx))
4c6b3b2a
JJ
5567 sibcall_failure = 1;
5568 }
5569 }
5570 }
5571
1efe6448 5572 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
1b1f20ca 5573 parm_align, partial, reg, excess, argblock,
e7949876 5574 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
99206968 5575 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
51bbfa0c 5576
841404cd
AO
5577 /* Unless this is a partially-in-register argument, the argument is now
5578 in the stack.
51bbfa0c 5579
841404cd
AO
5580 ??? Unlike the case above, in which we want the actual
5581 address of the data, so that we can load it directly into a
5582 register, here we want the address of the stack slot, so that
5583 it's properly aligned for word-by-word copying or something
5584 like that. It's not clear that this is always correct. */
5585 if (partial == 0)
5586 arg->value = arg->stack_slot;
5587 }
8df3dbb7
RH
5588
5589 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5590 {
5591 tree type = TREE_TYPE (arg->tree_value);
5592 arg->parallel_value
5593 = emit_group_load_into_temps (arg->reg, arg->value, type,
5594 int_size_in_bytes (type));
5595 }
51bbfa0c 5596
8403445a
AM
5597 /* Mark all slots this store used. */
5598 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5599 && argblock && ! variable_size && arg->stack)
5600 for (i = lower_bound; i < upper_bound; i++)
5601 stack_usage_map[i] = 1;
5602
51bbfa0c
RS
5603 /* Once we have pushed something, pops can't safely
5604 be deferred during the rest of the arguments. */
5605 NO_DEFER_POP;
5606
9474e8ab 5607 /* Free any temporary slots made in processing this argument. */
cc79451b 5608 pop_temp_slots ();
4c6b3b2a
JJ
5609
5610 return sibcall_failure;
51bbfa0c 5611}
a4b1b92a 5612
fe984136 5613/* Nonzero if we do not know how to pass TYPE solely in registers. */
a4b1b92a 5614
fe984136 5615bool
ef4bddc2 5616must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
586de218 5617 const_tree type)
fe984136
RH
5618{
5619 if (!type)
5620 return false;
5621
5622 /* If the type has variable size... */
5623 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5624 return true;
a4b1b92a 5625
fe984136
RH
5626 /* If the type is marked as addressable (it is required
5627 to be constructed into the stack)... */
5628 if (TREE_ADDRESSABLE (type))
5629 return true;
5630
5631 return false;
5632}
a4b1b92a 5633
7ae4ad28 5634/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
5635 takes trailing padding of a structure into account. */
5636/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
5637
5638bool
ef4bddc2 5639must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
a4b1b92a
RH
5640{
5641 if (!type)
40cdfd5a 5642 return false;
a4b1b92a
RH
5643
5644 /* If the type has variable size... */
5645 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5646 return true;
5647
5648 /* If the type is marked as addressable (it is required
5649 to be constructed into the stack)... */
5650 if (TREE_ADDRESSABLE (type))
5651 return true;
5652
5653 /* If the padding and mode of the type is such that a copy into
5654 a register would put it into the wrong part of the register. */
5655 if (mode == BLKmode
5656 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5657 && (FUNCTION_ARG_PADDING (mode, type)
5658 == (BYTES_BIG_ENDIAN ? upward : downward)))
5659 return true;
5660
5661 return false;
5662}
6bf29a7e
MS
5663
5664/* Tell the garbage collector about GTY markers in this source file. */
5665#include "gt-calls.h"