]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
fix PR68343: disable fuse-*.c tests for isl 0.14 or earlier
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
818ab71a 2 Copyright (C) 1989-2016 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
957060b5
AM
29#include "tm_p.h"
30#include "stringpool.h"
31#include "expmed.h"
32#include "optabs.h"
957060b5
AM
33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "diagnostic-core.h"
40e23961 36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "varasm.h"
2fb9a547 39#include "internal-fn.h"
36566b39
PK
40#include "dojump.h"
41#include "explow.h"
42#include "calls.h"
670ee920 43#include "expr.h"
d6f4ec51 44#include "output.h"
b0c48229 45#include "langhooks.h"
b2dd096b 46#include "except.h"
6fb5fa3c 47#include "dbgcnt.h"
e9f56944 48#include "rtl-iter.h"
d5e254e1
IE
49#include "tree-chkp.h"
50#include "rtl-chkp.h"
51bbfa0c 51
76e048a8 52
c795bca9
BS
53/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
55
56/* Data structure and subroutines used within expand_call. */
57
58struct arg_data
59{
60 /* Tree node for this argument. */
61 tree tree_value;
1efe6448 62 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 63 machine_mode mode;
51bbfa0c
RS
64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 rtx value;
66 /* Initially-compute RTL value for argument; only for const functions. */
67 rtx initial_value;
68 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 69 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
70 registers. */
71 rtx reg;
099e9712
JH
72 /* Register to pass this argument in when generating tail call sequence.
73 This is not the same register as for normal calls on machines with
74 register windows. */
75 rtx tail_call_reg;
8df3dbb7
RH
76 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
77 form for emit_group_move. */
78 rtx parallel_value;
d5e254e1
IE
79 /* If value is passed in neither reg nor stack, this field holds a number
80 of a special slot to be used. */
81 rtx special_slot;
82 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
83 there is no such pointer. */
84 int pointer_arg;
85 /* If pointer_arg refers a structure, then pointer_offset holds an offset
86 of a pointer in this structure. */
87 int pointer_offset;
84b55618
RK
88 /* If REG was promoted from the actual mode of the argument expression,
89 indicates whether the promotion is sign- or zero-extended. */
90 int unsignedp;
f0078f86
AM
91 /* Number of bytes to put in registers. 0 means put the whole arg
92 in registers. Also 0 if not passed in registers. */
51bbfa0c 93 int partial;
da7d8304 94 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
95 Note that some arguments may be passed on the stack
96 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
97 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 98 int pass_on_stack;
e7949876
AM
99 /* Some fields packaged up for locate_and_pad_parm. */
100 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
101 /* Location on the stack at which parameter should be stored. The store
102 has already been done if STACK == VALUE. */
103 rtx stack;
104 /* Location on the stack of the start of this argument slot. This can
105 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 106 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 107 rtx stack_slot;
51bbfa0c
RS
108 /* Place that this stack area has been saved, if needed. */
109 rtx save_area;
4ab56118
RK
110 /* If an argument's alignment does not permit direct copying into registers,
111 copy in smaller-sized pieces into pseudos. These are stored in a
112 block pointed to by this field. The next field says how many
113 word-sized pseudos we made. */
114 rtx *aligned_regs;
115 int n_aligned_regs;
51bbfa0c
RS
116};
117
da7d8304 118/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
119 the corresponding stack location has been used.
120 This vector is used to prevent a function call within an argument from
121 clobbering any stack already set up. */
122static char *stack_usage_map;
123
124/* Size of STACK_USAGE_MAP. */
125static int highest_outgoing_arg_in_use;
2f4aa534 126
c67846f2
JJ
127/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
128 stack location's tail call argument has been already stored into the stack.
129 This bitmap is used to prevent sibling call optimization if function tries
130 to use parent's incoming argument slots when they have been already
131 overwritten with tail call arguments. */
132static sbitmap stored_args_map;
133
2f4aa534
RS
134/* stack_arg_under_construction is nonzero when an argument may be
135 initialized with a constructor call (including a C function that
136 returns a BLKmode struct) and expand_call must take special action
137 to make sure the object being constructed does not overlap the
138 argument list for the constructor call. */
0405cc0e 139static int stack_arg_under_construction;
51bbfa0c 140
6de9cd9a 141static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
d329e058 142 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
d5cc9181 143 cumulative_args_t);
d329e058 144static void precompute_register_parameters (int, struct arg_data *, int *);
d5e254e1 145static void store_bounds (struct arg_data *, struct arg_data *);
d329e058
AJ
146static int store_one_arg (struct arg_data *, rtx, int, int, int);
147static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
148static int finalize_must_preallocate (int, int, struct arg_data *,
149 struct args_size *);
84b8030f 150static void precompute_arguments (int, struct arg_data *);
5d059ed9 151static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
d329e058 152static void initialize_argument_information (int, struct arg_data *,
078a18a4
SL
153 struct args_size *, int,
154 tree, tree,
d5cc9181 155 tree, tree, cumulative_args_t, int,
dd292d0a 156 rtx *, int *, int *, int *,
6de9cd9a 157 bool *, bool);
d329e058
AJ
158static void compute_argument_addresses (struct arg_data *, rtx, int);
159static rtx rtx_for_function_call (tree, tree);
160static void load_register_parameters (struct arg_data *, int, rtx *, int,
161 int, int *);
162static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
ef4bddc2 163 machine_mode, int, va_list);
6ea2b70d 164static int special_function_p (const_tree, int);
d329e058 165static int check_sibcall_argument_overlap_1 (rtx);
48810515 166static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
d329e058
AJ
167
168static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
95899b34 169 unsigned int);
2f2b4a02 170static tree split_complex_types (tree);
21a3b983 171
f73ad30e 172#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
173static rtx save_fixed_argument_area (int, rtx, int *, int *);
174static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 175#endif
51bbfa0c 176\f
51bbfa0c
RS
177/* Force FUNEXP into a form suitable for the address of a CALL,
178 and return that as an rtx. Also load the static chain register
179 if FNDECL is a nested function.
180
77cac2f2
RK
181 CALL_FUSAGE points to a variable holding the prospective
182 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 183
03dacb02 184rtx
f2d3d07e 185prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
6de9cd9a 186 rtx *call_fusage, int reg_parm_seen, int sibcallp)
51bbfa0c 187{
ba228239 188 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
189 but not for a constant address if -fno-function-cse. */
190 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 191 /* If we are using registers for parameters, force the
e9a25f70 192 function address into a register now. */
42db504c
SB
193 funexp = ((reg_parm_seen
194 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
e9a25f70
JL
195 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
196 : memory_address (FUNCTION_MODE, funexp));
3affaf29 197 else if (! sibcallp)
51bbfa0c 198 {
1e8552c2 199 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
082a099c 200 funexp = force_reg (Pmode, funexp);
51bbfa0c
RS
201 }
202
f2d3d07e
RH
203 if (static_chain_value != 0
204 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
205 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 206 {
531ca746
RH
207 rtx chain;
208
f2d3d07e 209 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 210 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 211
531ca746
RH
212 emit_move_insn (chain, static_chain_value);
213 if (REG_P (chain))
214 use_reg (call_fusage, chain);
51bbfa0c
RS
215 }
216
217 return funexp;
218}
219
220/* Generate instructions to call function FUNEXP,
221 and optionally pop the results.
222 The CALL_INSN is the first insn generated.
223
607ea900 224 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
225 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
226 its own args.
2c8da025 227
079e7538
NF
228 FUNTYPE is the data type of the function. This is given to the hook
229 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
230 own args. We used to allow an identifier for library functions, but
231 that doesn't work when the return type is an aggregate type and the
232 calling convention says that the pointer to this aggregate is to be
233 popped by the callee.
51bbfa0c
RS
234
235 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
236 ROUNDED_STACK_SIZE is that number rounded up to
237 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
238 both to put into the call insn and to generate explicit popping
239 code if necessary.
51bbfa0c
RS
240
241 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
242 It is zero if this call doesn't want a structure value.
243
244 NEXT_ARG_REG is the rtx that results from executing
3c07301f 245 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
51bbfa0c
RS
246 just after all the args have had their registers assigned.
247 This could be whatever you like, but normally it is the first
248 arg-register beyond those used for args in this call,
249 or 0 if all the arg-registers are used in this call.
250 It is passed on to `gen_call' so you can put this info in the call insn.
251
252 VALREG is a hard register in which a value is returned,
253 or 0 if the call does not return a value.
254
255 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
256 the args to this call were processed.
257 We restore `inhibit_defer_pop' to that value.
258
94b25f81 259 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 260 denote registers used by the called function. */
f725a3ec 261
322e3e34 262static void
28ed065e 263emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 264 tree funtype ATTRIBUTE_UNUSED,
d329e058
AJ
265 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
266 HOST_WIDE_INT rounded_stack_size,
267 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
268 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
269 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 270 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 271{
062e7fd8 272 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
58d745ec 273 rtx call, funmem, pat;
51bbfa0c 274 int already_popped = 0;
a00fe3b7
RS
275 HOST_WIDE_INT n_popped = 0;
276
277 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
278 patterns exist). Any popping that the callee does on return will
279 be from our caller's frame rather than ours. */
280 if (!(ecf_flags & ECF_SIBCALL))
281 {
282 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 283
fa5322fa 284#ifdef CALL_POPS_ARGS
a00fe3b7 285 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 286#endif
a00fe3b7 287 }
d329e058 288
51bbfa0c
RS
289 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
290 and we don't want to load it into a register as an optimization,
291 because prepare_call_address already did it if it should be done. */
292 if (GET_CODE (funexp) != SYMBOL_REF)
293 funexp = memory_address (FUNCTION_MODE, funexp);
294
325f5379
JJ
295 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
296 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
297 {
298 tree t = fndecl;
e79983f4 299
047d33a0
AO
300 /* Although a built-in FUNCTION_DECL and its non-__builtin
301 counterpart compare equal and get a shared mem_attrs, they
302 produce different dump output in compare-debug compilations,
303 if an entry gets garbage collected in one compilation, then
304 adds a different (but equivalent) entry, while the other
305 doesn't run the garbage collector at the same spot and then
306 shares the mem_attr with the equivalent entry. */
e79983f4
MM
307 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
308 {
309 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
310 if (t2)
311 t = t2;
312 }
313
314 set_mem_expr (funmem, t);
047d33a0 315 }
325f5379 316 else if (fntree)
e19f6650 317 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 318
58d745ec 319 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 320 {
0a1c58a2 321 if (valreg)
58d745ec
RS
322 pat = targetm.gen_sibcall_value (valreg, funmem,
323 rounded_stack_size_rtx,
324 next_arg_reg, NULL_RTX);
0a1c58a2 325 else
58d745ec
RS
326 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
327 next_arg_reg, GEN_INT (struct_value_size));
0a1c58a2 328 }
8ac61af7
RK
329 /* If the target has "call" or "call_value" insns, then prefer them
330 if no arguments are actually popped. If the target does not have
331 "call" or "call_value" insns, then we must use the popping versions
332 even if the call has no arguments to pop. */
58d745ec
RS
333 else if (n_popped > 0
334 || !(valreg
335 ? targetm.have_call_value ()
336 : targetm.have_call ()))
51bbfa0c 337 {
fb5eebb9 338 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
339
340 /* If this subroutine pops its own args, record that in the call insn
341 if possible, for the sake of frame pointer elimination. */
2c8da025 342
51bbfa0c 343 if (valreg)
58d745ec
RS
344 pat = targetm.gen_call_value_pop (valreg, funmem,
345 rounded_stack_size_rtx,
346 next_arg_reg, n_pop);
51bbfa0c 347 else
58d745ec
RS
348 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
349 next_arg_reg, n_pop);
51bbfa0c 350
51bbfa0c
RS
351 already_popped = 1;
352 }
353 else
0a1c58a2
JL
354 {
355 if (valreg)
58d745ec
RS
356 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
357 next_arg_reg, NULL_RTX);
0a1c58a2 358 else
58d745ec
RS
359 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
360 GEN_INT (struct_value_size));
0a1c58a2 361 }
58d745ec 362 emit_insn (pat);
51bbfa0c 363
ee960939 364 /* Find the call we just emitted. */
e67d1102 365 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 366
325f5379
JJ
367 /* Some target create a fresh MEM instead of reusing the one provided
368 above. Set its MEM_EXPR. */
da4fdf2d
SB
369 call = get_call_rtx_from (call_insn);
370 if (call
325f5379
JJ
371 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
372 && MEM_EXPR (funmem) != NULL_TREE)
373 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
374
d5e254e1
IE
375 /* Mark instrumented calls. */
376 if (call && fntree)
377 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
378
ee960939
OH
379 /* Put the register usage information there. */
380 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
381
382 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
383 if (ecf_flags & ECF_CONST)
384 RTL_CONST_CALL_P (call_insn) = 1;
385
386 /* If this is a pure call, then set the insn's unchanging bit. */
387 if (ecf_flags & ECF_PURE)
388 RTL_PURE_CALL_P (call_insn) = 1;
389
390 /* If this is a const call, then set the insn's unchanging bit. */
391 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
392 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 393
1d65f45c
RH
394 /* Create a nothrow REG_EH_REGION note, if needed. */
395 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 396
ca3920ad 397 if (ecf_flags & ECF_NORETURN)
65c5f2a6 398 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 399
570a98eb 400 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7 401 {
65c5f2a6 402 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
e3b5732b 403 cfun->calls_setjmp = 1;
9defc9b7 404 }
570a98eb 405
0a1c58a2
JL
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407
b1e64e0d
RS
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
411
fb5eebb9 412 if (n_popped > 0)
51bbfa0c
RS
413 {
414 if (!already_popped)
e3da301d 415 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 419 rounded_stack_size -= n_popped;
062e7fd8 420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 421 stack_pointer_delta -= n_popped;
2e3f842f 422
9a08d230
RH
423 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
424
2e3f842f
L
425 /* If popup is needed, stack realign must use DRAP */
426 if (SUPPORTS_STACK_ALIGNMENT)
427 crtl->need_drap = true;
51bbfa0c 428 }
f8f75b16
JJ
429 /* For noreturn calls when not accumulating outgoing args force
430 REG_ARGS_SIZE note to prevent crossjumping of calls with different
431 args sizes. */
432 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
433 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
51bbfa0c 434
f73ad30e 435 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 436 {
f73ad30e
JH
437 /* If returning from the subroutine does not automatically pop the args,
438 we need an instruction to pop them sooner or later.
439 Perhaps do it now; perhaps just record how much space to pop later.
440
441 If returning from the subroutine does pop the args, indicate that the
442 stack pointer will be changed. */
443
f79a65c0 444 if (rounded_stack_size != 0)
f73ad30e 445 {
9dd9bf80 446 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
447 /* Just pretend we did the pop. */
448 stack_pointer_delta -= rounded_stack_size;
449 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 450 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
451 pending_stack_adjust += rounded_stack_size;
452 else
453 adjust_stack (rounded_stack_size_rtx);
454 }
51bbfa0c 455 }
f73ad30e
JH
456 /* When we accumulate outgoing args, we must avoid any stack manipulations.
457 Restore the stack pointer to its original value now. Usually
458 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
459 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
460 popping variants of functions exist as well.
461
462 ??? We may optimize similar to defer_pop above, but it is
463 probably not worthwhile.
f725a3ec 464
f73ad30e
JH
465 ??? It will be worthwhile to enable combine_stack_adjustments even for
466 such machines. */
467 else if (n_popped)
468 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
469}
470
20efdf74
JL
471/* Determine if the function identified by NAME and FNDECL is one with
472 special properties we wish to know about.
473
474 For example, if the function might return more than one time (setjmp), then
475 set RETURNS_TWICE to a nonzero value.
476
bae802f9 477 Similarly set NORETURN if the function is in the longjmp family.
20efdf74 478
20efdf74
JL
479 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
480 space from the stack such as alloca. */
481
f2d33f13 482static int
6ea2b70d 483special_function_p (const_tree fndecl, int flags)
20efdf74 484{
d5e254e1
IE
485 tree name_decl = DECL_NAME (fndecl);
486
487 /* For instrumentation clones we want to derive flags
488 from the original name. */
489 if (cgraph_node::get (fndecl)
490 && cgraph_node::get (fndecl)->instrumentation_clone)
491 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
492
493 if (fndecl && name_decl
494 && IDENTIFIER_LENGTH (name_decl) <= 17
20efdf74
JL
495 /* Exclude functions not at the file scope, or not `extern',
496 since they are not the magic functions we would otherwise
d1bd0ded 497 think they are.
c22cacf3
MS
498 FIXME: this should be handled with attributes, not with this
499 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
500 because you can declare fork() inside a function if you
501 wish. */
7ae4ad28 502 && (DECL_CONTEXT (fndecl) == NULL_TREE
d1bd0ded
GK
503 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
504 && TREE_PUBLIC (fndecl))
20efdf74 505 {
d5e254e1 506 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 507 const char *tname = name;
20efdf74 508
ca54603f
JL
509 /* We assume that alloca will always be called by name. It
510 makes no sense to pass it as a pointer-to-function to
511 anything that does not understand its behavior. */
4e722cf1
JJ
512 if (IDENTIFIER_LENGTH (name_decl) == 6
513 && name[0] == 'a'
514 && ! strcmp (name, "alloca"))
f2d33f13 515 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 516
b545e411 517 /* Disregard prefix _, __, __x or __builtin_. */
20efdf74
JL
518 if (name[0] == '_')
519 {
b545e411
JJ
520 if (name[1] == '_'
521 && name[2] == 'b'
522 && !strncmp (name + 3, "uiltin_", 7))
523 tname += 10;
524 else if (name[1] == '_' && name[2] == 'x')
20efdf74
JL
525 tname += 3;
526 else if (name[1] == '_')
527 tname += 2;
528 else
529 tname += 1;
530 }
531
532 if (tname[0] == 's')
533 {
f2d33f13
JH
534 if ((tname[1] == 'e'
535 && (! strcmp (tname, "setjmp")
536 || ! strcmp (tname, "setjmp_syscall")))
537 || (tname[1] == 'i'
538 && ! strcmp (tname, "sigsetjmp"))
539 || (tname[1] == 'a'
540 && ! strcmp (tname, "savectx")))
3f8825c0 541 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
f2d33f13 542
20efdf74
JL
543 if (tname[1] == 'i'
544 && ! strcmp (tname, "siglongjmp"))
6e14af16 545 flags |= ECF_NORETURN;
20efdf74
JL
546 }
547 else if ((tname[0] == 'q' && tname[1] == 's'
548 && ! strcmp (tname, "qsetjmp"))
549 || (tname[0] == 'v' && tname[1] == 'f'
cd9ed4b4
EB
550 && ! strcmp (tname, "vfork"))
551 || (tname[0] == 'g' && tname[1] == 'e'
552 && !strcmp (tname, "getcontext")))
3f8825c0 553 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
20efdf74
JL
554
555 else if (tname[0] == 'l' && tname[1] == 'o'
556 && ! strcmp (tname, "longjmp"))
6e14af16 557 flags |= ECF_NORETURN;
20efdf74 558 }
d1c38823 559
4e722cf1
JJ
560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
561 switch (DECL_FUNCTION_CODE (fndecl))
562 {
563 case BUILT_IN_ALLOCA:
564 case BUILT_IN_ALLOCA_WITH_ALIGN:
565 flags |= ECF_MAY_BE_ALLOCA;
566 break;
567 default:
568 break;
569 }
570
f2d33f13 571 return flags;
20efdf74
JL
572}
573
e384e6b5
BS
574/* Similar to special_function_p; return a set of ERF_ flags for the
575 function FNDECL. */
576static int
577decl_return_flags (tree fndecl)
578{
579 tree attr;
580 tree type = TREE_TYPE (fndecl);
581 if (!type)
582 return 0;
583
584 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
585 if (!attr)
586 return 0;
587
588 attr = TREE_VALUE (TREE_VALUE (attr));
589 if (!attr || TREE_STRING_LENGTH (attr) < 1)
590 return 0;
591
592 switch (TREE_STRING_POINTER (attr)[0])
593 {
594 case '1':
595 case '2':
596 case '3':
597 case '4':
598 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
599
600 case 'm':
601 return ERF_NOALIAS;
602
603 case '.':
604 default:
605 return 0;
606 }
607}
608
bae802f9 609/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 610
f2d33f13 611int
6ea2b70d 612setjmp_call_p (const_tree fndecl)
f2d33f13 613{
275311c4
MP
614 if (DECL_IS_RETURNS_TWICE (fndecl))
615 return ECF_RETURNS_TWICE;
f2d33f13
JH
616 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
617}
618
726a989a
RB
619
620/* Return true if STMT is an alloca call. */
621
622bool
355fe088 623gimple_alloca_call_p (const gimple *stmt)
726a989a
RB
624{
625 tree fndecl;
626
627 if (!is_gimple_call (stmt))
628 return false;
629
630 fndecl = gimple_call_fndecl (stmt);
631 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
632 return true;
633
634 return false;
635}
636
c986baf6 637/* Return true when exp contains alloca call. */
726a989a 638
c986baf6 639bool
6ea2b70d 640alloca_call_p (const_tree exp)
c986baf6 641{
2284b034 642 tree fndecl;
c986baf6 643 if (TREE_CODE (exp) == CALL_EXPR
2284b034
MG
644 && (fndecl = get_callee_fndecl (exp))
645 && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
c986baf6
JH
646 return true;
647 return false;
648}
649
0a35513e
AH
650/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
651 function. Return FALSE otherwise. */
652
653static bool
654is_tm_builtin (const_tree fndecl)
655{
656 if (fndecl == NULL)
657 return false;
658
659 if (decl_is_tm_clone (fndecl))
660 return true;
661
662 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
663 {
664 switch (DECL_FUNCTION_CODE (fndecl))
665 {
666 case BUILT_IN_TM_COMMIT:
667 case BUILT_IN_TM_COMMIT_EH:
668 case BUILT_IN_TM_ABORT:
669 case BUILT_IN_TM_IRREVOCABLE:
670 case BUILT_IN_TM_GETTMCLONE_IRR:
671 case BUILT_IN_TM_MEMCPY:
672 case BUILT_IN_TM_MEMMOVE:
673 case BUILT_IN_TM_MEMSET:
674 CASE_BUILT_IN_TM_STORE (1):
675 CASE_BUILT_IN_TM_STORE (2):
676 CASE_BUILT_IN_TM_STORE (4):
677 CASE_BUILT_IN_TM_STORE (8):
678 CASE_BUILT_IN_TM_STORE (FLOAT):
679 CASE_BUILT_IN_TM_STORE (DOUBLE):
680 CASE_BUILT_IN_TM_STORE (LDOUBLE):
681 CASE_BUILT_IN_TM_STORE (M64):
682 CASE_BUILT_IN_TM_STORE (M128):
683 CASE_BUILT_IN_TM_STORE (M256):
684 CASE_BUILT_IN_TM_LOAD (1):
685 CASE_BUILT_IN_TM_LOAD (2):
686 CASE_BUILT_IN_TM_LOAD (4):
687 CASE_BUILT_IN_TM_LOAD (8):
688 CASE_BUILT_IN_TM_LOAD (FLOAT):
689 CASE_BUILT_IN_TM_LOAD (DOUBLE):
690 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
691 CASE_BUILT_IN_TM_LOAD (M64):
692 CASE_BUILT_IN_TM_LOAD (M128):
693 CASE_BUILT_IN_TM_LOAD (M256):
694 case BUILT_IN_TM_LOG:
695 case BUILT_IN_TM_LOG_1:
696 case BUILT_IN_TM_LOG_2:
697 case BUILT_IN_TM_LOG_4:
698 case BUILT_IN_TM_LOG_8:
699 case BUILT_IN_TM_LOG_FLOAT:
700 case BUILT_IN_TM_LOG_DOUBLE:
701 case BUILT_IN_TM_LOG_LDOUBLE:
702 case BUILT_IN_TM_LOG_M64:
703 case BUILT_IN_TM_LOG_M128:
704 case BUILT_IN_TM_LOG_M256:
705 return true;
706 default:
707 break;
708 }
709 }
710 return false;
711}
712
b5cd4ed4 713/* Detect flags (function attributes) from the function decl or type node. */
7393c642 714
4977bab6 715int
6ea2b70d 716flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
717{
718 int flags = 0;
36dbb93d 719
f2d33f13
JH
720 if (DECL_P (exp))
721 {
722 /* The function exp may have the `malloc' attribute. */
36dbb93d 723 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
724 flags |= ECF_MALLOC;
725
6e9a3221
AN
726 /* The function exp may have the `returns_twice' attribute. */
727 if (DECL_IS_RETURNS_TWICE (exp))
728 flags |= ECF_RETURNS_TWICE;
729
becfd6e5 730 /* Process the pure and const attributes. */
9e3920e9 731 if (TREE_READONLY (exp))
becfd6e5
KZ
732 flags |= ECF_CONST;
733 if (DECL_PURE_P (exp))
e238ccac 734 flags |= ECF_PURE;
becfd6e5
KZ
735 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
736 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 737
dcd6de6d
ZD
738 if (DECL_IS_NOVOPS (exp))
739 flags |= ECF_NOVOPS;
46a4da10
JH
740 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
741 flags |= ECF_LEAF;
dcd6de6d 742
f2d33f13
JH
743 if (TREE_NOTHROW (exp))
744 flags |= ECF_NOTHROW;
2b187c63 745
0a35513e
AH
746 if (flag_tm)
747 {
748 if (is_tm_builtin (exp))
749 flags |= ECF_TM_BUILTIN;
fe924d9f 750 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
751 || lookup_attribute ("transaction_pure",
752 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
753 flags |= ECF_TM_PURE;
754 }
755
6de9cd9a 756 flags = special_function_p (exp, flags);
f2d33f13 757 }
0a35513e
AH
758 else if (TYPE_P (exp))
759 {
760 if (TYPE_READONLY (exp))
761 flags |= ECF_CONST;
762
763 if (flag_tm
764 && ((flags & ECF_CONST) != 0
765 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
766 flags |= ECF_TM_PURE;
767 }
17fc8d6f
AH
768 else
769 gcc_unreachable ();
f2d33f13
JH
770
771 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
772 {
773 flags |= ECF_NORETURN;
774 if (flags & (ECF_CONST|ECF_PURE))
775 flags |= ECF_LOOPING_CONST_OR_PURE;
776 }
f2d33f13
JH
777
778 return flags;
779}
780
f027e0a2
JM
781/* Detect flags from a CALL_EXPR. */
782
783int
fa233e34 784call_expr_flags (const_tree t)
f027e0a2
JM
785{
786 int flags;
787 tree decl = get_callee_fndecl (t);
788
789 if (decl)
790 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
791 else if (CALL_EXPR_FN (t) == NULL_TREE)
792 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
793 else
794 {
5039610b 795 t = TREE_TYPE (CALL_EXPR_FN (t));
f027e0a2
JM
796 if (t && TREE_CODE (t) == POINTER_TYPE)
797 flags = flags_from_decl_or_type (TREE_TYPE (t));
798 else
799 flags = 0;
800 }
801
802 return flags;
803}
804
16a16ec7
AM
805/* Return true if TYPE should be passed by invisible reference. */
806
807bool
808pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
809 tree type, bool named_arg)
810{
811 if (type)
812 {
813 /* If this type contains non-trivial constructors, then it is
814 forbidden for the middle-end to create any new copies. */
815 if (TREE_ADDRESSABLE (type))
816 return true;
817
818 /* GCC post 3.4 passes *all* variable sized types by reference. */
819 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
820 return true;
821
822 /* If a record type should be passed the same as its first (and only)
823 member, use the type and mode of that member. */
824 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
825 {
826 type = TREE_TYPE (first_field (type));
827 mode = TYPE_MODE (type);
828 }
829 }
830
831 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
832 type, named_arg);
833}
834
835/* Return true if TYPE, which is passed by reference, should be callee
836 copied instead of caller copied. */
837
838bool
839reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
840 tree type, bool named_arg)
841{
842 if (type && TREE_ADDRESSABLE (type))
843 return false;
844 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
845 named_arg);
846}
847
848
20efdf74
JL
849/* Precompute all register parameters as described by ARGS, storing values
850 into fields within the ARGS array.
851
852 NUM_ACTUALS indicates the total number elements in the ARGS array.
853
854 Set REG_PARM_SEEN if we encounter a register parameter. */
855
856static void
27e29549
RH
857precompute_register_parameters (int num_actuals, struct arg_data *args,
858 int *reg_parm_seen)
20efdf74
JL
859{
860 int i;
861
862 *reg_parm_seen = 0;
863
864 for (i = 0; i < num_actuals; i++)
865 if (args[i].reg != 0 && ! args[i].pass_on_stack)
866 {
867 *reg_parm_seen = 1;
868
869 if (args[i].value == 0)
870 {
871 push_temp_slots ();
84217346 872 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
873 preserve_temp_slots (args[i].value);
874 pop_temp_slots ();
20efdf74
JL
875 }
876
877 /* If we are to promote the function arg to a wider mode,
878 do it now. */
879
880 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
881 args[i].value
882 = convert_modes (args[i].mode,
883 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
884 args[i].value, args[i].unsignedp);
885
a7adbbcb
L
886 /* If the value is a non-legitimate constant, force it into a
887 pseudo now. TLS symbols sometimes need a call to resolve. */
888 if (CONSTANT_P (args[i].value)
889 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
890 args[i].value = force_reg (args[i].mode, args[i].value);
891
27e29549
RH
892 /* If we're going to have to load the value by parts, pull the
893 parts into pseudos. The part extraction process can involve
894 non-trivial computation. */
895 if (GET_CODE (args[i].reg) == PARALLEL)
896 {
897 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 898 args[i].parallel_value
27e29549
RH
899 = emit_group_load_into_temps (args[i].reg, args[i].value,
900 type, int_size_in_bytes (type));
901 }
902
f725a3ec 903 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
904 short loop, put the value into a pseudo and then put the pseudo
905 into the hard reg.
906
907 For small register classes, also do this if this call uses
908 register parameters. This is to avoid reload conflicts while
909 loading the parameters registers. */
910
27e29549
RH
911 else if ((! (REG_P (args[i].value)
912 || (GET_CODE (args[i].value) == SUBREG
913 && REG_P (SUBREG_REG (args[i].value)))))
914 && args[i].mode != BLKmode
e548c9df
AM
915 && (set_src_cost (args[i].value, args[i].mode,
916 optimize_insn_for_speed_p ())
917 > COSTS_N_INSNS (1))
42db504c
SB
918 && ((*reg_parm_seen
919 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 920 || optimize))
20efdf74
JL
921 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
922 }
923}
924
f73ad30e 925#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
926
927 /* The argument list is the property of the called routine and it
928 may clobber it. If the fixed area has been used for previous
929 parameters, we must save and restore it. */
3bdf5ad1 930
20efdf74 931static rtx
d329e058 932save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 933{
b820d2b8
AM
934 int low;
935 int high;
20efdf74 936
b820d2b8
AM
937 /* Compute the boundary of the area that needs to be saved, if any. */
938 high = reg_parm_stack_space;
6dad9361
TS
939 if (ARGS_GROW_DOWNWARD)
940 high += 1;
941
b820d2b8
AM
942 if (high > highest_outgoing_arg_in_use)
943 high = highest_outgoing_arg_in_use;
20efdf74 944
b820d2b8
AM
945 for (low = 0; low < high; low++)
946 if (stack_usage_map[low] != 0)
947 {
948 int num_to_save;
ef4bddc2 949 machine_mode save_mode;
b820d2b8 950 int delta;
0a81f074 951 rtx addr;
b820d2b8
AM
952 rtx stack_area;
953 rtx save_area;
20efdf74 954
b820d2b8
AM
955 while (stack_usage_map[--high] == 0)
956 ;
20efdf74 957
b820d2b8
AM
958 *low_to_save = low;
959 *high_to_save = high;
960
961 num_to_save = high - low + 1;
962 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
20efdf74 963
b820d2b8
AM
964 /* If we don't have the required alignment, must do this
965 in BLKmode. */
966 if ((low & (MIN (GET_MODE_SIZE (save_mode),
967 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
968 save_mode = BLKmode;
20efdf74 969
6dad9361
TS
970 if (ARGS_GROW_DOWNWARD)
971 delta = -high;
972 else
973 delta = low;
974
0a81f074
RS
975 addr = plus_constant (Pmode, argblock, delta);
976 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 977
b820d2b8
AM
978 set_mem_align (stack_area, PARM_BOUNDARY);
979 if (save_mode == BLKmode)
980 {
9474e8ab 981 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
982 emit_block_move (validize_mem (save_area), stack_area,
983 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
984 }
985 else
986 {
987 save_area = gen_reg_rtx (save_mode);
988 emit_move_insn (save_area, stack_area);
989 }
8ac61af7 990
b820d2b8
AM
991 return save_area;
992 }
993
994 return NULL_RTX;
20efdf74
JL
995}
996
997static void
d329e058 998restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 999{
ef4bddc2 1000 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1001 int delta;
0a81f074 1002 rtx addr, stack_area;
b820d2b8 1003
6dad9361
TS
1004 if (ARGS_GROW_DOWNWARD)
1005 delta = -high_to_save;
1006 else
1007 delta = low_to_save;
1008
0a81f074
RS
1009 addr = plus_constant (Pmode, argblock, delta);
1010 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1011 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1012
1013 if (save_mode != BLKmode)
1014 emit_move_insn (stack_area, save_area);
1015 else
44bb111a
RH
1016 emit_block_move (stack_area, validize_mem (save_area),
1017 GEN_INT (high_to_save - low_to_save + 1),
1018 BLOCK_OP_CALL_PARM);
20efdf74 1019}
19652adf 1020#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1021
20efdf74
JL
1022/* If any elements in ARGS refer to parameters that are to be passed in
1023 registers, but not in memory, and whose alignment does not permit a
1024 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1025 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1026
1027 Pseudos for each unaligned argument will be stored into the array
1028 args[argnum].aligned_regs. The caller is responsible for deallocating
1029 the aligned_regs array if it is nonzero. */
1030
20efdf74 1031static void
d329e058 1032store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1033{
1034 int i, j;
f725a3ec 1035
20efdf74
JL
1036 for (i = 0; i < num_actuals; i++)
1037 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1038 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1039 && args[i].mode == BLKmode
852d22b4
EB
1040 && MEM_P (args[i].value)
1041 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1042 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1043 {
1044 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1045 int endian_correction = 0;
20efdf74 1046
78a52f11
RH
1047 if (args[i].partial)
1048 {
1049 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1050 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1051 }
1052 else
1053 {
1054 args[i].n_aligned_regs
1055 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1056 }
1057
5ed6ace5 1058 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1059
6e985040
AM
1060 /* Structures smaller than a word are normally aligned to the
1061 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1062 this means we must skip the empty high order bytes when
1063 calculating the bit offset. */
6e985040
AM
1064 if (bytes < UNITS_PER_WORD
1065#ifdef BLOCK_REG_PADDING
1066 && (BLOCK_REG_PADDING (args[i].mode,
1067 TREE_TYPE (args[i].tree_value), 1)
1068 == downward)
1069#else
1070 && BYTES_BIG_ENDIAN
1071#endif
1072 )
1073 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1074
1075 for (j = 0; j < args[i].n_aligned_regs; j++)
1076 {
1077 rtx reg = gen_reg_rtx (word_mode);
1078 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1079 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1080
1081 args[i].aligned_regs[j] = reg;
c6285bd7 1082 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
ee45a32d 1083 word_mode, word_mode, false);
20efdf74
JL
1084
1085 /* There is no need to restrict this code to loading items
1086 in TYPE_ALIGN sized hunks. The bitfield instructions can
1087 load up entire word sized registers efficiently.
1088
1089 ??? This may not be needed anymore.
1090 We use to emit a clobber here but that doesn't let later
1091 passes optimize the instructions we emit. By storing 0 into
1092 the register later passes know the first AND to zero out the
1093 bitfield being set in the register is unnecessary. The store
1094 of 0 will be deleted as will at least the first AND. */
1095
1096 emit_move_insn (reg, const0_rtx);
1097
1098 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1099 store_bit_field (reg, bitsize, endian_correction, 0, 0,
ee45a32d 1100 word_mode, word, false);
20efdf74
JL
1101 }
1102 }
1103}
1104
d7cdf113 1105/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 1106 CALL_EXPR EXP.
d7cdf113
JL
1107
1108 NUM_ACTUALS is the total number of parameters.
1109
1110 N_NAMED_ARGS is the total number of named arguments.
1111
078a18a4
SL
1112 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1113 value, or null.
1114
d7cdf113
JL
1115 FNDECL is the tree code for the target of this call (if known)
1116
1117 ARGS_SO_FAR holds state needed by the target to know where to place
1118 the next argument.
1119
1120 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1121 for arguments which are passed in registers.
1122
1123 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1124 and may be modified by this routine.
1125
f2d33f13 1126 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
026c3cfd 1127 flags which may be modified by this routine.
dd292d0a 1128
6de9cd9a
DN
1129 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1130 that requires allocation of stack space.
1131
dd292d0a
MM
1132 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1133 the thunked-to function. */
d7cdf113
JL
1134
1135static void
d329e058
AJ
1136initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1137 struct arg_data *args,
1138 struct args_size *args_size,
1139 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 1140 tree exp, tree struct_value_addr_value,
45769134 1141 tree fndecl, tree fntype,
d5cc9181 1142 cumulative_args_t args_so_far,
d329e058
AJ
1143 int reg_parm_stack_space,
1144 rtx *old_stack_level, int *old_pending_adj,
dd292d0a 1145 int *must_preallocate, int *ecf_flags,
6de9cd9a 1146 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 1147{
d5cc9181 1148 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 1149 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
1150
1151 /* Count arg position in order args appear. */
1152 int argpos;
1153
1154 int i;
f725a3ec 1155
d7cdf113
JL
1156 args_size->constant = 0;
1157 args_size->var = 0;
1158
d5e254e1
IE
1159 bitmap_obstack_initialize (NULL);
1160
d7cdf113 1161 /* In this loop, we consider args in the order they are written.
3d9684ae 1162 We fill up ARGS from the back. */
d7cdf113 1163
3d9684ae 1164 i = num_actuals - 1;
078a18a4 1165 {
d5e254e1 1166 int j = i, ptr_arg = -1;
078a18a4
SL
1167 call_expr_arg_iterator iter;
1168 tree arg;
d5e254e1 1169 bitmap slots = NULL;
078a18a4
SL
1170
1171 if (struct_value_addr_value)
1172 {
1173 args[j].tree_value = struct_value_addr_value;
3d9684ae 1174 j--;
d5e254e1
IE
1175
1176 /* If we pass structure address then we need to
1177 create bounds for it. Since created bounds is
1178 a call statement, we expand it right here to avoid
1179 fixing all other places where it may be expanded. */
1180 if (CALL_WITH_BOUNDS_P (exp))
1181 {
1182 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1183 args[j].tree_value
1184 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1185 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1186 EXPAND_NORMAL, 0, false);
1187 args[j].pointer_arg = j + 1;
1188 j--;
1189 }
078a18a4
SL
1190 }
1191 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1192 {
1193 tree argtype = TREE_TYPE (arg);
d5e254e1
IE
1194
1195 /* Remember last param with pointer and associate it
1196 with following pointer bounds. */
1197 if (CALL_WITH_BOUNDS_P (exp)
1198 && chkp_type_has_pointer (argtype))
1199 {
1200 if (slots)
1201 BITMAP_FREE (slots);
1202 ptr_arg = j;
1203 if (!BOUNDED_TYPE_P (argtype))
1204 {
1205 slots = BITMAP_ALLOC (NULL);
1206 chkp_find_bound_slots (argtype, slots);
1207 }
1208 }
1209 else if (POINTER_BOUNDS_TYPE_P (argtype))
1210 {
1211 /* We expect bounds in instrumented calls only.
1212 Otherwise it is a sign we lost flag due to some optimization
1213 and may emit call args incorrectly. */
1214 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1215
1216 /* For structures look for the next available pointer. */
1217 if (ptr_arg != -1 && slots)
1218 {
1219 unsigned bnd_no = bitmap_first_set_bit (slots);
1220 args[j].pointer_offset =
1221 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1222
1223 bitmap_clear_bit (slots, bnd_no);
1224
1225 /* Check we have no more pointers in the structure. */
1226 if (bitmap_empty_p (slots))
1227 BITMAP_FREE (slots);
1228 }
1229 args[j].pointer_arg = ptr_arg;
1230
1231 /* Check we covered all pointers in the previous
1232 non bounds arg. */
1233 if (!slots)
1234 ptr_arg = -1;
1235 }
1236 else
1237 ptr_arg = -1;
1238
078a18a4
SL
1239 if (targetm.calls.split_complex_arg
1240 && argtype
1241 && TREE_CODE (argtype) == COMPLEX_TYPE
1242 && targetm.calls.split_complex_arg (argtype))
1243 {
1244 tree subtype = TREE_TYPE (argtype);
078a18a4 1245 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 1246 j--;
078a18a4
SL
1247 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1248 }
1249 else
1250 args[j].tree_value = arg;
3d9684ae 1251 j--;
078a18a4 1252 }
d5e254e1
IE
1253
1254 if (slots)
1255 BITMAP_FREE (slots);
078a18a4
SL
1256 }
1257
d5e254e1
IE
1258 bitmap_obstack_release (NULL);
1259
d7cdf113 1260 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 1261 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 1262 {
078a18a4 1263 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 1264 int unsignedp;
ef4bddc2 1265 machine_mode mode;
d7cdf113 1266
d7cdf113 1267 /* Replace erroneous argument with constant zero. */
d0f062fb 1268 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1269 args[i].tree_value = integer_zero_node, type = integer_type_node;
1270
ebf0bf7f
JJ
1271 /* If TYPE is a transparent union or record, pass things the way
1272 we would pass the first field of the union or record. We have
1273 already verified that the modes are the same. */
1274 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1275 && TYPE_TRANSPARENT_AGGR (type))
1276 type = TREE_TYPE (first_field (type));
d7cdf113
JL
1277
1278 /* Decide where to pass this arg.
1279
1280 args[i].reg is nonzero if all or part is passed in registers.
1281
1282 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1283 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
1284
1285 args[i].pass_on_stack is nonzero if the argument must at least be
1286 computed on the stack. It may then be loaded back into registers
1287 if args[i].reg is nonzero.
1288
1289 These decisions are driven by the FUNCTION_... macros and must agree
1290 with those made by function.c. */
1291
1292 /* See if this argument should be passed by invisible reference. */
d5cc9181 1293 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
0976078c 1294 type, argpos < n_named_args))
d7cdf113 1295 {
9969aaf6 1296 bool callee_copies;
d6e1acf6 1297 tree base = NULL_TREE;
9969aaf6
RH
1298
1299 callee_copies
d5cc9181 1300 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
6cdd5672 1301 type, argpos < n_named_args);
9969aaf6
RH
1302
1303 /* If we're compiling a thunk, pass through invisible references
1304 instead of making a copy. */
dd292d0a 1305 if (call_from_thunk_p
9969aaf6
RH
1306 || (callee_copies
1307 && !TREE_ADDRESSABLE (type)
1308 && (base = get_base_address (args[i].tree_value))
9c3d55b4 1309 && TREE_CODE (base) != SSA_NAME
9969aaf6 1310 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
d7cdf113 1311 {
006e317a
JH
1312 /* We may have turned the parameter value into an SSA name.
1313 Go back to the original parameter so we can take the
1314 address. */
1315 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1316 {
1317 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1318 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1319 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1320 }
fe8dd12e
JH
1321 /* Argument setup code may have copied the value to register. We
1322 revert that optimization now because the tail call code must
1323 use the original location. */
1324 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1325 && !MEM_P (DECL_RTL (args[i].tree_value))
1326 && DECL_INCOMING_RTL (args[i].tree_value)
1327 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1328 set_decl_rtl (args[i].tree_value,
1329 DECL_INCOMING_RTL (args[i].tree_value));
1330
c4b9a87e
ER
1331 mark_addressable (args[i].tree_value);
1332
9969aaf6
RH
1333 /* We can't use sibcalls if a callee-copied argument is
1334 stored in the current function's frame. */
1335 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9fd47435
RS
1336 *may_tailcall = false;
1337
db3927fb
AH
1338 args[i].tree_value = build_fold_addr_expr_loc (loc,
1339 args[i].tree_value);
9969aaf6
RH
1340 type = TREE_TYPE (args[i].tree_value);
1341
becfd6e5
KZ
1342 if (*ecf_flags & ECF_CONST)
1343 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 1344 }
d7cdf113
JL
1345 else
1346 {
1347 /* We make a copy of the object and pass the address to the
1348 function being called. */
1349 rtx copy;
1350
d0f062fb 1351 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
1352 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1353 || (flag_stack_check == GENERIC_STACK_CHECK
1354 && compare_tree_int (TYPE_SIZE_UNIT (type),
1355 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
1356 {
1357 /* This is a variable-sized object. Make space on the stack
1358 for it. */
078a18a4 1359 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
1360
1361 if (*old_stack_level == 0)
1362 {
9eac0f2a 1363 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
1364 *old_pending_adj = pending_stack_adjust;
1365 pending_stack_adjust = 0;
1366 }
1367
d3c12306
EB
1368 /* We can pass TRUE as the 4th argument because we just
1369 saved the stack pointer and will restore it right after
1370 the call. */
3a42502d
RH
1371 copy = allocate_dynamic_stack_space (size_rtx,
1372 TYPE_ALIGN (type),
1373 TYPE_ALIGN (type),
1374 true);
1375 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 1376 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1377 }
1378 else
9474e8ab 1379 copy = assign_temp (type, 1, 0);
d7cdf113 1380
ee45a32d 1381 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 1382
becfd6e5
KZ
1383 /* Just change the const function to pure and then let
1384 the next test clear the pure based on
1385 callee_copies. */
1386 if (*ecf_flags & ECF_CONST)
1387 {
1388 *ecf_flags &= ~ECF_CONST;
1389 *ecf_flags |= ECF_PURE;
1390 }
1391
1392 if (!callee_copies && *ecf_flags & ECF_PURE)
1393 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
1394
1395 args[i].tree_value
db3927fb 1396 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 1397 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 1398 *may_tailcall = false;
d7cdf113
JL
1399 }
1400 }
1401
8df83eae 1402 unsignedp = TYPE_UNSIGNED (type);
cde0f3fd
PB
1403 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1404 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
1405
1406 args[i].unsignedp = unsignedp;
1407 args[i].mode = mode;
7d167afd 1408
3c07301f
NF
1409 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1410 argpos < n_named_args);
1411
d5e254e1
IE
1412 if (args[i].reg && CONST_INT_P (args[i].reg))
1413 {
1414 args[i].special_slot = args[i].reg;
1415 args[i].reg = NULL;
1416 }
1417
7d167afd
JJ
1418 /* If this is a sibling call and the machine has register windows, the
1419 register window has to be unwinded before calling the routine, so
1420 arguments have to go into the incoming registers. */
3c07301f
NF
1421 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1422 args[i].tail_call_reg
1423 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1424 argpos < n_named_args);
1425 else
1426 args[i].tail_call_reg = args[i].reg;
7d167afd 1427
d7cdf113
JL
1428 if (args[i].reg)
1429 args[i].partial
78a52f11
RH
1430 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1431 argpos < n_named_args);
d7cdf113 1432
fe984136 1433 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
d7cdf113
JL
1434
1435 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1436 it means that we are to pass this arg in the register(s) designated
1437 by the PARALLEL, but also to pass it in the stack. */
1438 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1439 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1440 args[i].pass_on_stack = 1;
1441
1442 /* If this is an addressable type, we must preallocate the stack
1443 since we must evaluate the object into its final location.
1444
1445 If this is to be passed in both registers and the stack, it is simpler
1446 to preallocate. */
1447 if (TREE_ADDRESSABLE (type)
1448 || (args[i].pass_on_stack && args[i].reg != 0))
1449 *must_preallocate = 1;
1450
d5e254e1
IE
1451 /* No stack allocation and padding for bounds. */
1452 if (POINTER_BOUNDS_P (args[i].tree_value))
1453 ;
d7cdf113 1454 /* Compute the stack-size of this argument. */
d5e254e1
IE
1455 else if (args[i].reg == 0 || args[i].partial != 0
1456 || reg_parm_stack_space > 0
1457 || args[i].pass_on_stack)
d7cdf113
JL
1458 locate_and_pad_parm (mode, type,
1459#ifdef STACK_PARMS_IN_REG_PARM_AREA
1460 1,
1461#else
1462 args[i].reg != 0,
1463#endif
2e4ceca5 1464 reg_parm_stack_space,
e7949876
AM
1465 args[i].pass_on_stack ? 0 : args[i].partial,
1466 fndecl, args_size, &args[i].locate);
648bb159
RS
1467#ifdef BLOCK_REG_PADDING
1468 else
1469 /* The argument is passed entirely in registers. See at which
1470 end it should be padded. */
1471 args[i].locate.where_pad =
1472 BLOCK_REG_PADDING (mode, type,
1473 int_size_in_bytes (type) <= UNITS_PER_WORD);
1474#endif
f725a3ec 1475
d7cdf113
JL
1476 /* Update ARGS_SIZE, the total stack space for args so far. */
1477
e7949876
AM
1478 args_size->constant += args[i].locate.size.constant;
1479 if (args[i].locate.size.var)
1480 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1481
1482 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1483 have been used, etc. */
1484
3c07301f
NF
1485 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1486 type, argpos < n_named_args);
d7cdf113
JL
1487 }
1488}
1489
599f37b6
JL
1490/* Update ARGS_SIZE to contain the total size for the argument block.
1491 Return the original constant component of the argument block's size.
1492
1493 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1494 for arguments passed in registers. */
1495
1496static int
d329e058
AJ
1497compute_argument_block_size (int reg_parm_stack_space,
1498 struct args_size *args_size,
033df0b9 1499 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 1500 tree fntype ATTRIBUTE_UNUSED,
d329e058 1501 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6
JL
1502{
1503 int unadjusted_args_size = args_size->constant;
1504
f73ad30e
JH
1505 /* For accumulate outgoing args mode we don't need to align, since the frame
1506 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1507 backends from generating misaligned frame sizes. */
f73ad30e
JH
1508 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1509 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1510
599f37b6
JL
1511 /* Compute the actual size of the argument block required. The variable
1512 and constant sizes must be combined, the size may have to be rounded,
1513 and there may be a minimum required size. */
1514
1515 if (args_size->var)
1516 {
1517 args_size->var = ARGS_SIZE_TREE (*args_size);
1518 args_size->constant = 0;
1519
c2f8b491
JH
1520 preferred_stack_boundary /= BITS_PER_UNIT;
1521 if (preferred_stack_boundary > 1)
1503a7ec
JH
1522 {
1523 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1524 to add the delta, round and subtract the delta.
1503a7ec 1525 Currently no machine description requires this support. */
366de0ce 1526 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1503a7ec
JH
1527 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1528 }
599f37b6
JL
1529
1530 if (reg_parm_stack_space > 0)
1531 {
1532 args_size->var
1533 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1534 ssize_int (reg_parm_stack_space));
599f37b6 1535
599f37b6
JL
1536 /* The area corresponding to register parameters is not to count in
1537 the size of the block we need. So make the adjustment. */
5d059ed9 1538 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
1539 args_size->var
1540 = size_binop (MINUS_EXPR, args_size->var,
1541 ssize_int (reg_parm_stack_space));
599f37b6
JL
1542 }
1543 }
1544 else
1545 {
c2f8b491 1546 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1547 if (preferred_stack_boundary < 1)
1548 preferred_stack_boundary = 1;
fb5eebb9 1549 args_size->constant = (((args_size->constant
1503a7ec 1550 + stack_pointer_delta
c2f8b491
JH
1551 + preferred_stack_boundary - 1)
1552 / preferred_stack_boundary
1553 * preferred_stack_boundary)
1503a7ec 1554 - stack_pointer_delta);
599f37b6
JL
1555
1556 args_size->constant = MAX (args_size->constant,
1557 reg_parm_stack_space);
1558
5d059ed9 1559 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 1560 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
1561 }
1562 return unadjusted_args_size;
1563}
1564
19832c77 1565/* Precompute parameters as needed for a function call.
cc0b1adc 1566
f2d33f13 1567 FLAGS is mask of ECF_* constants.
cc0b1adc 1568
cc0b1adc
JL
1569 NUM_ACTUALS is the number of arguments.
1570
f725a3ec
KH
1571 ARGS is an array containing information for each argument; this
1572 routine fills in the INITIAL_VALUE and VALUE fields for each
1573 precomputed argument. */
cc0b1adc
JL
1574
1575static void
84b8030f 1576precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
1577{
1578 int i;
1579
3638733b 1580 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 1581 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
1582
1583 /* If we preallocated the stack space, and some arguments must be passed
1584 on the stack, then we must precompute any parameter which contains a
1585 function call which will store arguments on the stack.
1586 Otherwise, evaluating the parameter may clobber previous parameters
1587 which have already been stored into the stack. (we have code to avoid
1588 such case by saving the outgoing stack arguments, but it results in
1589 worse code) */
84b8030f 1590 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 1591 return;
7ae4ad28 1592
cc0b1adc 1593 for (i = 0; i < num_actuals; i++)
82c82743 1594 {
cde0f3fd 1595 tree type;
ef4bddc2 1596 machine_mode mode;
ddef6bc7 1597
84b8030f 1598 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
1599 continue;
1600
82c82743 1601 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
1602 type = TREE_TYPE (args[i].tree_value);
1603 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 1604
82c82743 1605 args[i].initial_value = args[i].value
84217346 1606 = expand_normal (args[i].tree_value);
cc0b1adc 1607
cde0f3fd 1608 mode = TYPE_MODE (type);
82c82743
RH
1609 if (mode != args[i].mode)
1610 {
cde0f3fd 1611 int unsignedp = args[i].unsignedp;
82c82743
RH
1612 args[i].value
1613 = convert_modes (args[i].mode, mode,
1614 args[i].value, args[i].unsignedp);
cde0f3fd 1615
82c82743
RH
1616 /* CSE will replace this only if it contains args[i].value
1617 pseudo, so convert it down to the declared mode using
1618 a SUBREG. */
1619 if (REG_P (args[i].value)
cde0f3fd
PB
1620 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1621 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
1622 {
1623 args[i].initial_value
1624 = gen_lowpart_SUBREG (mode, args[i].value);
1625 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 1626 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 1627 }
82c82743
RH
1628 }
1629 }
cc0b1adc
JL
1630}
1631
0f9b3ea6
JL
1632/* Given the current state of MUST_PREALLOCATE and information about
1633 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1634 compute and return the final value for MUST_PREALLOCATE. */
1635
1636static int
b8698a0f 1637finalize_must_preallocate (int must_preallocate, int num_actuals,
5039610b 1638 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
1639{
1640 /* See if we have or want to preallocate stack space.
1641
1642 If we would have to push a partially-in-regs parm
1643 before other stack parms, preallocate stack space instead.
1644
1645 If the size of some parm is not a multiple of the required stack
1646 alignment, we must preallocate.
1647
1648 If the total size of arguments that would otherwise create a copy in
1649 a temporary (such as a CALL) is more than half the total argument list
1650 size, preallocation is faster.
1651
1652 Another reason to preallocate is if we have a machine (like the m88k)
1653 where stack alignment is required to be maintained between every
1654 pair of insns, not just when the call is made. However, we assume here
1655 that such machines either do not have push insns (and hence preallocation
1656 would occur anyway) or the problem is taken care of with
1657 PUSH_ROUNDING. */
1658
1659 if (! must_preallocate)
1660 {
1661 int partial_seen = 0;
1662 int copy_to_evaluate_size = 0;
1663 int i;
1664
1665 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1666 {
1667 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1668 partial_seen = 1;
1669 else if (partial_seen && args[i].reg == 0)
1670 must_preallocate = 1;
d5e254e1
IE
1671 /* We preallocate in case there are bounds passed
1672 in the bounds table to have precomputed address
1673 for bounds association. */
1674 else if (POINTER_BOUNDS_P (args[i].tree_value)
1675 && !args[i].reg)
1676 must_preallocate = 1;
0f9b3ea6
JL
1677
1678 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1679 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1680 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1681 || TREE_CODE (args[i].tree_value) == COND_EXPR
1682 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1683 copy_to_evaluate_size
1684 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1685 }
1686
1687 if (copy_to_evaluate_size * 2 >= args_size->constant
1688 && args_size->constant > 0)
1689 must_preallocate = 1;
1690 }
1691 return must_preallocate;
1692}
599f37b6 1693
a45bdd02
JL
1694/* If we preallocated stack space, compute the address of each argument
1695 and store it into the ARGS array.
1696
f725a3ec 1697 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1698 validized when it is used.
1699
1700 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1701
1702static void
d329e058 1703compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
1704{
1705 if (argblock)
1706 {
1707 rtx arg_reg = argblock;
1708 int i, arg_offset = 0;
1709
1710 if (GET_CODE (argblock) == PLUS)
1711 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1712
1713 for (i = 0; i < num_actuals; i++)
1714 {
e7949876
AM
1715 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1716 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 1717 rtx addr;
bfc45551 1718 unsigned int align, boundary;
7816b87e 1719 unsigned int units_on_stack = 0;
ef4bddc2 1720 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
1721
1722 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
1723 if (! args[i].pass_on_stack
1724 && args[i].reg != 0
1725 && args[i].partial == 0)
a45bdd02
JL
1726 continue;
1727
d5e254e1
IE
1728 /* Pointer Bounds are never passed on the stack. */
1729 if (POINTER_BOUNDS_P (args[i].tree_value))
1730 continue;
1731
481683e1 1732 if (CONST_INT_P (offset))
0a81f074 1733 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
a45bdd02
JL
1734 else
1735 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1736
0a81f074 1737 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1738
1739 if (args[i].partial != 0)
1740 {
1741 /* Only part of the parameter is being passed on the stack.
1742 Generate a simple memory reference of the correct size. */
1743 units_on_stack = args[i].locate.size.constant;
1744 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1745 MODE_INT, 1);
1746 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 1747 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
1748 }
1749 else
1750 {
1751 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1752 set_mem_attributes (args[i].stack,
1753 TREE_TYPE (args[i].tree_value), 1);
1754 }
bfc45551
AM
1755 align = BITS_PER_UNIT;
1756 boundary = args[i].locate.boundary;
1757 if (args[i].locate.where_pad != downward)
1758 align = boundary;
481683e1 1759 else if (CONST_INT_P (offset))
bfc45551
AM
1760 {
1761 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1762 align = align & -align;
1763 }
1764 set_mem_align (args[i].stack, align);
a45bdd02 1765
481683e1 1766 if (CONST_INT_P (slot_offset))
0a81f074 1767 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
a45bdd02
JL
1768 else
1769 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1770
0a81f074 1771 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1772
1773 if (args[i].partial != 0)
1774 {
1775 /* Only part of the parameter is being passed on the stack.
1776 Generate a simple memory reference of the correct size.
1777 */
1778 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 1779 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
1780 }
1781 else
1782 {
1783 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1784 set_mem_attributes (args[i].stack_slot,
1785 TREE_TYPE (args[i].tree_value), 1);
1786 }
bfc45551 1787 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
1788
1789 /* Function incoming arguments may overlap with sibling call
1790 outgoing arguments and we cannot allow reordering of reads
1791 from function arguments with stores to outgoing arguments
1792 of sibling calls. */
ba4828e0
RK
1793 set_mem_alias_set (args[i].stack, 0);
1794 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1795 }
1796 }
1797}
f725a3ec 1798
a45bdd02
JL
1799/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1800 in a call instruction.
1801
1802 FNDECL is the tree node for the target function. For an indirect call
1803 FNDECL will be NULL_TREE.
1804
09e2bf48 1805 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
1806
1807static rtx
d329e058 1808rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
1809{
1810 rtx funexp;
1811
1812 /* Get the function to call, in the form of RTL. */
1813 if (fndecl)
1814 {
ad960f56 1815 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 1816 TREE_USED (fndecl) = 1;
a45bdd02
JL
1817
1818 /* Get a SYMBOL_REF rtx for the function address. */
1819 funexp = XEXP (DECL_RTL (fndecl), 0);
1820 }
1821 else
1822 /* Generate an rtx (probably a pseudo-register) for the address. */
1823 {
1824 push_temp_slots ();
84217346 1825 funexp = expand_normal (addr);
f725a3ec 1826 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1827 }
1828 return funexp;
1829}
1830
5275901c
JJ
1831/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1832static struct
1833{
1834 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1835 or NULL_RTX if none has been scanned yet. */
48810515 1836 rtx_insn *scan_start;
5275901c
JJ
1837 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1838 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1839 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1840 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 1841 vec<rtx> cache;
5275901c
JJ
1842} internal_arg_pointer_exp_state;
1843
e9f56944 1844static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
1845
1846/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1847 the tail call sequence, starting with first insn that hasn't been
1848 scanned yet, and note for each pseudo on the LHS whether it is based
1849 on crtl->args.internal_arg_pointer or not, and what offset from that
1850 that pointer it has. */
1851
1852static void
1853internal_arg_pointer_based_exp_scan (void)
1854{
48810515 1855 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
1856
1857 if (scan_start == NULL_RTX)
1858 insn = get_insns ();
1859 else
1860 insn = NEXT_INSN (scan_start);
1861
1862 while (insn)
1863 {
1864 rtx set = single_set (insn);
1865 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1866 {
1867 rtx val = NULL_RTX;
1868 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1869 /* Punt on pseudos set multiple times. */
9771b263
DN
1870 if (idx < internal_arg_pointer_exp_state.cache.length ()
1871 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
1872 != NULL_RTX))
1873 val = pc_rtx;
1874 else
1875 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1876 if (val != NULL_RTX)
1877 {
9771b263 1878 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718
RS
1879 internal_arg_pointer_exp_state.cache
1880 .safe_grow_cleared (idx + 1);
9771b263 1881 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
1882 }
1883 }
1884 if (NEXT_INSN (insn) == NULL_RTX)
1885 scan_start = insn;
1886 insn = NEXT_INSN (insn);
1887 }
1888
1889 internal_arg_pointer_exp_state.scan_start = scan_start;
1890}
1891
5275901c
JJ
1892/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1893 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1894 it with fixed offset, or PC if this is with variable or unknown offset.
1895 TOPLEVEL is true if the function is invoked at the topmost level. */
1896
1897static rtx
e9f56944 1898internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
1899{
1900 if (CONSTANT_P (rtl))
1901 return NULL_RTX;
1902
1903 if (rtl == crtl->args.internal_arg_pointer)
1904 return const0_rtx;
1905
1906 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1907 return NULL_RTX;
1908
1909 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1910 {
1911 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1912 if (val == NULL_RTX || val == pc_rtx)
1913 return val;
0a81f074 1914 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
5275901c
JJ
1915 }
1916
1917 /* When called at the topmost level, scan pseudo assignments in between the
1918 last scanned instruction in the tail call sequence and the latest insn
1919 in that sequence. */
1920 if (toplevel)
1921 internal_arg_pointer_based_exp_scan ();
1922
1923 if (REG_P (rtl))
1924 {
1925 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
1926 if (idx < internal_arg_pointer_exp_state.cache.length ())
1927 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
1928
1929 return NULL_RTX;
1930 }
1931
e9f56944
RS
1932 subrtx_iterator::array_type array;
1933 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
1934 {
1935 const_rtx x = *iter;
1936 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
1937 return pc_rtx;
1938 if (MEM_P (x))
1939 iter.skip_subrtxes ();
1940 }
5275901c
JJ
1941
1942 return NULL_RTX;
1943}
1944
07eef816
KH
1945/* Return true if and only if SIZE storage units (usually bytes)
1946 starting from address ADDR overlap with already clobbered argument
1947 area. This function is used to determine if we should give up a
1948 sibcall. */
1949
1950static bool
1951mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1952{
1953 HOST_WIDE_INT i;
5275901c 1954 rtx val;
07eef816 1955
f61e445a 1956 if (bitmap_empty_p (stored_args_map))
4189fb53 1957 return false;
5275901c
JJ
1958 val = internal_arg_pointer_based_exp (addr, true);
1959 if (val == NULL_RTX)
1960 return false;
1961 else if (val == pc_rtx)
6c3cb698 1962 return true;
07eef816 1963 else
5275901c 1964 i = INTVAL (val);
76e048a8
KT
1965
1966 if (STACK_GROWS_DOWNWARD)
1967 i -= crtl->args.pretend_args_size;
1968 else
1969 i += crtl->args.pretend_args_size;
1970
07eef816 1971
6dad9361
TS
1972 if (ARGS_GROW_DOWNWARD)
1973 i = -i - size;
1974
07eef816
KH
1975 if (size > 0)
1976 {
1977 unsigned HOST_WIDE_INT k;
1978
1979 for (k = 0; k < size; k++)
5829cc0f 1980 if (i + k < SBITMAP_SIZE (stored_args_map)
d7c028c0 1981 && bitmap_bit_p (stored_args_map, i + k))
07eef816
KH
1982 return true;
1983 }
1984
1985 return false;
1986}
1987
21a3b983
JL
1988/* Do the register loads required for any wholly-register parms or any
1989 parms which are passed both on the stack and in a register. Their
f725a3ec 1990 expressions were already evaluated.
21a3b983
JL
1991
1992 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
1993 insns in the CALL_INSN_FUNCTION_USAGE field.
1994
40b0345d 1995 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 1996 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
1997
1998static void
d329e058
AJ
1999load_register_parameters (struct arg_data *args, int num_actuals,
2000 rtx *call_fusage, int flags, int is_sibcall,
2001 int *sibcall_failure)
21a3b983
JL
2002{
2003 int i, j;
2004
21a3b983 2005 for (i = 0; i < num_actuals; i++)
21a3b983 2006 {
099e9712
JH
2007 rtx reg = ((flags & ECF_SIBCALL)
2008 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
2009 if (reg)
2010 {
6e985040
AM
2011 int partial = args[i].partial;
2012 int nregs;
2013 int size = 0;
48810515 2014 rtx_insn *before_arg = get_last_insn ();
f0078f86
AM
2015 /* Set non-negative if we must move a word at a time, even if
2016 just one word (e.g, partial == 4 && mode == DFmode). Set
2017 to -1 if we just use a normal move insn. This value can be
2018 zero if the argument is a zero size structure. */
6e985040 2019 nregs = -1;
78a52f11
RH
2020 if (GET_CODE (reg) == PARALLEL)
2021 ;
2022 else if (partial)
2023 {
2024 gcc_assert (partial % UNITS_PER_WORD == 0);
2025 nregs = partial / UNITS_PER_WORD;
2026 }
6e985040
AM
2027 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2028 {
2029 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2030 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2031 }
2032 else
2033 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
2034
2035 /* Handle calls that pass values in multiple non-contiguous
2036 locations. The Irix 6 ABI has examples of this. */
2037
2038 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 2039 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
2040
2041 /* If simple case, just do move. If normal partial, store_one_arg
2042 has already loaded the register for us. In all other cases,
2043 load the register(s) from memory. */
2044
9206d736
AM
2045 else if (nregs == -1)
2046 {
2047 emit_move_insn (reg, args[i].value);
6e985040 2048#ifdef BLOCK_REG_PADDING
9206d736
AM
2049 /* Handle case where we have a value that needs shifting
2050 up to the msb. eg. a QImode value and we're padding
2051 upward on a BYTES_BIG_ENDIAN machine. */
2052 if (size < UNITS_PER_WORD
2053 && (args[i].locate.where_pad
2054 == (BYTES_BIG_ENDIAN ? upward : downward)))
2055 {
9206d736
AM
2056 rtx x;
2057 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
980f6e8e
AM
2058
2059 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2060 report the whole reg as used. Strictly speaking, the
2061 call only uses SIZE bytes at the msb end, but it doesn't
2062 seem worth generating rtl to say that. */
2063 reg = gen_rtx_REG (word_mode, REGNO (reg));
eb6c3df1 2064 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
980f6e8e
AM
2065 if (x != reg)
2066 emit_move_insn (reg, x);
9206d736 2067 }
6e985040 2068#endif
9206d736 2069 }
21a3b983
JL
2070
2071 /* If we have pre-computed the values to put in the registers in
2072 the case of non-aligned structures, copy them in now. */
2073
2074 else if (args[i].n_aligned_regs != 0)
2075 for (j = 0; j < args[i].n_aligned_regs; j++)
2076 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2077 args[i].aligned_regs[j]);
2078
3b2ee170 2079 else if (partial == 0 || args[i].pass_on_stack)
6e985040 2080 {
1a8cb155 2081 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 2082
3b2ee170
IS
2083 /* Check for overlap with already clobbered argument area,
2084 providing that this has non-zero size. */
07eef816 2085 if (is_sibcall
3b2ee170
IS
2086 && (size == 0
2087 || mem_overlaps_already_clobbered_arg_p
2088 (XEXP (args[i].value, 0), size)))
07eef816
KH
2089 *sibcall_failure = 1;
2090
984b2054
AM
2091 if (size % UNITS_PER_WORD == 0
2092 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2093 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2094 else
2095 {
2096 if (nregs > 1)
2097 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2098 args[i].mode);
2099 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2100 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2101 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
ee45a32d
EB
2102 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2103 word_mode, word_mode, false);
984b2054
AM
2104 if (BYTES_BIG_ENDIAN)
2105 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2106 BITS_PER_WORD - bitsize, dest, 1);
2107 if (x != dest)
2108 emit_move_insn (dest, x);
2109 }
2110
6e985040 2111 /* Handle a BLKmode that needs shifting. */
9206d736 2112 if (nregs == 1 && size < UNITS_PER_WORD
03ca1672
UW
2113#ifdef BLOCK_REG_PADDING
2114 && args[i].locate.where_pad == downward
2115#else
2116 && BYTES_BIG_ENDIAN
2117#endif
984b2054 2118 )
6e985040 2119 {
984b2054 2120 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
6e985040 2121 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
984b2054
AM
2122 enum tree_code dir = (BYTES_BIG_ENDIAN
2123 ? RSHIFT_EXPR : LSHIFT_EXPR);
2124 rtx x;
6e985040 2125
984b2054
AM
2126 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2127 if (x != dest)
2128 emit_move_insn (dest, x);
6e985040 2129 }
6e985040 2130 }
21a3b983 2131
0cdca92b
DJ
2132 /* When a parameter is a block, and perhaps in other cases, it is
2133 possible that it did a load from an argument slot that was
32dd366d 2134 already clobbered. */
0cdca92b
DJ
2135 if (is_sibcall
2136 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2137 *sibcall_failure = 1;
2138
21a3b983
JL
2139 /* Handle calls that pass values in multiple non-contiguous
2140 locations. The Irix 6 ABI has examples of this. */
2141 if (GET_CODE (reg) == PARALLEL)
2142 use_group_regs (call_fusage, reg);
2143 else if (nregs == -1)
7d810276
JJ
2144 use_reg_mode (call_fusage, reg,
2145 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
faa00334
AO
2146 else if (nregs > 0)
2147 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
2148 }
2149 }
2150}
2151
739fb049
MM
2152/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2153 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2154 bytes, then we would need to push some additional bytes to pad the
ce48579b
RH
2155 arguments. So, we compute an adjust to the stack pointer for an
2156 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2157 bytes. Then, when the arguments are pushed the stack will be perfectly
2158 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2159 be popped after the call. Returns the adjustment. */
739fb049 2160
ce48579b 2161static int
d329e058
AJ
2162combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2163 struct args_size *args_size,
95899b34 2164 unsigned int preferred_unit_stack_boundary)
739fb049
MM
2165{
2166 /* The number of bytes to pop so that the stack will be
2167 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2168 HOST_WIDE_INT adjustment;
2169 /* The alignment of the stack after the arguments are pushed, if we
2170 just pushed the arguments without adjust the stack here. */
95899b34 2171 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 2172
f725a3ec 2173 unadjusted_alignment
739fb049
MM
2174 = ((stack_pointer_delta + unadjusted_args_size)
2175 % preferred_unit_stack_boundary);
2176
2177 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2178 as possible -- leaving just enough left to cancel out the
2179 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2180 PENDING_STACK_ADJUST is non-negative, and congruent to
2181 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2182
2183 /* Begin by trying to pop all the bytes. */
f725a3ec
KH
2184 unadjusted_alignment
2185 = (unadjusted_alignment
739fb049
MM
2186 - (pending_stack_adjust % preferred_unit_stack_boundary));
2187 adjustment = pending_stack_adjust;
2188 /* Push enough additional bytes that the stack will be aligned
2189 after the arguments are pushed. */
e079dcdb
HB
2190 if (preferred_unit_stack_boundary > 1)
2191 {
3e555c7d 2192 if (unadjusted_alignment > 0)
f725a3ec 2193 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
e079dcdb 2194 else
f725a3ec 2195 adjustment += unadjusted_alignment;
e079dcdb 2196 }
f725a3ec 2197
739fb049
MM
2198 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2199 bytes after the call. The right number is the entire
2200 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2201 by the arguments in the first place. */
f725a3ec 2202 args_size->constant
739fb049
MM
2203 = pending_stack_adjust - adjustment + unadjusted_args_size;
2204
ce48579b 2205 return adjustment;
739fb049
MM
2206}
2207
c67846f2
JJ
2208/* Scan X expression if it does not dereference any argument slots
2209 we already clobbered by tail call arguments (as noted in stored_args_map
2210 bitmap).
da7d8304 2211 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
2212 zero otherwise. */
2213
2214static int
d329e058 2215check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
2216{
2217 RTX_CODE code;
2218 int i, j;
c67846f2
JJ
2219 const char *fmt;
2220
2221 if (x == NULL_RTX)
2222 return 0;
2223
2224 code = GET_CODE (x);
2225
6c3cb698
KY
2226 /* We need not check the operands of the CALL expression itself. */
2227 if (code == CALL)
2228 return 0;
2229
c67846f2 2230 if (code == MEM)
07eef816
KH
2231 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2232 GET_MODE_SIZE (GET_MODE (x)));
c67846f2 2233
f725a3ec 2234 /* Scan all subexpressions. */
c67846f2
JJ
2235 fmt = GET_RTX_FORMAT (code);
2236 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2237 {
2238 if (*fmt == 'e')
f725a3ec
KH
2239 {
2240 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2241 return 1;
2242 }
c67846f2 2243 else if (*fmt == 'E')
f725a3ec
KH
2244 {
2245 for (j = 0; j < XVECLEN (x, i); j++)
2246 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2247 return 1;
2248 }
c67846f2
JJ
2249 }
2250 return 0;
c67846f2
JJ
2251}
2252
2253/* Scan sequence after INSN if it does not dereference any argument slots
2254 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
2255 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2256 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2257 should be 0). Return nonzero if sequence after INSN dereferences such argument
2258 slots, zero otherwise. */
c67846f2
JJ
2259
2260static int
48810515
DM
2261check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2262 int mark_stored_args_map)
f725a3ec 2263{
c67846f2
JJ
2264 int low, high;
2265
2266 if (insn == NULL_RTX)
2267 insn = get_insns ();
2268 else
2269 insn = NEXT_INSN (insn);
2270
2271 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
2272 if (INSN_P (insn)
2273 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
2274 break;
2275
0cdca92b
DJ
2276 if (mark_stored_args_map)
2277 {
6dad9361
TS
2278 if (ARGS_GROW_DOWNWARD)
2279 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2280 else
2281 low = arg->locate.slot_offset.constant;
d60eab50 2282
e7949876 2283 for (high = low + arg->locate.size.constant; low < high; low++)
d7c028c0 2284 bitmap_set_bit (stored_args_map, low);
0cdca92b 2285 }
c67846f2
JJ
2286 return insn != NULL_RTX;
2287}
2288
bef5d8b6
RS
2289/* Given that a function returns a value of mode MODE at the most
2290 significant end of hard register VALUE, shift VALUE left or right
2291 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 2292
bef5d8b6 2293bool
ef4bddc2 2294shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 2295{
bef5d8b6
RS
2296 HOST_WIDE_INT shift;
2297
2298 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2299 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2300 if (shift == 0)
2301 return false;
2302
2303 /* Use ashr rather than lshr for right shifts. This is for the benefit
2304 of the MIPS port, which requires SImode values to be sign-extended
2305 when stored in 64-bit registers. */
2306 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2307 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2308 gcc_unreachable ();
2309 return true;
c988af2b
RS
2310}
2311
3fb30019
RS
2312/* If X is a likely-spilled register value, copy it to a pseudo
2313 register and return that register. Return X otherwise. */
2314
2315static rtx
2316avoid_likely_spilled_reg (rtx x)
2317{
82d6e6fc 2318 rtx new_rtx;
3fb30019
RS
2319
2320 if (REG_P (x)
2321 && HARD_REGISTER_P (x)
07b8f0a8 2322 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
2323 {
2324 /* Make sure that we generate a REG rather than a CONCAT.
2325 Moves into CONCATs can need nontrivial instructions,
2326 and the whole point of this function is to avoid
2327 using the hard register directly in such a situation. */
2328 generating_concat_p = 0;
82d6e6fc 2329 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 2330 generating_concat_p = 1;
82d6e6fc
KG
2331 emit_move_insn (new_rtx, x);
2332 return new_rtx;
3fb30019
RS
2333 }
2334 return x;
2335}
2336
5039610b 2337/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
2338 and return an rtx for its value.
2339 Store the value in TARGET (specified as an rtx) if convenient.
2340 If the value is stored in TARGET then TARGET is returned.
2341 If IGNORE is nonzero, then we ignore the value of the function call. */
2342
2343rtx
d329e058 2344expand_call (tree exp, rtx target, int ignore)
51bbfa0c 2345{
0a1c58a2
JL
2346 /* Nonzero if we are currently expanding a call. */
2347 static int currently_expanding_call = 0;
2348
51bbfa0c
RS
2349 /* RTX for the function to be called. */
2350 rtx funexp;
0a1c58a2 2351 /* Sequence of insns to perform a normal "call". */
48810515 2352 rtx_insn *normal_call_insns = NULL;
6de9cd9a 2353 /* Sequence of insns to perform a tail "call". */
48810515 2354 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
2355 /* Data type of the function. */
2356 tree funtype;
ded9bf77 2357 tree type_arg_types;
28ed065e 2358 tree rettype;
51bbfa0c
RS
2359 /* Declaration of the function being called,
2360 or 0 if the function is computed (not known by name). */
2361 tree fndecl = 0;
57782ad8
MM
2362 /* The type of the function being called. */
2363 tree fntype;
6de9cd9a 2364 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
0a1c58a2 2365 int pass;
51bbfa0c
RS
2366
2367 /* Register in which non-BLKmode value will be returned,
2368 or 0 if no value or if value is BLKmode. */
2369 rtx valreg;
d5e254e1
IE
2370 /* Register(s) in which bounds are returned. */
2371 rtx valbnd = NULL;
51bbfa0c
RS
2372 /* Address where we should return a BLKmode value;
2373 0 if value not BLKmode. */
2374 rtx structure_value_addr = 0;
2375 /* Nonzero if that address is being passed by treating it as
2376 an extra, implicit first parameter. Otherwise,
2377 it is passed by being copied directly into struct_value_rtx. */
2378 int structure_value_addr_parm = 0;
078a18a4
SL
2379 /* Holds the value of implicit argument for the struct value. */
2380 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
2381 /* Size of aggregate value wanted, or zero if none wanted
2382 or if we are using the non-reentrant PCC calling convention
2383 or expecting the value in registers. */
e5e809f4 2384 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
2385 /* Nonzero if called function returns an aggregate in memory PCC style,
2386 by returning the address of where to find it. */
2387 int pcc_struct_value = 0;
61f71b34 2388 rtx struct_value = 0;
51bbfa0c
RS
2389
2390 /* Number of actual parameters in this call, including struct value addr. */
2391 int num_actuals;
2392 /* Number of named args. Args after this are anonymous ones
2393 and they must all go on the stack. */
2394 int n_named_args;
078a18a4
SL
2395 /* Number of complex actual arguments that need to be split. */
2396 int num_complex_actuals = 0;
51bbfa0c
RS
2397
2398 /* Vector of information about each argument.
2399 Arguments are numbered in the order they will be pushed,
2400 not the order they are written. */
2401 struct arg_data *args;
2402
2403 /* Total size in bytes of all the stack-parms scanned so far. */
2404 struct args_size args_size;
099e9712 2405 struct args_size adjusted_args_size;
51bbfa0c 2406 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 2407 int unadjusted_args_size;
51bbfa0c 2408 /* Data on reg parms scanned so far. */
d5cc9181
JR
2409 CUMULATIVE_ARGS args_so_far_v;
2410 cumulative_args_t args_so_far;
51bbfa0c
RS
2411 /* Nonzero if a reg parm has been scanned. */
2412 int reg_parm_seen;
efd65a8b 2413 /* Nonzero if this is an indirect function call. */
51bbfa0c 2414
f725a3ec 2415 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
2416 If stack space is allocated for register parameters, but not by the
2417 caller, then it is preallocated in the fixed part of the stack frame.
2418 So the entire argument block must then be preallocated (i.e., we
2419 ignore PUSH_ROUNDING in that case). */
2420
f73ad30e 2421 int must_preallocate = !PUSH_ARGS;
51bbfa0c 2422
f72aed24 2423 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
2424 int reg_parm_stack_space = 0;
2425
51bbfa0c
RS
2426 /* Address of space preallocated for stack parms
2427 (on machines that lack push insns), or 0 if space not preallocated. */
2428 rtx argblock = 0;
2429
e384e6b5 2430 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 2431 int flags = 0;
e384e6b5 2432 int return_flags = 0;
f73ad30e 2433#ifdef REG_PARM_STACK_SPACE
51bbfa0c 2434 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
2435 saved, if any. */
2436 int low_to_save, high_to_save;
51bbfa0c
RS
2437 rtx save_area = 0; /* Place that it is saved */
2438#endif
2439
51bbfa0c
RS
2440 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2441 char *initial_stack_usage_map = stack_usage_map;
d9725c41 2442 char *stack_usage_map_buf = NULL;
51bbfa0c 2443
38afb23f
OH
2444 int old_stack_allocated;
2445
2446 /* State variables to track stack modifications. */
51bbfa0c 2447 rtx old_stack_level = 0;
38afb23f 2448 int old_stack_arg_under_construction = 0;
79be3418 2449 int old_pending_adj = 0;
51bbfa0c 2450 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
2451
2452 /* Some stack pointer alterations we make are performed via
2453 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2454 which we then also need to save/restore along the way. */
a259f218 2455 int old_stack_pointer_delta = 0;
38afb23f 2456
0a1c58a2 2457 rtx call_fusage;
5039610b 2458 tree addr = CALL_EXPR_FN (exp);
b3694847 2459 int i;
739fb049 2460 /* The alignment of the stack, in bits. */
95899b34 2461 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 2462 /* The alignment of the stack, in bytes. */
95899b34 2463 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
2464 /* The static chain value to use for this call. */
2465 rtx static_chain_value;
f2d33f13
JH
2466 /* See if this is "nothrow" function call. */
2467 if (TREE_NOTHROW (exp))
2468 flags |= ECF_NOTHROW;
2469
6de9cd9a
DN
2470 /* See if we can find a DECL-node for the actual function, and get the
2471 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
2472 fndecl = get_callee_fndecl (exp);
2473 if (fndecl)
51bbfa0c 2474 {
57782ad8 2475 fntype = TREE_TYPE (fndecl);
39b0dce7 2476 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 2477 return_flags |= decl_return_flags (fndecl);
51bbfa0c 2478 }
39b0dce7 2479 else
72954a4f 2480 {
28ed065e 2481 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 2482 flags |= flags_from_decl_or_type (fntype);
72954a4f 2483 }
28ed065e 2484 rettype = TREE_TYPE (exp);
7393c642 2485
57782ad8 2486 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 2487
8c6a8269
RS
2488 /* Warn if this value is an aggregate type,
2489 regardless of which calling convention we are using for it. */
28ed065e 2490 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 2491 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 2492
becfd6e5
KZ
2493 /* If the result of a non looping pure or const function call is
2494 ignored (or void), and none of its arguments are volatile, we can
2495 avoid expanding the call and just evaluate the arguments for
2496 side-effects. */
8c6a8269 2497 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 2498 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8c6a8269 2499 && (ignore || target == const0_rtx
28ed065e 2500 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
2501 {
2502 bool volatilep = false;
2503 tree arg;
078a18a4 2504 call_expr_arg_iterator iter;
8c6a8269 2505
078a18a4
SL
2506 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2507 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
2508 {
2509 volatilep = true;
2510 break;
2511 }
2512
2513 if (! volatilep)
2514 {
078a18a4
SL
2515 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2516 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
2517 return const0_rtx;
2518 }
2519 }
2520
6f90e075 2521#ifdef REG_PARM_STACK_SPACE
5d059ed9 2522 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 2523#endif
6f90e075 2524
5d059ed9 2525 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 2526 && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 2527 must_preallocate = 1;
e5e809f4 2528
51bbfa0c
RS
2529 /* Set up a place to return a structure. */
2530
2531 /* Cater to broken compilers. */
d47d0a8d 2532 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
2533 {
2534 /* This call returns a big structure. */
84b8030f 2535 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
2536
2537#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2538 {
2539 pcc_struct_value = 1;
9e7b1d0a
RS
2540 }
2541#else /* not PCC_STATIC_STRUCT_RETURN */
2542 {
28ed065e 2543 struct_value_size = int_size_in_bytes (rettype);
51bbfa0c 2544
391756ad
EB
2545 /* Even if it is semantically safe to use the target as the return
2546 slot, it may be not sufficiently aligned for the return type. */
2547 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2548 && target
2549 && MEM_P (target)
2550 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2551 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
2552 MEM_ALIGN (target))))
9e7b1d0a
RS
2553 structure_value_addr = XEXP (target, 0);
2554 else
2555 {
9e7b1d0a
RS
2556 /* For variable-sized objects, we must be called with a target
2557 specified. If we were to allocate space on the stack here,
2558 we would have no way of knowing when to free it. */
9474e8ab 2559 rtx d = assign_temp (rettype, 1, 1);
4361b41d 2560 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2561 target = 0;
2562 }
2563 }
2564#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2565 }
2566
099e9712 2567 /* Figure out the amount to which the stack should be aligned. */
099e9712 2568 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
2569 if (fndecl)
2570 {
3dafb85c 2571 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
2572 /* Without automatic stack alignment, we can't increase preferred
2573 stack boundary. With automatic stack alignment, it is
2574 unnecessary since unless we can guarantee that all callers will
2575 align the outgoing stack properly, callee has to align its
2576 stack anyway. */
2577 if (i
2578 && i->preferred_incoming_stack_boundary
2579 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
2580 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2581 }
099e9712
JH
2582
2583 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 2584 funtype = TREE_TYPE (addr);
366de0ce 2585 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
2586 funtype = TREE_TYPE (funtype);
2587
078a18a4
SL
2588 /* Count whether there are actual complex arguments that need to be split
2589 into their real and imaginary parts. Munge the type_arg_types
2590 appropriately here as well. */
42ba5130 2591 if (targetm.calls.split_complex_arg)
ded9bf77 2592 {
078a18a4
SL
2593 call_expr_arg_iterator iter;
2594 tree arg;
2595 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2596 {
2597 tree type = TREE_TYPE (arg);
2598 if (type && TREE_CODE (type) == COMPLEX_TYPE
2599 && targetm.calls.split_complex_arg (type))
2600 num_complex_actuals++;
2601 }
ded9bf77 2602 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
2603 }
2604 else
2605 type_arg_types = TYPE_ARG_TYPES (funtype);
2606
099e9712 2607 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2608 cfun->calls_alloca = 1;
099e9712
JH
2609
2610 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
2611 as if it were an extra parameter. Put the argument expression
2612 in structure_value_addr_value. */
61f71b34 2613 if (structure_value_addr && struct_value == 0)
099e9712
JH
2614 {
2615 /* If structure_value_addr is a REG other than
2616 virtual_outgoing_args_rtx, we can use always use it. If it
2617 is not a REG, we must always copy it into a register.
2618 If it is virtual_outgoing_args_rtx, we must copy it to another
2619 register in some cases. */
f8cfc6aa 2620 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
2621 || (ACCUMULATE_OUTGOING_ARGS
2622 && stack_arg_under_construction
2623 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 2624 ? copy_addr_to_reg (convert_memory_address
57782ad8 2625 (Pmode, structure_value_addr))
099e9712
JH
2626 : structure_value_addr);
2627
078a18a4
SL
2628 structure_value_addr_value =
2629 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
d5e254e1 2630 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
099e9712
JH
2631 }
2632
2633 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
2634 num_actuals =
2635 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
2636
2637 /* Compute number of named args.
3a4d587b
AM
2638 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2639
2640 if (type_arg_types != 0)
2641 n_named_args
2642 = (list_length (type_arg_types)
2643 /* Count the struct value address, if it is passed as a parm. */
2644 + structure_value_addr_parm);
2645 else
2646 /* If we know nothing, treat all args as named. */
2647 n_named_args = num_actuals;
2648
2649 /* Start updating where the next arg would go.
2650
2651 On some machines (such as the PA) indirect calls have a different
2652 calling convention than normal calls. The fourth argument in
2653 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2654 or not. */
d5cc9181
JR
2655 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2656 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
2657
2658 /* Now possibly adjust the number of named args.
099e9712 2659 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
2660 We do include the last named arg if
2661 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
2662 (If no anonymous args follow, the result of list_length is actually
2663 one too large. This is harmless.)
2664
4ac8340c 2665 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
2666 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2667 this machine will be able to place unnamed args that were passed
2668 in registers into the stack. So treat all args as named. This
2669 allows the insns emitting for a specific argument list to be
2670 independent of the function declaration.
4ac8340c
KH
2671
2672 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2673 we do not have any reliable way to pass unnamed args in
2674 registers, so we must force them into memory. */
099e9712 2675
3a4d587b 2676 if (type_arg_types != 0
d5cc9181 2677 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
2678 ;
2679 else if (type_arg_types != 0
d5cc9181 2680 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
2681 /* Don't include the last named arg. */
2682 --n_named_args;
099e9712 2683 else
3a4d587b 2684 /* Treat all args as named. */
099e9712
JH
2685 n_named_args = num_actuals;
2686
099e9712 2687 /* Make a vector to hold all the information about each arg. */
f883e0a7 2688 args = XALLOCAVEC (struct arg_data, num_actuals);
703ad42b 2689 memset (args, 0, num_actuals * sizeof (struct arg_data));
099e9712 2690
d80d2d2a
KH
2691 /* Build up entries in the ARGS array, compute the size of the
2692 arguments into ARGS_SIZE, etc. */
099e9712 2693 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 2694 n_named_args, exp,
45769134 2695 structure_value_addr_value, fndecl, fntype,
d5cc9181 2696 args_so_far, reg_parm_stack_space,
099e9712 2697 &old_stack_level, &old_pending_adj,
dd292d0a 2698 &must_preallocate, &flags,
6de9cd9a 2699 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
2700
2701 if (args_size.var)
84b8030f 2702 must_preallocate = 1;
099e9712
JH
2703
2704 /* Now make final decision about preallocating stack space. */
2705 must_preallocate = finalize_must_preallocate (must_preallocate,
2706 num_actuals, args,
2707 &args_size);
2708
2709 /* If the structure value address will reference the stack pointer, we
2710 must stabilize it. We don't need to do this if we know that we are
2711 not going to adjust the stack pointer in processing this call. */
2712
2713 if (structure_value_addr
2714 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2715 || reg_mentioned_p (virtual_outgoing_args_rtx,
2716 structure_value_addr))
2717 && (args_size.var
2718 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2719 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 2720
7ae4ad28 2721 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 2722 pushed these optimizations into -O2. Don't try if we're already
fb158467 2723 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 2724 there's cleanups, as we know there's code to follow the call. */
0a1c58a2 2725
099e9712
JH
2726 if (currently_expanding_call++ != 0
2727 || !flag_optimize_sibling_calls
6de9cd9a 2728 || args_size.var
6fb5fa3c 2729 || dbg_cnt (tail_call) == false)
6de9cd9a 2730 try_tail_call = 0;
099e9712
JH
2731
2732 /* Rest of purposes for tail call optimizations to fail. */
e86a9946
RS
2733 if (!try_tail_call
2734 || !targetm.have_sibcall_epilogue ()
099e9712
JH
2735 /* Doing sibling call optimization needs some work, since
2736 structure_value_addr can be allocated on the stack.
2737 It does not seem worth the effort since few optimizable
2738 sibling calls will return a structure. */
2739 || structure_value_addr != NULL_RTX
130423d7 2740#ifdef REG_PARM_STACK_SPACE
0e456625
JH
2741 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2742 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2743 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
deb1de67 2744 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl))
130423d7 2745#endif
4977bab6
ZW
2746 /* Check whether the target is able to optimize the call
2747 into a sibcall. */
5fd9b178 2748 || !targetm.function_ok_for_sibcall (fndecl, exp)
4977bab6 2749 /* Functions that do not return exactly once may not be sibcall
c22cacf3 2750 optimized. */
6e14af16 2751 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
09e2bf48 2752 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
6a48df45 2753 /* If the called function is nested in the current one, it might access
c22cacf3
MS
2754 some of the caller's arguments, but could clobber them beforehand if
2755 the argument areas are shared. */
6a48df45 2756 || (fndecl && decl_function_context (fndecl) == current_function_decl)
099e9712 2757 /* If this function requires more stack slots than the current
ff7f012a 2758 function, we cannot change it into a sibling call.
38173d38 2759 crtl->args.pretend_args_size is not part of the
ff7f012a 2760 stack allocated by our caller. */
38173d38
JH
2761 || args_size.constant > (crtl->args.size
2762 - crtl->args.pretend_args_size)
099e9712
JH
2763 /* If the callee pops its own arguments, then it must pop exactly
2764 the same number of arguments as the current function. */
079e7538
NF
2765 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2766 != targetm.calls.return_pops_args (current_function_decl,
2767 TREE_TYPE (current_function_decl),
2768 crtl->args.size))
ae2bcd98 2769 || !lang_hooks.decls.ok_for_sibcall (fndecl))
e6f64875 2770 try_tail_call = 0;
497eb8c3 2771
c69cd1f5
JJ
2772 /* Check if caller and callee disagree in promotion of function
2773 return value. */
2774 if (try_tail_call)
2775 {
ef4bddc2
RS
2776 machine_mode caller_mode, caller_promoted_mode;
2777 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
2778 int caller_unsignedp, callee_unsignedp;
2779 tree caller_res = DECL_RESULT (current_function_decl);
2780
2781 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 2782 caller_mode = DECL_MODE (caller_res);
c69cd1f5 2783 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
2784 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2785 caller_promoted_mode
2786 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2787 &caller_unsignedp,
2788 TREE_TYPE (current_function_decl), 1);
2789 callee_promoted_mode
666e3ceb 2790 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 2791 &callee_unsignedp,
666e3ceb 2792 funtype, 1);
c69cd1f5
JJ
2793 if (caller_mode != VOIDmode
2794 && (caller_promoted_mode != callee_promoted_mode
2795 || ((caller_mode != caller_promoted_mode
2796 || callee_mode != callee_promoted_mode)
2797 && (caller_unsignedp != callee_unsignedp
2798 || GET_MODE_BITSIZE (caller_mode)
2799 < GET_MODE_BITSIZE (callee_mode)))))
2800 try_tail_call = 0;
2801 }
2802
01973e26
L
2803 /* Ensure current function's preferred stack boundary is at least
2804 what we need. Stack alignment may also increase preferred stack
2805 boundary. */
b5f772ce 2806 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 2807 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
2808 else
2809 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 2810
099e9712 2811 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 2812
0a1c58a2
JL
2813 /* We want to make two insn chains; one for a sibling call, the other
2814 for a normal call. We will select one of the two chains after
2815 initial RTL generation is complete. */
b820d2b8 2816 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
2817 {
2818 int sibcall_failure = 0;
f5143c46 2819 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 2820 recursion "call". That way we know any adjustment after the tail
7ae4ad28 2821 recursion call can be ignored if we indeed use the tail
0a1c58a2 2822 call expansion. */
7f2f0a01 2823 saved_pending_stack_adjust save;
48810515
DM
2824 rtx_insn *insns, *before_call, *after_args;
2825 rtx next_arg_reg;
39842893 2826
0a1c58a2
JL
2827 if (pass == 0)
2828 {
0a1c58a2
JL
2829 /* State variables we need to save and restore between
2830 iterations. */
7f2f0a01 2831 save_pending_stack_adjust (&save);
0a1c58a2 2832 }
f2d33f13
JH
2833 if (pass)
2834 flags &= ~ECF_SIBCALL;
2835 else
2836 flags |= ECF_SIBCALL;
51bbfa0c 2837
0a1c58a2 2838 /* Other state variables that we must reinitialize each time
f2d33f13 2839 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
2840 argblock = 0;
2841 call_fusage = 0;
fa76d9e0 2842
f725a3ec 2843 /* Start a new sequence for the normal call case.
51bbfa0c 2844
0a1c58a2
JL
2845 From this point on, if the sibling call fails, we want to set
2846 sibcall_failure instead of continuing the loop. */
2847 start_sequence ();
eecb6f50 2848
0a1c58a2
JL
2849 /* Don't let pending stack adjusts add up to too much.
2850 Also, do all pending adjustments now if there is any chance
2851 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 2852 call sequence.
63579539
DJ
2853 Also do the adjustments before a throwing call, otherwise
2854 exception handling can fail; PR 19225. */
0a1c58a2 2855 if (pending_stack_adjust >= 32
b5cd4ed4 2856 || (pending_stack_adjust > 0
9dd9bf80 2857 && (flags & ECF_MAY_BE_ALLOCA))
63579539
DJ
2858 || (pending_stack_adjust > 0
2859 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
2860 || pass == 0)
2861 do_pending_stack_adjust ();
51bbfa0c 2862
0a1c58a2 2863 /* Precompute any arguments as needed. */
f8a097cd 2864 if (pass)
84b8030f 2865 precompute_arguments (num_actuals, args);
51bbfa0c 2866
0a1c58a2
JL
2867 /* Now we are about to start emitting insns that can be deleted
2868 if a libcall is deleted. */
84b8030f 2869 if (pass && (flags & ECF_MALLOC))
0a1c58a2 2870 start_sequence ();
51bbfa0c 2871
cb91fab0 2872 if (pass == 0 && crtl->stack_protect_guard)
b755446c
RH
2873 stack_protect_epilogue ();
2874
099e9712 2875 adjusted_args_size = args_size;
ce48579b
RH
2876 /* Compute the actual size of the argument block required. The variable
2877 and constant sizes must be combined, the size may have to be rounded,
2878 and there may be a minimum required size. When generating a sibcall
2879 pattern, do not round up, since we'll be re-using whatever space our
2880 caller provided. */
2881 unadjusted_args_size
f725a3ec
KH
2882 = compute_argument_block_size (reg_parm_stack_space,
2883 &adjusted_args_size,
5d059ed9 2884 fndecl, fntype,
ce48579b
RH
2885 (pass == 0 ? 0
2886 : preferred_stack_boundary));
2887
f725a3ec 2888 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 2889
f8a097cd 2890 /* The argument block when performing a sibling call is the
c22cacf3 2891 incoming argument block. */
f8a097cd 2892 if (pass == 0)
c67846f2 2893 {
2e3f842f 2894 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
2895 if (STACK_GROWS_DOWNWARD)
2896 argblock
2897 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
2898 else
2899 argblock
2900 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
2901
c67846f2 2902 stored_args_map = sbitmap_alloc (args_size.constant);
f61e445a 2903 bitmap_clear (stored_args_map);
c67846f2 2904 }
ce48579b 2905
0a1c58a2
JL
2906 /* If we have no actual push instructions, or shouldn't use them,
2907 make space for all args right now. */
099e9712 2908 else if (adjusted_args_size.var != 0)
51bbfa0c 2909 {
0a1c58a2
JL
2910 if (old_stack_level == 0)
2911 {
9eac0f2a 2912 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 2913 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
2914 old_pending_adj = pending_stack_adjust;
2915 pending_stack_adjust = 0;
0a1c58a2
JL
2916 /* stack_arg_under_construction says whether a stack arg is
2917 being constructed at the old stack level. Pushing the stack
2918 gets a clean outgoing argument block. */
2919 old_stack_arg_under_construction = stack_arg_under_construction;
2920 stack_arg_under_construction = 0;
0a1c58a2 2921 }
099e9712 2922 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 2923 if (flag_stack_usage_info)
d3c12306 2924 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 2925 }
0a1c58a2
JL
2926 else
2927 {
2928 /* Note that we must go through the motions of allocating an argument
2929 block even if the size is zero because we may be storing args
2930 in the area reserved for register arguments, which may be part of
2931 the stack frame. */
26a258fe 2932
099e9712 2933 int needed = adjusted_args_size.constant;
51bbfa0c 2934
0a1c58a2
JL
2935 /* Store the maximum argument space used. It will be pushed by
2936 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2937 checking). */
51bbfa0c 2938
38173d38
JH
2939 if (needed > crtl->outgoing_args_size)
2940 crtl->outgoing_args_size = needed;
51bbfa0c 2941
0a1c58a2
JL
2942 if (must_preallocate)
2943 {
f73ad30e
JH
2944 if (ACCUMULATE_OUTGOING_ARGS)
2945 {
f8a097cd
JH
2946 /* Since the stack pointer will never be pushed, it is
2947 possible for the evaluation of a parm to clobber
2948 something we have already written to the stack.
2949 Since most function calls on RISC machines do not use
2950 the stack, this is uncommon, but must work correctly.
26a258fe 2951
f73ad30e 2952 Therefore, we save any area of the stack that was already
f8a097cd
JH
2953 written and that we are using. Here we set up to do this
2954 by making a new stack usage map from the old one. The
f725a3ec 2955 actual save will be done by store_one_arg.
26a258fe 2956
f73ad30e
JH
2957 Another approach might be to try to reorder the argument
2958 evaluations to avoid this conflicting stack usage. */
26a258fe 2959
f8a097cd
JH
2960 /* Since we will be writing into the entire argument area,
2961 the map must be allocated for its entire size, not just
2962 the part that is the responsibility of the caller. */
5d059ed9 2963 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 2964 needed += reg_parm_stack_space;
51bbfa0c 2965
6dad9361
TS
2966 if (ARGS_GROW_DOWNWARD)
2967 highest_outgoing_arg_in_use
2968 = MAX (initial_highest_arg_in_use, needed + 1);
2969 else
2970 highest_outgoing_arg_in_use
2971 = MAX (initial_highest_arg_in_use, needed);
2972
04695783 2973 free (stack_usage_map_buf);
5ed6ace5 2974 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 2975 stack_usage_map = stack_usage_map_buf;
51bbfa0c 2976
f73ad30e 2977 if (initial_highest_arg_in_use)
2e09e75a
JM
2978 memcpy (stack_usage_map, initial_stack_usage_map,
2979 initial_highest_arg_in_use);
2f4aa534 2980
f73ad30e 2981 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 2982 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
2983 (highest_outgoing_arg_in_use
2984 - initial_highest_arg_in_use));
2985 needed = 0;
2f4aa534 2986
f8a097cd
JH
2987 /* The address of the outgoing argument list must not be
2988 copied to a register here, because argblock would be left
2989 pointing to the wrong place after the call to
f725a3ec 2990 allocate_dynamic_stack_space below. */
2f4aa534 2991
f73ad30e 2992 argblock = virtual_outgoing_args_rtx;
f725a3ec 2993 }
f73ad30e 2994 else
26a258fe 2995 {
f73ad30e 2996 if (inhibit_defer_pop == 0)
0a1c58a2 2997 {
f73ad30e 2998 /* Try to reuse some or all of the pending_stack_adjust
ce48579b
RH
2999 to get this space. */
3000 needed
f725a3ec 3001 = (combine_pending_stack_adjustment_and_call
ce48579b 3002 (unadjusted_args_size,
099e9712 3003 &adjusted_args_size,
ce48579b
RH
3004 preferred_unit_stack_boundary));
3005
3006 /* combine_pending_stack_adjustment_and_call computes
3007 an adjustment before the arguments are allocated.
3008 Account for them and see whether or not the stack
3009 needs to go up or down. */
3010 needed = unadjusted_args_size - needed;
3011
3012 if (needed < 0)
f73ad30e 3013 {
ce48579b
RH
3014 /* We're releasing stack space. */
3015 /* ??? We can avoid any adjustment at all if we're
3016 already aligned. FIXME. */
3017 pending_stack_adjust = -needed;
3018 do_pending_stack_adjust ();
f73ad30e
JH
3019 needed = 0;
3020 }
f725a3ec 3021 else
ce48579b
RH
3022 /* We need to allocate space. We'll do that in
3023 push_block below. */
3024 pending_stack_adjust = 0;
0a1c58a2 3025 }
ce48579b
RH
3026
3027 /* Special case this because overhead of `push_block' in
3028 this case is non-trivial. */
f73ad30e
JH
3029 if (needed == 0)
3030 argblock = virtual_outgoing_args_rtx;
0a1c58a2 3031 else
d892f288
DD
3032 {
3033 argblock = push_block (GEN_INT (needed), 0, 0);
6dad9361
TS
3034 if (ARGS_GROW_DOWNWARD)
3035 argblock = plus_constant (Pmode, argblock, needed);
d892f288 3036 }
f73ad30e 3037
f8a097cd
JH
3038 /* We only really need to call `copy_to_reg' in the case
3039 where push insns are going to be used to pass ARGBLOCK
3040 to a function call in ARGS. In that case, the stack
3041 pointer changes value from the allocation point to the
3042 call point, and hence the value of
3043 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3044 as well always do it. */
f73ad30e 3045 argblock = copy_to_reg (argblock);
38afb23f
OH
3046 }
3047 }
3048 }
0a1c58a2 3049
38afb23f
OH
3050 if (ACCUMULATE_OUTGOING_ARGS)
3051 {
3052 /* The save/restore code in store_one_arg handles all
3053 cases except one: a constructor call (including a C
3054 function returning a BLKmode struct) to initialize
3055 an argument. */
3056 if (stack_arg_under_construction)
3057 {
ac294f0b
KT
3058 rtx push_size
3059 = GEN_INT (adjusted_args_size.constant
5d059ed9 3060 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
81464b2c 3061 : TREE_TYPE (fndecl))) ? 0
ac294f0b 3062 : reg_parm_stack_space));
38afb23f
OH
3063 if (old_stack_level == 0)
3064 {
9eac0f2a 3065 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
3066 old_stack_pointer_delta = stack_pointer_delta;
3067 old_pending_adj = pending_stack_adjust;
3068 pending_stack_adjust = 0;
3069 /* stack_arg_under_construction says whether a stack
3070 arg is being constructed at the old stack level.
3071 Pushing the stack gets a clean outgoing argument
3072 block. */
3073 old_stack_arg_under_construction
3074 = stack_arg_under_construction;
3075 stack_arg_under_construction = 0;
3076 /* Make a new map for the new argument list. */
04695783 3077 free (stack_usage_map_buf);
b9eae1a9 3078 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3079 stack_usage_map = stack_usage_map_buf;
38afb23f 3080 highest_outgoing_arg_in_use = 0;
f73ad30e 3081 }
d3c12306
EB
3082 /* We can pass TRUE as the 4th argument because we just
3083 saved the stack pointer and will restore it right after
3084 the call. */
3a42502d
RH
3085 allocate_dynamic_stack_space (push_size, 0,
3086 BIGGEST_ALIGNMENT, true);
0a1c58a2 3087 }
bfbf933a 3088
38afb23f
OH
3089 /* If argument evaluation might modify the stack pointer,
3090 copy the address of the argument list to a register. */
3091 for (i = 0; i < num_actuals; i++)
3092 if (args[i].pass_on_stack)
3093 {
3094 argblock = copy_addr_to_reg (argblock);
3095 break;
3096 }
3097 }
d329e058 3098
0a1c58a2 3099 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 3100
5ba53785
UB
3101 /* Stack is properly aligned, pops can't safely be deferred during
3102 the evaluation of the arguments. */
3103 NO_DEFER_POP;
3104
ac4ee457
UB
3105 /* Precompute all register parameters. It isn't safe to compute
3106 anything once we have started filling any specific hard regs.
3107 TLS symbols sometimes need a call to resolve. Precompute
3108 register parameters before any stack pointer manipulation
3109 to avoid unaligned stack in the called function. */
3110 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3111
5ba53785
UB
3112 OK_DEFER_POP;
3113
3d9684ae
JG
3114 /* Perform stack alignment before the first push (the last arg). */
3115 if (argblock == 0
f830ddc2 3116 && adjusted_args_size.constant > reg_parm_stack_space
099e9712 3117 && adjusted_args_size.constant != unadjusted_args_size)
4e217aed 3118 {
0a1c58a2
JL
3119 /* When the stack adjustment is pending, we get better code
3120 by combining the adjustments. */
f725a3ec 3121 if (pending_stack_adjust
0a1c58a2 3122 && ! inhibit_defer_pop)
ce48579b
RH
3123 {
3124 pending_stack_adjust
f725a3ec 3125 = (combine_pending_stack_adjustment_and_call
ce48579b 3126 (unadjusted_args_size,
099e9712 3127 &adjusted_args_size,
ce48579b
RH
3128 preferred_unit_stack_boundary));
3129 do_pending_stack_adjust ();
3130 }
0a1c58a2 3131 else if (argblock == 0)
099e9712 3132 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 3133 - unadjusted_args_size));
0a1c58a2 3134 }
ebcd0b57
JH
3135 /* Now that the stack is properly aligned, pops can't safely
3136 be deferred during the evaluation of the arguments. */
3137 NO_DEFER_POP;
51bbfa0c 3138
d3c12306
EB
3139 /* Record the maximum pushed stack space size. We need to delay
3140 doing it this far to take into account the optimization done
3141 by combine_pending_stack_adjustment_and_call. */
a11e0df4 3142 if (flag_stack_usage_info
d3c12306
EB
3143 && !ACCUMULATE_OUTGOING_ARGS
3144 && pass
3145 && adjusted_args_size.var == 0)
3146 {
3147 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3148 if (pushed > current_function_pushed_stack_size)
3149 current_function_pushed_stack_size = pushed;
3150 }
3151
09e2bf48 3152 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 3153
5039610b
SL
3154 if (CALL_EXPR_STATIC_CHAIN (exp))
3155 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
3156 else
3157 static_chain_value = 0;
3158
f73ad30e 3159#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3160 /* Save the fixed argument area if it's part of the caller's frame and
3161 is clobbered by argument setup for this call. */
f8a097cd 3162 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
3163 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3164 &low_to_save, &high_to_save);
b94301c2 3165#endif
51bbfa0c 3166
0a1c58a2
JL
3167 /* Now store (and compute if necessary) all non-register parms.
3168 These come before register parms, since they can require block-moves,
3169 which could clobber the registers used for register parms.
3170 Parms which have partial registers are not stored here,
3171 but we do preallocate space here if they want that. */
51bbfa0c 3172
0a1c58a2 3173 for (i = 0; i < num_actuals; i++)
0196c95e 3174 {
d5e254e1
IE
3175 /* Delay bounds until all other args are stored. */
3176 if (POINTER_BOUNDS_P (args[i].tree_value))
3177 continue;
3178 else if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 3179 {
48810515 3180 rtx_insn *before_arg = get_last_insn ();
0196c95e 3181
ddc923b5
MP
3182 /* We don't allow passing huge (> 2^30 B) arguments
3183 by value. It would cause an overflow later on. */
3184 if (adjusted_args_size.constant
3185 >= (1 << (HOST_BITS_PER_INT - 2)))
3186 {
3187 sorry ("passing too large argument on stack");
3188 continue;
3189 }
3190
0196c95e
JJ
3191 if (store_one_arg (&args[i], argblock, flags,
3192 adjusted_args_size.var != 0,
3193 reg_parm_stack_space)
3194 || (pass == 0
3195 && check_sibcall_argument_overlap (before_arg,
3196 &args[i], 1)))
3197 sibcall_failure = 1;
3198 }
3199
2b1c5433 3200 if (args[i].stack)
7d810276
JJ
3201 call_fusage
3202 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3203 gen_rtx_USE (VOIDmode, args[i].stack),
3204 call_fusage);
0196c95e 3205 }
0a1c58a2
JL
3206
3207 /* If we have a parm that is passed in registers but not in memory
3208 and whose alignment does not permit a direct copy into registers,
3209 make a group of pseudos that correspond to each register that we
3210 will later fill. */
3211 if (STRICT_ALIGNMENT)
3212 store_unaligned_arguments_into_pseudos (args, num_actuals);
3213
3214 /* Now store any partially-in-registers parm.
3215 This is the last place a block-move can happen. */
3216 if (reg_parm_seen)
3217 for (i = 0; i < num_actuals; i++)
3218 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 3219 {
48810515 3220 rtx_insn *before_arg = get_last_insn ();
c67846f2 3221
99206968
KT
3222 /* On targets with weird calling conventions (e.g. PA) it's
3223 hard to ensure that all cases of argument overlap between
3224 stack and registers work. Play it safe and bail out. */
3225 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3226 {
3227 sibcall_failure = 1;
3228 break;
3229 }
3230
4c6b3b2a
JJ
3231 if (store_one_arg (&args[i], argblock, flags,
3232 adjusted_args_size.var != 0,
3233 reg_parm_stack_space)
3234 || (pass == 0
3235 && check_sibcall_argument_overlap (before_arg,
0cdca92b 3236 &args[i], 1)))
c67846f2
JJ
3237 sibcall_failure = 1;
3238 }
51bbfa0c 3239
2f21e1ba
BS
3240 bool any_regs = false;
3241 for (i = 0; i < num_actuals; i++)
3242 if (args[i].reg != NULL_RTX)
3243 {
3244 any_regs = true;
3245 targetm.calls.call_args (args[i].reg, funtype);
3246 }
3247 if (!any_regs)
3248 targetm.calls.call_args (pc_rtx, funtype);
3249
3250 /* Figure out the register where the value, if any, will come back. */
3251 valreg = 0;
3252 valbnd = 0;
3253 if (TYPE_MODE (rettype) != VOIDmode
3254 && ! structure_value_addr)
3255 {
3256 if (pcc_struct_value)
3257 {
3258 valreg = hard_function_value (build_pointer_type (rettype),
3259 fndecl, NULL, (pass == 0));
3260 if (CALL_WITH_BOUNDS_P (exp))
3261 valbnd = targetm.calls.
3262 chkp_function_value_bounds (build_pointer_type (rettype),
3263 fndecl, (pass == 0));
3264 }
3265 else
3266 {
3267 valreg = hard_function_value (rettype, fndecl, fntype,
3268 (pass == 0));
3269 if (CALL_WITH_BOUNDS_P (exp))
3270 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3271 fndecl,
3272 (pass == 0));
3273 }
3274
3275 /* If VALREG is a PARALLEL whose first member has a zero
3276 offset, use that. This is for targets such as m68k that
3277 return the same value in multiple places. */
3278 if (GET_CODE (valreg) == PARALLEL)
3279 {
3280 rtx elem = XVECEXP (valreg, 0, 0);
3281 rtx where = XEXP (elem, 0);
3282 rtx offset = XEXP (elem, 1);
3283 if (offset == const0_rtx
3284 && GET_MODE (where) == GET_MODE (valreg))
3285 valreg = where;
3286 }
3287 }
3288
d5e254e1
IE
3289 /* Store all bounds not passed in registers. */
3290 for (i = 0; i < num_actuals; i++)
3291 {
3292 if (POINTER_BOUNDS_P (args[i].tree_value)
3293 && !args[i].reg)
3294 store_bounds (&args[i],
3295 args[i].pointer_arg == -1
3296 ? NULL
3297 : &args[args[i].pointer_arg]);
3298 }
3299
0a1c58a2
JL
3300 /* If register arguments require space on the stack and stack space
3301 was not preallocated, allocate stack space here for arguments
3302 passed in registers. */
5d059ed9 3303 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3304 && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 3305 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 3306 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 3307
0a1c58a2
JL
3308 /* Pass the function the address in which to return a
3309 structure value. */
3310 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3311 {
7ae4ad28 3312 structure_value_addr
5ae6cd0d 3313 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 3314 emit_move_insn (struct_value,
0a1c58a2
JL
3315 force_reg (Pmode,
3316 force_operand (structure_value_addr,
3317 NULL_RTX)));
3318
f8cfc6aa 3319 if (REG_P (struct_value))
61f71b34 3320 use_reg (&call_fusage, struct_value);
0a1c58a2 3321 }
c2939b57 3322
05e6ee93 3323 after_args = get_last_insn ();
f2d3d07e
RH
3324 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3325 static_chain_value, &call_fusage,
3326 reg_parm_seen, pass == 0);
51bbfa0c 3327
0cdca92b
DJ
3328 load_register_parameters (args, num_actuals, &call_fusage, flags,
3329 pass == 0, &sibcall_failure);
f725a3ec 3330
0a1c58a2
JL
3331 /* Save a pointer to the last insn before the call, so that we can
3332 later safely search backwards to find the CALL_INSN. */
3333 before_call = get_last_insn ();
51bbfa0c 3334
7d167afd
JJ
3335 /* Set up next argument register. For sibling calls on machines
3336 with register windows this should be the incoming register. */
7d167afd 3337 if (pass == 0)
d5cc9181 3338 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3c07301f
NF
3339 VOIDmode,
3340 void_type_node,
3341 true);
7d167afd 3342 else
d5cc9181 3343 next_arg_reg = targetm.calls.function_arg (args_so_far,
3c07301f
NF
3344 VOIDmode, void_type_node,
3345 true);
7d167afd 3346
e384e6b5
BS
3347 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3348 {
3349 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 3350 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
3351 if (arg_nr >= 0
3352 && arg_nr < num_actuals
3353 && args[arg_nr].reg
e384e6b5
BS
3354 && valreg
3355 && REG_P (valreg)
3356 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3357 call_fusage
3358 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 3359 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
3360 call_fusage);
3361 }
0a1c58a2
JL
3362 /* All arguments and registers used for the call must be set up by
3363 now! */
3364
ce48579b 3365 /* Stack must be properly aligned now. */
366de0ce
NS
3366 gcc_assert (!pass
3367 || !(stack_pointer_delta % preferred_unit_stack_boundary));
ebcd0b57 3368
0a1c58a2 3369 /* Generate the actual call instruction. */
6de9cd9a 3370 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 3371 adjusted_args_size.constant, struct_value_size,
7d167afd 3372 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 3373 flags, args_so_far);
0a1c58a2 3374
1e288103 3375 if (flag_ipa_ra)
4f660b15 3376 {
48810515
DM
3377 rtx_call_insn *last;
3378 rtx datum = NULL_RTX;
4f660b15
RO
3379 if (fndecl != NULL_TREE)
3380 {
3381 datum = XEXP (DECL_RTL (fndecl), 0);
3382 gcc_assert (datum != NULL_RTX
3383 && GET_CODE (datum) == SYMBOL_REF);
3384 }
3385 last = last_call_insn ();
3386 add_reg_note (last, REG_CALL_DECL, datum);
3387 }
3388
05e6ee93
MM
3389 /* If the call setup or the call itself overlaps with anything
3390 of the argument setup we probably clobbered our call address.
3391 In that case we can't do sibcalls. */
3392 if (pass == 0
3393 && check_sibcall_argument_overlap (after_args, 0, 0))
3394 sibcall_failure = 1;
3395
bef5d8b6
RS
3396 /* If a non-BLKmode value is returned at the most significant end
3397 of a register, shift the register right by the appropriate amount
3398 and update VALREG accordingly. BLKmode values are handled by the
3399 group load/store machinery below. */
3400 if (!structure_value_addr
3401 && !pcc_struct_value
66de4d7c 3402 && TYPE_MODE (rettype) != VOIDmode
28ed065e 3403 && TYPE_MODE (rettype) != BLKmode
66de4d7c 3404 && REG_P (valreg)
28ed065e 3405 && targetm.calls.return_in_msb (rettype))
bef5d8b6 3406 {
28ed065e 3407 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
bef5d8b6 3408 sibcall_failure = 1;
28ed065e 3409 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
3410 }
3411
84b8030f 3412 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3413 {
3414 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 3415 rtx_insn *last, *insns;
0a1c58a2 3416
f725a3ec 3417 /* The return value from a malloc-like function is a pointer. */
28ed065e 3418 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 3419 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
3420
3421 emit_move_insn (temp, valreg);
3422
3423 /* The return value from a malloc-like function can not alias
3424 anything else. */
3425 last = get_last_insn ();
65c5f2a6 3426 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
3427
3428 /* Write out the sequence. */
3429 insns = get_insns ();
3430 end_sequence ();
2f937369 3431 emit_insn (insns);
0a1c58a2
JL
3432 valreg = temp;
3433 }
51bbfa0c 3434
6fb5fa3c
DB
3435 /* For calls to `setjmp', etc., inform
3436 function.c:setjmp_warnings that it should complain if
3437 nonvolatile values are live. For functions that cannot
3438 return, inform flow that control does not fall through. */
51bbfa0c 3439
6e14af16 3440 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 3441 {
570a98eb 3442 /* The barrier must be emitted
0a1c58a2
JL
3443 immediately after the CALL_INSN. Some ports emit more
3444 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 3445
48810515 3446 rtx_insn *last = get_last_insn ();
4b4bf941 3447 while (!CALL_P (last))
0a1c58a2
JL
3448 {
3449 last = PREV_INSN (last);
3450 /* There was no CALL_INSN? */
366de0ce 3451 gcc_assert (last != before_call);
0a1c58a2 3452 }
51bbfa0c 3453
570a98eb 3454 emit_barrier_after (last);
8af61113 3455
f451eeef
JS
3456 /* Stack adjustments after a noreturn call are dead code.
3457 However when NO_DEFER_POP is in effect, we must preserve
3458 stack_pointer_delta. */
3459 if (inhibit_defer_pop == 0)
3460 {
3461 stack_pointer_delta = old_stack_allocated;
3462 pending_stack_adjust = 0;
3463 }
0a1c58a2 3464 }
51bbfa0c 3465
0a1c58a2 3466 /* If value type not void, return an rtx for the value. */
51bbfa0c 3467
28ed065e 3468 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 3469 || ignore)
b5cd4ed4 3470 target = const0_rtx;
0a1c58a2
JL
3471 else if (structure_value_addr)
3472 {
3c0cb5de 3473 if (target == 0 || !MEM_P (target))
0a1c58a2 3474 {
3bdf5ad1 3475 target
28ed065e
MM
3476 = gen_rtx_MEM (TYPE_MODE (rettype),
3477 memory_address (TYPE_MODE (rettype),
3bdf5ad1 3478 structure_value_addr));
28ed065e 3479 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
3480 }
3481 }
3482 else if (pcc_struct_value)
cacbd532 3483 {
0a1c58a2
JL
3484 /* This is the special C++ case where we need to
3485 know what the true target was. We take care to
3486 never use this value more than once in one expression. */
28ed065e 3487 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 3488 copy_to_reg (valreg));
28ed065e 3489 set_mem_attributes (target, rettype, 1);
cacbd532 3490 }
0a1c58a2
JL
3491 /* Handle calls that return values in multiple non-contiguous locations.
3492 The Irix 6 ABI has examples of this. */
3493 else if (GET_CODE (valreg) == PARALLEL)
3494 {
6de9cd9a 3495 if (target == 0)
5ef0b50d 3496 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
3497 else if (rtx_equal_p (target, valreg))
3498 ;
3499 else if (GET_CODE (target) == PARALLEL)
3500 /* Handle the result of a emit_group_move_into_temps
3501 call in the previous pass. */
3502 emit_group_move (target, valreg);
3503 else
28ed065e
MM
3504 emit_group_store (target, valreg, rettype,
3505 int_size_in_bytes (rettype));
0a1c58a2
JL
3506 }
3507 else if (target
28ed065e 3508 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
3509 && GET_MODE (target) == GET_MODE (valreg))
3510 {
51caaefe
EB
3511 bool may_overlap = false;
3512
f2d18690
KK
3513 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3514 reg to a plain register. */
3fb30019
RS
3515 if (!REG_P (target) || HARD_REGISTER_P (target))
3516 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 3517
51caaefe
EB
3518 /* If TARGET is a MEM in the argument area, and we have
3519 saved part of the argument area, then we can't store
3520 directly into TARGET as it may get overwritten when we
3521 restore the argument save area below. Don't work too
3522 hard though and simply force TARGET to a register if it
3523 is a MEM; the optimizer is quite likely to sort it out. */
3524 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3525 for (i = 0; i < num_actuals; i++)
3526 if (args[i].save_area)
3527 {
3528 may_overlap = true;
3529 break;
3530 }
0219237c 3531
51caaefe
EB
3532 if (may_overlap)
3533 target = copy_to_reg (valreg);
3534 else
3535 {
3536 /* TARGET and VALREG cannot be equal at this point
3537 because the latter would not have
3538 REG_FUNCTION_VALUE_P true, while the former would if
3539 it were referring to the same register.
3540
3541 If they refer to the same register, this move will be
3542 a no-op, except when function inlining is being
3543 done. */
3544 emit_move_insn (target, valreg);
3545
3546 /* If we are setting a MEM, this code must be executed.
3547 Since it is emitted after the call insn, sibcall
3548 optimization cannot be performed in that case. */
3549 if (MEM_P (target))
3550 sibcall_failure = 1;
3551 }
0a1c58a2 3552 }
0a1c58a2 3553 else
3fb30019 3554 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 3555
cde0f3fd
PB
3556 /* If we promoted this return value, make the proper SUBREG.
3557 TARGET might be const0_rtx here, so be careful. */
3558 if (REG_P (target)
28ed065e
MM
3559 && TYPE_MODE (rettype) != BLKmode
3560 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 3561 {
28ed065e 3562 tree type = rettype;
cde0f3fd
PB
3563 int unsignedp = TYPE_UNSIGNED (type);
3564 int offset = 0;
ef4bddc2 3565 machine_mode pmode;
cde0f3fd
PB
3566
3567 /* Ensure we promote as expected, and get the new unsignedness. */
3568 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3569 funtype, 1);
3570 gcc_assert (GET_MODE (target) == pmode);
3571
3572 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3573 && (GET_MODE_SIZE (GET_MODE (target))
3574 > GET_MODE_SIZE (TYPE_MODE (type))))
366de0ce 3575 {
cde0f3fd
PB
3576 offset = GET_MODE_SIZE (GET_MODE (target))
3577 - GET_MODE_SIZE (TYPE_MODE (type));
3578 if (! BYTES_BIG_ENDIAN)
3579 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3580 else if (! WORDS_BIG_ENDIAN)
3581 offset %= UNITS_PER_WORD;
366de0ce 3582 }
cde0f3fd
PB
3583
3584 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3585 SUBREG_PROMOTED_VAR_P (target) = 1;
362d42dc 3586 SUBREG_PROMOTED_SET (target, unsignedp);
61f71b34 3587 }
84b55618 3588
0a1c58a2
JL
3589 /* If size of args is variable or this was a constructor call for a stack
3590 argument, restore saved stack-pointer value. */
51bbfa0c 3591
9dd9bf80 3592 if (old_stack_level)
0a1c58a2 3593 {
48810515 3594 rtx_insn *prev = get_last_insn ();
9a08d230 3595
9eac0f2a 3596 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 3597 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 3598
faf7a23d 3599 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 3600
0a1c58a2 3601 pending_stack_adjust = old_pending_adj;
d25cee4d 3602 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
3603 stack_arg_under_construction = old_stack_arg_under_construction;
3604 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3605 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
3606 sibcall_failure = 1;
3607 }
f8a097cd 3608 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3609 {
51bbfa0c 3610#ifdef REG_PARM_STACK_SPACE
0a1c58a2 3611 if (save_area)
b820d2b8
AM
3612 restore_fixed_argument_area (save_area, argblock,
3613 high_to_save, low_to_save);
b94301c2 3614#endif
51bbfa0c 3615
0a1c58a2
JL
3616 /* If we saved any argument areas, restore them. */
3617 for (i = 0; i < num_actuals; i++)
3618 if (args[i].save_area)
3619 {
ef4bddc2 3620 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
3621 rtx stack_area
3622 = gen_rtx_MEM (save_mode,
3623 memory_address (save_mode,
3624 XEXP (args[i].stack_slot, 0)));
3625
3626 if (save_mode != BLKmode)
3627 emit_move_insn (stack_area, args[i].save_area);
3628 else
44bb111a 3629 emit_block_move (stack_area, args[i].save_area,
e7949876 3630 GEN_INT (args[i].locate.size.constant),
44bb111a 3631 BLOCK_OP_CALL_PARM);
0a1c58a2 3632 }
51bbfa0c 3633
0a1c58a2
JL
3634 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3635 stack_usage_map = initial_stack_usage_map;
3636 }
51bbfa0c 3637
d33606c3
EB
3638 /* If this was alloca, record the new stack level. */
3639 if (flags & ECF_MAY_BE_ALLOCA)
3640 record_new_stack_level ();
51bbfa0c 3641
0a1c58a2
JL
3642 /* Free up storage we no longer need. */
3643 for (i = 0; i < num_actuals; ++i)
04695783 3644 free (args[i].aligned_regs);
0a1c58a2 3645
2f21e1ba
BS
3646 targetm.calls.end_call_args ();
3647
0a1c58a2
JL
3648 insns = get_insns ();
3649 end_sequence ();
3650
3651 if (pass == 0)
3652 {
3653 tail_call_insns = insns;
3654
0a1c58a2
JL
3655 /* Restore the pending stack adjustment now that we have
3656 finished generating the sibling call sequence. */
1503a7ec 3657
7f2f0a01 3658 restore_pending_stack_adjust (&save);
099e9712
JH
3659
3660 /* Prepare arg structure for next iteration. */
f725a3ec 3661 for (i = 0; i < num_actuals; i++)
099e9712
JH
3662 {
3663 args[i].value = 0;
3664 args[i].aligned_regs = 0;
3665 args[i].stack = 0;
3666 }
c67846f2
JJ
3667
3668 sbitmap_free (stored_args_map);
48810515 3669 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 3670 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
3671 }
3672 else
38afb23f
OH
3673 {
3674 normal_call_insns = insns;
3675
3676 /* Verify that we've deallocated all the stack we used. */
6e14af16 3677 gcc_assert ((flags & ECF_NORETURN)
366de0ce
NS
3678 || (old_stack_allocated
3679 == stack_pointer_delta - pending_stack_adjust));
38afb23f 3680 }
fadb729c
JJ
3681
3682 /* If something prevents making this a sibling call,
3683 zero out the sequence. */
3684 if (sibcall_failure)
48810515 3685 tail_call_insns = NULL;
6de9cd9a
DN
3686 else
3687 break;
0a1c58a2
JL
3688 }
3689
1ea7e6ad 3690 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
3691 arguments too, as argument area is now clobbered by the call. */
3692 if (tail_call_insns)
0a1c58a2 3693 {
6de9cd9a 3694 emit_insn (tail_call_insns);
e3b5732b 3695 crtl->tail_call_emit = true;
0a1c58a2
JL
3696 }
3697 else
2f937369 3698 emit_insn (normal_call_insns);
51bbfa0c 3699
0a1c58a2 3700 currently_expanding_call--;
8e6a59fe 3701
04695783 3702 free (stack_usage_map_buf);
d9725c41 3703
d5e254e1
IE
3704 /* Join result with returned bounds so caller may use them if needed. */
3705 target = chkp_join_splitted_slot (target, valbnd);
3706
51bbfa0c
RS
3707 return target;
3708}
ded9bf77 3709
6de9cd9a
DN
3710/* A sibling call sequence invalidates any REG_EQUIV notes made for
3711 this function's incoming arguments.
3712
3713 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
3714 in the rtl chain are those for incoming arguments, so we can look
3715 for REG_EQUIV notes between the start of the function and the
3716 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
3717
3718 This is (slight) overkill. We could keep track of the highest
3719 argument we clobber and be more selective in removing notes, but it
3720 does not seem to be worth the effort. */
29d51cdb 3721
6de9cd9a
DN
3722void
3723fixup_tail_calls (void)
3724{
48810515 3725 rtx_insn *insn;
29d51cdb
SB
3726
3727 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3728 {
a31830a7
SB
3729 rtx note;
3730
29d51cdb
SB
3731 /* There are never REG_EQUIV notes for the incoming arguments
3732 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3733 if (NOTE_P (insn)
a38e7aa5 3734 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
3735 break;
3736
a31830a7
SB
3737 note = find_reg_note (insn, REG_EQUIV, 0);
3738 if (note)
3739 remove_note (insn, note);
3740 note = find_reg_note (insn, REG_EQUIV, 0);
3741 gcc_assert (!note);
29d51cdb 3742 }
6de9cd9a
DN
3743}
3744
ded9bf77
AH
3745/* Traverse a list of TYPES and expand all complex types into their
3746 components. */
2f2b4a02 3747static tree
ded9bf77
AH
3748split_complex_types (tree types)
3749{
3750 tree p;
3751
42ba5130
RH
3752 /* Before allocating memory, check for the common case of no complex. */
3753 for (p = types; p; p = TREE_CHAIN (p))
3754 {
3755 tree type = TREE_VALUE (p);
3756 if (TREE_CODE (type) == COMPLEX_TYPE
3757 && targetm.calls.split_complex_arg (type))
c22cacf3 3758 goto found;
42ba5130
RH
3759 }
3760 return types;
3761
3762 found:
ded9bf77
AH
3763 types = copy_list (types);
3764
3765 for (p = types; p; p = TREE_CHAIN (p))
3766 {
3767 tree complex_type = TREE_VALUE (p);
3768
42ba5130
RH
3769 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3770 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
3771 {
3772 tree next, imag;
3773
3774 /* Rewrite complex type with component type. */
3775 TREE_VALUE (p) = TREE_TYPE (complex_type);
3776 next = TREE_CHAIN (p);
3777
3778 /* Add another component type for the imaginary part. */
3779 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3780 TREE_CHAIN (p) = imag;
3781 TREE_CHAIN (imag) = next;
3782
3783 /* Skip the newly created node. */
3784 p = TREE_CHAIN (p);
3785 }
3786 }
3787
3788 return types;
3789}
51bbfa0c 3790\f
de76b467 3791/* Output a library call to function FUN (a SYMBOL_REF rtx).
f725a3ec 3792 The RETVAL parameter specifies whether return value needs to be saved, other
0407c02b 3793 parameters are documented in the emit_library_call function below. */
8ac61af7 3794
de76b467 3795static rtx
d329e058
AJ
3796emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3797 enum libcall_type fn_type,
ef4bddc2 3798 machine_mode outmode, int nargs, va_list p)
43bc5f13 3799{
3c0fca12
RH
3800 /* Total size in bytes of all the stack-parms scanned so far. */
3801 struct args_size args_size;
3802 /* Size of arguments before any adjustments (such as rounding). */
3803 struct args_size original_args_size;
b3694847 3804 int argnum;
3c0fca12 3805 rtx fun;
81464b2c
KT
3806 /* Todo, choose the correct decl type of orgfun. Sadly this information
3807 isn't present here, so we default to native calling abi here. */
033df0b9 3808 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 3809 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 3810 int count;
3c0fca12 3811 rtx argblock = 0;
d5cc9181
JR
3812 CUMULATIVE_ARGS args_so_far_v;
3813 cumulative_args_t args_so_far;
f725a3ec
KH
3814 struct arg
3815 {
3816 rtx value;
ef4bddc2 3817 machine_mode mode;
f725a3ec
KH
3818 rtx reg;
3819 int partial;
e7949876 3820 struct locate_and_pad_arg_data locate;
f725a3ec
KH
3821 rtx save_area;
3822 };
3c0fca12
RH
3823 struct arg *argvec;
3824 int old_inhibit_defer_pop = inhibit_defer_pop;
3825 rtx call_fusage = 0;
3826 rtx mem_value = 0;
5591ee6f 3827 rtx valreg;
3c0fca12
RH
3828 int pcc_struct_value = 0;
3829 int struct_value_size = 0;
52a11cbf 3830 int flags;
3c0fca12 3831 int reg_parm_stack_space = 0;
3c0fca12 3832 int needed;
48810515 3833 rtx_insn *before_call;
0ed4bf92 3834 bool have_push_fusage;
b0c48229 3835 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 3836
f73ad30e 3837#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
3838 /* Define the boundary of the register parm stack space that needs to be
3839 save, if any. */
726a989a 3840 int low_to_save = 0, high_to_save = 0;
f725a3ec 3841 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
3842#endif
3843
3c0fca12
RH
3844 /* Size of the stack reserved for parameter registers. */
3845 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3846 char *initial_stack_usage_map = stack_usage_map;
d9725c41 3847 char *stack_usage_map_buf = NULL;
3c0fca12 3848
61f71b34
DD
3849 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3850
3c0fca12 3851#ifdef REG_PARM_STACK_SPACE
3c0fca12 3852 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
3853#endif
3854
9555a122 3855 /* By default, library functions can not throw. */
52a11cbf
RH
3856 flags = ECF_NOTHROW;
3857
9555a122
RH
3858 switch (fn_type)
3859 {
3860 case LCT_NORMAL:
53d4257f 3861 break;
9555a122 3862 case LCT_CONST:
53d4257f
JH
3863 flags |= ECF_CONST;
3864 break;
9555a122 3865 case LCT_PURE:
53d4257f 3866 flags |= ECF_PURE;
9555a122 3867 break;
9555a122
RH
3868 case LCT_NORETURN:
3869 flags |= ECF_NORETURN;
3870 break;
3871 case LCT_THROW:
3872 flags = ECF_NORETURN;
3873 break;
9defc9b7
RH
3874 case LCT_RETURNS_TWICE:
3875 flags = ECF_RETURNS_TWICE;
3876 break;
9555a122 3877 }
3c0fca12
RH
3878 fun = orgfun;
3879
3c0fca12
RH
3880 /* Ensure current function's preferred stack boundary is at least
3881 what we need. */
cb91fab0
JH
3882 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3883 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
3884
3885 /* If this kind of value comes back in memory,
3886 decide where in memory it should come back. */
b0c48229 3887 if (outmode != VOIDmode)
3c0fca12 3888 {
ae2bcd98 3889 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 3890 if (aggregate_value_p (tfom, 0))
b0c48229 3891 {
3c0fca12 3892#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 3893 rtx pointer_reg
1d636cc6 3894 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
3895 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3896 pcc_struct_value = 1;
3897 if (value == 0)
3898 value = gen_reg_rtx (outmode);
3c0fca12 3899#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 3900 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 3901 if (value != 0 && MEM_P (value))
b0c48229
NB
3902 mem_value = value;
3903 else
9474e8ab 3904 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 3905#endif
b0c48229 3906 /* This call returns a big structure. */
84b8030f 3907 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 3908 }
3c0fca12 3909 }
b0c48229
NB
3910 else
3911 tfom = void_type_node;
3c0fca12
RH
3912
3913 /* ??? Unfinished: must pass the memory address as an argument. */
3914
3915 /* Copy all the libcall-arguments out of the varargs data
3916 and into a vector ARGVEC.
3917
3918 Compute how to pass each argument. We only support a very small subset
3919 of the full argument passing conventions to limit complexity here since
3920 library functions shouldn't have many args. */
3921
f883e0a7 3922 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 3923 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 3924
97fc4caf 3925#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 3926 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 3927#else
d5cc9181 3928 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 3929#endif
d5cc9181 3930 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
3931
3932 args_size.constant = 0;
3933 args_size.var = 0;
3934
3935 count = 0;
3936
3937 push_temp_slots ();
3938
3939 /* If there's a structure value address to be passed,
3940 either pass it in the special place, or pass it as an extra argument. */
61f71b34 3941 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
3942 {
3943 rtx addr = XEXP (mem_value, 0);
c22cacf3 3944
3c0fca12
RH
3945 nargs++;
3946
ee88d9aa
MK
3947 /* Make sure it is a reasonable operand for a move or push insn. */
3948 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
3949 && !(CONSTANT_P (addr)
3950 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
3951 addr = force_operand (addr, NULL_RTX);
3952
3c0fca12
RH
3953 argvec[count].value = addr;
3954 argvec[count].mode = Pmode;
3955 argvec[count].partial = 0;
3956
d5cc9181 3957 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3c07301f 3958 Pmode, NULL_TREE, true);
d5cc9181 3959 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
78a52f11 3960 NULL_TREE, 1) == 0);
3c0fca12
RH
3961
3962 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 3963#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 3964 1,
a4d5044f
CM
3965#else
3966 argvec[count].reg != 0,
3967#endif
2e4ceca5
UW
3968 reg_parm_stack_space, 0,
3969 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 3970
3c0fca12
RH
3971 if (argvec[count].reg == 0 || argvec[count].partial != 0
3972 || reg_parm_stack_space > 0)
e7949876 3973 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 3974
d5cc9181 3975 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3c0fca12
RH
3976
3977 count++;
3978 }
3979
3980 for (; count < nargs; count++)
3981 {
3982 rtx val = va_arg (p, rtx);
ef4bddc2 3983 machine_mode mode = (machine_mode) va_arg (p, int);
5e617be8 3984 int unsigned_p = 0;
3c0fca12
RH
3985
3986 /* We cannot convert the arg value to the mode the library wants here;
3987 must do it earlier where we know the signedness of the arg. */
366de0ce
NS
3988 gcc_assert (mode != BLKmode
3989 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3c0fca12 3990
ee88d9aa
MK
3991 /* Make sure it is a reasonable operand for a move or push insn. */
3992 if (!REG_P (val) && !MEM_P (val)
1a627b35 3993 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
ee88d9aa
MK
3994 val = force_operand (val, NULL_RTX);
3995
d5cc9181 3996 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3c0fca12 3997 {
f474c6f8 3998 rtx slot;
6cdd5672 3999 int must_copy
d5cc9181 4000 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
f474c6f8 4001
becfd6e5
KZ
4002 /* If this was a CONST function, it is now PURE since it now
4003 reads memory. */
99a32567
DM
4004 if (flags & ECF_CONST)
4005 {
4006 flags &= ~ECF_CONST;
4007 flags |= ECF_PURE;
4008 }
4009
e0c68ce9 4010 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
4011 {
4012 tree val_expr = MEM_EXPR (val);
4013 if (val_expr)
4014 mark_addressable (val_expr);
4015 slot = val;
4016 }
9969aaf6 4017 else
f474c6f8 4018 {
ae2bcd98 4019 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
9474e8ab 4020 1, 1);
f474c6f8
AO
4021 emit_move_insn (slot, val);
4022 }
1da68f56 4023
6b5273c3
AO
4024 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4025 gen_rtx_USE (VOIDmode, slot),
4026 call_fusage);
f474c6f8
AO
4027 if (must_copy)
4028 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4029 gen_rtx_CLOBBER (VOIDmode,
4030 slot),
4031 call_fusage);
4032
3c0fca12 4033 mode = Pmode;
f474c6f8 4034 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 4035 }
3c0fca12 4036
5e617be8 4037 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3c0fca12 4038 argvec[count].mode = mode;
5e617be8 4039 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
d5cc9181 4040 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3c07301f 4041 NULL_TREE, true);
3c0fca12 4042
3c0fca12 4043 argvec[count].partial
d5cc9181 4044 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3c0fca12 4045
3576f984
RS
4046 if (argvec[count].reg == 0
4047 || argvec[count].partial != 0
4048 || reg_parm_stack_space > 0)
4049 {
4050 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 4051#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 4052 1,
a4d5044f 4053#else
3576f984
RS
4054 argvec[count].reg != 0,
4055#endif
2e4ceca5 4056 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
4057 NULL_TREE, &args_size, &argvec[count].locate);
4058 args_size.constant += argvec[count].locate.size.constant;
4059 gcc_assert (!argvec[count].locate.size.var);
4060 }
4061#ifdef BLOCK_REG_PADDING
4062 else
4063 /* The argument is passed entirely in registers. See at which
4064 end it should be padded. */
4065 argvec[count].locate.where_pad =
4066 BLOCK_REG_PADDING (mode, NULL_TREE,
4067 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
a4d5044f 4068#endif
3c0fca12 4069
d5cc9181 4070 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3c0fca12 4071 }
3c0fca12 4072
3c0fca12
RH
4073 /* If this machine requires an external definition for library
4074 functions, write one out. */
4075 assemble_external_libcall (fun);
4076
4077 original_args_size = args_size;
1503a7ec
JH
4078 args_size.constant = (((args_size.constant
4079 + stack_pointer_delta
4080 + STACK_BYTES - 1)
4081 / STACK_BYTES
4082 * STACK_BYTES)
4083 - stack_pointer_delta);
3c0fca12
RH
4084
4085 args_size.constant = MAX (args_size.constant,
4086 reg_parm_stack_space);
4087
5d059ed9 4088 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4089 args_size.constant -= reg_parm_stack_space;
3c0fca12 4090
38173d38
JH
4091 if (args_size.constant > crtl->outgoing_args_size)
4092 crtl->outgoing_args_size = args_size.constant;
3c0fca12 4093
a11e0df4 4094 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306
EB
4095 {
4096 int pushed = args_size.constant + pending_stack_adjust;
4097 if (pushed > current_function_pushed_stack_size)
4098 current_function_pushed_stack_size = pushed;
4099 }
4100
f73ad30e
JH
4101 if (ACCUMULATE_OUTGOING_ARGS)
4102 {
4103 /* Since the stack pointer will never be pushed, it is possible for
4104 the evaluation of a parm to clobber something we have already
4105 written to the stack. Since most function calls on RISC machines
4106 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 4107
f73ad30e
JH
4108 Therefore, we save any area of the stack that was already written
4109 and that we are using. Here we set up to do this by making a new
4110 stack usage map from the old one.
3c0fca12 4111
f73ad30e
JH
4112 Another approach might be to try to reorder the argument
4113 evaluations to avoid this conflicting stack usage. */
3c0fca12 4114
f73ad30e 4115 needed = args_size.constant;
3c0fca12 4116
f73ad30e
JH
4117 /* Since we will be writing into the entire argument area, the
4118 map must be allocated for its entire size, not just the part that
4119 is the responsibility of the caller. */
5d059ed9 4120 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4121 needed += reg_parm_stack_space;
3c0fca12 4122
6dad9361
TS
4123 if (ARGS_GROW_DOWNWARD)
4124 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4125 needed + 1);
4126 else
4127 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4128
5ed6ace5 4129 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4130 stack_usage_map = stack_usage_map_buf;
3c0fca12 4131
f73ad30e 4132 if (initial_highest_arg_in_use)
2e09e75a
JM
4133 memcpy (stack_usage_map, initial_stack_usage_map,
4134 initial_highest_arg_in_use);
3c0fca12 4135
f73ad30e 4136 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4137 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4138 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4139 needed = 0;
3c0fca12 4140
c39ada04 4141 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 4142 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
4143 new libcalls after we've instantiated the virtual regs, and if we
4144 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 4145
c39ada04 4146 if (virtuals_instantiated)
0a81f074
RS
4147 argblock = plus_constant (Pmode, stack_pointer_rtx,
4148 STACK_POINTER_OFFSET);
c39ada04
DD
4149 else
4150 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
4151 }
4152 else
4153 {
4154 if (!PUSH_ARGS)
4155 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4156 }
3c0fca12 4157
3d9684ae 4158 /* We push args individually in reverse order, perform stack alignment
3c0fca12 4159 before the first push (the last arg). */
3d9684ae 4160 if (argblock == 0)
3c0fca12
RH
4161 anti_adjust_stack (GEN_INT (args_size.constant
4162 - original_args_size.constant));
3c0fca12 4163
3d9684ae 4164 argnum = nargs - 1;
3c0fca12 4165
f73ad30e
JH
4166#ifdef REG_PARM_STACK_SPACE
4167 if (ACCUMULATE_OUTGOING_ARGS)
4168 {
4169 /* The argument list is the property of the called routine and it
4170 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
4171 parameters, we must save and restore it. */
4172 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4173 &low_to_save, &high_to_save);
3c0fca12
RH
4174 }
4175#endif
f725a3ec 4176
2f21e1ba
BS
4177 /* When expanding a normal call, args are stored in push order,
4178 which is the reverse of what we have here. */
4179 bool any_regs = false;
4180 for (int i = nargs; i-- > 0; )
4181 if (argvec[i].reg != NULL_RTX)
4182 {
4183 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4184 any_regs = true;
4185 }
4186 if (!any_regs)
4187 targetm.calls.call_args (pc_rtx, NULL_TREE);
4188
3c0fca12
RH
4189 /* Push the args that need to be pushed. */
4190
0ed4bf92
BS
4191 have_push_fusage = false;
4192
3c0fca12
RH
4193 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4194 are to be pushed. */
3d9684ae 4195 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4196 {
ef4bddc2 4197 machine_mode mode = argvec[argnum].mode;
b3694847 4198 rtx val = argvec[argnum].value;
3c0fca12
RH
4199 rtx reg = argvec[argnum].reg;
4200 int partial = argvec[argnum].partial;
6bdf8c2e 4201 unsigned int parm_align = argvec[argnum].locate.boundary;
f73ad30e 4202 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
4203
4204 if (! (reg != 0 && partial == 0))
4205 {
2b1c5433
JJ
4206 rtx use;
4207
f73ad30e
JH
4208 if (ACCUMULATE_OUTGOING_ARGS)
4209 {
f8a097cd
JH
4210 /* If this is being stored into a pre-allocated, fixed-size,
4211 stack area, save any previous data at that location. */
3c0fca12 4212
6dad9361
TS
4213 if (ARGS_GROW_DOWNWARD)
4214 {
4215 /* stack_slot is negative, but we want to index stack_usage_map
4216 with positive values. */
4217 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4218 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4219 }
4220 else
4221 {
4222 lower_bound = argvec[argnum].locate.slot_offset.constant;
4223 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4224 }
3c0fca12 4225
546ff777
AM
4226 i = lower_bound;
4227 /* Don't worry about things in the fixed argument area;
4228 it has already been saved. */
4229 if (i < reg_parm_stack_space)
4230 i = reg_parm_stack_space;
4231 while (i < upper_bound && stack_usage_map[i] == 0)
4232 i++;
3c0fca12 4233
546ff777 4234 if (i < upper_bound)
f73ad30e 4235 {
e7949876
AM
4236 /* We need to make a save area. */
4237 unsigned int size
4238 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 4239 machine_mode save_mode
e7949876
AM
4240 = mode_for_size (size, MODE_INT, 1);
4241 rtx adr
0a81f074 4242 = plus_constant (Pmode, argblock,
e7949876 4243 argvec[argnum].locate.offset.constant);
f73ad30e 4244 rtx stack_area
e7949876 4245 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 4246
9778f2f8
JH
4247 if (save_mode == BLKmode)
4248 {
4249 argvec[argnum].save_area
4250 = assign_stack_temp (BLKmode,
9474e8ab
MM
4251 argvec[argnum].locate.size.constant
4252 );
9778f2f8 4253
1a8cb155
RS
4254 emit_block_move (validize_mem
4255 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 4256 stack_area,
9778f2f8
JH
4257 GEN_INT (argvec[argnum].locate.size.constant),
4258 BLOCK_OP_CALL_PARM);
4259 }
4260 else
4261 {
4262 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4263
4264 emit_move_insn (argvec[argnum].save_area, stack_area);
4265 }
f73ad30e 4266 }
3c0fca12 4267 }
19caa751 4268
6bdf8c2e 4269 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 4270 partial, reg, 0, argblock,
e7949876
AM
4271 GEN_INT (argvec[argnum].locate.offset.constant),
4272 reg_parm_stack_space,
99206968 4273 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 4274
3c0fca12 4275 /* Now mark the segment we just used. */
f73ad30e
JH
4276 if (ACCUMULATE_OUTGOING_ARGS)
4277 for (i = lower_bound; i < upper_bound; i++)
4278 stack_usage_map[i] = 1;
3c0fca12
RH
4279
4280 NO_DEFER_POP;
475a3eef 4281
2b1c5433
JJ
4282 /* Indicate argument access so that alias.c knows that these
4283 values are live. */
4284 if (argblock)
0a81f074 4285 use = plus_constant (Pmode, argblock,
2b1c5433 4286 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
4287 else if (have_push_fusage)
4288 continue;
2b1c5433 4289 else
0ed4bf92
BS
4290 {
4291 /* When arguments are pushed, trying to tell alias.c where
4292 exactly this argument is won't work, because the
4293 auto-increment causes confusion. So we merely indicate
4294 that we access something with a known mode somewhere on
4295 the stack. */
4296 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4297 gen_rtx_SCRATCH (Pmode));
4298 have_push_fusage = true;
4299 }
2b1c5433
JJ
4300 use = gen_rtx_MEM (argvec[argnum].mode, use);
4301 use = gen_rtx_USE (VOIDmode, use);
4302 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
4303 }
4304 }
4305
3d9684ae 4306 argnum = nargs - 1;
3c0fca12 4307
531ca746 4308 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
4309
4310 /* Now load any reg parms into their regs. */
4311
4312 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4313 are to be pushed. */
3d9684ae 4314 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4315 {
ef4bddc2 4316 machine_mode mode = argvec[argnum].mode;
b3694847 4317 rtx val = argvec[argnum].value;
3c0fca12
RH
4318 rtx reg = argvec[argnum].reg;
4319 int partial = argvec[argnum].partial;
ee222ce0 4320#ifdef BLOCK_REG_PADDING
460b171d 4321 int size = 0;
ee222ce0 4322#endif
460b171d 4323
3c0fca12
RH
4324 /* Handle calls that pass values in multiple non-contiguous
4325 locations. The PA64 has examples of this for library calls. */
4326 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 4327 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 4328 else if (reg != 0 && partial == 0)
460b171d
JB
4329 {
4330 emit_move_insn (reg, val);
4331#ifdef BLOCK_REG_PADDING
4332 size = GET_MODE_SIZE (argvec[argnum].mode);
4333
4334 /* Copied from load_register_parameters. */
4335
4336 /* Handle case where we have a value that needs shifting
4337 up to the msb. eg. a QImode value and we're padding
4338 upward on a BYTES_BIG_ENDIAN machine. */
4339 if (size < UNITS_PER_WORD
4340 && (argvec[argnum].locate.where_pad
4341 == (BYTES_BIG_ENDIAN ? upward : downward)))
4342 {
4343 rtx x;
4344 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4345
4346 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4347 report the whole reg as used. Strictly speaking, the
4348 call only uses SIZE bytes at the msb end, but it doesn't
4349 seem worth generating rtl to say that. */
4350 reg = gen_rtx_REG (word_mode, REGNO (reg));
4351 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4352 if (x != reg)
4353 emit_move_insn (reg, x);
4354 }
4355#endif
4356 }
3c0fca12
RH
4357
4358 NO_DEFER_POP;
4359 }
4360
3c0fca12
RH
4361 /* Any regs containing parms remain in use through the call. */
4362 for (count = 0; count < nargs; count++)
4363 {
4364 rtx reg = argvec[count].reg;
4365 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4366 use_group_regs (&call_fusage, reg);
4367 else if (reg != 0)
3b1bf459
BS
4368 {
4369 int partial = argvec[count].partial;
4370 if (partial)
4371 {
4372 int nregs;
4373 gcc_assert (partial % UNITS_PER_WORD == 0);
4374 nregs = partial / UNITS_PER_WORD;
4375 use_regs (&call_fusage, REGNO (reg), nregs);
4376 }
4377 else
4378 use_reg (&call_fusage, reg);
4379 }
3c0fca12
RH
4380 }
4381
4382 /* Pass the function the address in which to return a structure value. */
61f71b34 4383 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 4384 {
61f71b34 4385 emit_move_insn (struct_value,
3c0fca12
RH
4386 force_reg (Pmode,
4387 force_operand (XEXP (mem_value, 0),
4388 NULL_RTX)));
f8cfc6aa 4389 if (REG_P (struct_value))
61f71b34 4390 use_reg (&call_fusage, struct_value);
3c0fca12
RH
4391 }
4392
4393 /* Don't allow popping to be deferred, since then
4394 cse'ing of library calls could delete a call and leave the pop. */
4395 NO_DEFER_POP;
5591ee6f 4396 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 4397 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 4398
ce48579b 4399 /* Stack must be properly aligned now. */
366de0ce
NS
4400 gcc_assert (!(stack_pointer_delta
4401 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
ebcd0b57 4402
695ee791
RH
4403 before_call = get_last_insn ();
4404
3c0fca12
RH
4405 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4406 will set inhibit_defer_pop to that value. */
de76b467
JH
4407 /* The return type is needed to decide how many bytes the function pops.
4408 Signedness plays no role in that, so for simplicity, we pretend it's
4409 always signed. We also assume that the list of arguments passed has
4410 no impact, so we pretend it is unknown. */
3c0fca12 4411
6de9cd9a 4412 emit_call_1 (fun, NULL,
f725a3ec 4413 get_identifier (XSTR (orgfun, 0)),
b0c48229 4414 build_function_type (tfom, NULL_TREE),
f725a3ec 4415 original_args_size.constant, args_size.constant,
3c0fca12 4416 struct_value_size,
d5cc9181 4417 targetm.calls.function_arg (args_so_far,
3c07301f 4418 VOIDmode, void_type_node, true),
5591ee6f 4419 valreg,
d5cc9181 4420 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 4421
1e288103 4422 if (flag_ipa_ra)
4f660b15 4423 {
e67d1102 4424 rtx datum = orgfun;
4f660b15 4425 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 4426 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
4427 add_reg_note (last, REG_CALL_DECL, datum);
4428 }
4429
460b171d
JB
4430 /* Right-shift returned value if necessary. */
4431 if (!pcc_struct_value
4432 && TYPE_MODE (tfom) != BLKmode
4433 && targetm.calls.return_in_msb (tfom))
4434 {
4435 shift_return_value (TYPE_MODE (tfom), false, valreg);
4436 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4437 }
4438
2f21e1ba
BS
4439 targetm.calls.end_call_args ();
4440
6fb5fa3c
DB
4441 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4442 that it should complain if nonvolatile values are live. For
4443 functions that cannot return, inform flow that control does not
4444 fall through. */
6e14af16 4445 if (flags & ECF_NORETURN)
695ee791 4446 {
570a98eb 4447 /* The barrier note must be emitted
695ee791
RH
4448 immediately after the CALL_INSN. Some ports emit more than
4449 just a CALL_INSN above, so we must search for it here. */
48810515 4450 rtx_insn *last = get_last_insn ();
4b4bf941 4451 while (!CALL_P (last))
695ee791
RH
4452 {
4453 last = PREV_INSN (last);
4454 /* There was no CALL_INSN? */
366de0ce 4455 gcc_assert (last != before_call);
695ee791
RH
4456 }
4457
570a98eb 4458 emit_barrier_after (last);
695ee791
RH
4459 }
4460
85da11a6
EB
4461 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4462 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4463 if (flags & ECF_NOTHROW)
4464 {
48810515 4465 rtx_insn *last = get_last_insn ();
85da11a6
EB
4466 while (!CALL_P (last))
4467 {
4468 last = PREV_INSN (last);
4469 /* There was no CALL_INSN? */
4470 gcc_assert (last != before_call);
4471 }
4472
4473 make_reg_eh_region_note_nothrow_nononlocal (last);
4474 }
4475
3c0fca12
RH
4476 /* Now restore inhibit_defer_pop to its actual original value. */
4477 OK_DEFER_POP;
4478
4479 pop_temp_slots ();
4480
4481 /* Copy the value to the right place. */
de76b467 4482 if (outmode != VOIDmode && retval)
3c0fca12
RH
4483 {
4484 if (mem_value)
4485 {
4486 if (value == 0)
4487 value = mem_value;
4488 if (value != mem_value)
4489 emit_move_insn (value, mem_value);
4490 }
c3297561
AO
4491 else if (GET_CODE (valreg) == PARALLEL)
4492 {
4493 if (value == 0)
4494 value = gen_reg_rtx (outmode);
643642eb 4495 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 4496 }
3c0fca12 4497 else
7ab0aca2 4498 {
cde0f3fd 4499 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
4500 if (GET_MODE (valreg) != outmode)
4501 {
4502 int unsignedp = TYPE_UNSIGNED (tfom);
4503
cde0f3fd
PB
4504 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4505 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 4506 == GET_MODE (valreg));
7ab0aca2
RH
4507 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4508 }
4509
4510 if (value != 0)
4511 emit_move_insn (value, valreg);
4512 else
4513 value = valreg;
4514 }
3c0fca12
RH
4515 }
4516
f73ad30e 4517 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 4518 {
f73ad30e
JH
4519#ifdef REG_PARM_STACK_SPACE
4520 if (save_area)
b820d2b8
AM
4521 restore_fixed_argument_area (save_area, argblock,
4522 high_to_save, low_to_save);
3c0fca12 4523#endif
f725a3ec 4524
f73ad30e
JH
4525 /* If we saved any argument areas, restore them. */
4526 for (count = 0; count < nargs; count++)
4527 if (argvec[count].save_area)
4528 {
ef4bddc2 4529 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 4530 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
4531 argvec[count].locate.offset.constant);
4532 rtx stack_area = gen_rtx_MEM (save_mode,
4533 memory_address (save_mode, adr));
f73ad30e 4534
9778f2f8
JH
4535 if (save_mode == BLKmode)
4536 emit_block_move (stack_area,
1a8cb155
RS
4537 validize_mem
4538 (copy_rtx (argvec[count].save_area)),
9778f2f8
JH
4539 GEN_INT (argvec[count].locate.size.constant),
4540 BLOCK_OP_CALL_PARM);
4541 else
4542 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 4543 }
3c0fca12 4544
f73ad30e
JH
4545 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4546 stack_usage_map = initial_stack_usage_map;
4547 }
43bc5f13 4548
04695783 4549 free (stack_usage_map_buf);
d9725c41 4550
de76b467
JH
4551 return value;
4552
4553}
4554\f
4555/* Output a library call to function FUN (a SYMBOL_REF rtx)
4556 (emitting the queue unless NO_QUEUE is nonzero),
4557 for a value of mode OUTMODE,
4558 with NARGS different arguments, passed as alternating rtx values
4559 and machine_modes to convert them to.
de76b467 4560
84b8030f
KZ
4561 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4562 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4563 other types of library calls. */
de76b467
JH
4564
4565void
e34d07f2 4566emit_library_call (rtx orgfun, enum libcall_type fn_type,
ef4bddc2 4567 machine_mode outmode, int nargs, ...)
de76b467 4568{
e34d07f2 4569 va_list p;
d329e058 4570
e34d07f2 4571 va_start (p, nargs);
2a8f6b90 4572 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
e34d07f2 4573 va_end (p);
de76b467
JH
4574}
4575\f
4576/* Like emit_library_call except that an extra argument, VALUE,
4577 comes second and says where to store the result.
4578 (If VALUE is zero, this function chooses a convenient way
4579 to return the value.
4580
4581 This function returns an rtx for where the value is to be found.
4582 If VALUE is nonzero, VALUE is returned. */
4583
4584rtx
e34d07f2
KG
4585emit_library_call_value (rtx orgfun, rtx value,
4586 enum libcall_type fn_type,
ef4bddc2 4587 machine_mode outmode, int nargs, ...)
de76b467 4588{
6268b922 4589 rtx result;
e34d07f2 4590 va_list p;
d329e058 4591
e34d07f2 4592 va_start (p, nargs);
6268b922
KG
4593 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4594 nargs, p);
e34d07f2 4595 va_end (p);
de76b467 4596
6268b922 4597 return result;
322e3e34
RK
4598}
4599\f
d5e254e1
IE
4600
4601/* Store pointer bounds argument ARG into Bounds Table entry
4602 associated with PARM. */
4603static void
4604store_bounds (struct arg_data *arg, struct arg_data *parm)
4605{
4606 rtx slot = NULL, ptr = NULL, addr = NULL;
4607
4608 /* We may pass bounds not associated with any pointer. */
4609 if (!parm)
4610 {
4611 gcc_assert (arg->special_slot);
4612 slot = arg->special_slot;
4613 ptr = const0_rtx;
4614 }
4615 /* Find pointer associated with bounds and where it is
4616 passed. */
4617 else
4618 {
4619 if (!parm->reg)
4620 {
4621 gcc_assert (!arg->special_slot);
4622
4623 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
4624 }
4625 else if (REG_P (parm->reg))
4626 {
4627 gcc_assert (arg->special_slot);
4628 slot = arg->special_slot;
4629
4630 if (MEM_P (parm->value))
4631 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
4632 else if (REG_P (parm->value))
4633 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
4634 else
4635 {
4636 gcc_assert (!arg->pointer_offset);
4637 ptr = parm->value;
4638 }
4639 }
4640 else
4641 {
4642 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
4643
4644 gcc_assert (arg->special_slot);
4645 slot = arg->special_slot;
4646
4647 if (parm->parallel_value)
4648 ptr = chkp_get_value_with_offs (parm->parallel_value,
4649 GEN_INT (arg->pointer_offset));
4650 else
4651 gcc_unreachable ();
4652 }
4653 }
4654
4655 /* Expand bounds. */
4656 if (!arg->value)
4657 arg->value = expand_normal (arg->tree_value);
4658
4659 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
4660}
4661
51bbfa0c
RS
4662/* Store a single argument for a function call
4663 into the register or memory area where it must be passed.
4664 *ARG describes the argument value and where to pass it.
4665
4666 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4667 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4668
4669 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4670 so must be careful about how the stack is used.
51bbfa0c
RS
4671
4672 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4673 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4674 that we need not worry about saving and restoring the stack.
4675
4c6b3b2a 4676 FNDECL is the declaration of the function we are calling.
f725a3ec 4677
da7d8304 4678 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 4679 zero otherwise. */
51bbfa0c 4680
4c6b3b2a 4681static int
d329e058
AJ
4682store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4683 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 4684{
b3694847 4685 tree pval = arg->tree_value;
51bbfa0c
RS
4686 rtx reg = 0;
4687 int partial = 0;
4688 int used = 0;
6a651371 4689 int i, lower_bound = 0, upper_bound = 0;
4c6b3b2a 4690 int sibcall_failure = 0;
51bbfa0c
RS
4691
4692 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 4693 return 1;
51bbfa0c 4694
cc79451b
RK
4695 /* Push a new temporary level for any temporaries we make for
4696 this argument. */
4697 push_temp_slots ();
4698
f8a097cd 4699 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4700 {
f73ad30e
JH
4701 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4702 save any previous data at that location. */
4703 if (argblock && ! variable_size && arg->stack)
4704 {
6dad9361
TS
4705 if (ARGS_GROW_DOWNWARD)
4706 {
4707 /* stack_slot is negative, but we want to index stack_usage_map
4708 with positive values. */
4709 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4710 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4711 else
4712 upper_bound = 0;
51bbfa0c 4713
6dad9361
TS
4714 lower_bound = upper_bound - arg->locate.size.constant;
4715 }
f73ad30e 4716 else
6dad9361
TS
4717 {
4718 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4719 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4720 else
4721 lower_bound = 0;
51bbfa0c 4722
6dad9361
TS
4723 upper_bound = lower_bound + arg->locate.size.constant;
4724 }
51bbfa0c 4725
546ff777
AM
4726 i = lower_bound;
4727 /* Don't worry about things in the fixed argument area;
4728 it has already been saved. */
4729 if (i < reg_parm_stack_space)
4730 i = reg_parm_stack_space;
4731 while (i < upper_bound && stack_usage_map[i] == 0)
4732 i++;
51bbfa0c 4733
546ff777 4734 if (i < upper_bound)
51bbfa0c 4735 {
e7949876
AM
4736 /* We need to make a save area. */
4737 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
ef4bddc2 4738 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
e7949876
AM
4739 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4740 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
4741
4742 if (save_mode == BLKmode)
4743 {
9ee5337d
EB
4744 arg->save_area
4745 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 4746 preserve_temp_slots (arg->save_area);
1a8cb155
RS
4747 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4748 stack_area,
7816b87e 4749 GEN_INT (arg->locate.size.constant),
44bb111a 4750 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4751 }
4752 else
4753 {
4754 arg->save_area = gen_reg_rtx (save_mode);
4755 emit_move_insn (arg->save_area, stack_area);
4756 }
51bbfa0c
RS
4757 }
4758 }
4759 }
b564df06 4760
51bbfa0c
RS
4761 /* If this isn't going to be placed on both the stack and in registers,
4762 set up the register and number of words. */
4763 if (! arg->pass_on_stack)
aa7634dd
DM
4764 {
4765 if (flags & ECF_SIBCALL)
4766 reg = arg->tail_call_reg;
4767 else
4768 reg = arg->reg;
4769 partial = arg->partial;
4770 }
51bbfa0c 4771
366de0ce
NS
4772 /* Being passed entirely in a register. We shouldn't be called in
4773 this case. */
4774 gcc_assert (reg == 0 || partial != 0);
c22cacf3 4775
4ab56118
RK
4776 /* If this arg needs special alignment, don't load the registers
4777 here. */
4778 if (arg->n_aligned_regs != 0)
4779 reg = 0;
f725a3ec 4780
4ab56118 4781 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4782 it directly into its stack slot. Otherwise, we can. */
4783 if (arg->value == 0)
d64f5a78 4784 {
d64f5a78
RS
4785 /* stack_arg_under_construction is nonzero if a function argument is
4786 being evaluated directly into the outgoing argument list and
4787 expand_call must take special action to preserve the argument list
4788 if it is called recursively.
4789
4790 For scalar function arguments stack_usage_map is sufficient to
4791 determine which stack slots must be saved and restored. Scalar
4792 arguments in general have pass_on_stack == 0.
4793
4794 If this argument is initialized by a function which takes the
4795 address of the argument (a C++ constructor or a C function
4796 returning a BLKmode structure), then stack_usage_map is
4797 insufficient and expand_call must push the stack around the
4798 function call. Such arguments have pass_on_stack == 1.
4799
4800 Note that it is always safe to set stack_arg_under_construction,
4801 but this generates suboptimal code if set when not needed. */
4802
4803 if (arg->pass_on_stack)
4804 stack_arg_under_construction++;
f73ad30e 4805
3a08477a
RK
4806 arg->value = expand_expr (pval,
4807 (partial
4808 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4809 ? NULL_RTX : arg->stack,
8403445a 4810 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
4811
4812 /* If we are promoting object (or for any other reason) the mode
4813 doesn't agree, convert the mode. */
4814
7373d92d
RK
4815 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4816 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4817 arg->value, arg->unsignedp);
1efe6448 4818
d64f5a78
RS
4819 if (arg->pass_on_stack)
4820 stack_arg_under_construction--;
d64f5a78 4821 }
51bbfa0c 4822
0dc42b03 4823 /* Check for overlap with already clobbered argument area. */
07eef816
KH
4824 if ((flags & ECF_SIBCALL)
4825 && MEM_P (arg->value)
4826 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4827 arg->locate.size.constant))
4828 sibcall_failure = 1;
0dc42b03 4829
51bbfa0c
RS
4830 /* Don't allow anything left on stack from computation
4831 of argument to alloca. */
f8a097cd 4832 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
4833 do_pending_stack_adjust ();
4834
4835 if (arg->value == arg->stack)
37a08a29
RK
4836 /* If the value is already in the stack slot, we are done. */
4837 ;
1efe6448 4838 else if (arg->mode != BLKmode)
51bbfa0c 4839 {
b3694847 4840 int size;
46bd2bee 4841 unsigned int parm_align;
51bbfa0c
RS
4842
4843 /* Argument is a scalar, not entirely passed in registers.
4844 (If part is passed in registers, arg->partial says how much
4845 and emit_push_insn will take care of putting it there.)
f725a3ec 4846
51bbfa0c
RS
4847 Push it, and if its size is less than the
4848 amount of space allocated to it,
4849 also bump stack pointer by the additional space.
4850 Note that in C the default argument promotions
4851 will prevent such mismatches. */
4852
1efe6448 4853 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4854 /* Compute how much space the push instruction will push.
4855 On many machines, pushing a byte will advance the stack
4856 pointer by a halfword. */
4857#ifdef PUSH_ROUNDING
4858 size = PUSH_ROUNDING (size);
4859#endif
4860 used = size;
4861
4862 /* Compute how much space the argument should get:
4863 round up to a multiple of the alignment for arguments. */
1efe6448 4864 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4865 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4866 / (PARM_BOUNDARY / BITS_PER_UNIT))
4867 * (PARM_BOUNDARY / BITS_PER_UNIT));
4868
46bd2bee
JM
4869 /* Compute the alignment of the pushed argument. */
4870 parm_align = arg->locate.boundary;
4871 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4872 {
4873 int pad = used - size;
4874 if (pad)
4875 {
4876 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4877 parm_align = MIN (parm_align, pad_align);
4878 }
4879 }
4880
51bbfa0c
RS
4881 /* This isn't already where we want it on the stack, so put it there.
4882 This can either be done with push or copy insns. */
99206968 4883 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
46bd2bee 4884 parm_align, partial, reg, used - size, argblock,
e7949876 4885 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
99206968
KT
4886 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
4887 sibcall_failure = 1;
841404cd
AO
4888
4889 /* Unless this is a partially-in-register argument, the argument is now
4890 in the stack. */
4891 if (partial == 0)
4892 arg->value = arg->stack;
51bbfa0c
RS
4893 }
4894 else
4895 {
4896 /* BLKmode, at least partly to be pushed. */
4897
1b1f20ca 4898 unsigned int parm_align;
b3694847 4899 int excess;
51bbfa0c
RS
4900 rtx size_rtx;
4901
4902 /* Pushing a nonscalar.
4903 If part is passed in registers, PARTIAL says how much
4904 and emit_push_insn will take care of putting it there. */
4905
4906 /* Round its size up to a multiple
4907 of the allocation unit for arguments. */
4908
e7949876 4909 if (arg->locate.size.var != 0)
51bbfa0c
RS
4910 {
4911 excess = 0;
e7949876 4912 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
4913 }
4914 else
4915 {
78a52f11
RH
4916 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4917 for BLKmode is careful to avoid it. */
4918 excess = (arg->locate.size.constant
4919 - int_size_in_bytes (TREE_TYPE (pval))
4920 + partial);
db4c55f6 4921 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
4922 NULL_RTX, TYPE_MODE (sizetype),
4923 EXPAND_NORMAL);
51bbfa0c
RS
4924 }
4925
bfc45551 4926 parm_align = arg->locate.boundary;
1b1f20ca
RH
4927
4928 /* When an argument is padded down, the block is aligned to
4929 PARM_BOUNDARY, but the actual argument isn't. */
4930 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4931 {
e7949876 4932 if (arg->locate.size.var)
1b1f20ca
RH
4933 parm_align = BITS_PER_UNIT;
4934 else if (excess)
4935 {
97d05bfd 4936 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
1b1f20ca
RH
4937 parm_align = MIN (parm_align, excess_align);
4938 }
4939 }
4940
3c0cb5de 4941 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
4942 {
4943 /* emit_push_insn might not work properly if arg->value and
e7949876 4944 argblock + arg->locate.offset areas overlap. */
4c6b3b2a
JJ
4945 rtx x = arg->value;
4946 int i = 0;
4947
38173d38 4948 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4c6b3b2a
JJ
4949 || (GET_CODE (XEXP (x, 0)) == PLUS
4950 && XEXP (XEXP (x, 0), 0) ==
38173d38 4951 crtl->args.internal_arg_pointer
481683e1 4952 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4c6b3b2a 4953 {
38173d38 4954 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4c6b3b2a
JJ
4955 i = INTVAL (XEXP (XEXP (x, 0), 1));
4956
b3877860
KT
4957 /* arg.locate doesn't contain the pretend_args_size offset,
4958 it's part of argblock. Ensure we don't count it in I. */
4959 if (STACK_GROWS_DOWNWARD)
4960 i -= crtl->args.pretend_args_size;
4961 else
4962 i += crtl->args.pretend_args_size;
4963
e0a21ab9 4964 /* expand_call should ensure this. */
366de0ce 4965 gcc_assert (!arg->locate.offset.var
d6c2c77c 4966 && arg->locate.size.var == 0
481683e1 4967 && CONST_INT_P (size_rtx));
4c6b3b2a 4968
e7949876 4969 if (arg->locate.offset.constant > i)
4c6b3b2a 4970 {
e7949876 4971 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4c6b3b2a
JJ
4972 sibcall_failure = 1;
4973 }
e7949876 4974 else if (arg->locate.offset.constant < i)
4c6b3b2a 4975 {
d6c2c77c
JC
4976 /* Use arg->locate.size.constant instead of size_rtx
4977 because we only care about the part of the argument
4978 on the stack. */
4979 if (i < (arg->locate.offset.constant
4980 + arg->locate.size.constant))
4981 sibcall_failure = 1;
4982 }
4983 else
4984 {
4985 /* Even though they appear to be at the same location,
4986 if part of the outgoing argument is in registers,
4987 they aren't really at the same location. Check for
4988 this by making sure that the incoming size is the
4989 same as the outgoing size. */
4990 if (arg->locate.size.constant != INTVAL (size_rtx))
4c6b3b2a
JJ
4991 sibcall_failure = 1;
4992 }
4993 }
4994 }
4995
1efe6448 4996 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
1b1f20ca 4997 parm_align, partial, reg, excess, argblock,
e7949876 4998 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
99206968 4999 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
51bbfa0c 5000
841404cd
AO
5001 /* Unless this is a partially-in-register argument, the argument is now
5002 in the stack.
51bbfa0c 5003
841404cd
AO
5004 ??? Unlike the case above, in which we want the actual
5005 address of the data, so that we can load it directly into a
5006 register, here we want the address of the stack slot, so that
5007 it's properly aligned for word-by-word copying or something
5008 like that. It's not clear that this is always correct. */
5009 if (partial == 0)
5010 arg->value = arg->stack_slot;
5011 }
8df3dbb7
RH
5012
5013 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5014 {
5015 tree type = TREE_TYPE (arg->tree_value);
5016 arg->parallel_value
5017 = emit_group_load_into_temps (arg->reg, arg->value, type,
5018 int_size_in_bytes (type));
5019 }
51bbfa0c 5020
8403445a
AM
5021 /* Mark all slots this store used. */
5022 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5023 && argblock && ! variable_size && arg->stack)
5024 for (i = lower_bound; i < upper_bound; i++)
5025 stack_usage_map[i] = 1;
5026
51bbfa0c
RS
5027 /* Once we have pushed something, pops can't safely
5028 be deferred during the rest of the arguments. */
5029 NO_DEFER_POP;
5030
9474e8ab 5031 /* Free any temporary slots made in processing this argument. */
cc79451b 5032 pop_temp_slots ();
4c6b3b2a
JJ
5033
5034 return sibcall_failure;
51bbfa0c 5035}
a4b1b92a 5036
fe984136 5037/* Nonzero if we do not know how to pass TYPE solely in registers. */
a4b1b92a 5038
fe984136 5039bool
ef4bddc2 5040must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
586de218 5041 const_tree type)
fe984136
RH
5042{
5043 if (!type)
5044 return false;
5045
5046 /* If the type has variable size... */
5047 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5048 return true;
a4b1b92a 5049
fe984136
RH
5050 /* If the type is marked as addressable (it is required
5051 to be constructed into the stack)... */
5052 if (TREE_ADDRESSABLE (type))
5053 return true;
5054
5055 return false;
5056}
a4b1b92a 5057
7ae4ad28 5058/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
5059 takes trailing padding of a structure into account. */
5060/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
5061
5062bool
ef4bddc2 5063must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
a4b1b92a
RH
5064{
5065 if (!type)
40cdfd5a 5066 return false;
a4b1b92a
RH
5067
5068 /* If the type has variable size... */
5069 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5070 return true;
5071
5072 /* If the type is marked as addressable (it is required
5073 to be constructed into the stack)... */
5074 if (TREE_ADDRESSABLE (type))
5075 return true;
5076
5077 /* If the padding and mode of the type is such that a copy into
5078 a register would put it into the wrong part of the register. */
5079 if (mode == BLKmode
5080 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5081 && (FUNCTION_ARG_PADDING (mode, type)
5082 == (BYTES_BIG_ENDIAN ? upward : downward)))
5083 return true;
5084
5085 return false;
5086}