]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
gcc/
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
711789cc 2 Copyright (C) 1989-2013 Free Software Foundation, Inc.
66d433c7 3
f12b58b3 4This file is part of GCC.
66d433c7 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
66d433c7 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
66d433c7 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
66d433c7 19
20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
23#include "tm.h"
405711de 24#include "rtl.h"
25#include "tree.h"
75a70cf9 26#include "gimple.h"
405711de 27#include "flags.h"
28#include "expr.h"
5f4cd670 29#include "optabs.h"
d8fc4d0b 30#include "libfuncs.h"
0a893c29 31#include "function.h"
405711de 32#include "regs.h"
0b205f4c 33#include "diagnostic-core.h"
cd03a192 34#include "output.h"
075136a2 35#include "tm_p.h"
a6260fc7 36#include "timevar.h"
7ecc63d3 37#include "sbitmap.h"
771d21fa 38#include "langhooks.h"
6fce022c 39#include "target.h"
28992b23 40#include "cgraph.h"
95cedffb 41#include "except.h"
3072d30e 42#include "dbgcnt.h"
75a70cf9 43#include "tree-flow.h"
66d433c7 44
dfb1ee39 45/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 47
48/* Data structure and subroutines used within expand_call. */
49
50struct arg_data
51{
52 /* Tree node for this argument. */
53 tree tree_value;
1c0c37a5 54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
66d433c7 56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 rtx value;
58 /* Initially-compute RTL value for argument; only for const functions. */
59 rtx initial_value;
60 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 61 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 62 registers. */
63 rtx reg;
0e0be288 64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
66 register windows. */
67 rtx tail_call_reg;
b600a907 68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
70 rtx parallel_value;
23eb5fa6 71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
83272ab4 74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
66d433c7 76 int partial;
d10cfa8d 77 /* Nonzero if argument must be passed on stack.
f848041f 78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 81 int pass_on_stack;
241399f6 82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
66d433c7 84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
bd99ba64 89 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
66d433c7 90 rtx stack_slot;
66d433c7 91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
f28c7a75 93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
66d433c7 99};
100
d10cfa8d 101/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105static char *stack_usage_map;
106
107/* Size of STACK_USAGE_MAP. */
108static int highest_outgoing_arg_in_use;
d1b03b62 109
7ecc63d3 110/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115static sbitmap stored_args_map;
116
d1b03b62 117/* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
fbbbfe26 122static int stack_arg_under_construction;
66d433c7 123
4ee9c684 124static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
4c9e08a4 125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
39cba157 126 cumulative_args_t);
4c9e08a4 127static void precompute_register_parameters (int, struct arg_data *, int *);
128static int store_one_arg (struct arg_data *, rtx, int, int, int);
129static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130static int finalize_must_preallocate (int, int, struct arg_data *,
131 struct args_size *);
2dd6f9ed 132static void precompute_arguments (int, struct arg_data *);
fa20f865 133static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
4c9e08a4 134static void initialize_argument_information (int, struct arg_data *,
cd46caee 135 struct args_size *, int,
136 tree, tree,
39cba157 137 tree, tree, cumulative_args_t, int,
eaa112a0 138 rtx *, int *, int *, int *,
4ee9c684 139 bool *, bool);
4c9e08a4 140static void compute_argument_addresses (struct arg_data *, rtx, int);
141static rtx rtx_for_function_call (tree, tree);
142static void load_register_parameters (struct arg_data *, int, rtx *, int,
143 int, int *);
144static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
5d1b319b 146static int special_function_p (const_tree, int);
4c9e08a4 147static int check_sibcall_argument_overlap_1 (rtx);
148static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149
150static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
38413c80 151 unsigned int);
5ab29745 152static tree split_complex_types (tree);
cde25025 153
4448f543 154#ifdef REG_PARM_STACK_SPACE
4c9e08a4 155static rtx save_fixed_argument_area (int, rtx, int *, int *);
156static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 157#endif
66d433c7 158\f
66d433c7 159/* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
162
8866f42d 163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 165
d9076622 166rtx
82c7907c 167prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
4ee9c684 168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
66d433c7 169{
c7bf1374 170 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
a89aeae3 173 /* If we are using registers for parameters, force the
0dbd1c74 174 function address into a register now. */
ed5527ca 175 funexp = ((reg_parm_seen
176 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
0dbd1c74 177 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
178 : memory_address (FUNCTION_MODE, funexp));
707ff8b1 179 else if (! sibcallp)
66d433c7 180 {
181#ifndef NO_FUNCTION_CSE
182 if (optimize && ! flag_no_function_cse)
fb154d03 183 funexp = force_reg (Pmode, funexp);
66d433c7 184#endif
185 }
186
187 if (static_chain_value != 0)
188 {
82c7907c 189 rtx chain;
190
191 gcc_assert (fndecl);
192 chain = targetm.calls.static_chain (fndecl, false);
3dce56cc 193 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 194
82c7907c 195 emit_move_insn (chain, static_chain_value);
196 if (REG_P (chain))
197 use_reg (call_fusage, chain);
66d433c7 198 }
199
200 return funexp;
201}
202
203/* Generate instructions to call function FUNEXP,
204 and optionally pop the results.
205 The CALL_INSN is the first insn generated.
206
c74d0a20 207 FNDECL is the declaration node of the function. This is given to the
f5bc28da 208 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
209 its own args.
e93a4612 210
f5bc28da 211 FUNTYPE is the data type of the function. This is given to the hook
212 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
213 own args. We used to allow an identifier for library functions, but
214 that doesn't work when the return type is an aggregate type and the
215 calling convention says that the pointer to this aggregate is to be
216 popped by the callee.
66d433c7 217
218 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 219 ROUNDED_STACK_SIZE is that number rounded up to
220 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
221 both to put into the call insn and to generate explicit popping
222 code if necessary.
66d433c7 223
224 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
225 It is zero if this call doesn't want a structure value.
226
227 NEXT_ARG_REG is the rtx that results from executing
f387af4f 228 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
66d433c7 229 just after all the args have had their registers assigned.
230 This could be whatever you like, but normally it is the first
231 arg-register beyond those used for args in this call,
232 or 0 if all the arg-registers are used in this call.
233 It is passed on to `gen_call' so you can put this info in the call insn.
234
235 VALREG is a hard register in which a value is returned,
236 or 0 if the call does not return a value.
237
238 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
239 the args to this call were processed.
240 We restore `inhibit_defer_pop' to that value.
241
07409b3a 242 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 243 denote registers used by the called function. */
c87678e4 244
8ddf1c7e 245static void
16c9337c 246emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
4ee9c684 247 tree funtype ATTRIBUTE_UNUSED,
4c9e08a4 248 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
249 HOST_WIDE_INT rounded_stack_size,
250 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
251 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
252 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
39cba157 253 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
66d433c7 254{
dd837bff 255 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
57999964 256 rtx call_insn, call, funmem;
66d433c7 257 int already_popped = 0;
f5bc28da 258 HOST_WIDE_INT n_popped
259 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
66d433c7 260
87e19636 261#ifdef CALL_POPS_ARGS
39cba157 262 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
87e19636 263#endif
4c9e08a4 264
66d433c7 265 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
266 and we don't want to load it into a register as an optimization,
267 because prepare_call_address already did it if it should be done. */
268 if (GET_CODE (funexp) != SYMBOL_REF)
269 funexp = memory_address (FUNCTION_MODE, funexp);
270
57999964 271 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
272 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
854aa6aa 273 {
274 tree t = fndecl;
b9a16870 275
854aa6aa 276 /* Although a built-in FUNCTION_DECL and its non-__builtin
277 counterpart compare equal and get a shared mem_attrs, they
278 produce different dump output in compare-debug compilations,
279 if an entry gets garbage collected in one compilation, then
280 adds a different (but equivalent) entry, while the other
281 doesn't run the garbage collector at the same spot and then
282 shares the mem_attr with the equivalent entry. */
b9a16870 283 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
284 {
285 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
286 if (t2)
287 t = t2;
288 }
289
290 set_mem_expr (funmem, t);
854aa6aa 291 }
57999964 292 else if (fntree)
2622064f 293 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
57999964 294
60ecc450 295#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
296 if ((ecf_flags & ECF_SIBCALL)
297 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
a864723e 298 && (n_popped > 0 || stack_size == 0))
60ecc450 299 {
2a631e19 300 rtx n_pop = GEN_INT (n_popped);
60ecc450 301 rtx pat;
302
303 /* If this subroutine pops its own args, record that in the call insn
304 if possible, for the sake of frame pointer elimination. */
305
306 if (valreg)
57999964 307 pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
308 next_arg_reg, n_pop);
60ecc450 309 else
57999964 310 pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
311 n_pop);
60ecc450 312
313 emit_call_insn (pat);
314 already_popped = 1;
315 }
316 else
317#endif
318
66d433c7 319#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
2a631e19 320 /* If the target has "call" or "call_value" insns, then prefer them
321 if no arguments are actually popped. If the target does not have
322 "call" or "call_value" insns, then we must use the popping versions
323 even if the call has no arguments to pop. */
ec596f3b 324#if defined (HAVE_call) && defined (HAVE_call_value)
325 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
ff3ae375 326 && n_popped > 0)
ec596f3b 327#else
328 if (HAVE_call_pop && HAVE_call_value_pop)
329#endif
66d433c7 330 {
e39fae61 331 rtx n_pop = GEN_INT (n_popped);
66d433c7 332 rtx pat;
333
334 /* If this subroutine pops its own args, record that in the call insn
335 if possible, for the sake of frame pointer elimination. */
e93a4612 336
66d433c7 337 if (valreg)
57999964 338 pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
339 next_arg_reg, n_pop);
66d433c7 340 else
57999964 341 pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
342 n_pop);
66d433c7 343
344 emit_call_insn (pat);
345 already_popped = 1;
346 }
347 else
348#endif
66d433c7 349
60ecc450 350#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
351 if ((ecf_flags & ECF_SIBCALL)
352 && HAVE_sibcall && HAVE_sibcall_value)
353 {
354 if (valreg)
57999964 355 emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
60ecc450 356 rounded_stack_size_rtx,
357 next_arg_reg, NULL_RTX));
358 else
57999964 359 emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
360 next_arg_reg,
f018d957 361 GEN_INT (struct_value_size)));
60ecc450 362 }
363 else
364#endif
365
66d433c7 366#if defined (HAVE_call) && defined (HAVE_call_value)
367 if (HAVE_call && HAVE_call_value)
368 {
369 if (valreg)
57999964 370 emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
371 next_arg_reg, NULL_RTX));
66d433c7 372 else
57999964 373 emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
f018d957 374 GEN_INT (struct_value_size)));
66d433c7 375 }
376 else
377#endif
231bd014 378 gcc_unreachable ();
66d433c7 379
d5f9786f 380 /* Find the call we just emitted. */
381 call_insn = last_call_insn ();
66d433c7 382
57999964 383 /* Some target create a fresh MEM instead of reusing the one provided
384 above. Set its MEM_EXPR. */
cf7fb72d 385 call = get_call_rtx_from (call_insn);
386 if (call
57999964 387 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
388 && MEM_EXPR (funmem) != NULL_TREE)
389 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
390
d5f9786f 391 /* Put the register usage information there. */
392 add_function_usage_to (call_insn, call_fusage);
66d433c7 393
394 /* If this is a const call, then set the insn's unchanging bit. */
9c2a0c05 395 if (ecf_flags & ECF_CONST)
396 RTL_CONST_CALL_P (call_insn) = 1;
397
398 /* If this is a pure call, then set the insn's unchanging bit. */
399 if (ecf_flags & ECF_PURE)
400 RTL_PURE_CALL_P (call_insn) = 1;
401
402 /* If this is a const call, then set the insn's unchanging bit. */
403 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
404 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 405
e38def9c 406 /* Create a nothrow REG_EH_REGION note, if needed. */
407 make_reg_eh_region_note (call_insn, ecf_flags, 0);
00dd2e9e 408
356b51a0 409 if (ecf_flags & ECF_NORETURN)
a1ddb869 410 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
356b51a0 411
9239aee6 412 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 413 {
a1ddb869 414 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
18d50ae6 415 cfun->calls_setjmp = 1;
0ff18307 416 }
9239aee6 417
60ecc450 418 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
419
d1f88d00 420 /* Restore this now, so that we do defer pops for this call's args
421 if the context of the call as a whole permits. */
422 inhibit_defer_pop = old_inhibit_defer_pop;
423
e39fae61 424 if (n_popped > 0)
66d433c7 425 {
426 if (!already_popped)
37808e3a 427 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 428 = gen_rtx_EXPR_LIST (VOIDmode,
429 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
430 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 431 rounded_stack_size -= n_popped;
dd837bff 432 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
91b70175 433 stack_pointer_delta -= n_popped;
27a7a23a 434
dfe00a8f 435 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
436
27a7a23a 437 /* If popup is needed, stack realign must use DRAP */
438 if (SUPPORTS_STACK_ALIGNMENT)
439 crtl->need_drap = true;
66d433c7 440 }
27827244 441 /* For noreturn calls when not accumulating outgoing args force
442 REG_ARGS_SIZE note to prevent crossjumping of calls with different
443 args sizes. */
444 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
445 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
66d433c7 446
4448f543 447 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 448 {
4448f543 449 /* If returning from the subroutine does not automatically pop the args,
450 we need an instruction to pop them sooner or later.
451 Perhaps do it now; perhaps just record how much space to pop later.
452
453 If returning from the subroutine does pop the args, indicate that the
454 stack pointer will be changed. */
455
10d1a2c0 456 if (rounded_stack_size != 0)
4448f543 457 {
ff3ae375 458 if (ecf_flags & ECF_NORETURN)
10d1a2c0 459 /* Just pretend we did the pop. */
460 stack_pointer_delta -= rounded_stack_size;
461 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 462 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 463 pending_stack_adjust += rounded_stack_size;
464 else
465 adjust_stack (rounded_stack_size_rtx);
466 }
66d433c7 467 }
4448f543 468 /* When we accumulate outgoing args, we must avoid any stack manipulations.
469 Restore the stack pointer to its original value now. Usually
470 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
471 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
472 popping variants of functions exist as well.
473
474 ??? We may optimize similar to defer_pop above, but it is
475 probably not worthwhile.
c87678e4 476
4448f543 477 ??? It will be worthwhile to enable combine_stack_adjustments even for
478 such machines. */
479 else if (n_popped)
480 anti_adjust_stack (GEN_INT (n_popped));
66d433c7 481}
482
6a0e6138 483/* Determine if the function identified by NAME and FNDECL is one with
484 special properties we wish to know about.
485
486 For example, if the function might return more than one time (setjmp), then
487 set RETURNS_TWICE to a nonzero value.
488
4c8db992 489 Similarly set NORETURN if the function is in the longjmp family.
6a0e6138 490
6a0e6138 491 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
492 space from the stack such as alloca. */
493
dfe08167 494static int
5d1b319b 495special_function_p (const_tree fndecl, int flags)
6a0e6138 496{
4ee9c684 497 if (fndecl && DECL_NAME (fndecl)
7259f3f8 498 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
6a0e6138 499 /* Exclude functions not at the file scope, or not `extern',
500 since they are not the magic functions we would otherwise
40109983 501 think they are.
a0c938f0 502 FIXME: this should be handled with attributes, not with this
503 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
504 because you can declare fork() inside a function if you
505 wish. */
0d568ddf 506 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 507 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
508 && TREE_PUBLIC (fndecl))
6a0e6138 509 {
71d9fc9b 510 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
511 const char *tname = name;
6a0e6138 512
cc7cc47f 513 /* We assume that alloca will always be called by name. It
514 makes no sense to pass it as a pointer-to-function to
515 anything that does not understand its behavior. */
dfe08167 516 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
517 && name[0] == 'a'
518 && ! strcmp (name, "alloca"))
519 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
520 && name[0] == '_'
521 && ! strcmp (name, "__builtin_alloca"))))
522 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 523
73d3c8f2 524 /* Disregard prefix _, __, __x or __builtin_. */
6a0e6138 525 if (name[0] == '_')
526 {
73d3c8f2 527 if (name[1] == '_'
528 && name[2] == 'b'
529 && !strncmp (name + 3, "uiltin_", 7))
530 tname += 10;
531 else if (name[1] == '_' && name[2] == 'x')
6a0e6138 532 tname += 3;
533 else if (name[1] == '_')
534 tname += 2;
535 else
536 tname += 1;
537 }
538
539 if (tname[0] == 's')
540 {
dfe08167 541 if ((tname[1] == 'e'
542 && (! strcmp (tname, "setjmp")
543 || ! strcmp (tname, "setjmp_syscall")))
544 || (tname[1] == 'i'
545 && ! strcmp (tname, "sigsetjmp"))
546 || (tname[1] == 'a'
547 && ! strcmp (tname, "savectx")))
548 flags |= ECF_RETURNS_TWICE;
549
6a0e6138 550 if (tname[1] == 'i'
551 && ! strcmp (tname, "siglongjmp"))
4fec1d6c 552 flags |= ECF_NORETURN;
6a0e6138 553 }
554 else if ((tname[0] == 'q' && tname[1] == 's'
555 && ! strcmp (tname, "qsetjmp"))
556 || (tname[0] == 'v' && tname[1] == 'f'
0b4cb8ec 557 && ! strcmp (tname, "vfork"))
558 || (tname[0] == 'g' && tname[1] == 'e'
559 && !strcmp (tname, "getcontext")))
dfe08167 560 flags |= ECF_RETURNS_TWICE;
6a0e6138 561
562 else if (tname[0] == 'l' && tname[1] == 'o'
563 && ! strcmp (tname, "longjmp"))
4fec1d6c 564 flags |= ECF_NORETURN;
6a0e6138 565 }
73673831 566
dfe08167 567 return flags;
6a0e6138 568}
569
c8010b80 570/* Similar to special_function_p; return a set of ERF_ flags for the
571 function FNDECL. */
572static int
573decl_return_flags (tree fndecl)
574{
575 tree attr;
576 tree type = TREE_TYPE (fndecl);
577 if (!type)
578 return 0;
579
580 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
581 if (!attr)
582 return 0;
583
584 attr = TREE_VALUE (TREE_VALUE (attr));
585 if (!attr || TREE_STRING_LENGTH (attr) < 1)
586 return 0;
587
588 switch (TREE_STRING_POINTER (attr)[0])
589 {
590 case '1':
591 case '2':
592 case '3':
593 case '4':
594 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
595
596 case 'm':
597 return ERF_NOALIAS;
598
599 case '.':
600 default:
601 return 0;
602 }
603}
604
4c8db992 605/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 606
dfe08167 607int
5d1b319b 608setjmp_call_p (const_tree fndecl)
dfe08167 609{
69010134 610 if (DECL_IS_RETURNS_TWICE (fndecl))
611 return ECF_RETURNS_TWICE;
dfe08167 612 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
613}
614
75a70cf9 615
616/* Return true if STMT is an alloca call. */
617
618bool
619gimple_alloca_call_p (const_gimple stmt)
620{
621 tree fndecl;
622
623 if (!is_gimple_call (stmt))
624 return false;
625
626 fndecl = gimple_call_fndecl (stmt);
627 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
628 return true;
629
630 return false;
631}
632
9a7ecb49 633/* Return true when exp contains alloca call. */
75a70cf9 634
9a7ecb49 635bool
5d1b319b 636alloca_call_p (const_tree exp)
9a7ecb49 637{
638 if (TREE_CODE (exp) == CALL_EXPR
c2f47e15 639 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
640 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
641 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
642 & ECF_MAY_BE_ALLOCA))
9a7ecb49 643 return true;
644 return false;
645}
646
4c0315d0 647/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
648 function. Return FALSE otherwise. */
649
650static bool
651is_tm_builtin (const_tree fndecl)
652{
653 if (fndecl == NULL)
654 return false;
655
656 if (decl_is_tm_clone (fndecl))
657 return true;
658
659 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
660 {
661 switch (DECL_FUNCTION_CODE (fndecl))
662 {
663 case BUILT_IN_TM_COMMIT:
664 case BUILT_IN_TM_COMMIT_EH:
665 case BUILT_IN_TM_ABORT:
666 case BUILT_IN_TM_IRREVOCABLE:
667 case BUILT_IN_TM_GETTMCLONE_IRR:
668 case BUILT_IN_TM_MEMCPY:
669 case BUILT_IN_TM_MEMMOVE:
670 case BUILT_IN_TM_MEMSET:
671 CASE_BUILT_IN_TM_STORE (1):
672 CASE_BUILT_IN_TM_STORE (2):
673 CASE_BUILT_IN_TM_STORE (4):
674 CASE_BUILT_IN_TM_STORE (8):
675 CASE_BUILT_IN_TM_STORE (FLOAT):
676 CASE_BUILT_IN_TM_STORE (DOUBLE):
677 CASE_BUILT_IN_TM_STORE (LDOUBLE):
678 CASE_BUILT_IN_TM_STORE (M64):
679 CASE_BUILT_IN_TM_STORE (M128):
680 CASE_BUILT_IN_TM_STORE (M256):
681 CASE_BUILT_IN_TM_LOAD (1):
682 CASE_BUILT_IN_TM_LOAD (2):
683 CASE_BUILT_IN_TM_LOAD (4):
684 CASE_BUILT_IN_TM_LOAD (8):
685 CASE_BUILT_IN_TM_LOAD (FLOAT):
686 CASE_BUILT_IN_TM_LOAD (DOUBLE):
687 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
688 CASE_BUILT_IN_TM_LOAD (M64):
689 CASE_BUILT_IN_TM_LOAD (M128):
690 CASE_BUILT_IN_TM_LOAD (M256):
691 case BUILT_IN_TM_LOG:
692 case BUILT_IN_TM_LOG_1:
693 case BUILT_IN_TM_LOG_2:
694 case BUILT_IN_TM_LOG_4:
695 case BUILT_IN_TM_LOG_8:
696 case BUILT_IN_TM_LOG_FLOAT:
697 case BUILT_IN_TM_LOG_DOUBLE:
698 case BUILT_IN_TM_LOG_LDOUBLE:
699 case BUILT_IN_TM_LOG_M64:
700 case BUILT_IN_TM_LOG_M128:
701 case BUILT_IN_TM_LOG_M256:
702 return true;
703 default:
704 break;
705 }
706 }
707 return false;
708}
709
5edaabad 710/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 711
805e22b2 712int
5d1b319b 713flags_from_decl_or_type (const_tree exp)
dfe08167 714{
715 int flags = 0;
7a24815f 716
dfe08167 717 if (DECL_P (exp))
718 {
719 /* The function exp may have the `malloc' attribute. */
7a24815f 720 if (DECL_IS_MALLOC (exp))
dfe08167 721 flags |= ECF_MALLOC;
722
26d1c5ff 723 /* The function exp may have the `returns_twice' attribute. */
724 if (DECL_IS_RETURNS_TWICE (exp))
725 flags |= ECF_RETURNS_TWICE;
726
9c2a0c05 727 /* Process the pure and const attributes. */
67fa4078 728 if (TREE_READONLY (exp))
9c2a0c05 729 flags |= ECF_CONST;
730 if (DECL_PURE_P (exp))
ef689d4e 731 flags |= ECF_PURE;
9c2a0c05 732 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
733 flags |= ECF_LOOPING_CONST_OR_PURE;
26dfc457 734
fc09b200 735 if (DECL_IS_NOVOPS (exp))
736 flags |= ECF_NOVOPS;
7bd95dfd 737 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
738 flags |= ECF_LEAF;
fc09b200 739
dfe08167 740 if (TREE_NOTHROW (exp))
741 flags |= ECF_NOTHROW;
b15db406 742
4c0315d0 743 if (flag_tm)
744 {
745 if (is_tm_builtin (exp))
746 flags |= ECF_TM_BUILTIN;
c86dbacd 747 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
4c0315d0 748 || lookup_attribute ("transaction_pure",
749 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
750 flags |= ECF_TM_PURE;
751 }
752
4ee9c684 753 flags = special_function_p (exp, flags);
dfe08167 754 }
4c0315d0 755 else if (TYPE_P (exp))
756 {
757 if (TYPE_READONLY (exp))
758 flags |= ECF_CONST;
759
760 if (flag_tm
761 && ((flags & ECF_CONST) != 0
762 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
763 flags |= ECF_TM_PURE;
764 }
dfe08167 765
766 if (TREE_THIS_VOLATILE (exp))
67fa4078 767 {
768 flags |= ECF_NORETURN;
769 if (flags & (ECF_CONST|ECF_PURE))
770 flags |= ECF_LOOPING_CONST_OR_PURE;
771 }
dfe08167 772
773 return flags;
774}
775
886a914d 776/* Detect flags from a CALL_EXPR. */
777
778int
b7bf20db 779call_expr_flags (const_tree t)
886a914d 780{
781 int flags;
782 tree decl = get_callee_fndecl (t);
783
784 if (decl)
785 flags = flags_from_decl_or_type (decl);
786 else
787 {
c2f47e15 788 t = TREE_TYPE (CALL_EXPR_FN (t));
886a914d 789 if (t && TREE_CODE (t) == POINTER_TYPE)
790 flags = flags_from_decl_or_type (TREE_TYPE (t));
791 else
792 flags = 0;
793 }
794
795 return flags;
796}
797
6a0e6138 798/* Precompute all register parameters as described by ARGS, storing values
799 into fields within the ARGS array.
800
801 NUM_ACTUALS indicates the total number elements in the ARGS array.
802
803 Set REG_PARM_SEEN if we encounter a register parameter. */
804
805static void
e2ff5c1b 806precompute_register_parameters (int num_actuals, struct arg_data *args,
807 int *reg_parm_seen)
6a0e6138 808{
809 int i;
810
811 *reg_parm_seen = 0;
812
813 for (i = 0; i < num_actuals; i++)
814 if (args[i].reg != 0 && ! args[i].pass_on_stack)
815 {
816 *reg_parm_seen = 1;
817
818 if (args[i].value == 0)
819 {
820 push_temp_slots ();
8ec3c5c2 821 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 822 preserve_temp_slots (args[i].value);
823 pop_temp_slots ();
6a0e6138 824 }
825
826 /* If we are to promote the function arg to a wider mode,
827 do it now. */
828
829 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
830 args[i].value
831 = convert_modes (args[i].mode,
832 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
833 args[i].value, args[i].unsignedp);
834
5e050fa1 835 /* If the value is a non-legitimate constant, force it into a
836 pseudo now. TLS symbols sometimes need a call to resolve. */
837 if (CONSTANT_P (args[i].value)
838 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
839 args[i].value = force_reg (args[i].mode, args[i].value);
840
e2ff5c1b 841 /* If we're going to have to load the value by parts, pull the
842 parts into pseudos. The part extraction process can involve
843 non-trivial computation. */
844 if (GET_CODE (args[i].reg) == PARALLEL)
845 {
846 tree type = TREE_TYPE (args[i].tree_value);
b600a907 847 args[i].parallel_value
e2ff5c1b 848 = emit_group_load_into_temps (args[i].reg, args[i].value,
849 type, int_size_in_bytes (type));
850 }
851
c87678e4 852 /* If the value is expensive, and we are inside an appropriately
6a0e6138 853 short loop, put the value into a pseudo and then put the pseudo
854 into the hard reg.
855
856 For small register classes, also do this if this call uses
857 register parameters. This is to avoid reload conflicts while
858 loading the parameters registers. */
859
e2ff5c1b 860 else if ((! (REG_P (args[i].value)
861 || (GET_CODE (args[i].value) == SUBREG
862 && REG_P (SUBREG_REG (args[i].value)))))
863 && args[i].mode != BLKmode
7013e87c 864 && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
f529eb25 865 > COSTS_N_INSNS (1)
ed5527ca 866 && ((*reg_parm_seen
867 && targetm.small_register_classes_for_mode_p (args[i].mode))
e2ff5c1b 868 || optimize))
6a0e6138 869 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
870 }
871}
872
4448f543 873#ifdef REG_PARM_STACK_SPACE
6a0e6138 874
875 /* The argument list is the property of the called routine and it
876 may clobber it. If the fixed area has been used for previous
877 parameters, we must save and restore it. */
f7c44134 878
6a0e6138 879static rtx
4c9e08a4 880save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 881{
6e96b626 882 int low;
883 int high;
6a0e6138 884
6e96b626 885 /* Compute the boundary of the area that needs to be saved, if any. */
886 high = reg_parm_stack_space;
6a0e6138 887#ifdef ARGS_GROW_DOWNWARD
6e96b626 888 high += 1;
6a0e6138 889#endif
6e96b626 890 if (high > highest_outgoing_arg_in_use)
891 high = highest_outgoing_arg_in_use;
6a0e6138 892
6e96b626 893 for (low = 0; low < high; low++)
894 if (stack_usage_map[low] != 0)
895 {
896 int num_to_save;
897 enum machine_mode save_mode;
898 int delta;
29c05e22 899 rtx addr;
6e96b626 900 rtx stack_area;
901 rtx save_area;
6a0e6138 902
6e96b626 903 while (stack_usage_map[--high] == 0)
904 ;
6a0e6138 905
6e96b626 906 *low_to_save = low;
907 *high_to_save = high;
908
909 num_to_save = high - low + 1;
910 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
6a0e6138 911
6e96b626 912 /* If we don't have the required alignment, must do this
913 in BLKmode. */
914 if ((low & (MIN (GET_MODE_SIZE (save_mode),
915 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
916 save_mode = BLKmode;
6a0e6138 917
918#ifdef ARGS_GROW_DOWNWARD
6e96b626 919 delta = -high;
6a0e6138 920#else
6e96b626 921 delta = low;
6a0e6138 922#endif
29c05e22 923 addr = plus_constant (Pmode, argblock, delta);
924 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
2a631e19 925
6e96b626 926 set_mem_align (stack_area, PARM_BOUNDARY);
927 if (save_mode == BLKmode)
928 {
0ab48139 929 save_area = assign_stack_temp (BLKmode, num_to_save);
6e96b626 930 emit_block_move (validize_mem (save_area), stack_area,
931 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
932 }
933 else
934 {
935 save_area = gen_reg_rtx (save_mode);
936 emit_move_insn (save_area, stack_area);
937 }
2a631e19 938
6e96b626 939 return save_area;
940 }
941
942 return NULL_RTX;
6a0e6138 943}
944
945static void
4c9e08a4 946restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 947{
948 enum machine_mode save_mode = GET_MODE (save_area);
6e96b626 949 int delta;
29c05e22 950 rtx addr, stack_area;
6e96b626 951
6a0e6138 952#ifdef ARGS_GROW_DOWNWARD
6e96b626 953 delta = -high_to_save;
6a0e6138 954#else
6e96b626 955 delta = low_to_save;
6a0e6138 956#endif
29c05e22 957 addr = plus_constant (Pmode, argblock, delta);
958 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
6e96b626 959 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 960
961 if (save_mode != BLKmode)
962 emit_move_insn (stack_area, save_area);
963 else
0378dbdc 964 emit_block_move (stack_area, validize_mem (save_area),
965 GEN_INT (high_to_save - low_to_save + 1),
966 BLOCK_OP_CALL_PARM);
6a0e6138 967}
f6025ee7 968#endif /* REG_PARM_STACK_SPACE */
c87678e4 969
6a0e6138 970/* If any elements in ARGS refer to parameters that are to be passed in
971 registers, but not in memory, and whose alignment does not permit a
972 direct copy into registers. Copy the values into a group of pseudos
c87678e4 973 which we will later copy into the appropriate hard registers.
6d801f27 974
975 Pseudos for each unaligned argument will be stored into the array
976 args[argnum].aligned_regs. The caller is responsible for deallocating
977 the aligned_regs array if it is nonzero. */
978
6a0e6138 979static void
4c9e08a4 980store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 981{
982 int i, j;
c87678e4 983
6a0e6138 984 for (i = 0; i < num_actuals; i++)
985 if (args[i].reg != 0 && ! args[i].pass_on_stack
986 && args[i].mode == BLKmode
77f1b1bb 987 && MEM_P (args[i].value)
988 && (MEM_ALIGN (args[i].value)
6a0e6138 989 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
990 {
991 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 992 int endian_correction = 0;
6a0e6138 993
f054eb3c 994 if (args[i].partial)
995 {
996 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
997 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
998 }
999 else
1000 {
1001 args[i].n_aligned_regs
1002 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1003 }
1004
4c36ffe6 1005 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 1006
5f4cd670 1007 /* Structures smaller than a word are normally aligned to the
1008 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 1009 this means we must skip the empty high order bytes when
1010 calculating the bit offset. */
5f4cd670 1011 if (bytes < UNITS_PER_WORD
1012#ifdef BLOCK_REG_PADDING
1013 && (BLOCK_REG_PADDING (args[i].mode,
1014 TREE_TYPE (args[i].tree_value), 1)
1015 == downward)
1016#else
1017 && BYTES_BIG_ENDIAN
1018#endif
1019 )
1020 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 1021
1022 for (j = 0; j < args[i].n_aligned_regs; j++)
1023 {
1024 rtx reg = gen_reg_rtx (word_mode);
1025 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1026 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 1027
1028 args[i].aligned_regs[j] = reg;
8eef3a45 1029 word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
1445ea5b 1030 word_mode, word_mode);
6a0e6138 1031
1032 /* There is no need to restrict this code to loading items
1033 in TYPE_ALIGN sized hunks. The bitfield instructions can
1034 load up entire word sized registers efficiently.
1035
1036 ??? This may not be needed anymore.
1037 We use to emit a clobber here but that doesn't let later
1038 passes optimize the instructions we emit. By storing 0 into
1039 the register later passes know the first AND to zero out the
1040 bitfield being set in the register is unnecessary. The store
1041 of 0 will be deleted as will at least the first AND. */
1042
1043 emit_move_insn (reg, const0_rtx);
1044
1045 bytes -= bitsize / BITS_PER_UNIT;
4bb60ec7 1046 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1047 word_mode, word);
6a0e6138 1048 }
1049 }
1050}
1051
cb543c54 1052/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
48e1416a 1053 CALL_EXPR EXP.
cb543c54 1054
1055 NUM_ACTUALS is the total number of parameters.
1056
1057 N_NAMED_ARGS is the total number of named arguments.
1058
cd46caee 1059 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1060 value, or null.
1061
cb543c54 1062 FNDECL is the tree code for the target of this call (if known)
1063
1064 ARGS_SO_FAR holds state needed by the target to know where to place
1065 the next argument.
1066
1067 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1068 for arguments which are passed in registers.
1069
1070 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1071 and may be modified by this routine.
1072
dfe08167 1073 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
0d568ddf 1074 flags which may may be modified by this routine.
eaa112a0 1075
4ee9c684 1076 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1077 that requires allocation of stack space.
1078
eaa112a0 1079 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1080 the thunked-to function. */
cb543c54 1081
1082static void
4c9e08a4 1083initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1084 struct arg_data *args,
1085 struct args_size *args_size,
1086 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 1087 tree exp, tree struct_value_addr_value,
d8b9c828 1088 tree fndecl, tree fntype,
39cba157 1089 cumulative_args_t args_so_far,
4c9e08a4 1090 int reg_parm_stack_space,
1091 rtx *old_stack_level, int *old_pending_adj,
eaa112a0 1092 int *must_preallocate, int *ecf_flags,
4ee9c684 1093 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 1094{
39cba157 1095 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
389dd41b 1096 location_t loc = EXPR_LOCATION (exp);
cb543c54 1097 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1098 int inc;
1099
1100 /* Count arg position in order args appear. */
1101 int argpos;
1102
1103 int i;
c87678e4 1104
cb543c54 1105 args_size->constant = 0;
1106 args_size->var = 0;
1107
1108 /* In this loop, we consider args in the order they are written.
1109 We fill up ARGS from the front or from the back if necessary
1110 so that in any case the first arg to be pushed ends up at the front. */
1111
4448f543 1112 if (PUSH_ARGS_REVERSED)
1113 {
1114 i = num_actuals - 1, inc = -1;
1115 /* In this case, must reverse order of args
1116 so that we compute and push the last arg first. */
1117 }
1118 else
1119 {
1120 i = 0, inc = 1;
1121 }
cb543c54 1122
cd46caee 1123 /* First fill in the actual arguments in the ARGS array, splitting
1124 complex arguments if necessary. */
1125 {
1126 int j = i;
1127 call_expr_arg_iterator iter;
1128 tree arg;
1129
1130 if (struct_value_addr_value)
1131 {
1132 args[j].tree_value = struct_value_addr_value;
1133 j += inc;
1134 }
1135 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1136 {
1137 tree argtype = TREE_TYPE (arg);
1138 if (targetm.calls.split_complex_arg
1139 && argtype
1140 && TREE_CODE (argtype) == COMPLEX_TYPE
1141 && targetm.calls.split_complex_arg (argtype))
1142 {
1143 tree subtype = TREE_TYPE (argtype);
cd46caee 1144 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1145 j += inc;
1146 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1147 }
1148 else
1149 args[j].tree_value = arg;
1150 j += inc;
1151 }
1152 }
1153
cb543c54 1154 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
cd46caee 1155 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
cb543c54 1156 {
cd46caee 1157 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 1158 int unsignedp;
1159 enum machine_mode mode;
1160
cb543c54 1161 /* Replace erroneous argument with constant zero. */
4b72716d 1162 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 1163 args[i].tree_value = integer_zero_node, type = integer_type_node;
1164
8df5a43d 1165 /* If TYPE is a transparent union or record, pass things the way
1166 we would pass the first field of the union or record. We have
1167 already verified that the modes are the same. */
1168 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1169 && TYPE_TRANSPARENT_AGGR (type))
1170 type = TREE_TYPE (first_field (type));
cb543c54 1171
1172 /* Decide where to pass this arg.
1173
1174 args[i].reg is nonzero if all or part is passed in registers.
1175
1176 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1177 and the exact value says how many bytes are passed in registers.
cb543c54 1178
1179 args[i].pass_on_stack is nonzero if the argument must at least be
1180 computed on the stack. It may then be loaded back into registers
1181 if args[i].reg is nonzero.
1182
1183 These decisions are driven by the FUNCTION_... macros and must agree
1184 with those made by function.c. */
1185
1186 /* See if this argument should be passed by invisible reference. */
39cba157 1187 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
cc9b8628 1188 type, argpos < n_named_args))
cb543c54 1189 {
41dc12b4 1190 bool callee_copies;
bc4577c4 1191 tree base = NULL_TREE;
41dc12b4 1192
1193 callee_copies
39cba157 1194 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
13f08ee7 1195 type, argpos < n_named_args);
41dc12b4 1196
1197 /* If we're compiling a thunk, pass through invisible references
1198 instead of making a copy. */
eaa112a0 1199 if (call_from_thunk_p
41dc12b4 1200 || (callee_copies
1201 && !TREE_ADDRESSABLE (type)
1202 && (base = get_base_address (args[i].tree_value))
d6230243 1203 && TREE_CODE (base) != SSA_NAME
41dc12b4 1204 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 1205 {
006e2d5a 1206 mark_addressable (args[i].tree_value);
1207
41dc12b4 1208 /* We can't use sibcalls if a callee-copied argument is
1209 stored in the current function's frame. */
1210 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
c71e72dd 1211 *may_tailcall = false;
1212
389dd41b 1213 args[i].tree_value = build_fold_addr_expr_loc (loc,
1214 args[i].tree_value);
41dc12b4 1215 type = TREE_TYPE (args[i].tree_value);
1216
9c2a0c05 1217 if (*ecf_flags & ECF_CONST)
1218 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
ce95a955 1219 }
cb543c54 1220 else
1221 {
1222 /* We make a copy of the object and pass the address to the
1223 function being called. */
1224 rtx copy;
1225
4b72716d 1226 if (!COMPLETE_TYPE_P (type)
4852b829 1227 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1228 || (flag_stack_check == GENERIC_STACK_CHECK
1229 && compare_tree_int (TYPE_SIZE_UNIT (type),
1230 STACK_CHECK_MAX_VAR_SIZE) > 0))
cb543c54 1231 {
1232 /* This is a variable-sized object. Make space on the stack
1233 for it. */
cd46caee 1234 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 1235
1236 if (*old_stack_level == 0)
1237 {
e9c97615 1238 emit_stack_save (SAVE_BLOCK, old_stack_level);
cb543c54 1239 *old_pending_adj = pending_stack_adjust;
1240 pending_stack_adjust = 0;
1241 }
1242
990495a7 1243 /* We can pass TRUE as the 4th argument because we just
1244 saved the stack pointer and will restore it right after
1245 the call. */
5be42b39 1246 copy = allocate_dynamic_stack_space (size_rtx,
1247 TYPE_ALIGN (type),
1248 TYPE_ALIGN (type),
1249 true);
1250 copy = gen_rtx_MEM (BLKmode, copy);
f7c44134 1251 set_mem_attributes (copy, type, 1);
cb543c54 1252 }
1253 else
0ab48139 1254 copy = assign_temp (type, 1, 0);
cb543c54 1255
5b5037b3 1256 store_expr (args[i].tree_value, copy, 0, false);
cb543c54 1257
9c2a0c05 1258 /* Just change the const function to pure and then let
1259 the next test clear the pure based on
1260 callee_copies. */
1261 if (*ecf_flags & ECF_CONST)
1262 {
1263 *ecf_flags &= ~ECF_CONST;
1264 *ecf_flags |= ECF_PURE;
1265 }
1266
1267 if (!callee_copies && *ecf_flags & ECF_PURE)
1268 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
41dc12b4 1269
1270 args[i].tree_value
389dd41b 1271 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
41dc12b4 1272 type = TREE_TYPE (args[i].tree_value);
4ee9c684 1273 *may_tailcall = false;
cb543c54 1274 }
1275 }
1276
78a8ed03 1277 unsignedp = TYPE_UNSIGNED (type);
3b2411a8 1278 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1279 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
cb543c54 1280
1281 args[i].unsignedp = unsignedp;
1282 args[i].mode = mode;
7a8d641b 1283
f387af4f 1284 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1285 argpos < n_named_args);
1286
7a8d641b 1287 /* If this is a sibling call and the machine has register windows, the
1288 register window has to be unwinded before calling the routine, so
1289 arguments have to go into the incoming registers. */
f387af4f 1290 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1291 args[i].tail_call_reg
1292 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1293 argpos < n_named_args);
1294 else
1295 args[i].tail_call_reg = args[i].reg;
7a8d641b 1296
cb543c54 1297 if (args[i].reg)
1298 args[i].partial
f054eb3c 1299 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1300 argpos < n_named_args);
cb543c54 1301
0336f0f0 1302 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 1303
1304 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1305 it means that we are to pass this arg in the register(s) designated
1306 by the PARALLEL, but also to pass it in the stack. */
1307 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1308 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1309 args[i].pass_on_stack = 1;
1310
1311 /* If this is an addressable type, we must preallocate the stack
1312 since we must evaluate the object into its final location.
1313
1314 If this is to be passed in both registers and the stack, it is simpler
1315 to preallocate. */
1316 if (TREE_ADDRESSABLE (type)
1317 || (args[i].pass_on_stack && args[i].reg != 0))
1318 *must_preallocate = 1;
1319
cb543c54 1320 /* Compute the stack-size of this argument. */
1321 if (args[i].reg == 0 || args[i].partial != 0
1322 || reg_parm_stack_space > 0
1323 || args[i].pass_on_stack)
1324 locate_and_pad_parm (mode, type,
1325#ifdef STACK_PARMS_IN_REG_PARM_AREA
1326 1,
1327#else
1328 args[i].reg != 0,
1329#endif
241399f6 1330 args[i].pass_on_stack ? 0 : args[i].partial,
1331 fndecl, args_size, &args[i].locate);
0fee47f4 1332#ifdef BLOCK_REG_PADDING
1333 else
1334 /* The argument is passed entirely in registers. See at which
1335 end it should be padded. */
1336 args[i].locate.where_pad =
1337 BLOCK_REG_PADDING (mode, type,
1338 int_size_in_bytes (type) <= UNITS_PER_WORD);
1339#endif
c87678e4 1340
cb543c54 1341 /* Update ARGS_SIZE, the total stack space for args so far. */
1342
241399f6 1343 args_size->constant += args[i].locate.size.constant;
1344 if (args[i].locate.size.var)
1345 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 1346
1347 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1348 have been used, etc. */
1349
f387af4f 1350 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1351 type, argpos < n_named_args);
cb543c54 1352 }
1353}
1354
cc45e5e8 1355/* Update ARGS_SIZE to contain the total size for the argument block.
1356 Return the original constant component of the argument block's size.
1357
1358 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1359 for arguments passed in registers. */
1360
1361static int
4c9e08a4 1362compute_argument_block_size (int reg_parm_stack_space,
1363 struct args_size *args_size,
60e2260d 1364 tree fndecl ATTRIBUTE_UNUSED,
fa20f865 1365 tree fntype ATTRIBUTE_UNUSED,
4c9e08a4 1366 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 1367{
1368 int unadjusted_args_size = args_size->constant;
1369
4448f543 1370 /* For accumulate outgoing args mode we don't need to align, since the frame
1371 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 1372 backends from generating misaligned frame sizes. */
4448f543 1373 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1374 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 1375
cc45e5e8 1376 /* Compute the actual size of the argument block required. The variable
1377 and constant sizes must be combined, the size may have to be rounded,
1378 and there may be a minimum required size. */
1379
1380 if (args_size->var)
1381 {
1382 args_size->var = ARGS_SIZE_TREE (*args_size);
1383 args_size->constant = 0;
1384
d0285dd8 1385 preferred_stack_boundary /= BITS_PER_UNIT;
1386 if (preferred_stack_boundary > 1)
91b70175 1387 {
1388 /* We don't handle this case yet. To handle it correctly we have
35a3065a 1389 to add the delta, round and subtract the delta.
91b70175 1390 Currently no machine description requires this support. */
231bd014 1391 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
91b70175 1392 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1393 }
cc45e5e8 1394
1395 if (reg_parm_stack_space > 0)
1396 {
1397 args_size->var
1398 = size_binop (MAX_EXPR, args_size->var,
902de8ed 1399 ssize_int (reg_parm_stack_space));
cc45e5e8 1400
cc45e5e8 1401 /* The area corresponding to register parameters is not to count in
1402 the size of the block we need. So make the adjustment. */
fa20f865 1403 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1404 args_size->var
1405 = size_binop (MINUS_EXPR, args_size->var,
1406 ssize_int (reg_parm_stack_space));
cc45e5e8 1407 }
1408 }
1409 else
1410 {
d0285dd8 1411 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 1412 if (preferred_stack_boundary < 1)
1413 preferred_stack_boundary = 1;
e39fae61 1414 args_size->constant = (((args_size->constant
91b70175 1415 + stack_pointer_delta
d0285dd8 1416 + preferred_stack_boundary - 1)
1417 / preferred_stack_boundary
1418 * preferred_stack_boundary)
91b70175 1419 - stack_pointer_delta);
cc45e5e8 1420
1421 args_size->constant = MAX (args_size->constant,
1422 reg_parm_stack_space);
1423
fa20f865 1424 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1425 args_size->constant -= reg_parm_stack_space;
cc45e5e8 1426 }
1427 return unadjusted_args_size;
1428}
1429
caa1595a 1430/* Precompute parameters as needed for a function call.
04707f1c 1431
dfe08167 1432 FLAGS is mask of ECF_* constants.
04707f1c 1433
04707f1c 1434 NUM_ACTUALS is the number of arguments.
1435
c87678e4 1436 ARGS is an array containing information for each argument; this
1437 routine fills in the INITIAL_VALUE and VALUE fields for each
1438 precomputed argument. */
04707f1c 1439
1440static void
2dd6f9ed 1441precompute_arguments (int num_actuals, struct arg_data *args)
04707f1c 1442{
1443 int i;
1444
8c78c14b 1445 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 1446 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 1447
1448 /* If we preallocated the stack space, and some arguments must be passed
1449 on the stack, then we must precompute any parameter which contains a
1450 function call which will store arguments on the stack.
1451 Otherwise, evaluating the parameter may clobber previous parameters
1452 which have already been stored into the stack. (we have code to avoid
1453 such case by saving the outgoing stack arguments, but it results in
1454 worse code) */
2dd6f9ed 1455 if (!ACCUMULATE_OUTGOING_ARGS)
67c155cb 1456 return;
0d568ddf 1457
04707f1c 1458 for (i = 0; i < num_actuals; i++)
67c155cb 1459 {
3b2411a8 1460 tree type;
67c155cb 1461 enum machine_mode mode;
701e46d0 1462
2dd6f9ed 1463 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
c5dc094f 1464 continue;
1465
67c155cb 1466 /* If this is an addressable type, we cannot pre-evaluate it. */
3b2411a8 1467 type = TREE_TYPE (args[i].tree_value);
1468 gcc_assert (!TREE_ADDRESSABLE (type));
04707f1c 1469
67c155cb 1470 args[i].initial_value = args[i].value
8ec3c5c2 1471 = expand_normal (args[i].tree_value);
04707f1c 1472
3b2411a8 1473 mode = TYPE_MODE (type);
67c155cb 1474 if (mode != args[i].mode)
1475 {
3b2411a8 1476 int unsignedp = args[i].unsignedp;
67c155cb 1477 args[i].value
1478 = convert_modes (args[i].mode, mode,
1479 args[i].value, args[i].unsignedp);
3b2411a8 1480
67c155cb 1481 /* CSE will replace this only if it contains args[i].value
1482 pseudo, so convert it down to the declared mode using
1483 a SUBREG. */
1484 if (REG_P (args[i].value)
3b2411a8 1485 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1486 && promote_mode (type, mode, &unsignedp) != args[i].mode)
67c155cb 1487 {
1488 args[i].initial_value
1489 = gen_lowpart_SUBREG (mode, args[i].value);
1490 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1491 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1492 args[i].unsignedp);
1493 }
67c155cb 1494 }
1495 }
04707f1c 1496}
1497
e717ffc2 1498/* Given the current state of MUST_PREALLOCATE and information about
1499 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1500 compute and return the final value for MUST_PREALLOCATE. */
1501
1502static int
48e1416a 1503finalize_must_preallocate (int must_preallocate, int num_actuals,
c2f47e15 1504 struct arg_data *args, struct args_size *args_size)
e717ffc2 1505{
1506 /* See if we have or want to preallocate stack space.
1507
1508 If we would have to push a partially-in-regs parm
1509 before other stack parms, preallocate stack space instead.
1510
1511 If the size of some parm is not a multiple of the required stack
1512 alignment, we must preallocate.
1513
1514 If the total size of arguments that would otherwise create a copy in
1515 a temporary (such as a CALL) is more than half the total argument list
1516 size, preallocation is faster.
1517
1518 Another reason to preallocate is if we have a machine (like the m88k)
1519 where stack alignment is required to be maintained between every
1520 pair of insns, not just when the call is made. However, we assume here
1521 that such machines either do not have push insns (and hence preallocation
1522 would occur anyway) or the problem is taken care of with
1523 PUSH_ROUNDING. */
1524
1525 if (! must_preallocate)
1526 {
1527 int partial_seen = 0;
1528 int copy_to_evaluate_size = 0;
1529 int i;
1530
1531 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1532 {
1533 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1534 partial_seen = 1;
1535 else if (partial_seen && args[i].reg == 0)
1536 must_preallocate = 1;
1537
1538 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1539 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1540 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1541 || TREE_CODE (args[i].tree_value) == COND_EXPR
1542 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1543 copy_to_evaluate_size
1544 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1545 }
1546
1547 if (copy_to_evaluate_size * 2 >= args_size->constant
1548 && args_size->constant > 0)
1549 must_preallocate = 1;
1550 }
1551 return must_preallocate;
1552}
cc45e5e8 1553
f3012854 1554/* If we preallocated stack space, compute the address of each argument
1555 and store it into the ARGS array.
1556
c87678e4 1557 We need not ensure it is a valid memory address here; it will be
f3012854 1558 validized when it is used.
1559
1560 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1561
1562static void
4c9e08a4 1563compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 1564{
1565 if (argblock)
1566 {
1567 rtx arg_reg = argblock;
1568 int i, arg_offset = 0;
1569
1570 if (GET_CODE (argblock) == PLUS)
1571 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1572
1573 for (i = 0; i < num_actuals; i++)
1574 {
241399f6 1575 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1576 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 1577 rtx addr;
c5dc0c32 1578 unsigned int align, boundary;
c2ca1bab 1579 unsigned int units_on_stack = 0;
1580 enum machine_mode partial_mode = VOIDmode;
f3012854 1581
1582 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 1583 if (! args[i].pass_on_stack
1584 && args[i].reg != 0
1585 && args[i].partial == 0)
f3012854 1586 continue;
1587
971ba038 1588 if (CONST_INT_P (offset))
29c05e22 1589 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
f3012854 1590 else
1591 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1592
29c05e22 1593 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 1594
1595 if (args[i].partial != 0)
1596 {
1597 /* Only part of the parameter is being passed on the stack.
1598 Generate a simple memory reference of the correct size. */
1599 units_on_stack = args[i].locate.size.constant;
1600 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1601 MODE_INT, 1);
1602 args[i].stack = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 1603 set_mem_size (args[i].stack, units_on_stack);
c2ca1bab 1604 }
1605 else
1606 {
1607 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1608 set_mem_attributes (args[i].stack,
1609 TREE_TYPE (args[i].tree_value), 1);
1610 }
c5dc0c32 1611 align = BITS_PER_UNIT;
1612 boundary = args[i].locate.boundary;
1613 if (args[i].locate.where_pad != downward)
1614 align = boundary;
971ba038 1615 else if (CONST_INT_P (offset))
c5dc0c32 1616 {
1617 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1618 align = align & -align;
1619 }
1620 set_mem_align (args[i].stack, align);
f3012854 1621
971ba038 1622 if (CONST_INT_P (slot_offset))
29c05e22 1623 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
f3012854 1624 else
1625 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1626
29c05e22 1627 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 1628
1629 if (args[i].partial != 0)
1630 {
1631 /* Only part of the parameter is being passed on the stack.
1632 Generate a simple memory reference of the correct size.
1633 */
1634 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 1635 set_mem_size (args[i].stack_slot, units_on_stack);
c2ca1bab 1636 }
1637 else
1638 {
1639 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1640 set_mem_attributes (args[i].stack_slot,
1641 TREE_TYPE (args[i].tree_value), 1);
1642 }
c5dc0c32 1643 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 1644
1645 /* Function incoming arguments may overlap with sibling call
1646 outgoing arguments and we cannot allow reordering of reads
1647 from function arguments with stores to outgoing arguments
1648 of sibling calls. */
ab6ab77e 1649 set_mem_alias_set (args[i].stack, 0);
1650 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 1651 }
1652 }
1653}
c87678e4 1654
f3012854 1655/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1656 in a call instruction.
1657
1658 FNDECL is the tree node for the target function. For an indirect call
1659 FNDECL will be NULL_TREE.
1660
95672afe 1661 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 1662
1663static rtx
4c9e08a4 1664rtx_for_function_call (tree fndecl, tree addr)
f3012854 1665{
1666 rtx funexp;
1667
1668 /* Get the function to call, in the form of RTL. */
1669 if (fndecl)
1670 {
3d053e06 1671 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
ea259bbe 1672 TREE_USED (fndecl) = 1;
f3012854 1673
1674 /* Get a SYMBOL_REF rtx for the function address. */
1675 funexp = XEXP (DECL_RTL (fndecl), 0);
1676 }
1677 else
1678 /* Generate an rtx (probably a pseudo-register) for the address. */
1679 {
1680 push_temp_slots ();
8ec3c5c2 1681 funexp = expand_normal (addr);
c87678e4 1682 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 1683 }
1684 return funexp;
1685}
1686
74c02416 1687/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1688static struct
1689{
1690 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1691 or NULL_RTX if none has been scanned yet. */
1692 rtx scan_start;
1693 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1694 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1695 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1696 with fixed offset, or PC if this is with variable or unknown offset. */
f1f41a6c 1697 vec<rtx> cache;
74c02416 1698} internal_arg_pointer_exp_state;
1699
1700static rtx internal_arg_pointer_based_exp (rtx, bool);
1701
1702/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1703 the tail call sequence, starting with first insn that hasn't been
1704 scanned yet, and note for each pseudo on the LHS whether it is based
1705 on crtl->args.internal_arg_pointer or not, and what offset from that
1706 that pointer it has. */
1707
1708static void
1709internal_arg_pointer_based_exp_scan (void)
1710{
1711 rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
1712
1713 if (scan_start == NULL_RTX)
1714 insn = get_insns ();
1715 else
1716 insn = NEXT_INSN (scan_start);
1717
1718 while (insn)
1719 {
1720 rtx set = single_set (insn);
1721 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1722 {
1723 rtx val = NULL_RTX;
1724 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1725 /* Punt on pseudos set multiple times. */
f1f41a6c 1726 if (idx < internal_arg_pointer_exp_state.cache.length ()
1727 && (internal_arg_pointer_exp_state.cache[idx]
74c02416 1728 != NULL_RTX))
1729 val = pc_rtx;
1730 else
1731 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1732 if (val != NULL_RTX)
1733 {
f1f41a6c 1734 if (idx >= internal_arg_pointer_exp_state.cache.length ())
1735 internal_arg_pointer_exp_state.cache.safe_grow_cleared(idx + 1);
1736 internal_arg_pointer_exp_state.cache[idx] = val;
74c02416 1737 }
1738 }
1739 if (NEXT_INSN (insn) == NULL_RTX)
1740 scan_start = insn;
1741 insn = NEXT_INSN (insn);
1742 }
1743
1744 internal_arg_pointer_exp_state.scan_start = scan_start;
1745}
1746
1747/* Helper function for internal_arg_pointer_based_exp, called through
1748 for_each_rtx. Return 1 if *LOC is a register based on
1749 crtl->args.internal_arg_pointer. Return -1 if *LOC is not based on it
1750 and the subexpressions need not be examined. Otherwise return 0. */
1751
1752static int
1753internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1754{
1755 if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
1756 return 1;
1757 if (MEM_P (*loc))
1758 return -1;
1759 return 0;
1760}
1761
1762/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1763 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1764 it with fixed offset, or PC if this is with variable or unknown offset.
1765 TOPLEVEL is true if the function is invoked at the topmost level. */
1766
1767static rtx
1768internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
1769{
1770 if (CONSTANT_P (rtl))
1771 return NULL_RTX;
1772
1773 if (rtl == crtl->args.internal_arg_pointer)
1774 return const0_rtx;
1775
1776 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1777 return NULL_RTX;
1778
1779 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1780 {
1781 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1782 if (val == NULL_RTX || val == pc_rtx)
1783 return val;
29c05e22 1784 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
74c02416 1785 }
1786
1787 /* When called at the topmost level, scan pseudo assignments in between the
1788 last scanned instruction in the tail call sequence and the latest insn
1789 in that sequence. */
1790 if (toplevel)
1791 internal_arg_pointer_based_exp_scan ();
1792
1793 if (REG_P (rtl))
1794 {
1795 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
f1f41a6c 1796 if (idx < internal_arg_pointer_exp_state.cache.length ())
1797 return internal_arg_pointer_exp_state.cache[idx];
74c02416 1798
1799 return NULL_RTX;
1800 }
1801
1802 if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
1803 return pc_rtx;
1804
1805 return NULL_RTX;
1806}
1807
ff6c0ab2 1808/* Return true if and only if SIZE storage units (usually bytes)
1809 starting from address ADDR overlap with already clobbered argument
1810 area. This function is used to determine if we should give up a
1811 sibcall. */
1812
1813static bool
1814mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1815{
1816 HOST_WIDE_INT i;
74c02416 1817 rtx val;
ff6c0ab2 1818
53c5d9d4 1819 if (bitmap_empty_p (stored_args_map))
9ddeff7e 1820 return false;
74c02416 1821 val = internal_arg_pointer_based_exp (addr, true);
1822 if (val == NULL_RTX)
1823 return false;
1824 else if (val == pc_rtx)
cc0595c0 1825 return true;
ff6c0ab2 1826 else
74c02416 1827 i = INTVAL (val);
98d44ce4 1828#ifdef STACK_GROWS_DOWNWARD
1829 i -= crtl->args.pretend_args_size;
1830#else
1831 i += crtl->args.pretend_args_size;
1832#endif
ff6c0ab2 1833
1834#ifdef ARGS_GROW_DOWNWARD
1835 i = -i - size;
1836#endif
1837 if (size > 0)
1838 {
1839 unsigned HOST_WIDE_INT k;
1840
1841 for (k = 0; k < size; k++)
156093aa 1842 if (i + k < SBITMAP_SIZE (stored_args_map)
08b7917c 1843 && bitmap_bit_p (stored_args_map, i + k))
ff6c0ab2 1844 return true;
1845 }
1846
1847 return false;
1848}
1849
cde25025 1850/* Do the register loads required for any wholly-register parms or any
1851 parms which are passed both on the stack and in a register. Their
c87678e4 1852 expressions were already evaluated.
cde25025 1853
1854 Mark all register-parms as living through the call, putting these USE
4c9e08a4 1855 insns in the CALL_INSN_FUNCTION_USAGE field.
1856
dc537795 1857 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 1858 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 1859
1860static void
4c9e08a4 1861load_register_parameters (struct arg_data *args, int num_actuals,
1862 rtx *call_fusage, int flags, int is_sibcall,
1863 int *sibcall_failure)
cde25025 1864{
1865 int i, j;
1866
cde25025 1867 for (i = 0; i < num_actuals; i++)
cde25025 1868 {
0e0be288 1869 rtx reg = ((flags & ECF_SIBCALL)
1870 ? args[i].tail_call_reg : args[i].reg);
cde25025 1871 if (reg)
1872 {
5f4cd670 1873 int partial = args[i].partial;
1874 int nregs;
1875 int size = 0;
42b11544 1876 rtx before_arg = get_last_insn ();
83272ab4 1877 /* Set non-negative if we must move a word at a time, even if
1878 just one word (e.g, partial == 4 && mode == DFmode). Set
1879 to -1 if we just use a normal move insn. This value can be
1880 zero if the argument is a zero size structure. */
5f4cd670 1881 nregs = -1;
f054eb3c 1882 if (GET_CODE (reg) == PARALLEL)
1883 ;
1884 else if (partial)
1885 {
1886 gcc_assert (partial % UNITS_PER_WORD == 0);
1887 nregs = partial / UNITS_PER_WORD;
1888 }
5f4cd670 1889 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1890 {
1891 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1892 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1893 }
1894 else
1895 size = GET_MODE_SIZE (args[i].mode);
cde25025 1896
1897 /* Handle calls that pass values in multiple non-contiguous
1898 locations. The Irix 6 ABI has examples of this. */
1899
1900 if (GET_CODE (reg) == PARALLEL)
b600a907 1901 emit_group_move (reg, args[i].parallel_value);
cde25025 1902
1903 /* If simple case, just do move. If normal partial, store_one_arg
1904 has already loaded the register for us. In all other cases,
1905 load the register(s) from memory. */
1906
8e67abab 1907 else if (nregs == -1)
1908 {
1909 emit_move_insn (reg, args[i].value);
5f4cd670 1910#ifdef BLOCK_REG_PADDING
8e67abab 1911 /* Handle case where we have a value that needs shifting
1912 up to the msb. eg. a QImode value and we're padding
1913 upward on a BYTES_BIG_ENDIAN machine. */
1914 if (size < UNITS_PER_WORD
1915 && (args[i].locate.where_pad
1916 == (BYTES_BIG_ENDIAN ? upward : downward)))
1917 {
8e67abab 1918 rtx x;
1919 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
b2abd798 1920
1921 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1922 report the whole reg as used. Strictly speaking, the
1923 call only uses SIZE bytes at the msb end, but it doesn't
1924 seem worth generating rtl to say that. */
1925 reg = gen_rtx_REG (word_mode, REGNO (reg));
f5ff0b21 1926 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
b2abd798 1927 if (x != reg)
1928 emit_move_insn (reg, x);
8e67abab 1929 }
5f4cd670 1930#endif
8e67abab 1931 }
cde25025 1932
1933 /* If we have pre-computed the values to put in the registers in
1934 the case of non-aligned structures, copy them in now. */
1935
1936 else if (args[i].n_aligned_regs != 0)
1937 for (j = 0; j < args[i].n_aligned_regs; j++)
1938 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1939 args[i].aligned_regs[j]);
1940
e2e0ef92 1941 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 1942 {
1943 rtx mem = validize_mem (args[i].value);
1944
e2e0ef92 1945 /* Check for overlap with already clobbered argument area,
1946 providing that this has non-zero size. */
ff6c0ab2 1947 if (is_sibcall
e2e0ef92 1948 && (size == 0
1949 || mem_overlaps_already_clobbered_arg_p
1950 (XEXP (args[i].value, 0), size)))
ff6c0ab2 1951 *sibcall_failure = 1;
1952
5f4cd670 1953 /* Handle a BLKmode that needs shifting. */
8e67abab 1954 if (nregs == 1 && size < UNITS_PER_WORD
2c267f1a 1955#ifdef BLOCK_REG_PADDING
1956 && args[i].locate.where_pad == downward
1957#else
1958 && BYTES_BIG_ENDIAN
1959#endif
1960 )
5f4cd670 1961 {
1962 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1963 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1964 rtx x = gen_reg_rtx (word_mode);
1965 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
92966f8b 1966 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1967 : LSHIFT_EXPR;
5f4cd670 1968
1969 emit_move_insn (x, tem);
f5ff0b21 1970 x = expand_shift (dir, word_mode, x, shift, ri, 1);
5f4cd670 1971 if (x != ri)
1972 emit_move_insn (ri, x);
1973 }
1974 else
5f4cd670 1975 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1976 }
cde25025 1977
42b11544 1978 /* When a parameter is a block, and perhaps in other cases, it is
1979 possible that it did a load from an argument slot that was
6a8fa8e2 1980 already clobbered. */
42b11544 1981 if (is_sibcall
1982 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1983 *sibcall_failure = 1;
1984
cde25025 1985 /* Handle calls that pass values in multiple non-contiguous
1986 locations. The Irix 6 ABI has examples of this. */
1987 if (GET_CODE (reg) == PARALLEL)
1988 use_group_regs (call_fusage, reg);
1989 else if (nregs == -1)
b4eeceb9 1990 use_reg_mode (call_fusage, reg,
1991 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
c75d013c 1992 else if (nregs > 0)
1993 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 1994 }
1995 }
1996}
1997
92e1ef5b 1998/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1999 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2000 bytes, then we would need to push some additional bytes to pad the
481feae3 2001 arguments. So, we compute an adjust to the stack pointer for an
2002 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2003 bytes. Then, when the arguments are pushed the stack will be perfectly
2004 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2005 be popped after the call. Returns the adjustment. */
92e1ef5b 2006
481feae3 2007static int
4c9e08a4 2008combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2009 struct args_size *args_size,
38413c80 2010 unsigned int preferred_unit_stack_boundary)
92e1ef5b 2011{
2012 /* The number of bytes to pop so that the stack will be
2013 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2014 HOST_WIDE_INT adjustment;
2015 /* The alignment of the stack after the arguments are pushed, if we
2016 just pushed the arguments without adjust the stack here. */
38413c80 2017 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 2018
c87678e4 2019 unadjusted_alignment
92e1ef5b 2020 = ((stack_pointer_delta + unadjusted_args_size)
2021 % preferred_unit_stack_boundary);
2022
2023 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2024 as possible -- leaving just enough left to cancel out the
2025 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2026 PENDING_STACK_ADJUST is non-negative, and congruent to
2027 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2028
2029 /* Begin by trying to pop all the bytes. */
c87678e4 2030 unadjusted_alignment
2031 = (unadjusted_alignment
92e1ef5b 2032 - (pending_stack_adjust % preferred_unit_stack_boundary));
2033 adjustment = pending_stack_adjust;
2034 /* Push enough additional bytes that the stack will be aligned
2035 after the arguments are pushed. */
d3ef58ec 2036 if (preferred_unit_stack_boundary > 1)
2037 {
3dc35e62 2038 if (unadjusted_alignment > 0)
c87678e4 2039 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
d3ef58ec 2040 else
c87678e4 2041 adjustment += unadjusted_alignment;
d3ef58ec 2042 }
c87678e4 2043
92e1ef5b 2044 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2045 bytes after the call. The right number is the entire
2046 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2047 by the arguments in the first place. */
c87678e4 2048 args_size->constant
92e1ef5b 2049 = pending_stack_adjust - adjustment + unadjusted_args_size;
2050
481feae3 2051 return adjustment;
92e1ef5b 2052}
2053
7ecc63d3 2054/* Scan X expression if it does not dereference any argument slots
2055 we already clobbered by tail call arguments (as noted in stored_args_map
2056 bitmap).
d10cfa8d 2057 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 2058 zero otherwise. */
2059
2060static int
4c9e08a4 2061check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 2062{
2063 RTX_CODE code;
2064 int i, j;
7ecc63d3 2065 const char *fmt;
2066
2067 if (x == NULL_RTX)
2068 return 0;
2069
2070 code = GET_CODE (x);
2071
cc0595c0 2072 /* We need not check the operands of the CALL expression itself. */
2073 if (code == CALL)
2074 return 0;
2075
7ecc63d3 2076 if (code == MEM)
ff6c0ab2 2077 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2078 GET_MODE_SIZE (GET_MODE (x)));
7ecc63d3 2079
c87678e4 2080 /* Scan all subexpressions. */
7ecc63d3 2081 fmt = GET_RTX_FORMAT (code);
2082 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2083 {
2084 if (*fmt == 'e')
c87678e4 2085 {
2086 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2087 return 1;
2088 }
7ecc63d3 2089 else if (*fmt == 'E')
c87678e4 2090 {
2091 for (j = 0; j < XVECLEN (x, i); j++)
2092 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2093 return 1;
2094 }
7ecc63d3 2095 }
2096 return 0;
7ecc63d3 2097}
2098
2099/* Scan sequence after INSN if it does not dereference any argument slots
2100 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 2101 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2102 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2103 should be 0). Return nonzero if sequence after INSN dereferences such argument
2104 slots, zero otherwise. */
7ecc63d3 2105
2106static int
4c9e08a4 2107check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
c87678e4 2108{
7ecc63d3 2109 int low, high;
2110
2111 if (insn == NULL_RTX)
2112 insn = get_insns ();
2113 else
2114 insn = NEXT_INSN (insn);
2115
2116 for (; insn; insn = NEXT_INSN (insn))
c87678e4 2117 if (INSN_P (insn)
2118 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 2119 break;
2120
42b11544 2121 if (mark_stored_args_map)
2122 {
db10eec8 2123#ifdef ARGS_GROW_DOWNWARD
241399f6 2124 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
db10eec8 2125#else
241399f6 2126 low = arg->locate.slot_offset.constant;
db10eec8 2127#endif
2128
241399f6 2129 for (high = low + arg->locate.size.constant; low < high; low++)
08b7917c 2130 bitmap_set_bit (stored_args_map, low);
42b11544 2131 }
7ecc63d3 2132 return insn != NULL_RTX;
2133}
2134
05d18e8b 2135/* Given that a function returns a value of mode MODE at the most
2136 significant end of hard register VALUE, shift VALUE left or right
2137 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 2138
05d18e8b 2139bool
2140shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2c8ff1ed 2141{
05d18e8b 2142 HOST_WIDE_INT shift;
2143
2144 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2145 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2146 if (shift == 0)
2147 return false;
2148
2149 /* Use ashr rather than lshr for right shifts. This is for the benefit
2150 of the MIPS port, which requires SImode values to be sign-extended
2151 when stored in 64-bit registers. */
2152 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2153 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2154 gcc_unreachable ();
2155 return true;
2c8ff1ed 2156}
2157
90af1361 2158/* If X is a likely-spilled register value, copy it to a pseudo
2159 register and return that register. Return X otherwise. */
2160
2161static rtx
2162avoid_likely_spilled_reg (rtx x)
2163{
f4e36c33 2164 rtx new_rtx;
90af1361 2165
2166 if (REG_P (x)
2167 && HARD_REGISTER_P (x)
24dd0668 2168 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
90af1361 2169 {
2170 /* Make sure that we generate a REG rather than a CONCAT.
2171 Moves into CONCATs can need nontrivial instructions,
2172 and the whole point of this function is to avoid
2173 using the hard register directly in such a situation. */
2174 generating_concat_p = 0;
f4e36c33 2175 new_rtx = gen_reg_rtx (GET_MODE (x));
90af1361 2176 generating_concat_p = 1;
f4e36c33 2177 emit_move_insn (new_rtx, x);
2178 return new_rtx;
90af1361 2179 }
2180 return x;
2181}
2182
c2f47e15 2183/* Generate all the code for a CALL_EXPR exp
66d433c7 2184 and return an rtx for its value.
2185 Store the value in TARGET (specified as an rtx) if convenient.
2186 If the value is stored in TARGET then TARGET is returned.
2187 If IGNORE is nonzero, then we ignore the value of the function call. */
2188
2189rtx
4c9e08a4 2190expand_call (tree exp, rtx target, int ignore)
66d433c7 2191{
60ecc450 2192 /* Nonzero if we are currently expanding a call. */
2193 static int currently_expanding_call = 0;
2194
66d433c7 2195 /* RTX for the function to be called. */
2196 rtx funexp;
60ecc450 2197 /* Sequence of insns to perform a normal "call". */
2198 rtx normal_call_insns = NULL_RTX;
4ee9c684 2199 /* Sequence of insns to perform a tail "call". */
60ecc450 2200 rtx tail_call_insns = NULL_RTX;
66d433c7 2201 /* Data type of the function. */
2202 tree funtype;
915e81b8 2203 tree type_arg_types;
16c9337c 2204 tree rettype;
66d433c7 2205 /* Declaration of the function being called,
2206 or 0 if the function is computed (not known by name). */
2207 tree fndecl = 0;
e100aadc 2208 /* The type of the function being called. */
2209 tree fntype;
4ee9c684 2210 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
60ecc450 2211 int pass;
66d433c7 2212
2213 /* Register in which non-BLKmode value will be returned,
2214 or 0 if no value or if value is BLKmode. */
2215 rtx valreg;
2216 /* Address where we should return a BLKmode value;
2217 0 if value not BLKmode. */
2218 rtx structure_value_addr = 0;
2219 /* Nonzero if that address is being passed by treating it as
2220 an extra, implicit first parameter. Otherwise,
2221 it is passed by being copied directly into struct_value_rtx. */
2222 int structure_value_addr_parm = 0;
cd46caee 2223 /* Holds the value of implicit argument for the struct value. */
2224 tree structure_value_addr_value = NULL_TREE;
66d433c7 2225 /* Size of aggregate value wanted, or zero if none wanted
2226 or if we are using the non-reentrant PCC calling convention
2227 or expecting the value in registers. */
997d68fe 2228 HOST_WIDE_INT struct_value_size = 0;
66d433c7 2229 /* Nonzero if called function returns an aggregate in memory PCC style,
2230 by returning the address of where to find it. */
2231 int pcc_struct_value = 0;
45550790 2232 rtx struct_value = 0;
66d433c7 2233
2234 /* Number of actual parameters in this call, including struct value addr. */
2235 int num_actuals;
2236 /* Number of named args. Args after this are anonymous ones
2237 and they must all go on the stack. */
2238 int n_named_args;
cd46caee 2239 /* Number of complex actual arguments that need to be split. */
2240 int num_complex_actuals = 0;
66d433c7 2241
2242 /* Vector of information about each argument.
2243 Arguments are numbered in the order they will be pushed,
2244 not the order they are written. */
2245 struct arg_data *args;
2246
2247 /* Total size in bytes of all the stack-parms scanned so far. */
2248 struct args_size args_size;
0e0be288 2249 struct args_size adjusted_args_size;
66d433c7 2250 /* Size of arguments before any adjustments (such as rounding). */
cc45e5e8 2251 int unadjusted_args_size;
66d433c7 2252 /* Data on reg parms scanned so far. */
39cba157 2253 CUMULATIVE_ARGS args_so_far_v;
2254 cumulative_args_t args_so_far;
66d433c7 2255 /* Nonzero if a reg parm has been scanned. */
2256 int reg_parm_seen;
a50ca374 2257 /* Nonzero if this is an indirect function call. */
66d433c7 2258
c87678e4 2259 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 2260 If stack space is allocated for register parameters, but not by the
2261 caller, then it is preallocated in the fixed part of the stack frame.
2262 So the entire argument block must then be preallocated (i.e., we
2263 ignore PUSH_ROUNDING in that case). */
2264
4448f543 2265 int must_preallocate = !PUSH_ARGS;
66d433c7 2266
eb2f80f3 2267 /* Size of the stack reserved for parameter registers. */
2d7187c2 2268 int reg_parm_stack_space = 0;
2269
66d433c7 2270 /* Address of space preallocated for stack parms
2271 (on machines that lack push insns), or 0 if space not preallocated. */
2272 rtx argblock = 0;
2273
c8010b80 2274 /* Mask of ECF_ and ERF_ flags. */
dfe08167 2275 int flags = 0;
c8010b80 2276 int return_flags = 0;
4448f543 2277#ifdef REG_PARM_STACK_SPACE
66d433c7 2278 /* Define the boundary of the register parm stack space that needs to be
6e96b626 2279 saved, if any. */
2280 int low_to_save, high_to_save;
66d433c7 2281 rtx save_area = 0; /* Place that it is saved */
2282#endif
2283
66d433c7 2284 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2285 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 2286 char *stack_usage_map_buf = NULL;
66d433c7 2287
9069face 2288 int old_stack_allocated;
2289
2290 /* State variables to track stack modifications. */
66d433c7 2291 rtx old_stack_level = 0;
9069face 2292 int old_stack_arg_under_construction = 0;
65dccdb1 2293 int old_pending_adj = 0;
66d433c7 2294 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 2295
2296 /* Some stack pointer alterations we make are performed via
2297 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2298 which we then also need to save/restore along the way. */
31d035ca 2299 int old_stack_pointer_delta = 0;
9069face 2300
60ecc450 2301 rtx call_fusage;
c2f47e15 2302 tree addr = CALL_EXPR_FN (exp);
19cb6b50 2303 int i;
92e1ef5b 2304 /* The alignment of the stack, in bits. */
38413c80 2305 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 2306 /* The alignment of the stack, in bytes. */
38413c80 2307 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 2308 /* The static chain value to use for this call. */
2309 rtx static_chain_value;
dfe08167 2310 /* See if this is "nothrow" function call. */
2311 if (TREE_NOTHROW (exp))
2312 flags |= ECF_NOTHROW;
2313
4ee9c684 2314 /* See if we can find a DECL-node for the actual function, and get the
2315 function attributes (flags) from the function decl or type node. */
97a1590b 2316 fndecl = get_callee_fndecl (exp);
2317 if (fndecl)
66d433c7 2318 {
e100aadc 2319 fntype = TREE_TYPE (fndecl);
97a1590b 2320 flags |= flags_from_decl_or_type (fndecl);
c8010b80 2321 return_flags |= decl_return_flags (fndecl);
66d433c7 2322 }
97a1590b 2323 else
8a8cdb8d 2324 {
16c9337c 2325 fntype = TREE_TYPE (TREE_TYPE (addr));
e100aadc 2326 flags |= flags_from_decl_or_type (fntype);
8a8cdb8d 2327 }
16c9337c 2328 rettype = TREE_TYPE (exp);
d490e2f2 2329
e100aadc 2330 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 2331
4a081ddd 2332 /* Warn if this value is an aggregate type,
2333 regardless of which calling convention we are using for it. */
16c9337c 2334 if (AGGREGATE_TYPE_P (rettype))
efb9d9ee 2335 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 2336
9c2a0c05 2337 /* If the result of a non looping pure or const function call is
2338 ignored (or void), and none of its arguments are volatile, we can
2339 avoid expanding the call and just evaluate the arguments for
2340 side-effects. */
4a081ddd 2341 if ((flags & (ECF_CONST | ECF_PURE))
9c2a0c05 2342 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
4a081ddd 2343 && (ignore || target == const0_rtx
16c9337c 2344 || TYPE_MODE (rettype) == VOIDmode))
4a081ddd 2345 {
2346 bool volatilep = false;
2347 tree arg;
cd46caee 2348 call_expr_arg_iterator iter;
4a081ddd 2349
cd46caee 2350 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2351 if (TREE_THIS_VOLATILE (arg))
4a081ddd 2352 {
2353 volatilep = true;
2354 break;
2355 }
2356
2357 if (! volatilep)
2358 {
cd46caee 2359 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2360 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 2361 return const0_rtx;
2362 }
2363 }
2364
2d7187c2 2365#ifdef REG_PARM_STACK_SPACE
fa20f865 2366 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2d7187c2 2367#endif
2d7187c2 2368
fa20f865 2369 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 2370 && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 2371 must_preallocate = 1;
997d68fe 2372
66d433c7 2373 /* Set up a place to return a structure. */
2374
2375 /* Cater to broken compilers. */
4cd5bb61 2376 if (aggregate_value_p (exp, fntype))
66d433c7 2377 {
2378 /* This call returns a big structure. */
2dd6f9ed 2379 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
66d433c7 2380
2381#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 2382 {
2383 pcc_struct_value = 1;
f49c64ba 2384 }
2385#else /* not PCC_STATIC_STRUCT_RETURN */
2386 {
16c9337c 2387 struct_value_size = int_size_in_bytes (rettype);
66d433c7 2388
ea523851 2389 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
f49c64ba 2390 structure_value_addr = XEXP (target, 0);
2391 else
2392 {
f49c64ba 2393 /* For variable-sized objects, we must be called with a target
2394 specified. If we were to allocate space on the stack here,
2395 we would have no way of knowing when to free it. */
0ab48139 2396 rtx d = assign_temp (rettype, 1, 1);
930f0e87 2397 structure_value_addr = XEXP (d, 0);
f49c64ba 2398 target = 0;
2399 }
2400 }
2401#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 2402 }
2403
0e0be288 2404 /* Figure out the amount to which the stack should be aligned. */
0e0be288 2405 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 2406 if (fndecl)
2407 {
2408 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
9a27561f 2409 /* Without automatic stack alignment, we can't increase preferred
2410 stack boundary. With automatic stack alignment, it is
2411 unnecessary since unless we can guarantee that all callers will
2412 align the outgoing stack properly, callee has to align its
2413 stack anyway. */
2414 if (i
2415 && i->preferred_incoming_stack_boundary
2416 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
28992b23 2417 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2418 }
0e0be288 2419
2420 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 2421 funtype = TREE_TYPE (addr);
231bd014 2422 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 2423 funtype = TREE_TYPE (funtype);
2424
cd46caee 2425 /* Count whether there are actual complex arguments that need to be split
2426 into their real and imaginary parts. Munge the type_arg_types
2427 appropriately here as well. */
92d40bc4 2428 if (targetm.calls.split_complex_arg)
915e81b8 2429 {
cd46caee 2430 call_expr_arg_iterator iter;
2431 tree arg;
2432 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2433 {
2434 tree type = TREE_TYPE (arg);
2435 if (type && TREE_CODE (type) == COMPLEX_TYPE
2436 && targetm.calls.split_complex_arg (type))
2437 num_complex_actuals++;
2438 }
915e81b8 2439 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 2440 }
2441 else
2442 type_arg_types = TYPE_ARG_TYPES (funtype);
2443
0e0be288 2444 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 2445 cfun->calls_alloca = 1;
0e0be288 2446
2447 /* If struct_value_rtx is 0, it means pass the address
cd46caee 2448 as if it were an extra parameter. Put the argument expression
2449 in structure_value_addr_value. */
45550790 2450 if (structure_value_addr && struct_value == 0)
0e0be288 2451 {
2452 /* If structure_value_addr is a REG other than
2453 virtual_outgoing_args_rtx, we can use always use it. If it
2454 is not a REG, we must always copy it into a register.
2455 If it is virtual_outgoing_args_rtx, we must copy it to another
2456 register in some cases. */
8ad4c111 2457 rtx temp = (!REG_P (structure_value_addr)
0e0be288 2458 || (ACCUMULATE_OUTGOING_ARGS
2459 && stack_arg_under_construction
2460 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 2461 ? copy_addr_to_reg (convert_memory_address
e100aadc 2462 (Pmode, structure_value_addr))
0e0be288 2463 : structure_value_addr);
2464
cd46caee 2465 structure_value_addr_value =
2466 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
0e0be288 2467 structure_value_addr_parm = 1;
2468 }
2469
2470 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 2471 num_actuals =
2472 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 2473
2474 /* Compute number of named args.
30a10006 2475 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2476
2477 if (type_arg_types != 0)
2478 n_named_args
2479 = (list_length (type_arg_types)
2480 /* Count the struct value address, if it is passed as a parm. */
2481 + structure_value_addr_parm);
2482 else
2483 /* If we know nothing, treat all args as named. */
2484 n_named_args = num_actuals;
2485
2486 /* Start updating where the next arg would go.
2487
2488 On some machines (such as the PA) indirect calls have a different
2489 calling convention than normal calls. The fourth argument in
2490 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2491 or not. */
39cba157 2492 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2493 args_so_far = pack_cumulative_args (&args_so_far_v);
30a10006 2494
2495 /* Now possibly adjust the number of named args.
0e0be288 2496 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 2497 We do include the last named arg if
2498 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 2499 (If no anonymous args follow, the result of list_length is actually
2500 one too large. This is harmless.)
2501
a107cd89 2502 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 2503 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2504 this machine will be able to place unnamed args that were passed
2505 in registers into the stack. So treat all args as named. This
2506 allows the insns emitting for a specific argument list to be
2507 independent of the function declaration.
a107cd89 2508
2509 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2510 we do not have any reliable way to pass unnamed args in
2511 registers, so we must force them into memory. */
0e0be288 2512
30a10006 2513 if (type_arg_types != 0
39cba157 2514 && targetm.calls.strict_argument_naming (args_so_far))
30a10006 2515 ;
2516 else if (type_arg_types != 0
39cba157 2517 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
30a10006 2518 /* Don't include the last named arg. */
2519 --n_named_args;
0e0be288 2520 else
30a10006 2521 /* Treat all args as named. */
0e0be288 2522 n_named_args = num_actuals;
2523
0e0be288 2524 /* Make a vector to hold all the information about each arg. */
364c0c59 2525 args = XALLOCAVEC (struct arg_data, num_actuals);
f0af5a88 2526 memset (args, 0, num_actuals * sizeof (struct arg_data));
0e0be288 2527
00dddcf2 2528 /* Build up entries in the ARGS array, compute the size of the
2529 arguments into ARGS_SIZE, etc. */
0e0be288 2530 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 2531 n_named_args, exp,
d8b9c828 2532 structure_value_addr_value, fndecl, fntype,
39cba157 2533 args_so_far, reg_parm_stack_space,
0e0be288 2534 &old_stack_level, &old_pending_adj,
eaa112a0 2535 &must_preallocate, &flags,
4ee9c684 2536 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 2537
2538 if (args_size.var)
2dd6f9ed 2539 must_preallocate = 1;
0e0be288 2540
2541 /* Now make final decision about preallocating stack space. */
2542 must_preallocate = finalize_must_preallocate (must_preallocate,
2543 num_actuals, args,
2544 &args_size);
2545
2546 /* If the structure value address will reference the stack pointer, we
2547 must stabilize it. We don't need to do this if we know that we are
2548 not going to adjust the stack pointer in processing this call. */
2549
2550 if (structure_value_addr
2551 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2552 || reg_mentioned_p (virtual_outgoing_args_rtx,
2553 structure_value_addr))
2554 && (args_size.var
2555 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2556 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 2557
0d568ddf 2558 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 2559 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 2560 expanding a call, as that means we're an argument. Don't try if
011e6b51 2561 there's cleanups, as we know there's code to follow the call. */
60ecc450 2562
0e0be288 2563 if (currently_expanding_call++ != 0
2564 || !flag_optimize_sibling_calls
4ee9c684 2565 || args_size.var
3072d30e 2566 || dbg_cnt (tail_call) == false)
4ee9c684 2567 try_tail_call = 0;
0e0be288 2568
2569 /* Rest of purposes for tail call optimizations to fail. */
2570 if (
2571#ifdef HAVE_sibcall_epilogue
2572 !HAVE_sibcall_epilogue
2573#else
2574 1
2575#endif
2576 || !try_tail_call
2577 /* Doing sibling call optimization needs some work, since
2578 structure_value_addr can be allocated on the stack.
2579 It does not seem worth the effort since few optimizable
2580 sibling calls will return a structure. */
2581 || structure_value_addr != NULL_RTX
aa7aa403 2582#ifdef REG_PARM_STACK_SPACE
91ebded8 2583 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2584 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2585 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2586 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
aa7aa403 2587#endif
805e22b2 2588 /* Check whether the target is able to optimize the call
2589 into a sibcall. */
883b2e73 2590 || !targetm.function_ok_for_sibcall (fndecl, exp)
805e22b2 2591 /* Functions that do not return exactly once may not be sibcall
a0c938f0 2592 optimized. */
4fec1d6c 2593 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
95672afe 2594 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
4c4a1039 2595 /* If the called function is nested in the current one, it might access
a0c938f0 2596 some of the caller's arguments, but could clobber them beforehand if
2597 the argument areas are shared. */
4c4a1039 2598 || (fndecl && decl_function_context (fndecl) == current_function_decl)
0e0be288 2599 /* If this function requires more stack slots than the current
99b442ff 2600 function, we cannot change it into a sibling call.
abe32cce 2601 crtl->args.pretend_args_size is not part of the
99b442ff 2602 stack allocated by our caller. */
abe32cce 2603 || args_size.constant > (crtl->args.size
2604 - crtl->args.pretend_args_size)
0e0be288 2605 /* If the callee pops its own arguments, then it must pop exactly
2606 the same number of arguments as the current function. */
f5bc28da 2607 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2608 != targetm.calls.return_pops_args (current_function_decl,
2609 TREE_TYPE (current_function_decl),
2610 crtl->args.size))
dc24ddbd 2611 || !lang_hooks.decls.ok_for_sibcall (fndecl))
8b1cb18e 2612 try_tail_call = 0;
4b066641 2613
4681dd41 2614 /* Check if caller and callee disagree in promotion of function
2615 return value. */
2616 if (try_tail_call)
2617 {
2618 enum machine_mode caller_mode, caller_promoted_mode;
2619 enum machine_mode callee_mode, callee_promoted_mode;
2620 int caller_unsignedp, callee_unsignedp;
2621 tree caller_res = DECL_RESULT (current_function_decl);
2622
2623 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3b2411a8 2624 caller_mode = DECL_MODE (caller_res);
4681dd41 2625 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3b2411a8 2626 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2627 caller_promoted_mode
2628 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2629 &caller_unsignedp,
2630 TREE_TYPE (current_function_decl), 1);
2631 callee_promoted_mode
c879dbcf 2632 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3b2411a8 2633 &callee_unsignedp,
c879dbcf 2634 funtype, 1);
4681dd41 2635 if (caller_mode != VOIDmode
2636 && (caller_promoted_mode != callee_promoted_mode
2637 || ((caller_mode != caller_promoted_mode
2638 || callee_mode != callee_promoted_mode)
2639 && (caller_unsignedp != callee_unsignedp
2640 || GET_MODE_BITSIZE (caller_mode)
2641 < GET_MODE_BITSIZE (callee_mode)))))
2642 try_tail_call = 0;
2643 }
2644
755ece1f 2645 /* Ensure current function's preferred stack boundary is at least
2646 what we need. Stack alignment may also increase preferred stack
2647 boundary. */
54d759e3 2648 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
edb7afe8 2649 crtl->preferred_stack_boundary = preferred_stack_boundary;
755ece1f 2650 else
2651 preferred_stack_boundary = crtl->preferred_stack_boundary;
d0285dd8 2652
0e0be288 2653 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 2654
60ecc450 2655 /* We want to make two insn chains; one for a sibling call, the other
2656 for a normal call. We will select one of the two chains after
2657 initial RTL generation is complete. */
6e96b626 2658 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 2659 {
2660 int sibcall_failure = 0;
35a3065a 2661 /* We want to emit any pending stack adjustments before the tail
60ecc450 2662 recursion "call". That way we know any adjustment after the tail
0d568ddf 2663 recursion call can be ignored if we indeed use the tail
60ecc450 2664 call expansion. */
9a5bbcc2 2665 int save_pending_stack_adjust = 0;
2666 int save_stack_pointer_delta = 0;
60ecc450 2667 rtx insns;
c0e7e9f7 2668 rtx before_call, next_arg_reg, after_args;
1e2b2ab3 2669
60ecc450 2670 if (pass == 0)
2671 {
60ecc450 2672 /* State variables we need to save and restore between
2673 iterations. */
2674 save_pending_stack_adjust = pending_stack_adjust;
91b70175 2675 save_stack_pointer_delta = stack_pointer_delta;
60ecc450 2676 }
dfe08167 2677 if (pass)
2678 flags &= ~ECF_SIBCALL;
2679 else
2680 flags |= ECF_SIBCALL;
66d433c7 2681
60ecc450 2682 /* Other state variables that we must reinitialize each time
dfe08167 2683 through the loop (that are not initialized by the loop itself). */
60ecc450 2684 argblock = 0;
2685 call_fusage = 0;
2f921ec9 2686
c87678e4 2687 /* Start a new sequence for the normal call case.
66d433c7 2688
60ecc450 2689 From this point on, if the sibling call fails, we want to set
2690 sibcall_failure instead of continuing the loop. */
2691 start_sequence ();
412321ce 2692
60ecc450 2693 /* Don't let pending stack adjusts add up to too much.
2694 Also, do all pending adjustments now if there is any chance
2695 this might be a call to alloca or if we are expanding a sibling
ff3ae375 2696 call sequence.
82e95be3 2697 Also do the adjustments before a throwing call, otherwise
2698 exception handling can fail; PR 19225. */
60ecc450 2699 if (pending_stack_adjust >= 32
5edaabad 2700 || (pending_stack_adjust > 0
ff3ae375 2701 && (flags & ECF_MAY_BE_ALLOCA))
82e95be3 2702 || (pending_stack_adjust > 0
2703 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 2704 || pass == 0)
2705 do_pending_stack_adjust ();
66d433c7 2706
60ecc450 2707 /* Precompute any arguments as needed. */
02510658 2708 if (pass)
2dd6f9ed 2709 precompute_arguments (num_actuals, args);
66d433c7 2710
60ecc450 2711 /* Now we are about to start emitting insns that can be deleted
2712 if a libcall is deleted. */
2dd6f9ed 2713 if (pass && (flags & ECF_MALLOC))
60ecc450 2714 start_sequence ();
66d433c7 2715
edb7afe8 2716 if (pass == 0 && crtl->stack_protect_guard)
71d89928 2717 stack_protect_epilogue ();
2718
0e0be288 2719 adjusted_args_size = args_size;
481feae3 2720 /* Compute the actual size of the argument block required. The variable
2721 and constant sizes must be combined, the size may have to be rounded,
2722 and there may be a minimum required size. When generating a sibcall
2723 pattern, do not round up, since we'll be re-using whatever space our
2724 caller provided. */
2725 unadjusted_args_size
c87678e4 2726 = compute_argument_block_size (reg_parm_stack_space,
2727 &adjusted_args_size,
fa20f865 2728 fndecl, fntype,
481feae3 2729 (pass == 0 ? 0
2730 : preferred_stack_boundary));
2731
c87678e4 2732 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 2733
02510658 2734 /* The argument block when performing a sibling call is the
a0c938f0 2735 incoming argument block. */
02510658 2736 if (pass == 0)
7ecc63d3 2737 {
27a7a23a 2738 argblock = crtl->args.internal_arg_pointer;
bd54bbc6 2739 argblock
2740#ifdef STACK_GROWS_DOWNWARD
29c05e22 2741 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
bd54bbc6 2742#else
29c05e22 2743 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
bd54bbc6 2744#endif
7ecc63d3 2745 stored_args_map = sbitmap_alloc (args_size.constant);
53c5d9d4 2746 bitmap_clear (stored_args_map);
7ecc63d3 2747 }
481feae3 2748
60ecc450 2749 /* If we have no actual push instructions, or shouldn't use them,
2750 make space for all args right now. */
0e0be288 2751 else if (adjusted_args_size.var != 0)
66d433c7 2752 {
60ecc450 2753 if (old_stack_level == 0)
2754 {
e9c97615 2755 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 2756 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 2757 old_pending_adj = pending_stack_adjust;
2758 pending_stack_adjust = 0;
60ecc450 2759 /* stack_arg_under_construction says whether a stack arg is
2760 being constructed at the old stack level. Pushing the stack
2761 gets a clean outgoing argument block. */
2762 old_stack_arg_under_construction = stack_arg_under_construction;
2763 stack_arg_under_construction = 0;
60ecc450 2764 }
0e0be288 2765 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
8c0dd614 2766 if (flag_stack_usage_info)
990495a7 2767 current_function_has_unbounded_dynamic_stack_size = 1;
66d433c7 2768 }
60ecc450 2769 else
2770 {
2771 /* Note that we must go through the motions of allocating an argument
2772 block even if the size is zero because we may be storing args
2773 in the area reserved for register arguments, which may be part of
2774 the stack frame. */
7221f864 2775
0e0be288 2776 int needed = adjusted_args_size.constant;
66d433c7 2777
60ecc450 2778 /* Store the maximum argument space used. It will be pushed by
2779 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2780 checking). */
66d433c7 2781
abe32cce 2782 if (needed > crtl->outgoing_args_size)
2783 crtl->outgoing_args_size = needed;
66d433c7 2784
60ecc450 2785 if (must_preallocate)
2786 {
4448f543 2787 if (ACCUMULATE_OUTGOING_ARGS)
2788 {
02510658 2789 /* Since the stack pointer will never be pushed, it is
2790 possible for the evaluation of a parm to clobber
2791 something we have already written to the stack.
2792 Since most function calls on RISC machines do not use
2793 the stack, this is uncommon, but must work correctly.
7221f864 2794
4448f543 2795 Therefore, we save any area of the stack that was already
02510658 2796 written and that we are using. Here we set up to do this
2797 by making a new stack usage map from the old one. The
c87678e4 2798 actual save will be done by store_one_arg.
7221f864 2799
4448f543 2800 Another approach might be to try to reorder the argument
2801 evaluations to avoid this conflicting stack usage. */
7221f864 2802
02510658 2803 /* Since we will be writing into the entire argument area,
2804 the map must be allocated for its entire size, not just
2805 the part that is the responsibility of the caller. */
fa20f865 2806 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2807 needed += reg_parm_stack_space;
66d433c7 2808
2809#ifdef ARGS_GROW_DOWNWARD
4448f543 2810 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2811 needed + 1);
66d433c7 2812#else
4448f543 2813 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2814 needed);
66d433c7 2815#endif
dd045aee 2816 free (stack_usage_map_buf);
4c36ffe6 2817 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2818 stack_usage_map = stack_usage_map_buf;
66d433c7 2819
4448f543 2820 if (initial_highest_arg_in_use)
8e547276 2821 memcpy (stack_usage_map, initial_stack_usage_map,
2822 initial_highest_arg_in_use);
d1b03b62 2823
4448f543 2824 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 2825 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 2826 (highest_outgoing_arg_in_use
2827 - initial_highest_arg_in_use));
2828 needed = 0;
d1b03b62 2829
02510658 2830 /* The address of the outgoing argument list must not be
2831 copied to a register here, because argblock would be left
2832 pointing to the wrong place after the call to
c87678e4 2833 allocate_dynamic_stack_space below. */
d1b03b62 2834
4448f543 2835 argblock = virtual_outgoing_args_rtx;
c87678e4 2836 }
4448f543 2837 else
7221f864 2838 {
4448f543 2839 if (inhibit_defer_pop == 0)
60ecc450 2840 {
4448f543 2841 /* Try to reuse some or all of the pending_stack_adjust
481feae3 2842 to get this space. */
2843 needed
c87678e4 2844 = (combine_pending_stack_adjustment_and_call
481feae3 2845 (unadjusted_args_size,
0e0be288 2846 &adjusted_args_size,
481feae3 2847 preferred_unit_stack_boundary));
2848
2849 /* combine_pending_stack_adjustment_and_call computes
2850 an adjustment before the arguments are allocated.
2851 Account for them and see whether or not the stack
2852 needs to go up or down. */
2853 needed = unadjusted_args_size - needed;
2854
2855 if (needed < 0)
4448f543 2856 {
481feae3 2857 /* We're releasing stack space. */
2858 /* ??? We can avoid any adjustment at all if we're
2859 already aligned. FIXME. */
2860 pending_stack_adjust = -needed;
2861 do_pending_stack_adjust ();
4448f543 2862 needed = 0;
2863 }
c87678e4 2864 else
481feae3 2865 /* We need to allocate space. We'll do that in
2866 push_block below. */
2867 pending_stack_adjust = 0;
60ecc450 2868 }
481feae3 2869
2870 /* Special case this because overhead of `push_block' in
2871 this case is non-trivial. */
4448f543 2872 if (needed == 0)
2873 argblock = virtual_outgoing_args_rtx;
60ecc450 2874 else
ad3b56f3 2875 {
2876 argblock = push_block (GEN_INT (needed), 0, 0);
2877#ifdef ARGS_GROW_DOWNWARD
29c05e22 2878 argblock = plus_constant (Pmode, argblock, needed);
ad3b56f3 2879#endif
2880 }
4448f543 2881
02510658 2882 /* We only really need to call `copy_to_reg' in the case
2883 where push insns are going to be used to pass ARGBLOCK
2884 to a function call in ARGS. In that case, the stack
2885 pointer changes value from the allocation point to the
2886 call point, and hence the value of
2887 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2888 as well always do it. */
4448f543 2889 argblock = copy_to_reg (argblock);
9069face 2890 }
2891 }
2892 }
60ecc450 2893
9069face 2894 if (ACCUMULATE_OUTGOING_ARGS)
2895 {
2896 /* The save/restore code in store_one_arg handles all
2897 cases except one: a constructor call (including a C
2898 function returning a BLKmode struct) to initialize
2899 an argument. */
2900 if (stack_arg_under_construction)
2901 {
63c68695 2902 rtx push_size
2903 = GEN_INT (adjusted_args_size.constant
fa20f865 2904 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
22c61100 2905 : TREE_TYPE (fndecl))) ? 0
63c68695 2906 : reg_parm_stack_space));
9069face 2907 if (old_stack_level == 0)
2908 {
e9c97615 2909 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 2910 old_stack_pointer_delta = stack_pointer_delta;
2911 old_pending_adj = pending_stack_adjust;
2912 pending_stack_adjust = 0;
2913 /* stack_arg_under_construction says whether a stack
2914 arg is being constructed at the old stack level.
2915 Pushing the stack gets a clean outgoing argument
2916 block. */
2917 old_stack_arg_under_construction
2918 = stack_arg_under_construction;
2919 stack_arg_under_construction = 0;
2920 /* Make a new map for the new argument list. */
dd045aee 2921 free (stack_usage_map_buf);
43959b95 2922 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2923 stack_usage_map = stack_usage_map_buf;
9069face 2924 highest_outgoing_arg_in_use = 0;
4448f543 2925 }
990495a7 2926 /* We can pass TRUE as the 4th argument because we just
2927 saved the stack pointer and will restore it right after
2928 the call. */
5be42b39 2929 allocate_dynamic_stack_space (push_size, 0,
2930 BIGGEST_ALIGNMENT, true);
60ecc450 2931 }
a3585b90 2932
9069face 2933 /* If argument evaluation might modify the stack pointer,
2934 copy the address of the argument list to a register. */
2935 for (i = 0; i < num_actuals; i++)
2936 if (args[i].pass_on_stack)
2937 {
2938 argblock = copy_addr_to_reg (argblock);
2939 break;
2940 }
2941 }
4c9e08a4 2942
60ecc450 2943 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 2944
60ecc450 2945 /* If we push args individually in reverse order, perform stack alignment
2946 before the first push (the last arg). */
4448f543 2947 if (PUSH_ARGS_REVERSED && argblock == 0
0e0be288 2948 && adjusted_args_size.constant != unadjusted_args_size)
ff92623c 2949 {
60ecc450 2950 /* When the stack adjustment is pending, we get better code
2951 by combining the adjustments. */
c87678e4 2952 if (pending_stack_adjust
60ecc450 2953 && ! inhibit_defer_pop)
481feae3 2954 {
2955 pending_stack_adjust
c87678e4 2956 = (combine_pending_stack_adjustment_and_call
481feae3 2957 (unadjusted_args_size,
0e0be288 2958 &adjusted_args_size,
481feae3 2959 preferred_unit_stack_boundary));
2960 do_pending_stack_adjust ();
2961 }
60ecc450 2962 else if (argblock == 0)
0e0be288 2963 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 2964 - unadjusted_args_size));
60ecc450 2965 }
fa4f1f09 2966 /* Now that the stack is properly aligned, pops can't safely
2967 be deferred during the evaluation of the arguments. */
2968 NO_DEFER_POP;
66d433c7 2969
990495a7 2970 /* Record the maximum pushed stack space size. We need to delay
2971 doing it this far to take into account the optimization done
2972 by combine_pending_stack_adjustment_and_call. */
8c0dd614 2973 if (flag_stack_usage_info
990495a7 2974 && !ACCUMULATE_OUTGOING_ARGS
2975 && pass
2976 && adjusted_args_size.var == 0)
2977 {
2978 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2979 if (pushed > current_function_pushed_stack_size)
2980 current_function_pushed_stack_size = pushed;
2981 }
2982
95672afe 2983 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 2984
60ecc450 2985 /* Figure out the register where the value, if any, will come back. */
2986 valreg = 0;
16c9337c 2987 if (TYPE_MODE (rettype) != VOIDmode
60ecc450 2988 && ! structure_value_addr)
2989 {
2990 if (pcc_struct_value)
16c9337c 2991 valreg = hard_function_value (build_pointer_type (rettype),
46b3ff29 2992 fndecl, NULL, (pass == 0));
60ecc450 2993 else
16c9337c 2994 valreg = hard_function_value (rettype, fndecl, fntype,
46b3ff29 2995 (pass == 0));
2d329930 2996
2997 /* If VALREG is a PARALLEL whose first member has a zero
2998 offset, use that. This is for targets such as m68k that
2999 return the same value in multiple places. */
3000 if (GET_CODE (valreg) == PARALLEL)
3001 {
3002 rtx elem = XVECEXP (valreg, 0, 0);
3003 rtx where = XEXP (elem, 0);
3004 rtx offset = XEXP (elem, 1);
3005 if (offset == const0_rtx
3006 && GET_MODE (where) == GET_MODE (valreg))
3007 valreg = where;
3008 }
60ecc450 3009 }
66d433c7 3010
60ecc450 3011 /* Precompute all register parameters. It isn't safe to compute anything
3012 once we have started filling any specific hard regs. */
3013 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
66d433c7 3014
c2f47e15 3015 if (CALL_EXPR_STATIC_CHAIN (exp))
3016 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 3017 else
3018 static_chain_value = 0;
3019
4448f543 3020#ifdef REG_PARM_STACK_SPACE
60ecc450 3021 /* Save the fixed argument area if it's part of the caller's frame and
3022 is clobbered by argument setup for this call. */
02510658 3023 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 3024 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3025 &low_to_save, &high_to_save);
41332f48 3026#endif
66d433c7 3027
60ecc450 3028 /* Now store (and compute if necessary) all non-register parms.
3029 These come before register parms, since they can require block-moves,
3030 which could clobber the registers used for register parms.
3031 Parms which have partial registers are not stored here,
3032 but we do preallocate space here if they want that. */
66d433c7 3033
60ecc450 3034 for (i = 0; i < num_actuals; i++)
eb940a48 3035 {
3036 if (args[i].reg == 0 || args[i].pass_on_stack)
3037 {
3038 rtx before_arg = get_last_insn ();
3039
3040 if (store_one_arg (&args[i], argblock, flags,
3041 adjusted_args_size.var != 0,
3042 reg_parm_stack_space)
3043 || (pass == 0
3044 && check_sibcall_argument_overlap (before_arg,
3045 &args[i], 1)))
3046 sibcall_failure = 1;
3047 }
3048
4143d08b 3049 if (args[i].stack)
b4eeceb9 3050 call_fusage
3051 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3052 gen_rtx_USE (VOIDmode, args[i].stack),
3053 call_fusage);
eb940a48 3054 }
60ecc450 3055
3056 /* If we have a parm that is passed in registers but not in memory
3057 and whose alignment does not permit a direct copy into registers,
3058 make a group of pseudos that correspond to each register that we
3059 will later fill. */
3060 if (STRICT_ALIGNMENT)
3061 store_unaligned_arguments_into_pseudos (args, num_actuals);
3062
3063 /* Now store any partially-in-registers parm.
3064 This is the last place a block-move can happen. */
3065 if (reg_parm_seen)
3066 for (i = 0; i < num_actuals; i++)
3067 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 3068 {
3069 rtx before_arg = get_last_insn ();
3070
57679d39 3071 if (store_one_arg (&args[i], argblock, flags,
3072 adjusted_args_size.var != 0,
3073 reg_parm_stack_space)
3074 || (pass == 0
3075 && check_sibcall_argument_overlap (before_arg,
42b11544 3076 &args[i], 1)))
7ecc63d3 3077 sibcall_failure = 1;
3078 }
66d433c7 3079
60ecc450 3080 /* If we pushed args in forward order, perform stack alignment
3081 after pushing the last arg. */
4448f543 3082 if (!PUSH_ARGS_REVERSED && argblock == 0)
0e0be288 3083 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 3084 - unadjusted_args_size));
66d433c7 3085
60ecc450 3086 /* If register arguments require space on the stack and stack space
3087 was not preallocated, allocate stack space here for arguments
3088 passed in registers. */
fa20f865 3089 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 3090 && !ACCUMULATE_OUTGOING_ARGS
c87678e4 3091 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 3092 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 3093
60ecc450 3094 /* Pass the function the address in which to return a
3095 structure value. */
3096 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3097 {
0d568ddf 3098 structure_value_addr
85d654dd 3099 = convert_memory_address (Pmode, structure_value_addr);
45550790 3100 emit_move_insn (struct_value,
60ecc450 3101 force_reg (Pmode,
3102 force_operand (structure_value_addr,
3103 NULL_RTX)));
3104
8ad4c111 3105 if (REG_P (struct_value))
45550790 3106 use_reg (&call_fusage, struct_value);
60ecc450 3107 }
02c736f4 3108
c0e7e9f7 3109 after_args = get_last_insn ();
82c7907c 3110 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
4ee9c684 3111 &call_fusage, reg_parm_seen, pass == 0);
66d433c7 3112
42b11544 3113 load_register_parameters (args, num_actuals, &call_fusage, flags,
3114 pass == 0, &sibcall_failure);
c87678e4 3115
60ecc450 3116 /* Save a pointer to the last insn before the call, so that we can
3117 later safely search backwards to find the CALL_INSN. */
3118 before_call = get_last_insn ();
66d433c7 3119
7a8d641b 3120 /* Set up next argument register. For sibling calls on machines
3121 with register windows this should be the incoming register. */
7a8d641b 3122 if (pass == 0)
39cba157 3123 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
f387af4f 3124 VOIDmode,
3125 void_type_node,
3126 true);
7a8d641b 3127 else
39cba157 3128 next_arg_reg = targetm.calls.function_arg (args_so_far,
f387af4f 3129 VOIDmode, void_type_node,
3130 true);
7a8d641b 3131
c8010b80 3132 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3133 {
3134 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3135 if (PUSH_ARGS_REVERSED)
3136 arg_nr = num_actuals - arg_nr - 1;
3d38d682 3137 if (arg_nr >= 0
3138 && arg_nr < num_actuals
3139 && args[arg_nr].reg
c8010b80 3140 && valreg
3141 && REG_P (valreg)
3142 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3143 call_fusage
3144 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3145 gen_rtx_SET (VOIDmode, valreg, args[arg_nr].reg),
3146 call_fusage);
3147 }
60ecc450 3148 /* All arguments and registers used for the call must be set up by
3149 now! */
3150
481feae3 3151 /* Stack must be properly aligned now. */
231bd014 3152 gcc_assert (!pass
3153 || !(stack_pointer_delta % preferred_unit_stack_boundary));
fa4f1f09 3154
60ecc450 3155 /* Generate the actual call instruction. */
4ee9c684 3156 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 3157 adjusted_args_size.constant, struct_value_size,
7a8d641b 3158 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
39cba157 3159 flags, args_so_far);
60ecc450 3160
c0e7e9f7 3161 /* If the call setup or the call itself overlaps with anything
3162 of the argument setup we probably clobbered our call address.
3163 In that case we can't do sibcalls. */
3164 if (pass == 0
3165 && check_sibcall_argument_overlap (after_args, 0, 0))
3166 sibcall_failure = 1;
3167
05d18e8b 3168 /* If a non-BLKmode value is returned at the most significant end
3169 of a register, shift the register right by the appropriate amount
3170 and update VALREG accordingly. BLKmode values are handled by the
3171 group load/store machinery below. */
3172 if (!structure_value_addr
3173 && !pcc_struct_value
d8ef55fc 3174 && TYPE_MODE (rettype) != VOIDmode
16c9337c 3175 && TYPE_MODE (rettype) != BLKmode
d8ef55fc 3176 && REG_P (valreg)
16c9337c 3177 && targetm.calls.return_in_msb (rettype))
05d18e8b 3178 {
16c9337c 3179 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
05d18e8b 3180 sibcall_failure = 1;
16c9337c 3181 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
05d18e8b 3182 }
3183
2dd6f9ed 3184 if (pass && (flags & ECF_MALLOC))
60ecc450 3185 {
3186 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3187 rtx last, insns;
3188
c87678e4 3189 /* The return value from a malloc-like function is a pointer. */
16c9337c 3190 if (TREE_CODE (rettype) == POINTER_TYPE)
10836fcc 3191 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
60ecc450 3192
3193 emit_move_insn (temp, valreg);
3194
3195 /* The return value from a malloc-like function can not alias
3196 anything else. */
3197 last = get_last_insn ();
a1ddb869 3198 add_reg_note (last, REG_NOALIAS, temp);
60ecc450 3199
3200 /* Write out the sequence. */
3201 insns = get_insns ();
3202 end_sequence ();
31d3e01c 3203 emit_insn (insns);
60ecc450 3204 valreg = temp;
3205 }
66d433c7 3206
3072d30e 3207 /* For calls to `setjmp', etc., inform
3208 function.c:setjmp_warnings that it should complain if
3209 nonvolatile values are live. For functions that cannot
3210 return, inform flow that control does not fall through. */
66d433c7 3211
4fec1d6c 3212 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 3213 {
9239aee6 3214 /* The barrier must be emitted
60ecc450 3215 immediately after the CALL_INSN. Some ports emit more
3216 than just a CALL_INSN above, so we must search for it here. */
66d433c7 3217
60ecc450 3218 rtx last = get_last_insn ();
6d7dc5b9 3219 while (!CALL_P (last))
60ecc450 3220 {
3221 last = PREV_INSN (last);
3222 /* There was no CALL_INSN? */
231bd014 3223 gcc_assert (last != before_call);
60ecc450 3224 }
66d433c7 3225
9239aee6 3226 emit_barrier_after (last);
20f5f6d0 3227
b494d193 3228 /* Stack adjustments after a noreturn call are dead code.
3229 However when NO_DEFER_POP is in effect, we must preserve
3230 stack_pointer_delta. */
3231 if (inhibit_defer_pop == 0)
3232 {
3233 stack_pointer_delta = old_stack_allocated;
3234 pending_stack_adjust = 0;
3235 }
60ecc450 3236 }
66d433c7 3237
60ecc450 3238 /* If value type not void, return an rtx for the value. */
66d433c7 3239
16c9337c 3240 if (TYPE_MODE (rettype) == VOIDmode
60ecc450 3241 || ignore)
5edaabad 3242 target = const0_rtx;
60ecc450 3243 else if (structure_value_addr)
3244 {
e16ceb8e 3245 if (target == 0 || !MEM_P (target))
60ecc450 3246 {
f7c44134 3247 target
16c9337c 3248 = gen_rtx_MEM (TYPE_MODE (rettype),
3249 memory_address (TYPE_MODE (rettype),
f7c44134 3250 structure_value_addr));
16c9337c 3251 set_mem_attributes (target, rettype, 1);
60ecc450 3252 }
3253 }
3254 else if (pcc_struct_value)
566d850a 3255 {
60ecc450 3256 /* This is the special C++ case where we need to
3257 know what the true target was. We take care to
3258 never use this value more than once in one expression. */
16c9337c 3259 target = gen_rtx_MEM (TYPE_MODE (rettype),
60ecc450 3260 copy_to_reg (valreg));
16c9337c 3261 set_mem_attributes (target, rettype, 1);
566d850a 3262 }
60ecc450 3263 /* Handle calls that return values in multiple non-contiguous locations.
3264 The Irix 6 ABI has examples of this. */
3265 else if (GET_CODE (valreg) == PARALLEL)
3266 {
4ee9c684 3267 if (target == 0)
2d0fd66d 3268 target = emit_group_move_into_temps (valreg);
5bd5c1c2 3269 else if (rtx_equal_p (target, valreg))
3270 ;
3271 else if (GET_CODE (target) == PARALLEL)
3272 /* Handle the result of a emit_group_move_into_temps
3273 call in the previous pass. */
3274 emit_group_move (target, valreg);
3275 else
16c9337c 3276 emit_group_store (target, valreg, rettype,
3277 int_size_in_bytes (rettype));
60ecc450 3278 }
3279 else if (target
16c9337c 3280 && GET_MODE (target) == TYPE_MODE (rettype)
60ecc450 3281 && GET_MODE (target) == GET_MODE (valreg))
3282 {
aadbaa40 3283 bool may_overlap = false;
3284
360738f1 3285 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3286 reg to a plain register. */
90af1361 3287 if (!REG_P (target) || HARD_REGISTER_P (target))
3288 valreg = avoid_likely_spilled_reg (valreg);
360738f1 3289
aadbaa40 3290 /* If TARGET is a MEM in the argument area, and we have
3291 saved part of the argument area, then we can't store
3292 directly into TARGET as it may get overwritten when we
3293 restore the argument save area below. Don't work too
3294 hard though and simply force TARGET to a register if it
3295 is a MEM; the optimizer is quite likely to sort it out. */
3296 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3297 for (i = 0; i < num_actuals; i++)
3298 if (args[i].save_area)
3299 {
3300 may_overlap = true;
3301 break;
3302 }
dbe1f550 3303
aadbaa40 3304 if (may_overlap)
3305 target = copy_to_reg (valreg);
3306 else
3307 {
3308 /* TARGET and VALREG cannot be equal at this point
3309 because the latter would not have
3310 REG_FUNCTION_VALUE_P true, while the former would if
3311 it were referring to the same register.
3312
3313 If they refer to the same register, this move will be
3314 a no-op, except when function inlining is being
3315 done. */
3316 emit_move_insn (target, valreg);
3317
3318 /* If we are setting a MEM, this code must be executed.
3319 Since it is emitted after the call insn, sibcall
3320 optimization cannot be performed in that case. */
3321 if (MEM_P (target))
3322 sibcall_failure = 1;
3323 }
60ecc450 3324 }
60ecc450 3325 else
90af1361 3326 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 3327
3b2411a8 3328 /* If we promoted this return value, make the proper SUBREG.
3329 TARGET might be const0_rtx here, so be careful. */
3330 if (REG_P (target)
16c9337c 3331 && TYPE_MODE (rettype) != BLKmode
3332 && GET_MODE (target) != TYPE_MODE (rettype))
45550790 3333 {
16c9337c 3334 tree type = rettype;
3b2411a8 3335 int unsignedp = TYPE_UNSIGNED (type);
3336 int offset = 0;
3337 enum machine_mode pmode;
3338
3339 /* Ensure we promote as expected, and get the new unsignedness. */
3340 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3341 funtype, 1);
3342 gcc_assert (GET_MODE (target) == pmode);
3343
3344 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3345 && (GET_MODE_SIZE (GET_MODE (target))
3346 > GET_MODE_SIZE (TYPE_MODE (type))))
231bd014 3347 {
3b2411a8 3348 offset = GET_MODE_SIZE (GET_MODE (target))
3349 - GET_MODE_SIZE (TYPE_MODE (type));
3350 if (! BYTES_BIG_ENDIAN)
3351 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3352 else if (! WORDS_BIG_ENDIAN)
3353 offset %= UNITS_PER_WORD;
231bd014 3354 }
3b2411a8 3355
3356 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3357 SUBREG_PROMOTED_VAR_P (target) = 1;
3358 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
45550790 3359 }
23eb5fa6 3360
60ecc450 3361 /* If size of args is variable or this was a constructor call for a stack
3362 argument, restore saved stack-pointer value. */
66d433c7 3363
ff3ae375 3364 if (old_stack_level)
60ecc450 3365 {
897445c7 3366 rtx prev = get_last_insn ();
dfe00a8f 3367
e9c97615 3368 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9069face 3369 stack_pointer_delta = old_stack_pointer_delta;
dfe00a8f 3370
897445c7 3371 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
dfe00a8f 3372
60ecc450 3373 pending_stack_adjust = old_pending_adj;
80f06481 3374 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 3375 stack_arg_under_construction = old_stack_arg_under_construction;
3376 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3377 stack_usage_map = initial_stack_usage_map;
60ecc450 3378 sibcall_failure = 1;
3379 }
02510658 3380 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 3381 {
66d433c7 3382#ifdef REG_PARM_STACK_SPACE
60ecc450 3383 if (save_area)
6e96b626 3384 restore_fixed_argument_area (save_area, argblock,
3385 high_to_save, low_to_save);
41332f48 3386#endif
66d433c7 3387
60ecc450 3388 /* If we saved any argument areas, restore them. */
3389 for (i = 0; i < num_actuals; i++)
3390 if (args[i].save_area)
3391 {
3392 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3393 rtx stack_area
3394 = gen_rtx_MEM (save_mode,
3395 memory_address (save_mode,
3396 XEXP (args[i].stack_slot, 0)));
3397
3398 if (save_mode != BLKmode)
3399 emit_move_insn (stack_area, args[i].save_area);
3400 else
0378dbdc 3401 emit_block_move (stack_area, args[i].save_area,
241399f6 3402 GEN_INT (args[i].locate.size.constant),
0378dbdc 3403 BLOCK_OP_CALL_PARM);
60ecc450 3404 }
66d433c7 3405
60ecc450 3406 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3407 stack_usage_map = initial_stack_usage_map;
3408 }
66d433c7 3409
c87678e4 3410 /* If this was alloca, record the new stack level for nonlocal gotos.
60ecc450 3411 Check for the handler slots since we might not have a save area
3412 for non-local gotos. */
dbd6697a 3413
4ee9c684 3414 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3415 update_nonlocal_goto_save_area ();
66d433c7 3416
60ecc450 3417 /* Free up storage we no longer need. */
3418 for (i = 0; i < num_actuals; ++i)
dd045aee 3419 free (args[i].aligned_regs);
60ecc450 3420
3421 insns = get_insns ();
3422 end_sequence ();
3423
3424 if (pass == 0)
3425 {
3426 tail_call_insns = insns;
3427
60ecc450 3428 /* Restore the pending stack adjustment now that we have
3429 finished generating the sibling call sequence. */
91b70175 3430
60ecc450 3431 pending_stack_adjust = save_pending_stack_adjust;
91b70175 3432 stack_pointer_delta = save_stack_pointer_delta;
0e0be288 3433
3434 /* Prepare arg structure for next iteration. */
c87678e4 3435 for (i = 0; i < num_actuals; i++)
0e0be288 3436 {
3437 args[i].value = 0;
3438 args[i].aligned_regs = 0;
3439 args[i].stack = 0;
3440 }
7ecc63d3 3441
3442 sbitmap_free (stored_args_map);
74c02416 3443 internal_arg_pointer_exp_state.scan_start = NULL_RTX;
f1f41a6c 3444 internal_arg_pointer_exp_state.cache.release ();
60ecc450 3445 }
3446 else
9069face 3447 {
3448 normal_call_insns = insns;
3449
3450 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 3451 gcc_assert ((flags & ECF_NORETURN)
231bd014 3452 || (old_stack_allocated
3453 == stack_pointer_delta - pending_stack_adjust));
9069face 3454 }
ae8d6151 3455
3456 /* If something prevents making this a sibling call,
3457 zero out the sequence. */
3458 if (sibcall_failure)
3459 tail_call_insns = NULL_RTX;
4ee9c684 3460 else
3461 break;
60ecc450 3462 }
3463
365db11e 3464 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 3465 arguments too, as argument area is now clobbered by the call. */
3466 if (tail_call_insns)
60ecc450 3467 {
4ee9c684 3468 emit_insn (tail_call_insns);
18d50ae6 3469 crtl->tail_call_emit = true;
60ecc450 3470 }
3471 else
31d3e01c 3472 emit_insn (normal_call_insns);
66d433c7 3473
60ecc450 3474 currently_expanding_call--;
6d801f27 3475
dd045aee 3476 free (stack_usage_map_buf);
a331ea1b 3477
66d433c7 3478 return target;
3479}
915e81b8 3480
4ee9c684 3481/* A sibling call sequence invalidates any REG_EQUIV notes made for
3482 this function's incoming arguments.
3483
3484 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 3485 in the rtl chain are those for incoming arguments, so we can look
3486 for REG_EQUIV notes between the start of the function and the
3487 NOTE_INSN_FUNCTION_BEG.
4ee9c684 3488
3489 This is (slight) overkill. We could keep track of the highest
3490 argument we clobber and be more selective in removing notes, but it
3491 does not seem to be worth the effort. */
0a227ed5 3492
4ee9c684 3493void
3494fixup_tail_calls (void)
3495{
0a227ed5 3496 rtx insn;
3497
3498 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3499 {
750a330e 3500 rtx note;
3501
0a227ed5 3502 /* There are never REG_EQUIV notes for the incoming arguments
3503 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3504 if (NOTE_P (insn)
ad4583d9 3505 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 3506 break;
3507
750a330e 3508 note = find_reg_note (insn, REG_EQUIV, 0);
3509 if (note)
3510 remove_note (insn, note);
3511 note = find_reg_note (insn, REG_EQUIV, 0);
3512 gcc_assert (!note);
0a227ed5 3513 }
4ee9c684 3514}
3515
915e81b8 3516/* Traverse a list of TYPES and expand all complex types into their
3517 components. */
5ab29745 3518static tree
915e81b8 3519split_complex_types (tree types)
3520{
3521 tree p;
3522
92d40bc4 3523 /* Before allocating memory, check for the common case of no complex. */
3524 for (p = types; p; p = TREE_CHAIN (p))
3525 {
3526 tree type = TREE_VALUE (p);
3527 if (TREE_CODE (type) == COMPLEX_TYPE
3528 && targetm.calls.split_complex_arg (type))
a0c938f0 3529 goto found;
92d40bc4 3530 }
3531 return types;
3532
3533 found:
915e81b8 3534 types = copy_list (types);
3535
3536 for (p = types; p; p = TREE_CHAIN (p))
3537 {
3538 tree complex_type = TREE_VALUE (p);
3539
92d40bc4 3540 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3541 && targetm.calls.split_complex_arg (complex_type))
915e81b8 3542 {
3543 tree next, imag;
3544
3545 /* Rewrite complex type with component type. */
3546 TREE_VALUE (p) = TREE_TYPE (complex_type);
3547 next = TREE_CHAIN (p);
3548
3549 /* Add another component type for the imaginary part. */
3550 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3551 TREE_CHAIN (p) = imag;
3552 TREE_CHAIN (imag) = next;
3553
3554 /* Skip the newly created node. */
3555 p = TREE_CHAIN (p);
3556 }
3557 }
3558
3559 return types;
3560}
66d433c7 3561\f
20f7032f 3562/* Output a library call to function FUN (a SYMBOL_REF rtx).
c87678e4 3563 The RETVAL parameter specifies whether return value needs to be saved, other
ebf77775 3564 parameters are documented in the emit_library_call function below. */
2a631e19 3565
20f7032f 3566static rtx
4c9e08a4 3567emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3568 enum libcall_type fn_type,
3569 enum machine_mode outmode, int nargs, va_list p)
b39693dd 3570{
9bdaf1ba 3571 /* Total size in bytes of all the stack-parms scanned so far. */
3572 struct args_size args_size;
3573 /* Size of arguments before any adjustments (such as rounding). */
3574 struct args_size original_args_size;
19cb6b50 3575 int argnum;
9bdaf1ba 3576 rtx fun;
22c61100 3577 /* Todo, choose the correct decl type of orgfun. Sadly this information
3578 isn't present here, so we default to native calling abi here. */
60e2260d 3579 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
fa20f865 3580 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
9bdaf1ba 3581 int inc;
3582 int count;
9bdaf1ba 3583 rtx argblock = 0;
39cba157 3584 CUMULATIVE_ARGS args_so_far_v;
3585 cumulative_args_t args_so_far;
c87678e4 3586 struct arg
3587 {
3588 rtx value;
3589 enum machine_mode mode;
3590 rtx reg;
3591 int partial;
241399f6 3592 struct locate_and_pad_arg_data locate;
c87678e4 3593 rtx save_area;
3594 };
9bdaf1ba 3595 struct arg *argvec;
3596 int old_inhibit_defer_pop = inhibit_defer_pop;
3597 rtx call_fusage = 0;
3598 rtx mem_value = 0;
16204096 3599 rtx valreg;
9bdaf1ba 3600 int pcc_struct_value = 0;
3601 int struct_value_size = 0;
df4b504c 3602 int flags;
9bdaf1ba 3603 int reg_parm_stack_space = 0;
9bdaf1ba 3604 int needed;
644c283b 3605 rtx before_call;
771d21fa 3606 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 3607
4448f543 3608#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3609 /* Define the boundary of the register parm stack space that needs to be
3610 save, if any. */
75a70cf9 3611 int low_to_save = 0, high_to_save = 0;
c87678e4 3612 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 3613#endif
3614
9bdaf1ba 3615 /* Size of the stack reserved for parameter registers. */
3616 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3617 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 3618 char *stack_usage_map_buf = NULL;
9bdaf1ba 3619
45550790 3620 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3621
9bdaf1ba 3622#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3623 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 3624#endif
3625
ab7ccfa2 3626 /* By default, library functions can not throw. */
df4b504c 3627 flags = ECF_NOTHROW;
3628
ab7ccfa2 3629 switch (fn_type)
3630 {
3631 case LCT_NORMAL:
2a0c81bf 3632 break;
ab7ccfa2 3633 case LCT_CONST:
2a0c81bf 3634 flags |= ECF_CONST;
3635 break;
ab7ccfa2 3636 case LCT_PURE:
2a0c81bf 3637 flags |= ECF_PURE;
ab7ccfa2 3638 break;
ab7ccfa2 3639 case LCT_NORETURN:
3640 flags |= ECF_NORETURN;
3641 break;
3642 case LCT_THROW:
3643 flags = ECF_NORETURN;
3644 break;
0ff18307 3645 case LCT_RETURNS_TWICE:
3646 flags = ECF_RETURNS_TWICE;
3647 break;
ab7ccfa2 3648 }
9bdaf1ba 3649 fun = orgfun;
3650
9bdaf1ba 3651 /* Ensure current function's preferred stack boundary is at least
3652 what we need. */
edb7afe8 3653 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3654 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 3655
3656 /* If this kind of value comes back in memory,
3657 decide where in memory it should come back. */
771d21fa 3658 if (outmode != VOIDmode)
9bdaf1ba 3659 {
dc24ddbd 3660 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 3661 if (aggregate_value_p (tfom, 0))
771d21fa 3662 {
9bdaf1ba 3663#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 3664 rtx pointer_reg
46b3ff29 3665 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 3666 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3667 pcc_struct_value = 1;
3668 if (value == 0)
3669 value = gen_reg_rtx (outmode);
9bdaf1ba 3670#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 3671 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 3672 if (value != 0 && MEM_P (value))
771d21fa 3673 mem_value = value;
3674 else
0ab48139 3675 mem_value = assign_temp (tfom, 1, 1);
9bdaf1ba 3676#endif
771d21fa 3677 /* This call returns a big structure. */
2dd6f9ed 3678 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
771d21fa 3679 }
9bdaf1ba 3680 }
771d21fa 3681 else
3682 tfom = void_type_node;
9bdaf1ba 3683
3684 /* ??? Unfinished: must pass the memory address as an argument. */
3685
3686 /* Copy all the libcall-arguments out of the varargs data
3687 and into a vector ARGVEC.
3688
3689 Compute how to pass each argument. We only support a very small subset
3690 of the full argument passing conventions to limit complexity here since
3691 library functions shouldn't have many args. */
3692
364c0c59 3693 argvec = XALLOCAVEC (struct arg, nargs + 1);
f0af5a88 3694 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 3695
e1efd914 3696#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
39cba157 3697 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
e1efd914 3698#else
39cba157 3699 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
e1efd914 3700#endif
39cba157 3701 args_so_far = pack_cumulative_args (&args_so_far_v);
9bdaf1ba 3702
3703 args_size.constant = 0;
3704 args_size.var = 0;
3705
3706 count = 0;
3707
3708 push_temp_slots ();
3709
3710 /* If there's a structure value address to be passed,
3711 either pass it in the special place, or pass it as an extra argument. */
45550790 3712 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 3713 {
3714 rtx addr = XEXP (mem_value, 0);
a0c938f0 3715
9bdaf1ba 3716 nargs++;
3717
a56c46d2 3718 /* Make sure it is a reasonable operand for a move or push insn. */
3719 if (!REG_P (addr) && !MEM_P (addr)
ca316360 3720 && !(CONSTANT_P (addr)
3721 && targetm.legitimate_constant_p (Pmode, addr)))
a56c46d2 3722 addr = force_operand (addr, NULL_RTX);
3723
9bdaf1ba 3724 argvec[count].value = addr;
3725 argvec[count].mode = Pmode;
3726 argvec[count].partial = 0;
3727
39cba157 3728 argvec[count].reg = targetm.calls.function_arg (args_so_far,
f387af4f 3729 Pmode, NULL_TREE, true);
39cba157 3730 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
f054eb3c 3731 NULL_TREE, 1) == 0);
9bdaf1ba 3732
3733 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 3734#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 3735 1,
2e735c0d 3736#else
3737 argvec[count].reg != 0,
3738#endif
241399f6 3739 0, NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 3740
9bdaf1ba 3741 if (argvec[count].reg == 0 || argvec[count].partial != 0
3742 || reg_parm_stack_space > 0)
241399f6 3743 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 3744
39cba157 3745 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
9bdaf1ba 3746
3747 count++;
3748 }
3749
3750 for (; count < nargs; count++)
3751 {
3752 rtx val = va_arg (p, rtx);
d62e827b 3753 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
adaf4ef0 3754 int unsigned_p = 0;
9bdaf1ba 3755
3756 /* We cannot convert the arg value to the mode the library wants here;
3757 must do it earlier where we know the signedness of the arg. */
231bd014 3758 gcc_assert (mode != BLKmode
3759 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 3760
a56c46d2 3761 /* Make sure it is a reasonable operand for a move or push insn. */
3762 if (!REG_P (val) && !MEM_P (val)
ca316360 3763 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
a56c46d2 3764 val = force_operand (val, NULL_RTX);
3765
39cba157 3766 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
9bdaf1ba 3767 {
ddaf7ad3 3768 rtx slot;
13f08ee7 3769 int must_copy
39cba157 3770 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
ddaf7ad3 3771
9c2a0c05 3772 /* If this was a CONST function, it is now PURE since it now
3773 reads memory. */
5096b8b0 3774 if (flags & ECF_CONST)
3775 {
3776 flags &= ~ECF_CONST;
3777 flags |= ECF_PURE;
3778 }
3779
590c3166 3780 if (MEM_P (val) && !must_copy)
006e2d5a 3781 {
3782 tree val_expr = MEM_EXPR (val);
3783 if (val_expr)
3784 mark_addressable (val_expr);
3785 slot = val;
3786 }
41dc12b4 3787 else
ddaf7ad3 3788 {
dc24ddbd 3789 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
0ab48139 3790 1, 1);
ddaf7ad3 3791 emit_move_insn (slot, val);
3792 }
387bc205 3793
a683e787 3794 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3795 gen_rtx_USE (VOIDmode, slot),
3796 call_fusage);
ddaf7ad3 3797 if (must_copy)
3798 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3799 gen_rtx_CLOBBER (VOIDmode,
3800 slot),
3801 call_fusage);
3802
9bdaf1ba 3803 mode = Pmode;
ddaf7ad3 3804 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 3805 }
9bdaf1ba 3806
adaf4ef0 3807 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
9bdaf1ba 3808 argvec[count].mode = mode;
adaf4ef0 3809 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
39cba157 3810 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
f387af4f 3811 NULL_TREE, true);
9bdaf1ba 3812
9bdaf1ba 3813 argvec[count].partial
39cba157 3814 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 3815
11fb947f 3816 if (argvec[count].reg == 0
3817 || argvec[count].partial != 0
3818 || reg_parm_stack_space > 0)
3819 {
3820 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 3821#ifdef STACK_PARMS_IN_REG_PARM_AREA
11fb947f 3822 1,
2e735c0d 3823#else
11fb947f 3824 argvec[count].reg != 0,
3825#endif
3826 argvec[count].partial,
3827 NULL_TREE, &args_size, &argvec[count].locate);
3828 args_size.constant += argvec[count].locate.size.constant;
3829 gcc_assert (!argvec[count].locate.size.var);
3830 }
3831#ifdef BLOCK_REG_PADDING
3832 else
3833 /* The argument is passed entirely in registers. See at which
3834 end it should be padded. */
3835 argvec[count].locate.where_pad =
3836 BLOCK_REG_PADDING (mode, NULL_TREE,
3837 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
2e735c0d 3838#endif
9bdaf1ba 3839
39cba157 3840 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
9bdaf1ba 3841 }
9bdaf1ba 3842
9bdaf1ba 3843 /* If this machine requires an external definition for library
3844 functions, write one out. */
3845 assemble_external_libcall (fun);
3846
3847 original_args_size = args_size;
91b70175 3848 args_size.constant = (((args_size.constant
3849 + stack_pointer_delta
3850 + STACK_BYTES - 1)
3851 / STACK_BYTES
3852 * STACK_BYTES)
3853 - stack_pointer_delta);
9bdaf1ba 3854
3855 args_size.constant = MAX (args_size.constant,
3856 reg_parm_stack_space);
3857
fa20f865 3858 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3859 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 3860
abe32cce 3861 if (args_size.constant > crtl->outgoing_args_size)
3862 crtl->outgoing_args_size = args_size.constant;
9bdaf1ba 3863
8c0dd614 3864 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
990495a7 3865 {
3866 int pushed = args_size.constant + pending_stack_adjust;
3867 if (pushed > current_function_pushed_stack_size)
3868 current_function_pushed_stack_size = pushed;
3869 }
3870
4448f543 3871 if (ACCUMULATE_OUTGOING_ARGS)
3872 {
3873 /* Since the stack pointer will never be pushed, it is possible for
3874 the evaluation of a parm to clobber something we have already
3875 written to the stack. Since most function calls on RISC machines
3876 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 3877
4448f543 3878 Therefore, we save any area of the stack that was already written
3879 and that we are using. Here we set up to do this by making a new
3880 stack usage map from the old one.
9bdaf1ba 3881
4448f543 3882 Another approach might be to try to reorder the argument
3883 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 3884
4448f543 3885 needed = args_size.constant;
9bdaf1ba 3886
4448f543 3887 /* Since we will be writing into the entire argument area, the
3888 map must be allocated for its entire size, not just the part that
3889 is the responsibility of the caller. */
fa20f865 3890 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3891 needed += reg_parm_stack_space;
9bdaf1ba 3892
3893#ifdef ARGS_GROW_DOWNWARD
4448f543 3894 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3895 needed + 1);
9bdaf1ba 3896#else
4448f543 3897 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3898 needed);
9bdaf1ba 3899#endif
4c36ffe6 3900 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3901 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 3902
4448f543 3903 if (initial_highest_arg_in_use)
8e547276 3904 memcpy (stack_usage_map, initial_stack_usage_map,
3905 initial_highest_arg_in_use);
9bdaf1ba 3906
4448f543 3907 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3908 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3909 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3910 needed = 0;
9bdaf1ba 3911
9c0a756f 3912 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 3913 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 3914 new libcalls after we've instantiated the virtual regs, and if we
3915 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 3916
9c0a756f 3917 if (virtuals_instantiated)
29c05e22 3918 argblock = plus_constant (Pmode, stack_pointer_rtx,
3919 STACK_POINTER_OFFSET);
9c0a756f 3920 else
3921 argblock = virtual_outgoing_args_rtx;
4448f543 3922 }
3923 else
3924 {
3925 if (!PUSH_ARGS)
3926 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3927 }
9bdaf1ba 3928
9bdaf1ba 3929 /* If we push args individually in reverse order, perform stack alignment
3930 before the first push (the last arg). */
4448f543 3931 if (argblock == 0 && PUSH_ARGS_REVERSED)
9bdaf1ba 3932 anti_adjust_stack (GEN_INT (args_size.constant
3933 - original_args_size.constant));
9bdaf1ba 3934
4448f543 3935 if (PUSH_ARGS_REVERSED)
3936 {
3937 inc = -1;
3938 argnum = nargs - 1;
3939 }
3940 else
3941 {
3942 inc = 1;
3943 argnum = 0;
3944 }
9bdaf1ba 3945
4448f543 3946#ifdef REG_PARM_STACK_SPACE
3947 if (ACCUMULATE_OUTGOING_ARGS)
3948 {
3949 /* The argument list is the property of the called routine and it
3950 may clobber it. If the fixed area has been used for previous
6e96b626 3951 parameters, we must save and restore it. */
3952 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3953 &low_to_save, &high_to_save);
9bdaf1ba 3954 }
3955#endif
c87678e4 3956
9bdaf1ba 3957 /* Push the args that need to be pushed. */
3958
3959 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3960 are to be pushed. */
3961 for (count = 0; count < nargs; count++, argnum += inc)
3962 {
19cb6b50 3963 enum machine_mode mode = argvec[argnum].mode;
3964 rtx val = argvec[argnum].value;
9bdaf1ba 3965 rtx reg = argvec[argnum].reg;
3966 int partial = argvec[argnum].partial;
c2fd5e89 3967 unsigned int parm_align = argvec[argnum].locate.boundary;
4448f543 3968 int lower_bound = 0, upper_bound = 0, i;
9bdaf1ba 3969
3970 if (! (reg != 0 && partial == 0))
3971 {
4143d08b 3972 rtx use;
3973
4448f543 3974 if (ACCUMULATE_OUTGOING_ARGS)
3975 {
02510658 3976 /* If this is being stored into a pre-allocated, fixed-size,
3977 stack area, save any previous data at that location. */
9bdaf1ba 3978
3979#ifdef ARGS_GROW_DOWNWARD
4448f543 3980 /* stack_slot is negative, but we want to index stack_usage_map
3981 with positive values. */
9a0cf170 3982 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
241399f6 3983 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
9bdaf1ba 3984#else
9a0cf170 3985 lower_bound = argvec[argnum].locate.slot_offset.constant;
241399f6 3986 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
9bdaf1ba 3987#endif
3988
fd2c0c1d 3989 i = lower_bound;
3990 /* Don't worry about things in the fixed argument area;
3991 it has already been saved. */
3992 if (i < reg_parm_stack_space)
3993 i = reg_parm_stack_space;
3994 while (i < upper_bound && stack_usage_map[i] == 0)
3995 i++;
9bdaf1ba 3996
fd2c0c1d 3997 if (i < upper_bound)
4448f543 3998 {
241399f6 3999 /* We need to make a save area. */
4000 unsigned int size
4001 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4448f543 4002 enum machine_mode save_mode
241399f6 4003 = mode_for_size (size, MODE_INT, 1);
4004 rtx adr
29c05e22 4005 = plus_constant (Pmode, argblock,
241399f6 4006 argvec[argnum].locate.offset.constant);
4448f543 4007 rtx stack_area
241399f6 4008 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 4009
f9c6a9c3 4010 if (save_mode == BLKmode)
4011 {
4012 argvec[argnum].save_area
4013 = assign_stack_temp (BLKmode,
0ab48139 4014 argvec[argnum].locate.size.constant
4015 );
f9c6a9c3 4016
4017 emit_block_move (validize_mem (argvec[argnum].save_area),
a0c938f0 4018 stack_area,
f9c6a9c3 4019 GEN_INT (argvec[argnum].locate.size.constant),
4020 BLOCK_OP_CALL_PARM);
4021 }
4022 else
4023 {
4024 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4025
4026 emit_move_insn (argvec[argnum].save_area, stack_area);
4027 }
4448f543 4028 }
9bdaf1ba 4029 }
325d1c45 4030
c2fd5e89 4031 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
0378dbdc 4032 partial, reg, 0, argblock,
241399f6 4033 GEN_INT (argvec[argnum].locate.offset.constant),
4034 reg_parm_stack_space,
4035 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
9bdaf1ba 4036
9bdaf1ba 4037 /* Now mark the segment we just used. */
4448f543 4038 if (ACCUMULATE_OUTGOING_ARGS)
4039 for (i = lower_bound; i < upper_bound; i++)
4040 stack_usage_map[i] = 1;
9bdaf1ba 4041
4042 NO_DEFER_POP;
2eb9302a 4043
4143d08b 4044 /* Indicate argument access so that alias.c knows that these
4045 values are live. */
4046 if (argblock)
29c05e22 4047 use = plus_constant (Pmode, argblock,
4143d08b 4048 argvec[argnum].locate.offset.constant);
4049 else
4050 /* When arguments are pushed, trying to tell alias.c where
4051 exactly this argument is won't work, because the
4052 auto-increment causes confusion. So we merely indicate
4053 that we access something with a known mode somewhere on
4054 the stack. */
4055 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4056 gen_rtx_SCRATCH (Pmode));
4057 use = gen_rtx_MEM (argvec[argnum].mode, use);
4058 use = gen_rtx_USE (VOIDmode, use);
4059 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
9bdaf1ba 4060 }
4061 }
4062
9bdaf1ba 4063 /* If we pushed args in forward order, perform stack alignment
4064 after pushing the last arg. */
4448f543 4065 if (argblock == 0 && !PUSH_ARGS_REVERSED)
9bdaf1ba 4066 anti_adjust_stack (GEN_INT (args_size.constant
4067 - original_args_size.constant));
9bdaf1ba 4068
4448f543 4069 if (PUSH_ARGS_REVERSED)
4070 argnum = nargs - 1;
4071 else
4072 argnum = 0;
9bdaf1ba 4073
82c7907c 4074 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 4075
4076 /* Now load any reg parms into their regs. */
4077
4078 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4079 are to be pushed. */
4080 for (count = 0; count < nargs; count++, argnum += inc)
4081 {
bec917cc 4082 enum machine_mode mode = argvec[argnum].mode;
19cb6b50 4083 rtx val = argvec[argnum].value;
9bdaf1ba 4084 rtx reg = argvec[argnum].reg;
4085 int partial = argvec[argnum].partial;
ab6e3ce0 4086#ifdef BLOCK_REG_PADDING
37cd19a4 4087 int size = 0;
ab6e3ce0 4088#endif
37cd19a4 4089
9bdaf1ba 4090 /* Handle calls that pass values in multiple non-contiguous
4091 locations. The PA64 has examples of this for library calls. */
4092 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 4093 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 4094 else if (reg != 0 && partial == 0)
37cd19a4 4095 {
4096 emit_move_insn (reg, val);
4097#ifdef BLOCK_REG_PADDING
4098 size = GET_MODE_SIZE (argvec[argnum].mode);
4099
4100 /* Copied from load_register_parameters. */
4101
4102 /* Handle case where we have a value that needs shifting
4103 up to the msb. eg. a QImode value and we're padding
4104 upward on a BYTES_BIG_ENDIAN machine. */
4105 if (size < UNITS_PER_WORD
4106 && (argvec[argnum].locate.where_pad
4107 == (BYTES_BIG_ENDIAN ? upward : downward)))
4108 {
4109 rtx x;
4110 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4111
4112 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4113 report the whole reg as used. Strictly speaking, the
4114 call only uses SIZE bytes at the msb end, but it doesn't
4115 seem worth generating rtl to say that. */
4116 reg = gen_rtx_REG (word_mode, REGNO (reg));
4117 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4118 if (x != reg)
4119 emit_move_insn (reg, x);
4120 }
4121#endif
4122 }
9bdaf1ba 4123
4124 NO_DEFER_POP;
4125 }
4126
9bdaf1ba 4127 /* Any regs containing parms remain in use through the call. */
4128 for (count = 0; count < nargs; count++)
4129 {
4130 rtx reg = argvec[count].reg;
4131 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4132 use_group_regs (&call_fusage, reg);
4133 else if (reg != 0)
6c6f16e5 4134 {
4135 int partial = argvec[count].partial;
4136 if (partial)
4137 {
4138 int nregs;
4139 gcc_assert (partial % UNITS_PER_WORD == 0);
4140 nregs = partial / UNITS_PER_WORD;
4141 use_regs (&call_fusage, REGNO (reg), nregs);
4142 }
4143 else
4144 use_reg (&call_fusage, reg);
4145 }
9bdaf1ba 4146 }
4147
4148 /* Pass the function the address in which to return a structure value. */
45550790 4149 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 4150 {
45550790 4151 emit_move_insn (struct_value,
9bdaf1ba 4152 force_reg (Pmode,
4153 force_operand (XEXP (mem_value, 0),
4154 NULL_RTX)));
8ad4c111 4155 if (REG_P (struct_value))
45550790 4156 use_reg (&call_fusage, struct_value);
9bdaf1ba 4157 }
4158
4159 /* Don't allow popping to be deferred, since then
4160 cse'ing of library calls could delete a call and leave the pop. */
4161 NO_DEFER_POP;
16204096 4162 valreg = (mem_value == 0 && outmode != VOIDmode
578d1295 4163 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
9bdaf1ba 4164
481feae3 4165 /* Stack must be properly aligned now. */
231bd014 4166 gcc_assert (!(stack_pointer_delta
4167 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
fa4f1f09 4168
644c283b 4169 before_call = get_last_insn ();
4170
9bdaf1ba 4171 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4172 will set inhibit_defer_pop to that value. */
20f7032f 4173 /* The return type is needed to decide how many bytes the function pops.
4174 Signedness plays no role in that, so for simplicity, we pretend it's
4175 always signed. We also assume that the list of arguments passed has
4176 no impact, so we pretend it is unknown. */
9bdaf1ba 4177
4ee9c684 4178 emit_call_1 (fun, NULL,
c87678e4 4179 get_identifier (XSTR (orgfun, 0)),
771d21fa 4180 build_function_type (tfom, NULL_TREE),
c87678e4 4181 original_args_size.constant, args_size.constant,
9bdaf1ba 4182 struct_value_size,
39cba157 4183 targetm.calls.function_arg (args_so_far,
f387af4f 4184 VOIDmode, void_type_node, true),
16204096 4185 valreg,
39cba157 4186 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
9bdaf1ba 4187
37cd19a4 4188 /* Right-shift returned value if necessary. */
4189 if (!pcc_struct_value
4190 && TYPE_MODE (tfom) != BLKmode
4191 && targetm.calls.return_in_msb (tfom))
4192 {
4193 shift_return_value (TYPE_MODE (tfom), false, valreg);
4194 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4195 }
4196
3072d30e 4197 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4198 that it should complain if nonvolatile values are live. For
4199 functions that cannot return, inform flow that control does not
4200 fall through. */
4fec1d6c 4201 if (flags & ECF_NORETURN)
644c283b 4202 {
9239aee6 4203 /* The barrier note must be emitted
644c283b 4204 immediately after the CALL_INSN. Some ports emit more than
4205 just a CALL_INSN above, so we must search for it here. */
644c283b 4206 rtx last = get_last_insn ();
6d7dc5b9 4207 while (!CALL_P (last))
644c283b 4208 {
4209 last = PREV_INSN (last);
4210 /* There was no CALL_INSN? */
231bd014 4211 gcc_assert (last != before_call);
644c283b 4212 }
4213
9239aee6 4214 emit_barrier_after (last);
644c283b 4215 }
4216
43926c6a 4217 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4218 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4219 if (flags & ECF_NOTHROW)
4220 {
4221 rtx last = get_last_insn ();
4222 while (!CALL_P (last))
4223 {
4224 last = PREV_INSN (last);
4225 /* There was no CALL_INSN? */
4226 gcc_assert (last != before_call);
4227 }
4228
4229 make_reg_eh_region_note_nothrow_nononlocal (last);
4230 }
4231
9bdaf1ba 4232 /* Now restore inhibit_defer_pop to its actual original value. */
4233 OK_DEFER_POP;
4234
4235 pop_temp_slots ();
4236
4237 /* Copy the value to the right place. */
20f7032f 4238 if (outmode != VOIDmode && retval)
9bdaf1ba 4239 {
4240 if (mem_value)
4241 {
4242 if (value == 0)
4243 value = mem_value;
4244 if (value != mem_value)
4245 emit_move_insn (value, mem_value);
4246 }
40651bac 4247 else if (GET_CODE (valreg) == PARALLEL)
4248 {
4249 if (value == 0)
4250 value = gen_reg_rtx (outmode);
4c3a0ea5 4251 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 4252 }
9bdaf1ba 4253 else
4e1a3169 4254 {
3b2411a8 4255 /* Convert to the proper mode if a promotion has been active. */
4e1a3169 4256 if (GET_MODE (valreg) != outmode)
4257 {
4258 int unsignedp = TYPE_UNSIGNED (tfom);
4259
3b2411a8 4260 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4261 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4e1a3169 4262 == GET_MODE (valreg));
4e1a3169 4263 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4264 }
4265
4266 if (value != 0)
4267 emit_move_insn (value, valreg);
4268 else
4269 value = valreg;
4270 }
9bdaf1ba 4271 }
4272
4448f543 4273 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 4274 {
4448f543 4275#ifdef REG_PARM_STACK_SPACE
4276 if (save_area)
6e96b626 4277 restore_fixed_argument_area (save_area, argblock,
4278 high_to_save, low_to_save);
9bdaf1ba 4279#endif
c87678e4 4280
4448f543 4281 /* If we saved any argument areas, restore them. */
4282 for (count = 0; count < nargs; count++)
4283 if (argvec[count].save_area)
4284 {
4285 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
29c05e22 4286 rtx adr = plus_constant (Pmode, argblock,
241399f6 4287 argvec[count].locate.offset.constant);
4288 rtx stack_area = gen_rtx_MEM (save_mode,
4289 memory_address (save_mode, adr));
4448f543 4290
f9c6a9c3 4291 if (save_mode == BLKmode)
4292 emit_block_move (stack_area,
a0c938f0 4293 validize_mem (argvec[count].save_area),
f9c6a9c3 4294 GEN_INT (argvec[count].locate.size.constant),
4295 BLOCK_OP_CALL_PARM);
4296 else
4297 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 4298 }
9bdaf1ba 4299
4448f543 4300 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4301 stack_usage_map = initial_stack_usage_map;
4302 }
b39693dd 4303
dd045aee 4304 free (stack_usage_map_buf);
a331ea1b 4305
20f7032f 4306 return value;
4307
4308}
4309\f
4310/* Output a library call to function FUN (a SYMBOL_REF rtx)
4311 (emitting the queue unless NO_QUEUE is nonzero),
4312 for a value of mode OUTMODE,
4313 with NARGS different arguments, passed as alternating rtx values
4314 and machine_modes to convert them to.
20f7032f 4315
2dd6f9ed 4316 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4317 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4318 other types of library calls. */
20f7032f 4319
4320void
ee582a61 4321emit_library_call (rtx orgfun, enum libcall_type fn_type,
4322 enum machine_mode outmode, int nargs, ...)
20f7032f 4323{
ee582a61 4324 va_list p;
4c9e08a4 4325
ee582a61 4326 va_start (p, nargs);
26dfc457 4327 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
ee582a61 4328 va_end (p);
20f7032f 4329}
4330\f
4331/* Like emit_library_call except that an extra argument, VALUE,
4332 comes second and says where to store the result.
4333 (If VALUE is zero, this function chooses a convenient way
4334 to return the value.
4335
4336 This function returns an rtx for where the value is to be found.
4337 If VALUE is nonzero, VALUE is returned. */
4338
4339rtx
ee582a61 4340emit_library_call_value (rtx orgfun, rtx value,
4341 enum libcall_type fn_type,
4342 enum machine_mode outmode, int nargs, ...)
20f7032f 4343{
7ad77798 4344 rtx result;
ee582a61 4345 va_list p;
4c9e08a4 4346
ee582a61 4347 va_start (p, nargs);
7ad77798 4348 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4349 nargs, p);
ee582a61 4350 va_end (p);
20f7032f 4351
7ad77798 4352 return result;
8ddf1c7e 4353}
4354\f
66d433c7 4355/* Store a single argument for a function call
4356 into the register or memory area where it must be passed.
4357 *ARG describes the argument value and where to pass it.
4358
4359 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 4360 or 0 on a machine where arguments are pushed individually.
66d433c7 4361
4362 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 4363 so must be careful about how the stack is used.
66d433c7 4364
4365 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4366 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4367 that we need not worry about saving and restoring the stack.
4368
57679d39 4369 FNDECL is the declaration of the function we are calling.
c87678e4 4370
d10cfa8d 4371 Return nonzero if this arg should cause sibcall failure,
57679d39 4372 zero otherwise. */
66d433c7 4373
57679d39 4374static int
4c9e08a4 4375store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4376 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 4377{
19cb6b50 4378 tree pval = arg->tree_value;
66d433c7 4379 rtx reg = 0;
4380 int partial = 0;
4381 int used = 0;
df9f2bb6 4382 int i, lower_bound = 0, upper_bound = 0;
57679d39 4383 int sibcall_failure = 0;
66d433c7 4384
4385 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 4386 return 1;
66d433c7 4387
1b117c60 4388 /* Push a new temporary level for any temporaries we make for
4389 this argument. */
4390 push_temp_slots ();
4391
02510658 4392 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 4393 {
4448f543 4394 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4395 save any previous data at that location. */
4396 if (argblock && ! variable_size && arg->stack)
4397 {
66d433c7 4398#ifdef ARGS_GROW_DOWNWARD
4448f543 4399 /* stack_slot is negative, but we want to index stack_usage_map
4400 with positive values. */
4401 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4402 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4403 else
4404 upper_bound = 0;
66d433c7 4405
241399f6 4406 lower_bound = upper_bound - arg->locate.size.constant;
66d433c7 4407#else
4448f543 4408 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4409 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4410 else
4411 lower_bound = 0;
66d433c7 4412
241399f6 4413 upper_bound = lower_bound + arg->locate.size.constant;
66d433c7 4414#endif
4415
fd2c0c1d 4416 i = lower_bound;
4417 /* Don't worry about things in the fixed argument area;
4418 it has already been saved. */
4419 if (i < reg_parm_stack_space)
4420 i = reg_parm_stack_space;
4421 while (i < upper_bound && stack_usage_map[i] == 0)
4422 i++;
66d433c7 4423
fd2c0c1d 4424 if (i < upper_bound)
66d433c7 4425 {
241399f6 4426 /* We need to make a save area. */
4427 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4428 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4429 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4430 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 4431
4432 if (save_mode == BLKmode)
4433 {
387bc205 4434 tree ot = TREE_TYPE (arg->tree_value);
4435 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4436 | TYPE_QUAL_CONST));
4437
0ab48139 4438 arg->save_area = assign_temp (nt, 1, 1);
4448f543 4439 preserve_temp_slots (arg->save_area);
4440 emit_block_move (validize_mem (arg->save_area), stack_area,
c2ca1bab 4441 GEN_INT (arg->locate.size.constant),
0378dbdc 4442 BLOCK_OP_CALL_PARM);
4448f543 4443 }
4444 else
4445 {
4446 arg->save_area = gen_reg_rtx (save_mode);
4447 emit_move_insn (arg->save_area, stack_area);
4448 }
66d433c7 4449 }
4450 }
4451 }
b3caaea3 4452
66d433c7 4453 /* If this isn't going to be placed on both the stack and in registers,
4454 set up the register and number of words. */
4455 if (! arg->pass_on_stack)
04d6fcf8 4456 {
4457 if (flags & ECF_SIBCALL)
4458 reg = arg->tail_call_reg;
4459 else
4460 reg = arg->reg;
4461 partial = arg->partial;
4462 }
66d433c7 4463
231bd014 4464 /* Being passed entirely in a register. We shouldn't be called in
4465 this case. */
4466 gcc_assert (reg == 0 || partial != 0);
a0c938f0 4467
f28c7a75 4468 /* If this arg needs special alignment, don't load the registers
4469 here. */
4470 if (arg->n_aligned_regs != 0)
4471 reg = 0;
c87678e4 4472
f28c7a75 4473 /* If this is being passed partially in a register, we can't evaluate
66d433c7 4474 it directly into its stack slot. Otherwise, we can. */
4475 if (arg->value == 0)
f848041f 4476 {
f848041f 4477 /* stack_arg_under_construction is nonzero if a function argument is
4478 being evaluated directly into the outgoing argument list and
4479 expand_call must take special action to preserve the argument list
4480 if it is called recursively.
4481
4482 For scalar function arguments stack_usage_map is sufficient to
4483 determine which stack slots must be saved and restored. Scalar
4484 arguments in general have pass_on_stack == 0.
4485
4486 If this argument is initialized by a function which takes the
4487 address of the argument (a C++ constructor or a C function
4488 returning a BLKmode structure), then stack_usage_map is
4489 insufficient and expand_call must push the stack around the
4490 function call. Such arguments have pass_on_stack == 1.
4491
4492 Note that it is always safe to set stack_arg_under_construction,
4493 but this generates suboptimal code if set when not needed. */
4494
4495 if (arg->pass_on_stack)
4496 stack_arg_under_construction++;
4448f543 4497
7dbf1af4 4498 arg->value = expand_expr (pval,
4499 (partial
4500 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4501 ? NULL_RTX : arg->stack,
a35a63ff 4502 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 4503
4504 /* If we are promoting object (or for any other reason) the mode
4505 doesn't agree, convert the mode. */
4506
1560ef8f 4507 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4508 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4509 arg->value, arg->unsignedp);
1c0c37a5 4510
f848041f 4511 if (arg->pass_on_stack)
4512 stack_arg_under_construction--;
f848041f 4513 }
66d433c7 4514
63864e1c 4515 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 4516 if ((flags & ECF_SIBCALL)
4517 && MEM_P (arg->value)
4518 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4519 arg->locate.size.constant))
4520 sibcall_failure = 1;
63864e1c 4521
66d433c7 4522 /* Don't allow anything left on stack from computation
4523 of argument to alloca. */
02510658 4524 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 4525 do_pending_stack_adjust ();
4526
4527 if (arg->value == arg->stack)
8a06f2d4 4528 /* If the value is already in the stack slot, we are done. */
4529 ;
1c0c37a5 4530 else if (arg->mode != BLKmode)
66d433c7 4531 {
19cb6b50 4532 int size;
851fc2b3 4533 unsigned int parm_align;
66d433c7 4534
4535 /* Argument is a scalar, not entirely passed in registers.
4536 (If part is passed in registers, arg->partial says how much
4537 and emit_push_insn will take care of putting it there.)
c87678e4 4538
66d433c7 4539 Push it, and if its size is less than the
4540 amount of space allocated to it,
4541 also bump stack pointer by the additional space.
4542 Note that in C the default argument promotions
4543 will prevent such mismatches. */
4544
1c0c37a5 4545 size = GET_MODE_SIZE (arg->mode);
66d433c7 4546 /* Compute how much space the push instruction will push.
4547 On many machines, pushing a byte will advance the stack
4548 pointer by a halfword. */
4549#ifdef PUSH_ROUNDING
4550 size = PUSH_ROUNDING (size);
4551#endif
4552 used = size;
4553
4554 /* Compute how much space the argument should get:
4555 round up to a multiple of the alignment for arguments. */
1c0c37a5 4556 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
66d433c7 4557 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4558 / (PARM_BOUNDARY / BITS_PER_UNIT))
4559 * (PARM_BOUNDARY / BITS_PER_UNIT));
4560
851fc2b3 4561 /* Compute the alignment of the pushed argument. */
4562 parm_align = arg->locate.boundary;
4563 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4564 {
4565 int pad = used - size;
4566 if (pad)
4567 {
4568 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4569 parm_align = MIN (parm_align, pad_align);
4570 }
4571 }
4572
66d433c7 4573 /* This isn't already where we want it on the stack, so put it there.
4574 This can either be done with push or copy insns. */
4c9e08a4 4575 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
851fc2b3 4576 parm_align, partial, reg, used - size, argblock,
241399f6 4577 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4578 ARGS_SIZE_RTX (arg->locate.alignment_pad));
d5c9a99f 4579
4580 /* Unless this is a partially-in-register argument, the argument is now
4581 in the stack. */
4582 if (partial == 0)
4583 arg->value = arg->stack;
66d433c7 4584 }
4585 else
4586 {
4587 /* BLKmode, at least partly to be pushed. */
4588
cf78c9ff 4589 unsigned int parm_align;
19cb6b50 4590 int excess;
66d433c7 4591 rtx size_rtx;
4592
4593 /* Pushing a nonscalar.
4594 If part is passed in registers, PARTIAL says how much
4595 and emit_push_insn will take care of putting it there. */
4596
4597 /* Round its size up to a multiple
4598 of the allocation unit for arguments. */
4599
241399f6 4600 if (arg->locate.size.var != 0)
66d433c7 4601 {
4602 excess = 0;
241399f6 4603 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 4604 }
4605 else
4606 {
f054eb3c 4607 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4608 for BLKmode is careful to avoid it. */
4609 excess = (arg->locate.size.constant
4610 - int_size_in_bytes (TREE_TYPE (pval))
4611 + partial);
623282b0 4612 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
b9c74b4d 4613 NULL_RTX, TYPE_MODE (sizetype),
4614 EXPAND_NORMAL);
66d433c7 4615 }
4616
c5dc0c32 4617 parm_align = arg->locate.boundary;
cf78c9ff 4618
4619 /* When an argument is padded down, the block is aligned to
4620 PARM_BOUNDARY, but the actual argument isn't. */
4621 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4622 {
241399f6 4623 if (arg->locate.size.var)
cf78c9ff 4624 parm_align = BITS_PER_UNIT;
4625 else if (excess)
4626 {
28397255 4627 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
cf78c9ff 4628 parm_align = MIN (parm_align, excess_align);
4629 }
4630 }
4631
e16ceb8e 4632 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 4633 {
4634 /* emit_push_insn might not work properly if arg->value and
241399f6 4635 argblock + arg->locate.offset areas overlap. */
57679d39 4636 rtx x = arg->value;
4637 int i = 0;
4638
abe32cce 4639 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
57679d39 4640 || (GET_CODE (XEXP (x, 0)) == PLUS
4641 && XEXP (XEXP (x, 0), 0) ==
abe32cce 4642 crtl->args.internal_arg_pointer
971ba038 4643 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
57679d39 4644 {
abe32cce 4645 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
57679d39 4646 i = INTVAL (XEXP (XEXP (x, 0), 1));
4647
21dda4ee 4648 /* expand_call should ensure this. */
231bd014 4649 gcc_assert (!arg->locate.offset.var
2ad152f7 4650 && arg->locate.size.var == 0
971ba038 4651 && CONST_INT_P (size_rtx));
57679d39 4652
241399f6 4653 if (arg->locate.offset.constant > i)
57679d39 4654 {
241399f6 4655 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
57679d39 4656 sibcall_failure = 1;
4657 }
241399f6 4658 else if (arg->locate.offset.constant < i)
57679d39 4659 {
2ad152f7 4660 /* Use arg->locate.size.constant instead of size_rtx
4661 because we only care about the part of the argument
4662 on the stack. */
4663 if (i < (arg->locate.offset.constant
4664 + arg->locate.size.constant))
4665 sibcall_failure = 1;
4666 }
4667 else
4668 {
4669 /* Even though they appear to be at the same location,
4670 if part of the outgoing argument is in registers,
4671 they aren't really at the same location. Check for
4672 this by making sure that the incoming size is the
4673 same as the outgoing size. */
4674 if (arg->locate.size.constant != INTVAL (size_rtx))
57679d39 4675 sibcall_failure = 1;
4676 }
4677 }
4678 }
4679
1c0c37a5 4680 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
cf78c9ff 4681 parm_align, partial, reg, excess, argblock,
241399f6 4682 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4683 ARGS_SIZE_RTX (arg->locate.alignment_pad));
66d433c7 4684
d5c9a99f 4685 /* Unless this is a partially-in-register argument, the argument is now
4686 in the stack.
66d433c7 4687
d5c9a99f 4688 ??? Unlike the case above, in which we want the actual
4689 address of the data, so that we can load it directly into a
4690 register, here we want the address of the stack slot, so that
4691 it's properly aligned for word-by-word copying or something
4692 like that. It's not clear that this is always correct. */
4693 if (partial == 0)
4694 arg->value = arg->stack_slot;
4695 }
b600a907 4696
4697 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4698 {
4699 tree type = TREE_TYPE (arg->tree_value);
4700 arg->parallel_value
4701 = emit_group_load_into_temps (arg->reg, arg->value, type,
4702 int_size_in_bytes (type));
4703 }
66d433c7 4704
a35a63ff 4705 /* Mark all slots this store used. */
4706 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4707 && argblock && ! variable_size && arg->stack)
4708 for (i = lower_bound; i < upper_bound; i++)
4709 stack_usage_map[i] = 1;
4710
66d433c7 4711 /* Once we have pushed something, pops can't safely
4712 be deferred during the rest of the arguments. */
4713 NO_DEFER_POP;
4714
0ab48139 4715 /* Free any temporary slots made in processing this argument. */
1b117c60 4716 pop_temp_slots ();
57679d39 4717
4718 return sibcall_failure;
66d433c7 4719}
890f0c17 4720
0336f0f0 4721/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 4722
0336f0f0 4723bool
4724must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 4725 const_tree type)
0336f0f0 4726{
4727 if (!type)
4728 return false;
4729
4730 /* If the type has variable size... */
4731 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4732 return true;
890f0c17 4733
0336f0f0 4734 /* If the type is marked as addressable (it is required
4735 to be constructed into the stack)... */
4736 if (TREE_ADDRESSABLE (type))
4737 return true;
4738
4739 return false;
4740}
890f0c17 4741
0d568ddf 4742/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 4743 takes trailing padding of a structure into account. */
4744/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 4745
4746bool
fb80456a 4747must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
890f0c17 4748{
4749 if (!type)
dceaa0b1 4750 return false;
890f0c17 4751
4752 /* If the type has variable size... */
4753 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4754 return true;
4755
4756 /* If the type is marked as addressable (it is required
4757 to be constructed into the stack)... */
4758 if (TREE_ADDRESSABLE (type))
4759 return true;
4760
4761 /* If the padding and mode of the type is such that a copy into
4762 a register would put it into the wrong part of the register. */
4763 if (mode == BLKmode
4764 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4765 && (FUNCTION_ARG_PADDING (mode, type)
4766 == (BYTES_BIG_ENDIAN ? upward : downward)))
4767 return true;
4768
4769 return false;
4770}