]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
* gnat.dg/parent_ltd_with-child_full_view.adb: New test.
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
45550790 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
851fc2b3 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
c5dc0c32 4 Free Software Foundation, Inc.
66d433c7 5
f12b58b3 6This file is part of GCC.
66d433c7 7
f12b58b3 8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
f12b58b3 11version.
66d433c7 12
f12b58b3 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
66d433c7 17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
66d433c7 21
22#include "config.h"
405711de 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
405711de 26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "expr.h"
5f4cd670 30#include "optabs.h"
d8fc4d0b 31#include "libfuncs.h"
0a893c29 32#include "function.h"
405711de 33#include "regs.h"
9cdfa0b0 34#include "toplev.h"
cd03a192 35#include "output.h"
075136a2 36#include "tm_p.h"
a6260fc7 37#include "timevar.h"
7ecc63d3 38#include "sbitmap.h"
771d21fa 39#include "langhooks.h"
6fce022c 40#include "target.h"
28992b23 41#include "cgraph.h"
95cedffb 42#include "except.h"
3072d30e 43#include "dbgcnt.h"
66d433c7 44
dfb1ee39 45/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 47
48/* Data structure and subroutines used within expand_call. */
49
50struct arg_data
51{
52 /* Tree node for this argument. */
53 tree tree_value;
1c0c37a5 54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
66d433c7 56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 rtx value;
58 /* Initially-compute RTL value for argument; only for const functions. */
59 rtx initial_value;
60 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 61 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 62 registers. */
63 rtx reg;
0e0be288 64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
66 register windows. */
67 rtx tail_call_reg;
b600a907 68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
70 rtx parallel_value;
23eb5fa6 71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
83272ab4 74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
66d433c7 76 int partial;
d10cfa8d 77 /* Nonzero if argument must be passed on stack.
f848041f 78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 81 int pass_on_stack;
241399f6 82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
66d433c7 84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
66d433c7 91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
f28c7a75 93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
66d433c7 99};
100
d10cfa8d 101/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105static char *stack_usage_map;
106
107/* Size of STACK_USAGE_MAP. */
108static int highest_outgoing_arg_in_use;
d1b03b62 109
7ecc63d3 110/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115static sbitmap stored_args_map;
116
d1b03b62 117/* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
fbbbfe26 122static int stack_arg_under_construction;
66d433c7 123
4ee9c684 124static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
4c9e08a4 125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
126 CUMULATIVE_ARGS *);
127static void precompute_register_parameters (int, struct arg_data *, int *);
128static int store_one_arg (struct arg_data *, rtx, int, int, int);
129static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130static int finalize_must_preallocate (int, int, struct arg_data *,
131 struct args_size *);
132static void precompute_arguments (int, int, struct arg_data *);
133static int compute_argument_block_size (int, struct args_size *, int);
134static void initialize_argument_information (int, struct arg_data *,
cd46caee 135 struct args_size *, int,
136 tree, tree,
4c9e08a4 137 tree, CUMULATIVE_ARGS *, int,
eaa112a0 138 rtx *, int *, int *, int *,
4ee9c684 139 bool *, bool);
4c9e08a4 140static void compute_argument_addresses (struct arg_data *, rtx, int);
141static rtx rtx_for_function_call (tree, tree);
142static void load_register_parameters (struct arg_data *, int, rtx *, int,
143 int, int *);
144static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
5d1b319b 146static int special_function_p (const_tree, int);
4c9e08a4 147static int check_sibcall_argument_overlap_1 (rtx);
148static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149
150static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
38413c80 151 unsigned int);
5ab29745 152static tree split_complex_types (tree);
cde25025 153
4448f543 154#ifdef REG_PARM_STACK_SPACE
4c9e08a4 155static rtx save_fixed_argument_area (int, rtx, int *, int *);
156static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 157#endif
66d433c7 158\f
66d433c7 159/* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
162
8866f42d 163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 165
d9076622 166rtx
4ee9c684 167prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
66d433c7 169{
c7bf1374 170 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
a89aeae3 173 /* If we are using registers for parameters, force the
0dbd1c74 174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
707ff8b1 178 else if (! sibcallp)
66d433c7 179 {
180#ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
fb154d03 182 funexp = force_reg (Pmode, funexp);
66d433c7 183#endif
184 }
185
186 if (static_chain_value != 0)
187 {
3dce56cc 188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 189 emit_move_insn (static_chain_rtx, static_chain_value);
190
8ad4c111 191 if (REG_P (static_chain_rtx))
4eb91f6f 192 use_reg (call_fusage, static_chain_rtx);
66d433c7 193 }
194
195 return funexp;
196}
197
198/* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
201
c74d0a20 202 FNDECL is the declaration node of the function. This is given to the
e93a4612 203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
204
d429bc10 205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
66d433c7 210
211 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
215 code if necessary.
66d433c7 216
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
219
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
227
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
230
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
234
07409b3a 235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 236 denote registers used by the called function. */
c87678e4 237
8ddf1c7e 238static void
4ee9c684 239emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
4c9e08a4 241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
66d433c7 247{
dd837bff 248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
66d433c7 249 rtx call_insn;
250 int already_popped = 0;
e39fae61 251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
2ed6c343 252#if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
255#endif
66d433c7 256
87e19636 257#ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
259#endif
4c9e08a4 260
66d433c7 261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
266
60ecc450 267#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
a864723e 270 && (n_popped > 0 || stack_size == 0))
60ecc450 271 {
2a631e19 272 rtx n_pop = GEN_INT (n_popped);
60ecc450 273 rtx pat;
274
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
277
278 if (valreg)
2ed6c343 279 pat = GEN_SIBCALL_VALUE_POP (valreg,
60ecc450 280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
282 n_pop);
283 else
2ed6c343 284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
60ecc450 285 rounded_stack_size_rtx, next_arg_reg, n_pop);
286
287 emit_call_insn (pat);
288 already_popped = 1;
289 }
290 else
291#endif
292
66d433c7 293#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
2a631e19 294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
ec596f3b 298#if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
ff3ae375 300 && n_popped > 0)
ec596f3b 301#else
302 if (HAVE_call_pop && HAVE_call_value_pop)
303#endif
66d433c7 304 {
e39fae61 305 rtx n_pop = GEN_INT (n_popped);
66d433c7 306 rtx pat;
307
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
e93a4612 310
66d433c7 311 if (valreg)
2ed6c343 312 pat = GEN_CALL_VALUE_POP (valreg,
941522d6 313 gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 314 rounded_stack_size_rtx, next_arg_reg, n_pop);
66d433c7 315 else
2ed6c343 316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 317 rounded_stack_size_rtx, next_arg_reg, n_pop);
66d433c7 318
319 emit_call_insn (pat);
320 already_popped = 1;
321 }
322 else
323#endif
66d433c7 324
60ecc450 325#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
328 {
329 if (valreg)
2ed6c343 330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
60ecc450 331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
334 else
2ed6c343 335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
60ecc450 336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
338 }
339 else
340#endif
341
66d433c7 342#if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
344 {
345 if (valreg)
2ed6c343 346 emit_call_insn (GEN_CALL_VALUE (valreg,
941522d6 347 gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 348 rounded_stack_size_rtx, next_arg_reg,
1e8cd5a7 349 NULL_RTX));
66d433c7 350 else
2ed6c343 351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 352 rounded_stack_size_rtx, next_arg_reg,
66d433c7 353 struct_value_size_rtx));
354 }
355 else
356#endif
231bd014 357 gcc_unreachable ();
66d433c7 358
d5f9786f 359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
66d433c7 361
26dfc457 362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
2a631e19 364 call_fusage
365 = gen_rtx_EXPR_LIST
366 (VOIDmode,
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369 call_fusage);
26dfc457 370
d5f9786f 371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
66d433c7 373
374 /* If this is a const call, then set the insn's unchanging bit. */
26dfc457 375 if (ecf_flags & (ECF_CONST | ECF_PURE))
06a652d1 376 CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 377
00dd2e9e 378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 effect. */
60ecc450 380 if (ecf_flags & ECF_NOTHROW)
4e834ca8 381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
00dd2e9e 382 REG_NOTES (call_insn));
95cedffb 383 else
4ee9c684 384 {
385 int rn = lookup_stmt_eh_region (fntree);
386
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
389 if (rn > 0)
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
393 }
00dd2e9e 394
356b51a0 395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
398
9239aee6 399 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 400 {
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
a0c938f0 402 REG_NOTES (call_insn));
0ff18307 403 current_function_calls_setjmp = 1;
404 }
9239aee6 405
60ecc450 406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407
d1f88d00 408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
411
e39fae61 412 if (n_popped > 0)
66d433c7 413 {
414 if (!already_popped)
37808e3a 415 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 419 rounded_stack_size -= n_popped;
dd837bff 420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
91b70175 421 stack_pointer_delta -= n_popped;
66d433c7 422 }
423
4448f543 424 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 425 {
4448f543 426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
429
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
432
10d1a2c0 433 if (rounded_stack_size != 0)
4448f543 434 {
ff3ae375 435 if (ecf_flags & ECF_NORETURN)
10d1a2c0 436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 440 pending_stack_adjust += rounded_stack_size;
441 else
442 adjust_stack (rounded_stack_size_rtx);
443 }
66d433c7 444 }
4448f543 445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
450
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
c87678e4 453
4448f543 454 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 such machines. */
456 else if (n_popped)
457 anti_adjust_stack (GEN_INT (n_popped));
66d433c7 458}
459
6a0e6138 460/* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
462
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
465
4c8db992 466 Similarly set NORETURN if the function is in the longjmp family.
6a0e6138 467
6a0e6138 468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
470
dfe08167 471static int
5d1b319b 472special_function_p (const_tree fndecl, int flags)
6a0e6138 473{
4ee9c684 474 if (fndecl && DECL_NAME (fndecl)
7259f3f8 475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
6a0e6138 476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
40109983 478 think they are.
a0c938f0 479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
482 wish. */
0d568ddf 483 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
6a0e6138 486 {
71d9fc9b 487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
6a0e6138 489
cc7cc47f 490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
dfe08167 493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && name[0] == 'a'
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && name[0] == '_'
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 500
6a0e6138 501 /* Disregard prefix _, __ or __x. */
502 if (name[0] == '_')
503 {
504 if (name[1] == '_' && name[2] == 'x')
505 tname += 3;
506 else if (name[1] == '_')
507 tname += 2;
508 else
509 tname += 1;
510 }
511
512 if (tname[0] == 's')
513 {
dfe08167 514 if ((tname[1] == 'e'
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
517 || (tname[1] == 'i'
518 && ! strcmp (tname, "sigsetjmp"))
519 || (tname[1] == 'a'
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
522
6a0e6138 523 if (tname[1] == 'i'
524 && ! strcmp (tname, "siglongjmp"))
4fec1d6c 525 flags |= ECF_NORETURN;
6a0e6138 526 }
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
0b4cb8ec 530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
dfe08167 533 flags |= ECF_RETURNS_TWICE;
6a0e6138 534
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
4fec1d6c 537 flags |= ECF_NORETURN;
6a0e6138 538 }
73673831 539
dfe08167 540 return flags;
6a0e6138 541}
542
4c8db992 543/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 544
dfe08167 545int
5d1b319b 546setjmp_call_p (const_tree fndecl)
dfe08167 547{
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549}
550
9a7ecb49 551/* Return true when exp contains alloca call. */
552bool
5d1b319b 553alloca_call_p (const_tree exp)
9a7ecb49 554{
555 if (TREE_CODE (exp) == CALL_EXPR
c2f47e15 556 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
557 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
558 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
559 & ECF_MAY_BE_ALLOCA))
9a7ecb49 560 return true;
561 return false;
562}
563
5edaabad 564/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 565
805e22b2 566int
5d1b319b 567flags_from_decl_or_type (const_tree exp)
dfe08167 568{
569 int flags = 0;
5d1b319b 570 const_tree type = exp;
7a24815f 571
dfe08167 572 if (DECL_P (exp))
573 {
5edaabad 574 type = TREE_TYPE (exp);
575
dfe08167 576 /* The function exp may have the `malloc' attribute. */
7a24815f 577 if (DECL_IS_MALLOC (exp))
dfe08167 578 flags |= ECF_MALLOC;
579
26d1c5ff 580 /* The function exp may have the `returns_twice' attribute. */
581 if (DECL_IS_RETURNS_TWICE (exp))
582 flags |= ECF_RETURNS_TWICE;
583
26dfc457 584 /* The function exp may have the `pure' attribute. */
7a24815f 585 if (DECL_IS_PURE (exp))
ef689d4e 586 flags |= ECF_PURE;
26dfc457 587
fc09b200 588 if (DECL_IS_NOVOPS (exp))
589 flags |= ECF_NOVOPS;
590
dfe08167 591 if (TREE_NOTHROW (exp))
592 flags |= ECF_NOTHROW;
b15db406 593
594 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
ef689d4e 595 flags |= ECF_CONST;
4ee9c684 596
597 flags = special_function_p (exp, flags);
dfe08167 598 }
66d12a6c 599 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
b15db406 600 flags |= ECF_CONST;
dfe08167 601
602 if (TREE_THIS_VOLATILE (exp))
603 flags |= ECF_NORETURN;
604
605 return flags;
606}
607
886a914d 608/* Detect flags from a CALL_EXPR. */
609
610int
b7bf20db 611call_expr_flags (const_tree t)
886a914d 612{
613 int flags;
614 tree decl = get_callee_fndecl (t);
615
616 if (decl)
617 flags = flags_from_decl_or_type (decl);
618 else
619 {
c2f47e15 620 t = TREE_TYPE (CALL_EXPR_FN (t));
886a914d 621 if (t && TREE_CODE (t) == POINTER_TYPE)
622 flags = flags_from_decl_or_type (TREE_TYPE (t));
623 else
624 flags = 0;
625 }
626
627 return flags;
628}
629
6a0e6138 630/* Precompute all register parameters as described by ARGS, storing values
631 into fields within the ARGS array.
632
633 NUM_ACTUALS indicates the total number elements in the ARGS array.
634
635 Set REG_PARM_SEEN if we encounter a register parameter. */
636
637static void
e2ff5c1b 638precompute_register_parameters (int num_actuals, struct arg_data *args,
639 int *reg_parm_seen)
6a0e6138 640{
641 int i;
642
643 *reg_parm_seen = 0;
644
645 for (i = 0; i < num_actuals; i++)
646 if (args[i].reg != 0 && ! args[i].pass_on_stack)
647 {
648 *reg_parm_seen = 1;
649
650 if (args[i].value == 0)
651 {
652 push_temp_slots ();
8ec3c5c2 653 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 654 preserve_temp_slots (args[i].value);
655 pop_temp_slots ();
6a0e6138 656 }
657
e80b4463 658 /* If the value is a non-legitimate constant, force it into a
659 pseudo now. TLS symbols sometimes need a call to resolve. */
660 if (CONSTANT_P (args[i].value)
661 && !LEGITIMATE_CONSTANT_P (args[i].value))
662 args[i].value = force_reg (args[i].mode, args[i].value);
663
6a0e6138 664 /* If we are to promote the function arg to a wider mode,
665 do it now. */
666
667 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
668 args[i].value
669 = convert_modes (args[i].mode,
670 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
671 args[i].value, args[i].unsignedp);
672
e2ff5c1b 673 /* If we're going to have to load the value by parts, pull the
674 parts into pseudos. The part extraction process can involve
675 non-trivial computation. */
676 if (GET_CODE (args[i].reg) == PARALLEL)
677 {
678 tree type = TREE_TYPE (args[i].tree_value);
b600a907 679 args[i].parallel_value
e2ff5c1b 680 = emit_group_load_into_temps (args[i].reg, args[i].value,
681 type, int_size_in_bytes (type));
682 }
683
c87678e4 684 /* If the value is expensive, and we are inside an appropriately
6a0e6138 685 short loop, put the value into a pseudo and then put the pseudo
686 into the hard reg.
687
688 For small register classes, also do this if this call uses
689 register parameters. This is to avoid reload conflicts while
690 loading the parameters registers. */
691
e2ff5c1b 692 else if ((! (REG_P (args[i].value)
693 || (GET_CODE (args[i].value) == SUBREG
694 && REG_P (SUBREG_REG (args[i].value)))))
695 && args[i].mode != BLKmode
696 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
697 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
698 || optimize))
6a0e6138 699 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
700 }
701}
702
4448f543 703#ifdef REG_PARM_STACK_SPACE
6a0e6138 704
705 /* The argument list is the property of the called routine and it
706 may clobber it. If the fixed area has been used for previous
707 parameters, we must save and restore it. */
f7c44134 708
6a0e6138 709static rtx
4c9e08a4 710save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 711{
6e96b626 712 int low;
713 int high;
6a0e6138 714
6e96b626 715 /* Compute the boundary of the area that needs to be saved, if any. */
716 high = reg_parm_stack_space;
6a0e6138 717#ifdef ARGS_GROW_DOWNWARD
6e96b626 718 high += 1;
6a0e6138 719#endif
6e96b626 720 if (high > highest_outgoing_arg_in_use)
721 high = highest_outgoing_arg_in_use;
6a0e6138 722
6e96b626 723 for (low = 0; low < high; low++)
724 if (stack_usage_map[low] != 0)
725 {
726 int num_to_save;
727 enum machine_mode save_mode;
728 int delta;
729 rtx stack_area;
730 rtx save_area;
6a0e6138 731
6e96b626 732 while (stack_usage_map[--high] == 0)
733 ;
6a0e6138 734
6e96b626 735 *low_to_save = low;
736 *high_to_save = high;
737
738 num_to_save = high - low + 1;
739 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
6a0e6138 740
6e96b626 741 /* If we don't have the required alignment, must do this
742 in BLKmode. */
743 if ((low & (MIN (GET_MODE_SIZE (save_mode),
744 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
745 save_mode = BLKmode;
6a0e6138 746
747#ifdef ARGS_GROW_DOWNWARD
6e96b626 748 delta = -high;
6a0e6138 749#else
6e96b626 750 delta = low;
6a0e6138 751#endif
6e96b626 752 stack_area = gen_rtx_MEM (save_mode,
753 memory_address (save_mode,
754 plus_constant (argblock,
755 delta)));
2a631e19 756
6e96b626 757 set_mem_align (stack_area, PARM_BOUNDARY);
758 if (save_mode == BLKmode)
759 {
760 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
761 emit_block_move (validize_mem (save_area), stack_area,
762 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
763 }
764 else
765 {
766 save_area = gen_reg_rtx (save_mode);
767 emit_move_insn (save_area, stack_area);
768 }
2a631e19 769
6e96b626 770 return save_area;
771 }
772
773 return NULL_RTX;
6a0e6138 774}
775
776static void
4c9e08a4 777restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 778{
779 enum machine_mode save_mode = GET_MODE (save_area);
6e96b626 780 int delta;
781 rtx stack_area;
782
6a0e6138 783#ifdef ARGS_GROW_DOWNWARD
6e96b626 784 delta = -high_to_save;
6a0e6138 785#else
6e96b626 786 delta = low_to_save;
6a0e6138 787#endif
6e96b626 788 stack_area = gen_rtx_MEM (save_mode,
789 memory_address (save_mode,
790 plus_constant (argblock, delta)));
791 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 792
793 if (save_mode != BLKmode)
794 emit_move_insn (stack_area, save_area);
795 else
0378dbdc 796 emit_block_move (stack_area, validize_mem (save_area),
797 GEN_INT (high_to_save - low_to_save + 1),
798 BLOCK_OP_CALL_PARM);
6a0e6138 799}
f6025ee7 800#endif /* REG_PARM_STACK_SPACE */
c87678e4 801
6a0e6138 802/* If any elements in ARGS refer to parameters that are to be passed in
803 registers, but not in memory, and whose alignment does not permit a
804 direct copy into registers. Copy the values into a group of pseudos
c87678e4 805 which we will later copy into the appropriate hard registers.
6d801f27 806
807 Pseudos for each unaligned argument will be stored into the array
808 args[argnum].aligned_regs. The caller is responsible for deallocating
809 the aligned_regs array if it is nonzero. */
810
6a0e6138 811static void
4c9e08a4 812store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 813{
814 int i, j;
c87678e4 815
6a0e6138 816 for (i = 0; i < num_actuals; i++)
817 if (args[i].reg != 0 && ! args[i].pass_on_stack
818 && args[i].mode == BLKmode
819 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
820 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
821 {
822 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 823 int endian_correction = 0;
6a0e6138 824
f054eb3c 825 if (args[i].partial)
826 {
827 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
828 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
829 }
830 else
831 {
832 args[i].n_aligned_regs
833 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
834 }
835
4c36ffe6 836 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 837
5f4cd670 838 /* Structures smaller than a word are normally aligned to the
839 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 840 this means we must skip the empty high order bytes when
841 calculating the bit offset. */
5f4cd670 842 if (bytes < UNITS_PER_WORD
843#ifdef BLOCK_REG_PADDING
844 && (BLOCK_REG_PADDING (args[i].mode,
845 TREE_TYPE (args[i].tree_value), 1)
846 == downward)
847#else
848 && BYTES_BIG_ENDIAN
849#endif
850 )
851 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 852
853 for (j = 0; j < args[i].n_aligned_regs; j++)
854 {
855 rtx reg = gen_reg_rtx (word_mode);
856 rtx word = operand_subword_force (args[i].value, j, BLKmode);
857 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 858
859 args[i].aligned_regs[j] = reg;
5f4cd670 860 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1445ea5b 861 word_mode, word_mode);
6a0e6138 862
863 /* There is no need to restrict this code to loading items
864 in TYPE_ALIGN sized hunks. The bitfield instructions can
865 load up entire word sized registers efficiently.
866
867 ??? This may not be needed anymore.
868 We use to emit a clobber here but that doesn't let later
869 passes optimize the instructions we emit. By storing 0 into
870 the register later passes know the first AND to zero out the
871 bitfield being set in the register is unnecessary. The store
872 of 0 will be deleted as will at least the first AND. */
873
874 emit_move_insn (reg, const0_rtx);
875
876 bytes -= bitsize / BITS_PER_UNIT;
5f4cd670 877 store_bit_field (reg, bitsize, endian_correction, word_mode,
1445ea5b 878 word);
6a0e6138 879 }
880 }
881}
882
cb543c54 883/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
cd46caee 884 CALL_EXPR EXP.
cb543c54 885
886 NUM_ACTUALS is the total number of parameters.
887
888 N_NAMED_ARGS is the total number of named arguments.
889
cd46caee 890 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
891 value, or null.
892
cb543c54 893 FNDECL is the tree code for the target of this call (if known)
894
895 ARGS_SO_FAR holds state needed by the target to know where to place
896 the next argument.
897
898 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
899 for arguments which are passed in registers.
900
901 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
902 and may be modified by this routine.
903
dfe08167 904 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
0d568ddf 905 flags which may may be modified by this routine.
eaa112a0 906
4ee9c684 907 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
908 that requires allocation of stack space.
909
eaa112a0 910 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
911 the thunked-to function. */
cb543c54 912
913static void
4c9e08a4 914initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
915 struct arg_data *args,
916 struct args_size *args_size,
917 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 918 tree exp, tree struct_value_addr_value,
919 tree fndecl,
4c9e08a4 920 CUMULATIVE_ARGS *args_so_far,
921 int reg_parm_stack_space,
922 rtx *old_stack_level, int *old_pending_adj,
eaa112a0 923 int *must_preallocate, int *ecf_flags,
4ee9c684 924 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 925{
926 /* 1 if scanning parms front to back, -1 if scanning back to front. */
927 int inc;
928
929 /* Count arg position in order args appear. */
930 int argpos;
931
932 int i;
c87678e4 933
cb543c54 934 args_size->constant = 0;
935 args_size->var = 0;
936
937 /* In this loop, we consider args in the order they are written.
938 We fill up ARGS from the front or from the back if necessary
939 so that in any case the first arg to be pushed ends up at the front. */
940
4448f543 941 if (PUSH_ARGS_REVERSED)
942 {
943 i = num_actuals - 1, inc = -1;
944 /* In this case, must reverse order of args
945 so that we compute and push the last arg first. */
946 }
947 else
948 {
949 i = 0, inc = 1;
950 }
cb543c54 951
cd46caee 952 /* First fill in the actual arguments in the ARGS array, splitting
953 complex arguments if necessary. */
954 {
955 int j = i;
956 call_expr_arg_iterator iter;
957 tree arg;
958
959 if (struct_value_addr_value)
960 {
961 args[j].tree_value = struct_value_addr_value;
962 j += inc;
963 }
964 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
965 {
966 tree argtype = TREE_TYPE (arg);
967 if (targetm.calls.split_complex_arg
968 && argtype
969 && TREE_CODE (argtype) == COMPLEX_TYPE
970 && targetm.calls.split_complex_arg (argtype))
971 {
972 tree subtype = TREE_TYPE (argtype);
973 arg = save_expr (arg);
974 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
975 j += inc;
976 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
977 }
978 else
979 args[j].tree_value = arg;
980 j += inc;
981 }
982 }
983
cb543c54 984 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
cd46caee 985 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
cb543c54 986 {
cd46caee 987 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 988 int unsignedp;
989 enum machine_mode mode;
990
cb543c54 991 /* Replace erroneous argument with constant zero. */
4b72716d 992 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 993 args[i].tree_value = integer_zero_node, type = integer_type_node;
994
995 /* If TYPE is a transparent union, pass things the way we would
996 pass the first field of the union. We have already verified that
997 the modes are the same. */
851dfbff 998 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
cb543c54 999 type = TREE_TYPE (TYPE_FIELDS (type));
1000
1001 /* Decide where to pass this arg.
1002
1003 args[i].reg is nonzero if all or part is passed in registers.
1004
1005 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1006 and the exact value says how many bytes are passed in registers.
cb543c54 1007
1008 args[i].pass_on_stack is nonzero if the argument must at least be
1009 computed on the stack. It may then be loaded back into registers
1010 if args[i].reg is nonzero.
1011
1012 These decisions are driven by the FUNCTION_... macros and must agree
1013 with those made by function.c. */
1014
1015 /* See if this argument should be passed by invisible reference. */
cc9b8628 1016 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1017 type, argpos < n_named_args))
cb543c54 1018 {
41dc12b4 1019 bool callee_copies;
1020 tree base;
1021
1022 callee_copies
13f08ee7 1023 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1024 type, argpos < n_named_args);
41dc12b4 1025
1026 /* If we're compiling a thunk, pass through invisible references
1027 instead of making a copy. */
eaa112a0 1028 if (call_from_thunk_p
41dc12b4 1029 || (callee_copies
1030 && !TREE_ADDRESSABLE (type)
1031 && (base = get_base_address (args[i].tree_value))
1032 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 1033 {
41dc12b4 1034 /* We can't use sibcalls if a callee-copied argument is
1035 stored in the current function's frame. */
1036 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
c71e72dd 1037 *may_tailcall = false;
1038
41dc12b4 1039 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1040 type = TREE_TYPE (args[i].tree_value);
1041
1042 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
ce95a955 1043 }
cb543c54 1044 else
1045 {
1046 /* We make a copy of the object and pass the address to the
1047 function being called. */
1048 rtx copy;
1049
4b72716d 1050 if (!COMPLETE_TYPE_P (type)
cb543c54 1051 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1052 || (flag_stack_check && ! STACK_CHECK_BUILTIN
a0c2c45b 1053 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1054 STACK_CHECK_MAX_VAR_SIZE))))
cb543c54 1055 {
1056 /* This is a variable-sized object. Make space on the stack
1057 for it. */
cd46caee 1058 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 1059
1060 if (*old_stack_level == 0)
1061 {
1062 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1063 *old_pending_adj = pending_stack_adjust;
1064 pending_stack_adjust = 0;
1065 }
1066
1067 copy = gen_rtx_MEM (BLKmode,
f7c44134 1068 allocate_dynamic_stack_space
1069 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1070 set_mem_attributes (copy, type, 1);
cb543c54 1071 }
1072 else
f7c44134 1073 copy = assign_temp (type, 0, 1, 0);
cb543c54 1074
5b5037b3 1075 store_expr (args[i].tree_value, copy, 0, false);
cb543c54 1076
41dc12b4 1077 if (callee_copies)
1078 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1079 else
1080 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1081
1082 args[i].tree_value
1083 = build_fold_addr_expr (make_tree (type, copy));
1084 type = TREE_TYPE (args[i].tree_value);
4ee9c684 1085 *may_tailcall = false;
cb543c54 1086 }
1087 }
1088
1089 mode = TYPE_MODE (type);
78a8ed03 1090 unsignedp = TYPE_UNSIGNED (type);
cb543c54 1091
45550790 1092 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1093 mode = promote_mode (type, mode, &unsignedp, 1);
cb543c54 1094
1095 args[i].unsignedp = unsignedp;
1096 args[i].mode = mode;
7a8d641b 1097
0e0be288 1098 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1099 argpos < n_named_args);
7a8d641b 1100#ifdef FUNCTION_INCOMING_ARG
1101 /* If this is a sibling call and the machine has register windows, the
1102 register window has to be unwinded before calling the routine, so
1103 arguments have to go into the incoming registers. */
0e0be288 1104 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
c87678e4 1105 argpos < n_named_args);
0e0be288 1106#else
1107 args[i].tail_call_reg = args[i].reg;
7a8d641b 1108#endif
7a8d641b 1109
cb543c54 1110 if (args[i].reg)
1111 args[i].partial
f054eb3c 1112 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1113 argpos < n_named_args);
cb543c54 1114
0336f0f0 1115 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 1116
1117 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1118 it means that we are to pass this arg in the register(s) designated
1119 by the PARALLEL, but also to pass it in the stack. */
1120 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1121 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1122 args[i].pass_on_stack = 1;
1123
1124 /* If this is an addressable type, we must preallocate the stack
1125 since we must evaluate the object into its final location.
1126
1127 If this is to be passed in both registers and the stack, it is simpler
1128 to preallocate. */
1129 if (TREE_ADDRESSABLE (type)
1130 || (args[i].pass_on_stack && args[i].reg != 0))
1131 *must_preallocate = 1;
1132
1133 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1134 we cannot consider this function call constant. */
1135 if (TREE_ADDRESSABLE (type))
2a0c81bf 1136 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
cb543c54 1137
1138 /* Compute the stack-size of this argument. */
1139 if (args[i].reg == 0 || args[i].partial != 0
1140 || reg_parm_stack_space > 0
1141 || args[i].pass_on_stack)
1142 locate_and_pad_parm (mode, type,
1143#ifdef STACK_PARMS_IN_REG_PARM_AREA
1144 1,
1145#else
1146 args[i].reg != 0,
1147#endif
241399f6 1148 args[i].pass_on_stack ? 0 : args[i].partial,
1149 fndecl, args_size, &args[i].locate);
0fee47f4 1150#ifdef BLOCK_REG_PADDING
1151 else
1152 /* The argument is passed entirely in registers. See at which
1153 end it should be padded. */
1154 args[i].locate.where_pad =
1155 BLOCK_REG_PADDING (mode, type,
1156 int_size_in_bytes (type) <= UNITS_PER_WORD);
1157#endif
c87678e4 1158
cb543c54 1159 /* Update ARGS_SIZE, the total stack space for args so far. */
1160
241399f6 1161 args_size->constant += args[i].locate.size.constant;
1162 if (args[i].locate.size.var)
1163 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 1164
1165 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1166 have been used, etc. */
1167
bbafd9d2 1168 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
cb543c54 1169 argpos < n_named_args);
1170 }
1171}
1172
cc45e5e8 1173/* Update ARGS_SIZE to contain the total size for the argument block.
1174 Return the original constant component of the argument block's size.
1175
1176 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1177 for arguments passed in registers. */
1178
1179static int
4c9e08a4 1180compute_argument_block_size (int reg_parm_stack_space,
1181 struct args_size *args_size,
1182 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 1183{
1184 int unadjusted_args_size = args_size->constant;
1185
4448f543 1186 /* For accumulate outgoing args mode we don't need to align, since the frame
1187 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 1188 backends from generating misaligned frame sizes. */
4448f543 1189 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1190 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 1191
cc45e5e8 1192 /* Compute the actual size of the argument block required. The variable
1193 and constant sizes must be combined, the size may have to be rounded,
1194 and there may be a minimum required size. */
1195
1196 if (args_size->var)
1197 {
1198 args_size->var = ARGS_SIZE_TREE (*args_size);
1199 args_size->constant = 0;
1200
d0285dd8 1201 preferred_stack_boundary /= BITS_PER_UNIT;
1202 if (preferred_stack_boundary > 1)
91b70175 1203 {
1204 /* We don't handle this case yet. To handle it correctly we have
35a3065a 1205 to add the delta, round and subtract the delta.
91b70175 1206 Currently no machine description requires this support. */
231bd014 1207 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
91b70175 1208 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1209 }
cc45e5e8 1210
1211 if (reg_parm_stack_space > 0)
1212 {
1213 args_size->var
1214 = size_binop (MAX_EXPR, args_size->var,
902de8ed 1215 ssize_int (reg_parm_stack_space));
cc45e5e8 1216
cc45e5e8 1217 /* The area corresponding to register parameters is not to count in
1218 the size of the block we need. So make the adjustment. */
63c68695 1219 if (!OUTGOING_REG_PARM_STACK_SPACE)
1220 args_size->var
1221 = size_binop (MINUS_EXPR, args_size->var,
1222 ssize_int (reg_parm_stack_space));
cc45e5e8 1223 }
1224 }
1225 else
1226 {
d0285dd8 1227 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 1228 if (preferred_stack_boundary < 1)
1229 preferred_stack_boundary = 1;
e39fae61 1230 args_size->constant = (((args_size->constant
91b70175 1231 + stack_pointer_delta
d0285dd8 1232 + preferred_stack_boundary - 1)
1233 / preferred_stack_boundary
1234 * preferred_stack_boundary)
91b70175 1235 - stack_pointer_delta);
cc45e5e8 1236
1237 args_size->constant = MAX (args_size->constant,
1238 reg_parm_stack_space);
1239
63c68695 1240 if (!OUTGOING_REG_PARM_STACK_SPACE)
1241 args_size->constant -= reg_parm_stack_space;
cc45e5e8 1242 }
1243 return unadjusted_args_size;
1244}
1245
caa1595a 1246/* Precompute parameters as needed for a function call.
04707f1c 1247
dfe08167 1248 FLAGS is mask of ECF_* constants.
04707f1c 1249
04707f1c 1250 NUM_ACTUALS is the number of arguments.
1251
c87678e4 1252 ARGS is an array containing information for each argument; this
1253 routine fills in the INITIAL_VALUE and VALUE fields for each
1254 precomputed argument. */
04707f1c 1255
1256static void
4c9e08a4 1257precompute_arguments (int flags, int num_actuals, struct arg_data *args)
04707f1c 1258{
1259 int i;
1260
8c78c14b 1261 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 1262 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 1263
1264 /* If we preallocated the stack space, and some arguments must be passed
1265 on the stack, then we must precompute any parameter which contains a
1266 function call which will store arguments on the stack.
1267 Otherwise, evaluating the parameter may clobber previous parameters
1268 which have already been stored into the stack. (we have code to avoid
1269 such case by saving the outgoing stack arguments, but it results in
1270 worse code) */
1271 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
67c155cb 1272 return;
0d568ddf 1273
04707f1c 1274 for (i = 0; i < num_actuals; i++)
67c155cb 1275 {
1276 enum machine_mode mode;
701e46d0 1277
c5dc094f 1278 if ((flags & ECF_LIBCALL_BLOCK) == 0
1279 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1280 continue;
1281
67c155cb 1282 /* If this is an addressable type, we cannot pre-evaluate it. */
231bd014 1283 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
04707f1c 1284
67c155cb 1285 args[i].initial_value = args[i].value
8ec3c5c2 1286 = expand_normal (args[i].tree_value);
04707f1c 1287
67c155cb 1288 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1289 if (mode != args[i].mode)
1290 {
1291 args[i].value
1292 = convert_modes (args[i].mode, mode,
1293 args[i].value, args[i].unsignedp);
7752d341 1294#if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
67c155cb 1295 /* CSE will replace this only if it contains args[i].value
1296 pseudo, so convert it down to the declared mode using
1297 a SUBREG. */
1298 if (REG_P (args[i].value)
1299 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1300 {
1301 args[i].initial_value
1302 = gen_lowpart_SUBREG (mode, args[i].value);
1303 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1304 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1305 args[i].unsignedp);
1306 }
c41c7d7a 1307#endif
67c155cb 1308 }
1309 }
04707f1c 1310}
1311
e717ffc2 1312/* Given the current state of MUST_PREALLOCATE and information about
1313 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1314 compute and return the final value for MUST_PREALLOCATE. */
1315
1316static int
c2f47e15 1317finalize_must_preallocate (int must_preallocate, int num_actuals,
1318 struct arg_data *args, struct args_size *args_size)
e717ffc2 1319{
1320 /* See if we have or want to preallocate stack space.
1321
1322 If we would have to push a partially-in-regs parm
1323 before other stack parms, preallocate stack space instead.
1324
1325 If the size of some parm is not a multiple of the required stack
1326 alignment, we must preallocate.
1327
1328 If the total size of arguments that would otherwise create a copy in
1329 a temporary (such as a CALL) is more than half the total argument list
1330 size, preallocation is faster.
1331
1332 Another reason to preallocate is if we have a machine (like the m88k)
1333 where stack alignment is required to be maintained between every
1334 pair of insns, not just when the call is made. However, we assume here
1335 that such machines either do not have push insns (and hence preallocation
1336 would occur anyway) or the problem is taken care of with
1337 PUSH_ROUNDING. */
1338
1339 if (! must_preallocate)
1340 {
1341 int partial_seen = 0;
1342 int copy_to_evaluate_size = 0;
1343 int i;
1344
1345 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1346 {
1347 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1348 partial_seen = 1;
1349 else if (partial_seen && args[i].reg == 0)
1350 must_preallocate = 1;
1351
1352 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1353 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1354 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1355 || TREE_CODE (args[i].tree_value) == COND_EXPR
1356 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1357 copy_to_evaluate_size
1358 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1359 }
1360
1361 if (copy_to_evaluate_size * 2 >= args_size->constant
1362 && args_size->constant > 0)
1363 must_preallocate = 1;
1364 }
1365 return must_preallocate;
1366}
cc45e5e8 1367
f3012854 1368/* If we preallocated stack space, compute the address of each argument
1369 and store it into the ARGS array.
1370
c87678e4 1371 We need not ensure it is a valid memory address here; it will be
f3012854 1372 validized when it is used.
1373
1374 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1375
1376static void
4c9e08a4 1377compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 1378{
1379 if (argblock)
1380 {
1381 rtx arg_reg = argblock;
1382 int i, arg_offset = 0;
1383
1384 if (GET_CODE (argblock) == PLUS)
1385 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1386
1387 for (i = 0; i < num_actuals; i++)
1388 {
241399f6 1389 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1390 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 1391 rtx addr;
c5dc0c32 1392 unsigned int align, boundary;
c2ca1bab 1393 unsigned int units_on_stack = 0;
1394 enum machine_mode partial_mode = VOIDmode;
f3012854 1395
1396 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 1397 if (! args[i].pass_on_stack
1398 && args[i].reg != 0
1399 && args[i].partial == 0)
f3012854 1400 continue;
1401
1402 if (GET_CODE (offset) == CONST_INT)
1403 addr = plus_constant (arg_reg, INTVAL (offset));
1404 else
1405 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1406
1407 addr = plus_constant (addr, arg_offset);
c2ca1bab 1408
1409 if (args[i].partial != 0)
1410 {
1411 /* Only part of the parameter is being passed on the stack.
1412 Generate a simple memory reference of the correct size. */
1413 units_on_stack = args[i].locate.size.constant;
1414 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1415 MODE_INT, 1);
1416 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1417 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1418 }
1419 else
1420 {
1421 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1422 set_mem_attributes (args[i].stack,
1423 TREE_TYPE (args[i].tree_value), 1);
1424 }
c5dc0c32 1425 align = BITS_PER_UNIT;
1426 boundary = args[i].locate.boundary;
1427 if (args[i].locate.where_pad != downward)
1428 align = boundary;
1429 else if (GET_CODE (offset) == CONST_INT)
1430 {
1431 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1432 align = align & -align;
1433 }
1434 set_mem_align (args[i].stack, align);
f3012854 1435
1436 if (GET_CODE (slot_offset) == CONST_INT)
1437 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1438 else
1439 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1440
1441 addr = plus_constant (addr, arg_offset);
c2ca1bab 1442
1443 if (args[i].partial != 0)
1444 {
1445 /* Only part of the parameter is being passed on the stack.
1446 Generate a simple memory reference of the correct size.
1447 */
1448 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1449 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1450 }
1451 else
1452 {
1453 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1454 set_mem_attributes (args[i].stack_slot,
1455 TREE_TYPE (args[i].tree_value), 1);
1456 }
c5dc0c32 1457 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 1458
1459 /* Function incoming arguments may overlap with sibling call
1460 outgoing arguments and we cannot allow reordering of reads
1461 from function arguments with stores to outgoing arguments
1462 of sibling calls. */
ab6ab77e 1463 set_mem_alias_set (args[i].stack, 0);
1464 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 1465 }
1466 }
1467}
c87678e4 1468
f3012854 1469/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1470 in a call instruction.
1471
1472 FNDECL is the tree node for the target function. For an indirect call
1473 FNDECL will be NULL_TREE.
1474
95672afe 1475 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 1476
1477static rtx
4c9e08a4 1478rtx_for_function_call (tree fndecl, tree addr)
f3012854 1479{
1480 rtx funexp;
1481
1482 /* Get the function to call, in the form of RTL. */
1483 if (fndecl)
1484 {
1485 /* If this is the first use of the function, see if we need to
1486 make an external definition for it. */
3d053e06 1487 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
f3012854 1488 {
1489 assemble_external (fndecl);
1490 TREE_USED (fndecl) = 1;
1491 }
1492
1493 /* Get a SYMBOL_REF rtx for the function address. */
1494 funexp = XEXP (DECL_RTL (fndecl), 0);
1495 }
1496 else
1497 /* Generate an rtx (probably a pseudo-register) for the address. */
1498 {
1499 push_temp_slots ();
8ec3c5c2 1500 funexp = expand_normal (addr);
c87678e4 1501 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 1502 }
1503 return funexp;
1504}
1505
ff6c0ab2 1506/* Return true if and only if SIZE storage units (usually bytes)
1507 starting from address ADDR overlap with already clobbered argument
1508 area. This function is used to determine if we should give up a
1509 sibcall. */
1510
1511static bool
1512mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1513{
1514 HOST_WIDE_INT i;
1515
1516 if (addr == current_function_internal_arg_pointer)
1517 i = 0;
1518 else if (GET_CODE (addr) == PLUS
e7ffa1e6 1519 && XEXP (addr, 0) == current_function_internal_arg_pointer
ff6c0ab2 1520 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1521 i = INTVAL (XEXP (addr, 1));
e7ffa1e6 1522 /* Return true for arg pointer based indexed addressing. */
1523 else if (GET_CODE (addr) == PLUS
1524 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1525 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1526 return true;
ff6c0ab2 1527 else
1528 return false;
1529
1530#ifdef ARGS_GROW_DOWNWARD
1531 i = -i - size;
1532#endif
1533 if (size > 0)
1534 {
1535 unsigned HOST_WIDE_INT k;
1536
1537 for (k = 0; k < size; k++)
1538 if (i + k < stored_args_map->n_bits
1539 && TEST_BIT (stored_args_map, i + k))
1540 return true;
1541 }
1542
1543 return false;
1544}
1545
cde25025 1546/* Do the register loads required for any wholly-register parms or any
1547 parms which are passed both on the stack and in a register. Their
c87678e4 1548 expressions were already evaluated.
cde25025 1549
1550 Mark all register-parms as living through the call, putting these USE
4c9e08a4 1551 insns in the CALL_INSN_FUNCTION_USAGE field.
1552
dc537795 1553 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 1554 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 1555
1556static void
4c9e08a4 1557load_register_parameters (struct arg_data *args, int num_actuals,
1558 rtx *call_fusage, int flags, int is_sibcall,
1559 int *sibcall_failure)
cde25025 1560{
1561 int i, j;
1562
cde25025 1563 for (i = 0; i < num_actuals; i++)
cde25025 1564 {
0e0be288 1565 rtx reg = ((flags & ECF_SIBCALL)
1566 ? args[i].tail_call_reg : args[i].reg);
cde25025 1567 if (reg)
1568 {
5f4cd670 1569 int partial = args[i].partial;
1570 int nregs;
1571 int size = 0;
42b11544 1572 rtx before_arg = get_last_insn ();
83272ab4 1573 /* Set non-negative if we must move a word at a time, even if
1574 just one word (e.g, partial == 4 && mode == DFmode). Set
1575 to -1 if we just use a normal move insn. This value can be
1576 zero if the argument is a zero size structure. */
5f4cd670 1577 nregs = -1;
f054eb3c 1578 if (GET_CODE (reg) == PARALLEL)
1579 ;
1580 else if (partial)
1581 {
1582 gcc_assert (partial % UNITS_PER_WORD == 0);
1583 nregs = partial / UNITS_PER_WORD;
1584 }
5f4cd670 1585 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1586 {
1587 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1588 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1589 }
1590 else
1591 size = GET_MODE_SIZE (args[i].mode);
cde25025 1592
1593 /* Handle calls that pass values in multiple non-contiguous
1594 locations. The Irix 6 ABI has examples of this. */
1595
1596 if (GET_CODE (reg) == PARALLEL)
b600a907 1597 emit_group_move (reg, args[i].parallel_value);
cde25025 1598
1599 /* If simple case, just do move. If normal partial, store_one_arg
1600 has already loaded the register for us. In all other cases,
1601 load the register(s) from memory. */
1602
8e67abab 1603 else if (nregs == -1)
1604 {
1605 emit_move_insn (reg, args[i].value);
5f4cd670 1606#ifdef BLOCK_REG_PADDING
8e67abab 1607 /* Handle case where we have a value that needs shifting
1608 up to the msb. eg. a QImode value and we're padding
1609 upward on a BYTES_BIG_ENDIAN machine. */
1610 if (size < UNITS_PER_WORD
1611 && (args[i].locate.where_pad
1612 == (BYTES_BIG_ENDIAN ? upward : downward)))
1613 {
8e67abab 1614 rtx x;
1615 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
b2abd798 1616
1617 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1618 report the whole reg as used. Strictly speaking, the
1619 call only uses SIZE bytes at the msb end, but it doesn't
1620 seem worth generating rtl to say that. */
1621 reg = gen_rtx_REG (word_mode, REGNO (reg));
92966f8b 1622 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7016c612 1623 build_int_cst (NULL_TREE, shift),
7c446c95 1624 reg, 1);
b2abd798 1625 if (x != reg)
1626 emit_move_insn (reg, x);
8e67abab 1627 }
5f4cd670 1628#endif
8e67abab 1629 }
cde25025 1630
1631 /* If we have pre-computed the values to put in the registers in
1632 the case of non-aligned structures, copy them in now. */
1633
1634 else if (args[i].n_aligned_regs != 0)
1635 for (j = 0; j < args[i].n_aligned_regs; j++)
1636 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1637 args[i].aligned_regs[j]);
1638
1639 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 1640 {
1641 rtx mem = validize_mem (args[i].value);
1642
ff6c0ab2 1643 /* Check for overlap with already clobbered argument area. */
1644 if (is_sibcall
1645 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1646 size))
1647 *sibcall_failure = 1;
1648
5f4cd670 1649 /* Handle a BLKmode that needs shifting. */
8e67abab 1650 if (nregs == 1 && size < UNITS_PER_WORD
2c267f1a 1651#ifdef BLOCK_REG_PADDING
1652 && args[i].locate.where_pad == downward
1653#else
1654 && BYTES_BIG_ENDIAN
1655#endif
1656 )
5f4cd670 1657 {
1658 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1659 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1660 rtx x = gen_reg_rtx (word_mode);
1661 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
92966f8b 1662 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1663 : LSHIFT_EXPR;
5f4cd670 1664
1665 emit_move_insn (x, tem);
92966f8b 1666 x = expand_shift (dir, word_mode, x,
7016c612 1667 build_int_cst (NULL_TREE, shift),
7c446c95 1668 ri, 1);
5f4cd670 1669 if (x != ri)
1670 emit_move_insn (ri, x);
1671 }
1672 else
5f4cd670 1673 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1674 }
cde25025 1675
42b11544 1676 /* When a parameter is a block, and perhaps in other cases, it is
1677 possible that it did a load from an argument slot that was
6a8fa8e2 1678 already clobbered. */
42b11544 1679 if (is_sibcall
1680 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1681 *sibcall_failure = 1;
1682
cde25025 1683 /* Handle calls that pass values in multiple non-contiguous
1684 locations. The Irix 6 ABI has examples of this. */
1685 if (GET_CODE (reg) == PARALLEL)
1686 use_group_regs (call_fusage, reg);
1687 else if (nregs == -1)
1688 use_reg (call_fusage, reg);
c75d013c 1689 else if (nregs > 0)
1690 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 1691 }
1692 }
1693}
1694
92e1ef5b 1695/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1696 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1697 bytes, then we would need to push some additional bytes to pad the
481feae3 1698 arguments. So, we compute an adjust to the stack pointer for an
1699 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1700 bytes. Then, when the arguments are pushed the stack will be perfectly
1701 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1702 be popped after the call. Returns the adjustment. */
92e1ef5b 1703
481feae3 1704static int
4c9e08a4 1705combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1706 struct args_size *args_size,
38413c80 1707 unsigned int preferred_unit_stack_boundary)
92e1ef5b 1708{
1709 /* The number of bytes to pop so that the stack will be
1710 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1711 HOST_WIDE_INT adjustment;
1712 /* The alignment of the stack after the arguments are pushed, if we
1713 just pushed the arguments without adjust the stack here. */
38413c80 1714 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 1715
c87678e4 1716 unadjusted_alignment
92e1ef5b 1717 = ((stack_pointer_delta + unadjusted_args_size)
1718 % preferred_unit_stack_boundary);
1719
1720 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1721 as possible -- leaving just enough left to cancel out the
1722 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1723 PENDING_STACK_ADJUST is non-negative, and congruent to
1724 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1725
1726 /* Begin by trying to pop all the bytes. */
c87678e4 1727 unadjusted_alignment
1728 = (unadjusted_alignment
92e1ef5b 1729 - (pending_stack_adjust % preferred_unit_stack_boundary));
1730 adjustment = pending_stack_adjust;
1731 /* Push enough additional bytes that the stack will be aligned
1732 after the arguments are pushed. */
d3ef58ec 1733 if (preferred_unit_stack_boundary > 1)
1734 {
3dc35e62 1735 if (unadjusted_alignment > 0)
c87678e4 1736 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
d3ef58ec 1737 else
c87678e4 1738 adjustment += unadjusted_alignment;
d3ef58ec 1739 }
c87678e4 1740
92e1ef5b 1741 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1742 bytes after the call. The right number is the entire
1743 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1744 by the arguments in the first place. */
c87678e4 1745 args_size->constant
92e1ef5b 1746 = pending_stack_adjust - adjustment + unadjusted_args_size;
1747
481feae3 1748 return adjustment;
92e1ef5b 1749}
1750
7ecc63d3 1751/* Scan X expression if it does not dereference any argument slots
1752 we already clobbered by tail call arguments (as noted in stored_args_map
1753 bitmap).
d10cfa8d 1754 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 1755 zero otherwise. */
1756
1757static int
4c9e08a4 1758check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 1759{
1760 RTX_CODE code;
1761 int i, j;
7ecc63d3 1762 const char *fmt;
1763
1764 if (x == NULL_RTX)
1765 return 0;
1766
1767 code = GET_CODE (x);
1768
1769 if (code == MEM)
ff6c0ab2 1770 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1771 GET_MODE_SIZE (GET_MODE (x)));
7ecc63d3 1772
c87678e4 1773 /* Scan all subexpressions. */
7ecc63d3 1774 fmt = GET_RTX_FORMAT (code);
1775 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1776 {
1777 if (*fmt == 'e')
c87678e4 1778 {
1779 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1780 return 1;
1781 }
7ecc63d3 1782 else if (*fmt == 'E')
c87678e4 1783 {
1784 for (j = 0; j < XVECLEN (x, i); j++)
1785 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1786 return 1;
1787 }
7ecc63d3 1788 }
1789 return 0;
7ecc63d3 1790}
1791
1792/* Scan sequence after INSN if it does not dereference any argument slots
1793 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 1794 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1795 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1796 should be 0). Return nonzero if sequence after INSN dereferences such argument
1797 slots, zero otherwise. */
7ecc63d3 1798
1799static int
4c9e08a4 1800check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
c87678e4 1801{
7ecc63d3 1802 int low, high;
1803
1804 if (insn == NULL_RTX)
1805 insn = get_insns ();
1806 else
1807 insn = NEXT_INSN (insn);
1808
1809 for (; insn; insn = NEXT_INSN (insn))
c87678e4 1810 if (INSN_P (insn)
1811 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 1812 break;
1813
42b11544 1814 if (mark_stored_args_map)
1815 {
db10eec8 1816#ifdef ARGS_GROW_DOWNWARD
241399f6 1817 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
db10eec8 1818#else
241399f6 1819 low = arg->locate.slot_offset.constant;
db10eec8 1820#endif
1821
241399f6 1822 for (high = low + arg->locate.size.constant; low < high; low++)
42b11544 1823 SET_BIT (stored_args_map, low);
1824 }
7ecc63d3 1825 return insn != NULL_RTX;
1826}
1827
05d18e8b 1828/* Given that a function returns a value of mode MODE at the most
1829 significant end of hard register VALUE, shift VALUE left or right
1830 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 1831
05d18e8b 1832bool
1833shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2c8ff1ed 1834{
05d18e8b 1835 HOST_WIDE_INT shift;
1836
1837 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1838 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1839 if (shift == 0)
1840 return false;
1841
1842 /* Use ashr rather than lshr for right shifts. This is for the benefit
1843 of the MIPS port, which requires SImode values to be sign-extended
1844 when stored in 64-bit registers. */
1845 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1846 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1847 gcc_unreachable ();
1848 return true;
2c8ff1ed 1849}
1850
90af1361 1851/* If X is a likely-spilled register value, copy it to a pseudo
1852 register and return that register. Return X otherwise. */
1853
1854static rtx
1855avoid_likely_spilled_reg (rtx x)
1856{
1857 rtx new;
1858
1859 if (REG_P (x)
1860 && HARD_REGISTER_P (x)
1861 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1862 {
1863 /* Make sure that we generate a REG rather than a CONCAT.
1864 Moves into CONCATs can need nontrivial instructions,
1865 and the whole point of this function is to avoid
1866 using the hard register directly in such a situation. */
1867 generating_concat_p = 0;
1868 new = gen_reg_rtx (GET_MODE (x));
1869 generating_concat_p = 1;
1870 emit_move_insn (new, x);
1871 return new;
1872 }
1873 return x;
1874}
1875
c2f47e15 1876/* Generate all the code for a CALL_EXPR exp
66d433c7 1877 and return an rtx for its value.
1878 Store the value in TARGET (specified as an rtx) if convenient.
1879 If the value is stored in TARGET then TARGET is returned.
1880 If IGNORE is nonzero, then we ignore the value of the function call. */
1881
1882rtx
4c9e08a4 1883expand_call (tree exp, rtx target, int ignore)
66d433c7 1884{
60ecc450 1885 /* Nonzero if we are currently expanding a call. */
1886 static int currently_expanding_call = 0;
1887
66d433c7 1888 /* RTX for the function to be called. */
1889 rtx funexp;
60ecc450 1890 /* Sequence of insns to perform a normal "call". */
1891 rtx normal_call_insns = NULL_RTX;
4ee9c684 1892 /* Sequence of insns to perform a tail "call". */
60ecc450 1893 rtx tail_call_insns = NULL_RTX;
66d433c7 1894 /* Data type of the function. */
1895 tree funtype;
915e81b8 1896 tree type_arg_types;
66d433c7 1897 /* Declaration of the function being called,
1898 or 0 if the function is computed (not known by name). */
1899 tree fndecl = 0;
e100aadc 1900 /* The type of the function being called. */
1901 tree fntype;
4ee9c684 1902 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
60ecc450 1903 int pass;
66d433c7 1904
1905 /* Register in which non-BLKmode value will be returned,
1906 or 0 if no value or if value is BLKmode. */
1907 rtx valreg;
1908 /* Address where we should return a BLKmode value;
1909 0 if value not BLKmode. */
1910 rtx structure_value_addr = 0;
1911 /* Nonzero if that address is being passed by treating it as
1912 an extra, implicit first parameter. Otherwise,
1913 it is passed by being copied directly into struct_value_rtx. */
1914 int structure_value_addr_parm = 0;
cd46caee 1915 /* Holds the value of implicit argument for the struct value. */
1916 tree structure_value_addr_value = NULL_TREE;
66d433c7 1917 /* Size of aggregate value wanted, or zero if none wanted
1918 or if we are using the non-reentrant PCC calling convention
1919 or expecting the value in registers. */
997d68fe 1920 HOST_WIDE_INT struct_value_size = 0;
66d433c7 1921 /* Nonzero if called function returns an aggregate in memory PCC style,
1922 by returning the address of where to find it. */
1923 int pcc_struct_value = 0;
45550790 1924 rtx struct_value = 0;
66d433c7 1925
1926 /* Number of actual parameters in this call, including struct value addr. */
1927 int num_actuals;
1928 /* Number of named args. Args after this are anonymous ones
1929 and they must all go on the stack. */
1930 int n_named_args;
cd46caee 1931 /* Number of complex actual arguments that need to be split. */
1932 int num_complex_actuals = 0;
66d433c7 1933
1934 /* Vector of information about each argument.
1935 Arguments are numbered in the order they will be pushed,
1936 not the order they are written. */
1937 struct arg_data *args;
1938
1939 /* Total size in bytes of all the stack-parms scanned so far. */
1940 struct args_size args_size;
0e0be288 1941 struct args_size adjusted_args_size;
66d433c7 1942 /* Size of arguments before any adjustments (such as rounding). */
cc45e5e8 1943 int unadjusted_args_size;
66d433c7 1944 /* Data on reg parms scanned so far. */
1945 CUMULATIVE_ARGS args_so_far;
1946 /* Nonzero if a reg parm has been scanned. */
1947 int reg_parm_seen;
a50ca374 1948 /* Nonzero if this is an indirect function call. */
66d433c7 1949
c87678e4 1950 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 1951 If stack space is allocated for register parameters, but not by the
1952 caller, then it is preallocated in the fixed part of the stack frame.
1953 So the entire argument block must then be preallocated (i.e., we
1954 ignore PUSH_ROUNDING in that case). */
1955
4448f543 1956 int must_preallocate = !PUSH_ARGS;
66d433c7 1957
eb2f80f3 1958 /* Size of the stack reserved for parameter registers. */
2d7187c2 1959 int reg_parm_stack_space = 0;
1960
66d433c7 1961 /* Address of space preallocated for stack parms
1962 (on machines that lack push insns), or 0 if space not preallocated. */
1963 rtx argblock = 0;
1964
dfe08167 1965 /* Mask of ECF_ flags. */
1966 int flags = 0;
4448f543 1967#ifdef REG_PARM_STACK_SPACE
66d433c7 1968 /* Define the boundary of the register parm stack space that needs to be
6e96b626 1969 saved, if any. */
1970 int low_to_save, high_to_save;
66d433c7 1971 rtx save_area = 0; /* Place that it is saved */
1972#endif
1973
66d433c7 1974 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1975 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 1976 char *stack_usage_map_buf = NULL;
66d433c7 1977
9069face 1978 int old_stack_allocated;
1979
1980 /* State variables to track stack modifications. */
66d433c7 1981 rtx old_stack_level = 0;
9069face 1982 int old_stack_arg_under_construction = 0;
65dccdb1 1983 int old_pending_adj = 0;
66d433c7 1984 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 1985
1986 /* Some stack pointer alterations we make are performed via
1987 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1988 which we then also need to save/restore along the way. */
31d035ca 1989 int old_stack_pointer_delta = 0;
9069face 1990
60ecc450 1991 rtx call_fusage;
c2f47e15 1992 tree p = CALL_EXPR_FN (exp);
1993 tree addr = CALL_EXPR_FN (exp);
19cb6b50 1994 int i;
92e1ef5b 1995 /* The alignment of the stack, in bits. */
38413c80 1996 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 1997 /* The alignment of the stack, in bytes. */
38413c80 1998 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 1999 /* The static chain value to use for this call. */
2000 rtx static_chain_value;
dfe08167 2001 /* See if this is "nothrow" function call. */
2002 if (TREE_NOTHROW (exp))
2003 flags |= ECF_NOTHROW;
2004
4ee9c684 2005 /* See if we can find a DECL-node for the actual function, and get the
2006 function attributes (flags) from the function decl or type node. */
97a1590b 2007 fndecl = get_callee_fndecl (exp);
2008 if (fndecl)
66d433c7 2009 {
e100aadc 2010 fntype = TREE_TYPE (fndecl);
97a1590b 2011 flags |= flags_from_decl_or_type (fndecl);
66d433c7 2012 }
97a1590b 2013 else
8a8cdb8d 2014 {
e100aadc 2015 fntype = TREE_TYPE (TREE_TYPE (p));
e100aadc 2016 flags |= flags_from_decl_or_type (fntype);
8a8cdb8d 2017 }
d490e2f2 2018
e100aadc 2019 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 2020
4a081ddd 2021 /* Warn if this value is an aggregate type,
2022 regardless of which calling convention we are using for it. */
efb9d9ee 2023 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2024 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 2025
2026 /* If the result of a pure or const function call is ignored (or void),
2027 and none of its arguments are volatile, we can avoid expanding the
2028 call and just evaluate the arguments for side-effects. */
2029 if ((flags & (ECF_CONST | ECF_PURE))
2030 && (ignore || target == const0_rtx
2031 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2032 {
2033 bool volatilep = false;
2034 tree arg;
cd46caee 2035 call_expr_arg_iterator iter;
4a081ddd 2036
cd46caee 2037 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2038 if (TREE_THIS_VOLATILE (arg))
4a081ddd 2039 {
2040 volatilep = true;
2041 break;
2042 }
2043
2044 if (! volatilep)
2045 {
cd46caee 2046 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2047 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 2048 return const0_rtx;
2049 }
2050 }
2051
2d7187c2 2052#ifdef REG_PARM_STACK_SPACE
2d7187c2 2053 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2054#endif
2d7187c2 2055
63c68695 2056 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 2057 must_preallocate = 1;
997d68fe 2058
66d433c7 2059 /* Set up a place to return a structure. */
2060
2061 /* Cater to broken compilers. */
45550790 2062 if (aggregate_value_p (exp, fndecl))
66d433c7 2063 {
2064 /* This call returns a big structure. */
2a0c81bf 2065 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
66d433c7 2066
2067#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 2068 {
2069 pcc_struct_value = 1;
f49c64ba 2070 }
2071#else /* not PCC_STATIC_STRUCT_RETURN */
2072 {
2073 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
66d433c7 2074
ea523851 2075 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
f49c64ba 2076 structure_value_addr = XEXP (target, 0);
2077 else
2078 {
f49c64ba 2079 /* For variable-sized objects, we must be called with a target
2080 specified. If we were to allocate space on the stack here,
2081 we would have no way of knowing when to free it. */
dee3c9dd 2082 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
66d433c7 2083
930f0e87 2084 mark_temp_addr_taken (d);
2085 structure_value_addr = XEXP (d, 0);
f49c64ba 2086 target = 0;
2087 }
2088 }
2089#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 2090 }
2091
0e0be288 2092 /* Figure out the amount to which the stack should be aligned. */
0e0be288 2093 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 2094 if (fndecl)
2095 {
2096 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2097 if (i && i->preferred_incoming_stack_boundary)
2098 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2099 }
0e0be288 2100
2101 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 2102 funtype = TREE_TYPE (addr);
231bd014 2103 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 2104 funtype = TREE_TYPE (funtype);
2105
cd46caee 2106 /* Count whether there are actual complex arguments that need to be split
2107 into their real and imaginary parts. Munge the type_arg_types
2108 appropriately here as well. */
92d40bc4 2109 if (targetm.calls.split_complex_arg)
915e81b8 2110 {
cd46caee 2111 call_expr_arg_iterator iter;
2112 tree arg;
2113 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2114 {
2115 tree type = TREE_TYPE (arg);
2116 if (type && TREE_CODE (type) == COMPLEX_TYPE
2117 && targetm.calls.split_complex_arg (type))
2118 num_complex_actuals++;
2119 }
915e81b8 2120 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 2121 }
2122 else
2123 type_arg_types = TYPE_ARG_TYPES (funtype);
2124
0e0be288 2125 if (flags & ECF_MAY_BE_ALLOCA)
2126 current_function_calls_alloca = 1;
2127
2128 /* If struct_value_rtx is 0, it means pass the address
cd46caee 2129 as if it were an extra parameter. Put the argument expression
2130 in structure_value_addr_value. */
45550790 2131 if (structure_value_addr && struct_value == 0)
0e0be288 2132 {
2133 /* If structure_value_addr is a REG other than
2134 virtual_outgoing_args_rtx, we can use always use it. If it
2135 is not a REG, we must always copy it into a register.
2136 If it is virtual_outgoing_args_rtx, we must copy it to another
2137 register in some cases. */
8ad4c111 2138 rtx temp = (!REG_P (structure_value_addr)
0e0be288 2139 || (ACCUMULATE_OUTGOING_ARGS
2140 && stack_arg_under_construction
2141 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 2142 ? copy_addr_to_reg (convert_memory_address
e100aadc 2143 (Pmode, structure_value_addr))
0e0be288 2144 : structure_value_addr);
2145
cd46caee 2146 structure_value_addr_value =
2147 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
0e0be288 2148 structure_value_addr_parm = 1;
2149 }
2150
2151 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 2152 num_actuals =
2153 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 2154
2155 /* Compute number of named args.
30a10006 2156 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2157
2158 if (type_arg_types != 0)
2159 n_named_args
2160 = (list_length (type_arg_types)
2161 /* Count the struct value address, if it is passed as a parm. */
2162 + structure_value_addr_parm);
2163 else
2164 /* If we know nothing, treat all args as named. */
2165 n_named_args = num_actuals;
2166
2167 /* Start updating where the next arg would go.
2168
2169 On some machines (such as the PA) indirect calls have a different
2170 calling convention than normal calls. The fourth argument in
2171 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2172 or not. */
2173 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2174
2175 /* Now possibly adjust the number of named args.
0e0be288 2176 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 2177 We do include the last named arg if
2178 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 2179 (If no anonymous args follow, the result of list_length is actually
2180 one too large. This is harmless.)
2181
a107cd89 2182 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 2183 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2184 this machine will be able to place unnamed args that were passed
2185 in registers into the stack. So treat all args as named. This
2186 allows the insns emitting for a specific argument list to be
2187 independent of the function declaration.
a107cd89 2188
2189 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2190 we do not have any reliable way to pass unnamed args in
2191 registers, so we must force them into memory. */
0e0be288 2192
30a10006 2193 if (type_arg_types != 0
2194 && targetm.calls.strict_argument_naming (&args_so_far))
2195 ;
2196 else if (type_arg_types != 0
2197 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2198 /* Don't include the last named arg. */
2199 --n_named_args;
0e0be288 2200 else
30a10006 2201 /* Treat all args as named. */
0e0be288 2202 n_named_args = num_actuals;
2203
0e0be288 2204 /* Make a vector to hold all the information about each arg. */
f0af5a88 2205 args = alloca (num_actuals * sizeof (struct arg_data));
2206 memset (args, 0, num_actuals * sizeof (struct arg_data));
0e0be288 2207
00dddcf2 2208 /* Build up entries in the ARGS array, compute the size of the
2209 arguments into ARGS_SIZE, etc. */
0e0be288 2210 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 2211 n_named_args, exp,
2212 structure_value_addr_value, fndecl,
0e0be288 2213 &args_so_far, reg_parm_stack_space,
2214 &old_stack_level, &old_pending_adj,
eaa112a0 2215 &must_preallocate, &flags,
4ee9c684 2216 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 2217
2218 if (args_size.var)
2219 {
2220 /* If this function requires a variable-sized argument list, don't
2221 try to make a cse'able block for this call. We may be able to
2222 do this eventually, but it is too complicated to keep track of
1e625a2e 2223 what insns go in the cse'able block and which don't. */
0e0be288 2224
2a0c81bf 2225 flags &= ~ECF_LIBCALL_BLOCK;
0e0be288 2226 must_preallocate = 1;
2227 }
2228
2229 /* Now make final decision about preallocating stack space. */
2230 must_preallocate = finalize_must_preallocate (must_preallocate,
2231 num_actuals, args,
2232 &args_size);
2233
2234 /* If the structure value address will reference the stack pointer, we
2235 must stabilize it. We don't need to do this if we know that we are
2236 not going to adjust the stack pointer in processing this call. */
2237
2238 if (structure_value_addr
2239 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2240 || reg_mentioned_p (virtual_outgoing_args_rtx,
2241 structure_value_addr))
2242 && (args_size.var
2243 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2244 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 2245
0d568ddf 2246 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 2247 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 2248 expanding a call, as that means we're an argument. Don't try if
011e6b51 2249 there's cleanups, as we know there's code to follow the call. */
60ecc450 2250
0e0be288 2251 if (currently_expanding_call++ != 0
2252 || !flag_optimize_sibling_calls
4ee9c684 2253 || args_size.var
3072d30e 2254 || lookup_stmt_eh_region (exp) >= 0
2255 || dbg_cnt (tail_call) == false)
4ee9c684 2256 try_tail_call = 0;
0e0be288 2257
2258 /* Rest of purposes for tail call optimizations to fail. */
2259 if (
2260#ifdef HAVE_sibcall_epilogue
2261 !HAVE_sibcall_epilogue
2262#else
2263 1
2264#endif
2265 || !try_tail_call
2266 /* Doing sibling call optimization needs some work, since
2267 structure_value_addr can be allocated on the stack.
2268 It does not seem worth the effort since few optimizable
2269 sibling calls will return a structure. */
2270 || structure_value_addr != NULL_RTX
805e22b2 2271 /* Check whether the target is able to optimize the call
2272 into a sibcall. */
883b2e73 2273 || !targetm.function_ok_for_sibcall (fndecl, exp)
805e22b2 2274 /* Functions that do not return exactly once may not be sibcall
a0c938f0 2275 optimized. */
4fec1d6c 2276 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
95672afe 2277 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
4c4a1039 2278 /* If the called function is nested in the current one, it might access
a0c938f0 2279 some of the caller's arguments, but could clobber them beforehand if
2280 the argument areas are shared. */
4c4a1039 2281 || (fndecl && decl_function_context (fndecl) == current_function_decl)
0e0be288 2282 /* If this function requires more stack slots than the current
99b442ff 2283 function, we cannot change it into a sibling call.
2284 current_function_pretend_args_size is not part of the
2285 stack allocated by our caller. */
2286 || args_size.constant > (current_function_args_size
2287 - current_function_pretend_args_size)
0e0be288 2288 /* If the callee pops its own arguments, then it must pop exactly
2289 the same number of arguments as the current function. */
e2e9c55b 2290 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2291 != RETURN_POPS_ARGS (current_function_decl,
2292 TREE_TYPE (current_function_decl),
2293 current_function_args_size))
dc24ddbd 2294 || !lang_hooks.decls.ok_for_sibcall (fndecl))
8b1cb18e 2295 try_tail_call = 0;
4b066641 2296
d0285dd8 2297 /* Ensure current function's preferred stack boundary is at least
2298 what we need. We don't have to increase alignment for recursive
2299 functions. */
2300 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2301 && fndecl != current_function_decl)
2302 cfun->preferred_stack_boundary = preferred_stack_boundary;
28992b23 2303 if (fndecl == current_function_decl)
2304 cfun->recursive_call_emit = true;
d0285dd8 2305
0e0be288 2306 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 2307
60ecc450 2308 /* We want to make two insn chains; one for a sibling call, the other
2309 for a normal call. We will select one of the two chains after
2310 initial RTL generation is complete. */
6e96b626 2311 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 2312 {
2313 int sibcall_failure = 0;
35a3065a 2314 /* We want to emit any pending stack adjustments before the tail
60ecc450 2315 recursion "call". That way we know any adjustment after the tail
0d568ddf 2316 recursion call can be ignored if we indeed use the tail
60ecc450 2317 call expansion. */
9a5bbcc2 2318 int save_pending_stack_adjust = 0;
2319 int save_stack_pointer_delta = 0;
60ecc450 2320 rtx insns;
c0e7e9f7 2321 rtx before_call, next_arg_reg, after_args;
1e2b2ab3 2322
60ecc450 2323 if (pass == 0)
2324 {
60ecc450 2325 /* State variables we need to save and restore between
2326 iterations. */
2327 save_pending_stack_adjust = pending_stack_adjust;
91b70175 2328 save_stack_pointer_delta = stack_pointer_delta;
60ecc450 2329 }
dfe08167 2330 if (pass)
2331 flags &= ~ECF_SIBCALL;
2332 else
2333 flags |= ECF_SIBCALL;
66d433c7 2334
60ecc450 2335 /* Other state variables that we must reinitialize each time
dfe08167 2336 through the loop (that are not initialized by the loop itself). */
60ecc450 2337 argblock = 0;
2338 call_fusage = 0;
2f921ec9 2339
c87678e4 2340 /* Start a new sequence for the normal call case.
66d433c7 2341
60ecc450 2342 From this point on, if the sibling call fails, we want to set
2343 sibcall_failure instead of continuing the loop. */
2344 start_sequence ();
412321ce 2345
60ecc450 2346 /* Don't let pending stack adjusts add up to too much.
2347 Also, do all pending adjustments now if there is any chance
2348 this might be a call to alloca or if we are expanding a sibling
ff3ae375 2349 call sequence.
82e95be3 2350 Also do the adjustments before a throwing call, otherwise
2351 exception handling can fail; PR 19225. */
60ecc450 2352 if (pending_stack_adjust >= 32
5edaabad 2353 || (pending_stack_adjust > 0
ff3ae375 2354 && (flags & ECF_MAY_BE_ALLOCA))
82e95be3 2355 || (pending_stack_adjust > 0
2356 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 2357 || pass == 0)
2358 do_pending_stack_adjust ();
66d433c7 2359
544c4b41 2360 /* When calling a const function, we must pop the stack args right away,
2361 so that the pop is deleted or moved with the call. */
2a0c81bf 2362 if (pass && (flags & ECF_LIBCALL_BLOCK))
544c4b41 2363 NO_DEFER_POP;
2364
60ecc450 2365 /* Precompute any arguments as needed. */
02510658 2366 if (pass)
2367 precompute_arguments (flags, num_actuals, args);
66d433c7 2368
60ecc450 2369 /* Now we are about to start emitting insns that can be deleted
2370 if a libcall is deleted. */
2a0c81bf 2371 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
60ecc450 2372 start_sequence ();
66d433c7 2373
71d89928 2374 if (pass == 0 && cfun->stack_protect_guard)
2375 stack_protect_epilogue ();
2376
0e0be288 2377 adjusted_args_size = args_size;
481feae3 2378 /* Compute the actual size of the argument block required. The variable
2379 and constant sizes must be combined, the size may have to be rounded,
2380 and there may be a minimum required size. When generating a sibcall
2381 pattern, do not round up, since we'll be re-using whatever space our
2382 caller provided. */
2383 unadjusted_args_size
c87678e4 2384 = compute_argument_block_size (reg_parm_stack_space,
2385 &adjusted_args_size,
481feae3 2386 (pass == 0 ? 0
2387 : preferred_stack_boundary));
2388
c87678e4 2389 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 2390
02510658 2391 /* The argument block when performing a sibling call is the
a0c938f0 2392 incoming argument block. */
02510658 2393 if (pass == 0)
7ecc63d3 2394 {
2395 argblock = virtual_incoming_args_rtx;
bd54bbc6 2396 argblock
2397#ifdef STACK_GROWS_DOWNWARD
2398 = plus_constant (argblock, current_function_pretend_args_size);
2399#else
2400 = plus_constant (argblock, -current_function_pretend_args_size);
2401#endif
7ecc63d3 2402 stored_args_map = sbitmap_alloc (args_size.constant);
2403 sbitmap_zero (stored_args_map);
2404 }
481feae3 2405
60ecc450 2406 /* If we have no actual push instructions, or shouldn't use them,
2407 make space for all args right now. */
0e0be288 2408 else if (adjusted_args_size.var != 0)
66d433c7 2409 {
60ecc450 2410 if (old_stack_level == 0)
2411 {
2412 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9069face 2413 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 2414 old_pending_adj = pending_stack_adjust;
2415 pending_stack_adjust = 0;
60ecc450 2416 /* stack_arg_under_construction says whether a stack arg is
2417 being constructed at the old stack level. Pushing the stack
2418 gets a clean outgoing argument block. */
2419 old_stack_arg_under_construction = stack_arg_under_construction;
2420 stack_arg_under_construction = 0;
60ecc450 2421 }
0e0be288 2422 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
66d433c7 2423 }
60ecc450 2424 else
2425 {
2426 /* Note that we must go through the motions of allocating an argument
2427 block even if the size is zero because we may be storing args
2428 in the area reserved for register arguments, which may be part of
2429 the stack frame. */
7221f864 2430
0e0be288 2431 int needed = adjusted_args_size.constant;
66d433c7 2432
60ecc450 2433 /* Store the maximum argument space used. It will be pushed by
2434 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2435 checking). */
66d433c7 2436
60ecc450 2437 if (needed > current_function_outgoing_args_size)
2438 current_function_outgoing_args_size = needed;
66d433c7 2439
60ecc450 2440 if (must_preallocate)
2441 {
4448f543 2442 if (ACCUMULATE_OUTGOING_ARGS)
2443 {
02510658 2444 /* Since the stack pointer will never be pushed, it is
2445 possible for the evaluation of a parm to clobber
2446 something we have already written to the stack.
2447 Since most function calls on RISC machines do not use
2448 the stack, this is uncommon, but must work correctly.
7221f864 2449
4448f543 2450 Therefore, we save any area of the stack that was already
02510658 2451 written and that we are using. Here we set up to do this
2452 by making a new stack usage map from the old one. The
c87678e4 2453 actual save will be done by store_one_arg.
7221f864 2454
4448f543 2455 Another approach might be to try to reorder the argument
2456 evaluations to avoid this conflicting stack usage. */
7221f864 2457
02510658 2458 /* Since we will be writing into the entire argument area,
2459 the map must be allocated for its entire size, not just
2460 the part that is the responsibility of the caller. */
63c68695 2461 if (!OUTGOING_REG_PARM_STACK_SPACE)
2462 needed += reg_parm_stack_space;
66d433c7 2463
2464#ifdef ARGS_GROW_DOWNWARD
4448f543 2465 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2466 needed + 1);
66d433c7 2467#else
4448f543 2468 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2469 needed);
66d433c7 2470#endif
a331ea1b 2471 if (stack_usage_map_buf)
2472 free (stack_usage_map_buf);
4c36ffe6 2473 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2474 stack_usage_map = stack_usage_map_buf;
66d433c7 2475
4448f543 2476 if (initial_highest_arg_in_use)
8e547276 2477 memcpy (stack_usage_map, initial_stack_usage_map,
2478 initial_highest_arg_in_use);
d1b03b62 2479
4448f543 2480 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 2481 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 2482 (highest_outgoing_arg_in_use
2483 - initial_highest_arg_in_use));
2484 needed = 0;
d1b03b62 2485
02510658 2486 /* The address of the outgoing argument list must not be
2487 copied to a register here, because argblock would be left
2488 pointing to the wrong place after the call to
c87678e4 2489 allocate_dynamic_stack_space below. */
d1b03b62 2490
4448f543 2491 argblock = virtual_outgoing_args_rtx;
c87678e4 2492 }
4448f543 2493 else
7221f864 2494 {
4448f543 2495 if (inhibit_defer_pop == 0)
60ecc450 2496 {
4448f543 2497 /* Try to reuse some or all of the pending_stack_adjust
481feae3 2498 to get this space. */
2499 needed
c87678e4 2500 = (combine_pending_stack_adjustment_and_call
481feae3 2501 (unadjusted_args_size,
0e0be288 2502 &adjusted_args_size,
481feae3 2503 preferred_unit_stack_boundary));
2504
2505 /* combine_pending_stack_adjustment_and_call computes
2506 an adjustment before the arguments are allocated.
2507 Account for them and see whether or not the stack
2508 needs to go up or down. */
2509 needed = unadjusted_args_size - needed;
2510
2511 if (needed < 0)
4448f543 2512 {
481feae3 2513 /* We're releasing stack space. */
2514 /* ??? We can avoid any adjustment at all if we're
2515 already aligned. FIXME. */
2516 pending_stack_adjust = -needed;
2517 do_pending_stack_adjust ();
4448f543 2518 needed = 0;
2519 }
c87678e4 2520 else
481feae3 2521 /* We need to allocate space. We'll do that in
2522 push_block below. */
2523 pending_stack_adjust = 0;
60ecc450 2524 }
481feae3 2525
2526 /* Special case this because overhead of `push_block' in
2527 this case is non-trivial. */
4448f543 2528 if (needed == 0)
2529 argblock = virtual_outgoing_args_rtx;
60ecc450 2530 else
ad3b56f3 2531 {
2532 argblock = push_block (GEN_INT (needed), 0, 0);
2533#ifdef ARGS_GROW_DOWNWARD
2534 argblock = plus_constant (argblock, needed);
2535#endif
2536 }
4448f543 2537
02510658 2538 /* We only really need to call `copy_to_reg' in the case
2539 where push insns are going to be used to pass ARGBLOCK
2540 to a function call in ARGS. In that case, the stack
2541 pointer changes value from the allocation point to the
2542 call point, and hence the value of
2543 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2544 as well always do it. */
4448f543 2545 argblock = copy_to_reg (argblock);
9069face 2546 }
2547 }
2548 }
60ecc450 2549
9069face 2550 if (ACCUMULATE_OUTGOING_ARGS)
2551 {
2552 /* The save/restore code in store_one_arg handles all
2553 cases except one: a constructor call (including a C
2554 function returning a BLKmode struct) to initialize
2555 an argument. */
2556 if (stack_arg_under_construction)
2557 {
63c68695 2558 rtx push_size
2559 = GEN_INT (adjusted_args_size.constant
2560 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2561 : reg_parm_stack_space));
9069face 2562 if (old_stack_level == 0)
2563 {
2564 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2565 NULL_RTX);
2566 old_stack_pointer_delta = stack_pointer_delta;
2567 old_pending_adj = pending_stack_adjust;
2568 pending_stack_adjust = 0;
2569 /* stack_arg_under_construction says whether a stack
2570 arg is being constructed at the old stack level.
2571 Pushing the stack gets a clean outgoing argument
2572 block. */
2573 old_stack_arg_under_construction
2574 = stack_arg_under_construction;
2575 stack_arg_under_construction = 0;
2576 /* Make a new map for the new argument list. */
a331ea1b 2577 if (stack_usage_map_buf)
2578 free (stack_usage_map_buf);
43959b95 2579 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2580 stack_usage_map = stack_usage_map_buf;
9069face 2581 highest_outgoing_arg_in_use = 0;
4448f543 2582 }
9069face 2583 allocate_dynamic_stack_space (push_size, NULL_RTX,
2584 BITS_PER_UNIT);
60ecc450 2585 }
a3585b90 2586
9069face 2587 /* If argument evaluation might modify the stack pointer,
2588 copy the address of the argument list to a register. */
2589 for (i = 0; i < num_actuals; i++)
2590 if (args[i].pass_on_stack)
2591 {
2592 argblock = copy_addr_to_reg (argblock);
2593 break;
2594 }
2595 }
4c9e08a4 2596
60ecc450 2597 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 2598
60ecc450 2599 /* If we push args individually in reverse order, perform stack alignment
2600 before the first push (the last arg). */
4448f543 2601 if (PUSH_ARGS_REVERSED && argblock == 0
0e0be288 2602 && adjusted_args_size.constant != unadjusted_args_size)
ff92623c 2603 {
60ecc450 2604 /* When the stack adjustment is pending, we get better code
2605 by combining the adjustments. */
c87678e4 2606 if (pending_stack_adjust
2a0c81bf 2607 && ! (flags & ECF_LIBCALL_BLOCK)
60ecc450 2608 && ! inhibit_defer_pop)
481feae3 2609 {
2610 pending_stack_adjust
c87678e4 2611 = (combine_pending_stack_adjustment_and_call
481feae3 2612 (unadjusted_args_size,
0e0be288 2613 &adjusted_args_size,
481feae3 2614 preferred_unit_stack_boundary));
2615 do_pending_stack_adjust ();
2616 }
60ecc450 2617 else if (argblock == 0)
0e0be288 2618 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 2619 - unadjusted_args_size));
60ecc450 2620 }
fa4f1f09 2621 /* Now that the stack is properly aligned, pops can't safely
2622 be deferred during the evaluation of the arguments. */
2623 NO_DEFER_POP;
66d433c7 2624
95672afe 2625 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 2626
60ecc450 2627 /* Figure out the register where the value, if any, will come back. */
2628 valreg = 0;
2629 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2630 && ! structure_value_addr)
2631 {
2632 if (pcc_struct_value)
2633 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
46b3ff29 2634 fndecl, NULL, (pass == 0));
60ecc450 2635 else
46b3ff29 2636 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2637 (pass == 0));
2d329930 2638
2639 /* If VALREG is a PARALLEL whose first member has a zero
2640 offset, use that. This is for targets such as m68k that
2641 return the same value in multiple places. */
2642 if (GET_CODE (valreg) == PARALLEL)
2643 {
2644 rtx elem = XVECEXP (valreg, 0, 0);
2645 rtx where = XEXP (elem, 0);
2646 rtx offset = XEXP (elem, 1);
2647 if (offset == const0_rtx
2648 && GET_MODE (where) == GET_MODE (valreg))
2649 valreg = where;
2650 }
60ecc450 2651 }
66d433c7 2652
60ecc450 2653 /* Precompute all register parameters. It isn't safe to compute anything
2654 once we have started filling any specific hard regs. */
2655 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
66d433c7 2656
c2f47e15 2657 if (CALL_EXPR_STATIC_CHAIN (exp))
2658 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 2659 else
2660 static_chain_value = 0;
2661
4448f543 2662#ifdef REG_PARM_STACK_SPACE
60ecc450 2663 /* Save the fixed argument area if it's part of the caller's frame and
2664 is clobbered by argument setup for this call. */
02510658 2665 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 2666 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2667 &low_to_save, &high_to_save);
41332f48 2668#endif
66d433c7 2669
60ecc450 2670 /* Now store (and compute if necessary) all non-register parms.
2671 These come before register parms, since they can require block-moves,
2672 which could clobber the registers used for register parms.
2673 Parms which have partial registers are not stored here,
2674 but we do preallocate space here if they want that. */
66d433c7 2675
60ecc450 2676 for (i = 0; i < num_actuals; i++)
2677 if (args[i].reg == 0 || args[i].pass_on_stack)
7ecc63d3 2678 {
2679 rtx before_arg = get_last_insn ();
2680
57679d39 2681 if (store_one_arg (&args[i], argblock, flags,
2682 adjusted_args_size.var != 0,
2683 reg_parm_stack_space)
2684 || (pass == 0
2685 && check_sibcall_argument_overlap (before_arg,
42b11544 2686 &args[i], 1)))
7ecc63d3 2687 sibcall_failure = 1;
8697e217 2688
2689 if (flags & ECF_CONST
2690 && args[i].stack
2691 && args[i].value == args[i].stack)
2692 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2693 gen_rtx_USE (VOIDmode,
2694 args[i].value),
2695 call_fusage);
7ecc63d3 2696 }
60ecc450 2697
2698 /* If we have a parm that is passed in registers but not in memory
2699 and whose alignment does not permit a direct copy into registers,
2700 make a group of pseudos that correspond to each register that we
2701 will later fill. */
2702 if (STRICT_ALIGNMENT)
2703 store_unaligned_arguments_into_pseudos (args, num_actuals);
2704
2705 /* Now store any partially-in-registers parm.
2706 This is the last place a block-move can happen. */
2707 if (reg_parm_seen)
2708 for (i = 0; i < num_actuals; i++)
2709 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 2710 {
2711 rtx before_arg = get_last_insn ();
2712
57679d39 2713 if (store_one_arg (&args[i], argblock, flags,
2714 adjusted_args_size.var != 0,
2715 reg_parm_stack_space)
2716 || (pass == 0
2717 && check_sibcall_argument_overlap (before_arg,
42b11544 2718 &args[i], 1)))
7ecc63d3 2719 sibcall_failure = 1;
2720 }
66d433c7 2721
60ecc450 2722 /* If we pushed args in forward order, perform stack alignment
2723 after pushing the last arg. */
4448f543 2724 if (!PUSH_ARGS_REVERSED && argblock == 0)
0e0be288 2725 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 2726 - unadjusted_args_size));
66d433c7 2727
60ecc450 2728 /* If register arguments require space on the stack and stack space
2729 was not preallocated, allocate stack space here for arguments
2730 passed in registers. */
63c68695 2731 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
c87678e4 2732 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 2733 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 2734
60ecc450 2735 /* Pass the function the address in which to return a
2736 structure value. */
2737 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2738 {
0d568ddf 2739 structure_value_addr
85d654dd 2740 = convert_memory_address (Pmode, structure_value_addr);
45550790 2741 emit_move_insn (struct_value,
60ecc450 2742 force_reg (Pmode,
2743 force_operand (structure_value_addr,
2744 NULL_RTX)));
2745
8ad4c111 2746 if (REG_P (struct_value))
45550790 2747 use_reg (&call_fusage, struct_value);
60ecc450 2748 }
02c736f4 2749
c0e7e9f7 2750 after_args = get_last_insn ();
4ee9c684 2751 funexp = prepare_call_address (funexp, static_chain_value,
2752 &call_fusage, reg_parm_seen, pass == 0);
66d433c7 2753
42b11544 2754 load_register_parameters (args, num_actuals, &call_fusage, flags,
2755 pass == 0, &sibcall_failure);
c87678e4 2756
60ecc450 2757 /* Save a pointer to the last insn before the call, so that we can
2758 later safely search backwards to find the CALL_INSN. */
2759 before_call = get_last_insn ();
66d433c7 2760
7a8d641b 2761 /* Set up next argument register. For sibling calls on machines
2762 with register windows this should be the incoming register. */
2763#ifdef FUNCTION_INCOMING_ARG
2764 if (pass == 0)
2765 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2766 void_type_node, 1);
2767 else
2768#endif
2769 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2770 void_type_node, 1);
2771
60ecc450 2772 /* All arguments and registers used for the call must be set up by
2773 now! */
2774
481feae3 2775 /* Stack must be properly aligned now. */
231bd014 2776 gcc_assert (!pass
2777 || !(stack_pointer_delta % preferred_unit_stack_boundary));
fa4f1f09 2778
60ecc450 2779 /* Generate the actual call instruction. */
4ee9c684 2780 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 2781 adjusted_args_size.constant, struct_value_size,
7a8d641b 2782 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
87e19636 2783 flags, & args_so_far);
60ecc450 2784
c0e7e9f7 2785 /* If the call setup or the call itself overlaps with anything
2786 of the argument setup we probably clobbered our call address.
2787 In that case we can't do sibcalls. */
2788 if (pass == 0
2789 && check_sibcall_argument_overlap (after_args, 0, 0))
2790 sibcall_failure = 1;
2791
05d18e8b 2792 /* If a non-BLKmode value is returned at the most significant end
2793 of a register, shift the register right by the appropriate amount
2794 and update VALREG accordingly. BLKmode values are handled by the
2795 group load/store machinery below. */
2796 if (!structure_value_addr
2797 && !pcc_struct_value
2798 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2799 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2800 {
2801 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2802 sibcall_failure = 1;
2803 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2804 }
2805
60ecc450 2806 /* If call is cse'able, make appropriate pair of reg-notes around it.
2807 Test valreg so we don't crash; may safely ignore `const'
2808 if return type is void. Disable for PARALLEL return values, because
2809 we have no way to move such values into a pseudo register. */
2a0c81bf 2810 if (pass && (flags & ECF_LIBCALL_BLOCK))
ea0cb7ae 2811 {
60ecc450 2812 rtx insns;
f9c6a9c3 2813 rtx insn;
2814 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
ea0cb7ae 2815
a0c938f0 2816 insns = get_insns ();
f9c6a9c3 2817
2818 /* Expansion of block moves possibly introduced a loop that may
2819 not appear inside libcall block. */
2820 for (insn = insns; insn; insn = NEXT_INSN (insn))
6d7dc5b9 2821 if (JUMP_P (insn))
f9c6a9c3 2822 failed = true;
2823
2824 if (failed)
6e17d606 2825 {
6e17d606 2826 end_sequence ();
31d3e01c 2827 emit_insn (insns);
6e17d606 2828 }
2829 else
2830 {
2831 rtx note = 0;
2832 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2833
2834 /* Mark the return value as a pointer if needed. */
2835 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2836 mark_reg_pointer (temp,
2837 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2838
6e17d606 2839 end_sequence ();
9a82df26 2840 if (flag_unsafe_math_optimizations
2841 && fndecl
a8073039 2842 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9a82df26 2843 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2844 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2845 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
0d568ddf 2846 note = gen_rtx_fmt_e (SQRT,
2847 GET_MODE (temp),
9a82df26 2848 args[0].initial_value);
2849 else
2850 {
2851 /* Construct an "equal form" for the value which
2852 mentions all the arguments in order as well as
2853 the function name. */
2854 for (i = 0; i < num_actuals; i++)
2855 note = gen_rtx_EXPR_LIST (VOIDmode,
2856 args[i].initial_value, note);
2857 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
0d568ddf 2858
9a82df26 2859 if (flags & ECF_PURE)
2860 note = gen_rtx_EXPR_LIST (VOIDmode,
6e17d606 2861 gen_rtx_USE (VOIDmode,
2862 gen_rtx_MEM (BLKmode,
2863 gen_rtx_SCRATCH (VOIDmode))),
2864 note);
9a82df26 2865 }
6e17d606 2866 emit_libcall_block (insns, temp, valreg, note);
2867
2868 valreg = temp;
2869 }
60ecc450 2870 }
2a0c81bf 2871 else if (pass && (flags & ECF_MALLOC))
60ecc450 2872 {
2873 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2874 rtx last, insns;
2875
c87678e4 2876 /* The return value from a malloc-like function is a pointer. */
60ecc450 2877 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
80909c64 2878 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
60ecc450 2879
2880 emit_move_insn (temp, valreg);
2881
2882 /* The return value from a malloc-like function can not alias
2883 anything else. */
2884 last = get_last_insn ();
c87678e4 2885 REG_NOTES (last) =
60ecc450 2886 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2887
2888 /* Write out the sequence. */
2889 insns = get_insns ();
2890 end_sequence ();
31d3e01c 2891 emit_insn (insns);
60ecc450 2892 valreg = temp;
2893 }
66d433c7 2894
3072d30e 2895 /* For calls to `setjmp', etc., inform
2896 function.c:setjmp_warnings that it should complain if
2897 nonvolatile values are live. For functions that cannot
2898 return, inform flow that control does not fall through. */
66d433c7 2899
4fec1d6c 2900 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 2901 {
9239aee6 2902 /* The barrier must be emitted
60ecc450 2903 immediately after the CALL_INSN. Some ports emit more
2904 than just a CALL_INSN above, so we must search for it here. */
66d433c7 2905
60ecc450 2906 rtx last = get_last_insn ();
6d7dc5b9 2907 while (!CALL_P (last))
60ecc450 2908 {
2909 last = PREV_INSN (last);
2910 /* There was no CALL_INSN? */
231bd014 2911 gcc_assert (last != before_call);
60ecc450 2912 }
66d433c7 2913
9239aee6 2914 emit_barrier_after (last);
20f5f6d0 2915
b494d193 2916 /* Stack adjustments after a noreturn call are dead code.
2917 However when NO_DEFER_POP is in effect, we must preserve
2918 stack_pointer_delta. */
2919 if (inhibit_defer_pop == 0)
2920 {
2921 stack_pointer_delta = old_stack_allocated;
2922 pending_stack_adjust = 0;
2923 }
60ecc450 2924 }
66d433c7 2925
60ecc450 2926 /* If value type not void, return an rtx for the value. */
66d433c7 2927
60ecc450 2928 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2929 || ignore)
5edaabad 2930 target = const0_rtx;
60ecc450 2931 else if (structure_value_addr)
2932 {
e16ceb8e 2933 if (target == 0 || !MEM_P (target))
60ecc450 2934 {
f7c44134 2935 target
2936 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2937 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2938 structure_value_addr));
2939 set_mem_attributes (target, exp, 1);
60ecc450 2940 }
2941 }
2942 else if (pcc_struct_value)
566d850a 2943 {
60ecc450 2944 /* This is the special C++ case where we need to
2945 know what the true target was. We take care to
2946 never use this value more than once in one expression. */
2947 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2948 copy_to_reg (valreg));
f7c44134 2949 set_mem_attributes (target, exp, 1);
566d850a 2950 }
60ecc450 2951 /* Handle calls that return values in multiple non-contiguous locations.
2952 The Irix 6 ABI has examples of this. */
2953 else if (GET_CODE (valreg) == PARALLEL)
2954 {
4ee9c684 2955 if (target == 0)
60ecc450 2956 {
387bc205 2957 /* This will only be assigned once, so it can be readonly. */
2958 tree nt = build_qualified_type (TREE_TYPE (exp),
2959 (TYPE_QUALS (TREE_TYPE (exp))
2960 | TYPE_QUAL_CONST));
2961
2962 target = assign_temp (nt, 0, 1, 1);
60ecc450 2963 }
2964
2965 if (! rtx_equal_p (target, valreg))
5f4cd670 2966 emit_group_store (target, valreg, TREE_TYPE (exp),
2c269e73 2967 int_size_in_bytes (TREE_TYPE (exp)));
325d1c45 2968
60ecc450 2969 /* We can not support sibling calls for this case. */
2970 sibcall_failure = 1;
2971 }
2972 else if (target
2973 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2974 && GET_MODE (target) == GET_MODE (valreg))
2975 {
aadbaa40 2976 bool may_overlap = false;
2977
360738f1 2978 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2979 reg to a plain register. */
90af1361 2980 if (!REG_P (target) || HARD_REGISTER_P (target))
2981 valreg = avoid_likely_spilled_reg (valreg);
360738f1 2982
aadbaa40 2983 /* If TARGET is a MEM in the argument area, and we have
2984 saved part of the argument area, then we can't store
2985 directly into TARGET as it may get overwritten when we
2986 restore the argument save area below. Don't work too
2987 hard though and simply force TARGET to a register if it
2988 is a MEM; the optimizer is quite likely to sort it out. */
2989 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2990 for (i = 0; i < num_actuals; i++)
2991 if (args[i].save_area)
2992 {
2993 may_overlap = true;
2994 break;
2995 }
dbe1f550 2996
aadbaa40 2997 if (may_overlap)
2998 target = copy_to_reg (valreg);
2999 else
3000 {
3001 /* TARGET and VALREG cannot be equal at this point
3002 because the latter would not have
3003 REG_FUNCTION_VALUE_P true, while the former would if
3004 it were referring to the same register.
3005
3006 If they refer to the same register, this move will be
3007 a no-op, except when function inlining is being
3008 done. */
3009 emit_move_insn (target, valreg);
3010
3011 /* If we are setting a MEM, this code must be executed.
3012 Since it is emitted after the call insn, sibcall
3013 optimization cannot be performed in that case. */
3014 if (MEM_P (target))
3015 sibcall_failure = 1;
3016 }
60ecc450 3017 }
3018 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
044aa5ed 3019 {
3020 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3021
3022 /* We can not support sibling calls for this case. */
3023 sibcall_failure = 1;
3024 }
60ecc450 3025 else
90af1361 3026 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 3027
45550790 3028 if (targetm.calls.promote_function_return(funtype))
3029 {
231bd014 3030 /* If we promoted this return value, make the proper SUBREG.
3031 TARGET might be const0_rtx here, so be careful. */
3032 if (REG_P (target)
3033 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3034 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3035 {
3036 tree type = TREE_TYPE (exp);
3037 int unsignedp = TYPE_UNSIGNED (type);
3038 int offset = 0;
3039 enum machine_mode pmode;
a0c938f0 3040
231bd014 3041 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3042 /* If we don't promote as expected, something is wrong. */
3043 gcc_assert (GET_MODE (target) == pmode);
a0c938f0 3044
231bd014 3045 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3046 && (GET_MODE_SIZE (GET_MODE (target))
3047 > GET_MODE_SIZE (TYPE_MODE (type))))
3048 {
3049 offset = GET_MODE_SIZE (GET_MODE (target))
3050 - GET_MODE_SIZE (TYPE_MODE (type));
3051 if (! BYTES_BIG_ENDIAN)
3052 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3053 else if (! WORDS_BIG_ENDIAN)
3054 offset %= UNITS_PER_WORD;
3055 }
3056 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3057 SUBREG_PROMOTED_VAR_P (target) = 1;
3058 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3059 }
45550790 3060 }
23eb5fa6 3061
60ecc450 3062 /* If size of args is variable or this was a constructor call for a stack
3063 argument, restore saved stack-pointer value. */
66d433c7 3064
ff3ae375 3065 if (old_stack_level)
60ecc450 3066 {
3067 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9069face 3068 stack_pointer_delta = old_stack_pointer_delta;
60ecc450 3069 pending_stack_adjust = old_pending_adj;
80f06481 3070 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 3071 stack_arg_under_construction = old_stack_arg_under_construction;
3072 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3073 stack_usage_map = initial_stack_usage_map;
60ecc450 3074 sibcall_failure = 1;
3075 }
02510658 3076 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 3077 {
66d433c7 3078#ifdef REG_PARM_STACK_SPACE
60ecc450 3079 if (save_area)
6e96b626 3080 restore_fixed_argument_area (save_area, argblock,
3081 high_to_save, low_to_save);
41332f48 3082#endif
66d433c7 3083
60ecc450 3084 /* If we saved any argument areas, restore them. */
3085 for (i = 0; i < num_actuals; i++)
3086 if (args[i].save_area)
3087 {
3088 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3089 rtx stack_area
3090 = gen_rtx_MEM (save_mode,
3091 memory_address (save_mode,
3092 XEXP (args[i].stack_slot, 0)));
3093
3094 if (save_mode != BLKmode)
3095 emit_move_insn (stack_area, args[i].save_area);
3096 else
0378dbdc 3097 emit_block_move (stack_area, args[i].save_area,
241399f6 3098 GEN_INT (args[i].locate.size.constant),
0378dbdc 3099 BLOCK_OP_CALL_PARM);
60ecc450 3100 }
66d433c7 3101
60ecc450 3102 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3103 stack_usage_map = initial_stack_usage_map;
3104 }
66d433c7 3105
c87678e4 3106 /* If this was alloca, record the new stack level for nonlocal gotos.
60ecc450 3107 Check for the handler slots since we might not have a save area
3108 for non-local gotos. */
dbd6697a 3109
4ee9c684 3110 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3111 update_nonlocal_goto_save_area ();
66d433c7 3112
60ecc450 3113 /* Free up storage we no longer need. */
3114 for (i = 0; i < num_actuals; ++i)
3115 if (args[i].aligned_regs)
3116 free (args[i].aligned_regs);
3117
3118 insns = get_insns ();
3119 end_sequence ();
3120
3121 if (pass == 0)
3122 {
3123 tail_call_insns = insns;
3124
60ecc450 3125 /* Restore the pending stack adjustment now that we have
3126 finished generating the sibling call sequence. */
91b70175 3127
60ecc450 3128 pending_stack_adjust = save_pending_stack_adjust;
91b70175 3129 stack_pointer_delta = save_stack_pointer_delta;
0e0be288 3130
3131 /* Prepare arg structure for next iteration. */
c87678e4 3132 for (i = 0; i < num_actuals; i++)
0e0be288 3133 {
3134 args[i].value = 0;
3135 args[i].aligned_regs = 0;
3136 args[i].stack = 0;
3137 }
7ecc63d3 3138
3139 sbitmap_free (stored_args_map);
60ecc450 3140 }
3141 else
9069face 3142 {
3143 normal_call_insns = insns;
3144
3145 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 3146 gcc_assert ((flags & ECF_NORETURN)
231bd014 3147 || (old_stack_allocated
3148 == stack_pointer_delta - pending_stack_adjust));
9069face 3149 }
ae8d6151 3150
3151 /* If something prevents making this a sibling call,
3152 zero out the sequence. */
3153 if (sibcall_failure)
3154 tail_call_insns = NULL_RTX;
4ee9c684 3155 else
3156 break;
60ecc450 3157 }
3158
365db11e 3159 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 3160 arguments too, as argument area is now clobbered by the call. */
3161 if (tail_call_insns)
60ecc450 3162 {
4ee9c684 3163 emit_insn (tail_call_insns);
3164 cfun->tail_call_emit = true;
60ecc450 3165 }
3166 else
31d3e01c 3167 emit_insn (normal_call_insns);
66d433c7 3168
60ecc450 3169 currently_expanding_call--;
6d801f27 3170
a331ea1b 3171 if (stack_usage_map_buf)
3172 free (stack_usage_map_buf);
3173
66d433c7 3174 return target;
3175}
915e81b8 3176
4ee9c684 3177/* A sibling call sequence invalidates any REG_EQUIV notes made for
3178 this function's incoming arguments.
3179
3180 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 3181 in the rtl chain are those for incoming arguments, so we can look
3182 for REG_EQUIV notes between the start of the function and the
3183 NOTE_INSN_FUNCTION_BEG.
4ee9c684 3184
3185 This is (slight) overkill. We could keep track of the highest
3186 argument we clobber and be more selective in removing notes, but it
3187 does not seem to be worth the effort. */
0a227ed5 3188
4ee9c684 3189void
3190fixup_tail_calls (void)
3191{
0a227ed5 3192 rtx insn;
3193
3194 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3195 {
750a330e 3196 rtx note;
3197
0a227ed5 3198 /* There are never REG_EQUIV notes for the incoming arguments
3199 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3200 if (NOTE_P (insn)
ad4583d9 3201 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 3202 break;
3203
750a330e 3204 note = find_reg_note (insn, REG_EQUIV, 0);
3205 if (note)
3206 remove_note (insn, note);
3207 note = find_reg_note (insn, REG_EQUIV, 0);
3208 gcc_assert (!note);
0a227ed5 3209 }
4ee9c684 3210}
3211
915e81b8 3212/* Traverse a list of TYPES and expand all complex types into their
3213 components. */
5ab29745 3214static tree
915e81b8 3215split_complex_types (tree types)
3216{
3217 tree p;
3218
92d40bc4 3219 /* Before allocating memory, check for the common case of no complex. */
3220 for (p = types; p; p = TREE_CHAIN (p))
3221 {
3222 tree type = TREE_VALUE (p);
3223 if (TREE_CODE (type) == COMPLEX_TYPE
3224 && targetm.calls.split_complex_arg (type))
a0c938f0 3225 goto found;
92d40bc4 3226 }
3227 return types;
3228
3229 found:
915e81b8 3230 types = copy_list (types);
3231
3232 for (p = types; p; p = TREE_CHAIN (p))
3233 {
3234 tree complex_type = TREE_VALUE (p);
3235
92d40bc4 3236 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3237 && targetm.calls.split_complex_arg (complex_type))
915e81b8 3238 {
3239 tree next, imag;
3240
3241 /* Rewrite complex type with component type. */
3242 TREE_VALUE (p) = TREE_TYPE (complex_type);
3243 next = TREE_CHAIN (p);
3244
3245 /* Add another component type for the imaginary part. */
3246 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3247 TREE_CHAIN (p) = imag;
3248 TREE_CHAIN (imag) = next;
3249
3250 /* Skip the newly created node. */
3251 p = TREE_CHAIN (p);
3252 }
3253 }
3254
3255 return types;
3256}
66d433c7 3257\f
20f7032f 3258/* Output a library call to function FUN (a SYMBOL_REF rtx).
c87678e4 3259 The RETVAL parameter specifies whether return value needs to be saved, other
ebf77775 3260 parameters are documented in the emit_library_call function below. */
2a631e19 3261
20f7032f 3262static rtx
4c9e08a4 3263emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3264 enum libcall_type fn_type,
3265 enum machine_mode outmode, int nargs, va_list p)
b39693dd 3266{
9bdaf1ba 3267 /* Total size in bytes of all the stack-parms scanned so far. */
3268 struct args_size args_size;
3269 /* Size of arguments before any adjustments (such as rounding). */
3270 struct args_size original_args_size;
19cb6b50 3271 int argnum;
9bdaf1ba 3272 rtx fun;
3273 int inc;
3274 int count;
9bdaf1ba 3275 rtx argblock = 0;
3276 CUMULATIVE_ARGS args_so_far;
c87678e4 3277 struct arg
3278 {
3279 rtx value;
3280 enum machine_mode mode;
3281 rtx reg;
3282 int partial;
241399f6 3283 struct locate_and_pad_arg_data locate;
c87678e4 3284 rtx save_area;
3285 };
9bdaf1ba 3286 struct arg *argvec;
3287 int old_inhibit_defer_pop = inhibit_defer_pop;
3288 rtx call_fusage = 0;
3289 rtx mem_value = 0;
16204096 3290 rtx valreg;
9bdaf1ba 3291 int pcc_struct_value = 0;
3292 int struct_value_size = 0;
df4b504c 3293 int flags;
9bdaf1ba 3294 int reg_parm_stack_space = 0;
9bdaf1ba 3295 int needed;
644c283b 3296 rtx before_call;
771d21fa 3297 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 3298
4448f543 3299#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3300 /* Define the boundary of the register parm stack space that needs to be
3301 save, if any. */
6e96b626 3302 int low_to_save, high_to_save;
c87678e4 3303 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 3304#endif
3305
9bdaf1ba 3306 /* Size of the stack reserved for parameter registers. */
3307 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3308 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 3309 char *stack_usage_map_buf = NULL;
9bdaf1ba 3310
45550790 3311 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3312
9bdaf1ba 3313#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3314 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 3315#endif
3316
ab7ccfa2 3317 /* By default, library functions can not throw. */
df4b504c 3318 flags = ECF_NOTHROW;
3319
ab7ccfa2 3320 switch (fn_type)
3321 {
3322 case LCT_NORMAL:
2a0c81bf 3323 break;
ab7ccfa2 3324 case LCT_CONST:
2a0c81bf 3325 flags |= ECF_CONST;
3326 break;
ab7ccfa2 3327 case LCT_PURE:
2a0c81bf 3328 flags |= ECF_PURE;
ab7ccfa2 3329 break;
3330 case LCT_CONST_MAKE_BLOCK:
2a0c81bf 3331 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
ab7ccfa2 3332 break;
3333 case LCT_PURE_MAKE_BLOCK:
2a0c81bf 3334 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
ab7ccfa2 3335 break;
3336 case LCT_NORETURN:
3337 flags |= ECF_NORETURN;
3338 break;
3339 case LCT_THROW:
3340 flags = ECF_NORETURN;
3341 break;
0ff18307 3342 case LCT_RETURNS_TWICE:
3343 flags = ECF_RETURNS_TWICE;
3344 break;
ab7ccfa2 3345 }
9bdaf1ba 3346 fun = orgfun;
3347
9bdaf1ba 3348 /* Ensure current function's preferred stack boundary is at least
3349 what we need. */
3350 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3351 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 3352
3353 /* If this kind of value comes back in memory,
3354 decide where in memory it should come back. */
771d21fa 3355 if (outmode != VOIDmode)
9bdaf1ba 3356 {
dc24ddbd 3357 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 3358 if (aggregate_value_p (tfom, 0))
771d21fa 3359 {
9bdaf1ba 3360#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 3361 rtx pointer_reg
46b3ff29 3362 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 3363 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3364 pcc_struct_value = 1;
3365 if (value == 0)
3366 value = gen_reg_rtx (outmode);
9bdaf1ba 3367#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 3368 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 3369 if (value != 0 && MEM_P (value))
771d21fa 3370 mem_value = value;
3371 else
3372 mem_value = assign_temp (tfom, 0, 1, 1);
9bdaf1ba 3373#endif
771d21fa 3374 /* This call returns a big structure. */
3375 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3376 }
9bdaf1ba 3377 }
771d21fa 3378 else
3379 tfom = void_type_node;
9bdaf1ba 3380
3381 /* ??? Unfinished: must pass the memory address as an argument. */
3382
3383 /* Copy all the libcall-arguments out of the varargs data
3384 and into a vector ARGVEC.
3385
3386 Compute how to pass each argument. We only support a very small subset
3387 of the full argument passing conventions to limit complexity here since
3388 library functions shouldn't have many args. */
3389
f0af5a88 3390 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3391 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 3392
e1efd914 3393#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3394 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3395#else
30c70355 3396 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
e1efd914 3397#endif
9bdaf1ba 3398
3399 args_size.constant = 0;
3400 args_size.var = 0;
3401
3402 count = 0;
3403
2c5d421b 3404 /* Now we are about to start emitting insns that can be deleted
3405 if a libcall is deleted. */
2a0c81bf 3406 if (flags & ECF_LIBCALL_BLOCK)
2c5d421b 3407 start_sequence ();
3408
9bdaf1ba 3409 push_temp_slots ();
3410
3411 /* If there's a structure value address to be passed,
3412 either pass it in the special place, or pass it as an extra argument. */
45550790 3413 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 3414 {
3415 rtx addr = XEXP (mem_value, 0);
a0c938f0 3416
9bdaf1ba 3417 nargs++;
3418
3419 /* Make sure it is a reasonable operand for a move or push insn. */
e16ceb8e 3420 if (!REG_P (addr) && !MEM_P (addr)
9bdaf1ba 3421 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3422 addr = force_operand (addr, NULL_RTX);
3423
3424 argvec[count].value = addr;
3425 argvec[count].mode = Pmode;
3426 argvec[count].partial = 0;
3427
3428 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
f054eb3c 3429 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3430 NULL_TREE, 1) == 0);
9bdaf1ba 3431
3432 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 3433#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 3434 1,
2e735c0d 3435#else
3436 argvec[count].reg != 0,
3437#endif
241399f6 3438 0, NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 3439
9bdaf1ba 3440 if (argvec[count].reg == 0 || argvec[count].partial != 0
3441 || reg_parm_stack_space > 0)
241399f6 3442 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 3443
3444 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3445
3446 count++;
3447 }
3448
3449 for (; count < nargs; count++)
3450 {
3451 rtx val = va_arg (p, rtx);
3452 enum machine_mode mode = va_arg (p, enum machine_mode);
3453
3454 /* We cannot convert the arg value to the mode the library wants here;
3455 must do it earlier where we know the signedness of the arg. */
231bd014 3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 3458
9bdaf1ba 3459 /* Make sure it is a reasonable operand for a move or push insn. */
e16ceb8e 3460 if (!REG_P (val) && !MEM_P (val)
9bdaf1ba 3461 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3462 val = force_operand (val, NULL_RTX);
3463
cc9b8628 3464 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
9bdaf1ba 3465 {
ddaf7ad3 3466 rtx slot;
13f08ee7 3467 int must_copy
3468 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
ddaf7ad3 3469
aeeed45c 3470 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3471 functions, so we have to pretend this isn't such a function. */
3472 if (flags & ECF_LIBCALL_BLOCK)
3473 {
3474 rtx insns = get_insns ();
3475 end_sequence ();
3476 emit_insn (insns);
3477 }
3478 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3479
5096b8b0 3480 /* If this was a CONST function, it is now PURE since
3481 it now reads memory. */
3482 if (flags & ECF_CONST)
3483 {
3484 flags &= ~ECF_CONST;
3485 flags |= ECF_PURE;
3486 }
3487
41dc12b4 3488 if (GET_MODE (val) == MEM && !must_copy)
ddaf7ad3 3489 slot = val;
41dc12b4 3490 else
ddaf7ad3 3491 {
dc24ddbd 3492 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
771d21fa 3493 0, 1, 1);
ddaf7ad3 3494 emit_move_insn (slot, val);
3495 }
387bc205 3496
a683e787 3497 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3498 gen_rtx_USE (VOIDmode, slot),
3499 call_fusage);
ddaf7ad3 3500 if (must_copy)
3501 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3502 gen_rtx_CLOBBER (VOIDmode,
3503 slot),
3504 call_fusage);
3505
9bdaf1ba 3506 mode = Pmode;
ddaf7ad3 3507 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 3508 }
9bdaf1ba 3509
3510 argvec[count].value = val;
3511 argvec[count].mode = mode;
3512
3513 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3514
9bdaf1ba 3515 argvec[count].partial
f054eb3c 3516 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 3517
3518 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 3519#ifdef STACK_PARMS_IN_REG_PARM_AREA
c87678e4 3520 1,
2e735c0d 3521#else
3522 argvec[count].reg != 0,
3523#endif
241399f6 3524 argvec[count].partial,
3525 NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 3526
231bd014 3527 gcc_assert (!argvec[count].locate.size.var);
9bdaf1ba 3528
9bdaf1ba 3529 if (argvec[count].reg == 0 || argvec[count].partial != 0
3530 || reg_parm_stack_space > 0)
241399f6 3531 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 3532
3533 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3534 }
9bdaf1ba 3535
9bdaf1ba 3536 /* If this machine requires an external definition for library
3537 functions, write one out. */
3538 assemble_external_libcall (fun);
3539
3540 original_args_size = args_size;
91b70175 3541 args_size.constant = (((args_size.constant
3542 + stack_pointer_delta
3543 + STACK_BYTES - 1)
3544 / STACK_BYTES
3545 * STACK_BYTES)
3546 - stack_pointer_delta);
9bdaf1ba 3547
3548 args_size.constant = MAX (args_size.constant,
3549 reg_parm_stack_space);
3550
63c68695 3551 if (!OUTGOING_REG_PARM_STACK_SPACE)
3552 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 3553
3554 if (args_size.constant > current_function_outgoing_args_size)
3555 current_function_outgoing_args_size = args_size.constant;
3556
4448f543 3557 if (ACCUMULATE_OUTGOING_ARGS)
3558 {
3559 /* Since the stack pointer will never be pushed, it is possible for
3560 the evaluation of a parm to clobber something we have already
3561 written to the stack. Since most function calls on RISC machines
3562 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 3563
4448f543 3564 Therefore, we save any area of the stack that was already written
3565 and that we are using. Here we set up to do this by making a new
3566 stack usage map from the old one.
9bdaf1ba 3567
4448f543 3568 Another approach might be to try to reorder the argument
3569 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 3570
4448f543 3571 needed = args_size.constant;
9bdaf1ba 3572
4448f543 3573 /* Since we will be writing into the entire argument area, the
3574 map must be allocated for its entire size, not just the part that
3575 is the responsibility of the caller. */
63c68695 3576 if (!OUTGOING_REG_PARM_STACK_SPACE)
3577 needed += reg_parm_stack_space;
9bdaf1ba 3578
3579#ifdef ARGS_GROW_DOWNWARD
4448f543 3580 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3581 needed + 1);
9bdaf1ba 3582#else
4448f543 3583 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3584 needed);
9bdaf1ba 3585#endif
4c36ffe6 3586 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3587 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 3588
4448f543 3589 if (initial_highest_arg_in_use)
8e547276 3590 memcpy (stack_usage_map, initial_stack_usage_map,
3591 initial_highest_arg_in_use);
9bdaf1ba 3592
4448f543 3593 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3594 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3595 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3596 needed = 0;
9bdaf1ba 3597
9c0a756f 3598 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 3599 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 3600 new libcalls after we've instantiated the virtual regs, and if we
3601 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 3602
9c0a756f 3603 if (virtuals_instantiated)
3604 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3605 else
3606 argblock = virtual_outgoing_args_rtx;
4448f543 3607 }
3608 else
3609 {
3610 if (!PUSH_ARGS)
3611 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3612 }
9bdaf1ba 3613
9bdaf1ba 3614 /* If we push args individually in reverse order, perform stack alignment
3615 before the first push (the last arg). */
4448f543 3616 if (argblock == 0 && PUSH_ARGS_REVERSED)
9bdaf1ba 3617 anti_adjust_stack (GEN_INT (args_size.constant
3618 - original_args_size.constant));
9bdaf1ba 3619
4448f543 3620 if (PUSH_ARGS_REVERSED)
3621 {
3622 inc = -1;
3623 argnum = nargs - 1;
3624 }
3625 else
3626 {
3627 inc = 1;
3628 argnum = 0;
3629 }
9bdaf1ba 3630
4448f543 3631#ifdef REG_PARM_STACK_SPACE
3632 if (ACCUMULATE_OUTGOING_ARGS)
3633 {
3634 /* The argument list is the property of the called routine and it
3635 may clobber it. If the fixed area has been used for previous
6e96b626 3636 parameters, we must save and restore it. */
3637 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3638 &low_to_save, &high_to_save);
9bdaf1ba 3639 }
3640#endif
c87678e4 3641
9bdaf1ba 3642 /* Push the args that need to be pushed. */
3643
3644 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3645 are to be pushed. */
3646 for (count = 0; count < nargs; count++, argnum += inc)
3647 {
19cb6b50 3648 enum machine_mode mode = argvec[argnum].mode;
3649 rtx val = argvec[argnum].value;
9bdaf1ba 3650 rtx reg = argvec[argnum].reg;
3651 int partial = argvec[argnum].partial;
4448f543 3652 int lower_bound = 0, upper_bound = 0, i;
9bdaf1ba 3653
3654 if (! (reg != 0 && partial == 0))
3655 {
4448f543 3656 if (ACCUMULATE_OUTGOING_ARGS)
3657 {
02510658 3658 /* If this is being stored into a pre-allocated, fixed-size,
3659 stack area, save any previous data at that location. */
9bdaf1ba 3660
3661#ifdef ARGS_GROW_DOWNWARD
4448f543 3662 /* stack_slot is negative, but we want to index stack_usage_map
3663 with positive values. */
241399f6 3664 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3665 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
9bdaf1ba 3666#else
241399f6 3667 lower_bound = argvec[argnum].locate.offset.constant;
3668 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
9bdaf1ba 3669#endif
3670
fd2c0c1d 3671 i = lower_bound;
3672 /* Don't worry about things in the fixed argument area;
3673 it has already been saved. */
3674 if (i < reg_parm_stack_space)
3675 i = reg_parm_stack_space;
3676 while (i < upper_bound && stack_usage_map[i] == 0)
3677 i++;
9bdaf1ba 3678
fd2c0c1d 3679 if (i < upper_bound)
4448f543 3680 {
241399f6 3681 /* We need to make a save area. */
3682 unsigned int size
3683 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4448f543 3684 enum machine_mode save_mode
241399f6 3685 = mode_for_size (size, MODE_INT, 1);
3686 rtx adr
3687 = plus_constant (argblock,
3688 argvec[argnum].locate.offset.constant);
4448f543 3689 rtx stack_area
241399f6 3690 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 3691
f9c6a9c3 3692 if (save_mode == BLKmode)
3693 {
3694 argvec[argnum].save_area
3695 = assign_stack_temp (BLKmode,
a0c938f0 3696 argvec[argnum].locate.size.constant,
f9c6a9c3 3697 0);
3698
3699 emit_block_move (validize_mem (argvec[argnum].save_area),
a0c938f0 3700 stack_area,
f9c6a9c3 3701 GEN_INT (argvec[argnum].locate.size.constant),
3702 BLOCK_OP_CALL_PARM);
3703 }
3704 else
3705 {
3706 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3707
3708 emit_move_insn (argvec[argnum].save_area, stack_area);
3709 }
4448f543 3710 }
9bdaf1ba 3711 }
325d1c45 3712
0378dbdc 3713 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3714 partial, reg, 0, argblock,
241399f6 3715 GEN_INT (argvec[argnum].locate.offset.constant),
3716 reg_parm_stack_space,
3717 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
9bdaf1ba 3718
9bdaf1ba 3719 /* Now mark the segment we just used. */
4448f543 3720 if (ACCUMULATE_OUTGOING_ARGS)
3721 for (i = lower_bound; i < upper_bound; i++)
3722 stack_usage_map[i] = 1;
9bdaf1ba 3723
3724 NO_DEFER_POP;
2eb9302a 3725
3726 if (flags & ECF_CONST)
3727 {
3728 rtx use;
3729
3730 /* Indicate argument access so that alias.c knows that these
3731 values are live. */
3732 if (argblock)
3733 use = plus_constant (argblock,
3734 argvec[argnum].locate.offset.constant);
3735 else
23943319 3736 /* When arguments are pushed, trying to tell alias.c where
2eb9302a 3737 exactly this argument is won't work, because the
3738 auto-increment causes confusion. So we merely indicate
3739 that we access something with a known mode somewhere on
3740 the stack. */
a0c938f0 3741 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2eb9302a 3742 gen_rtx_SCRATCH (Pmode));
3743 use = gen_rtx_MEM (argvec[argnum].mode, use);
3744 use = gen_rtx_USE (VOIDmode, use);
3745 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3746 }
9bdaf1ba 3747 }
3748 }
3749
9bdaf1ba 3750 /* If we pushed args in forward order, perform stack alignment
3751 after pushing the last arg. */
4448f543 3752 if (argblock == 0 && !PUSH_ARGS_REVERSED)
9bdaf1ba 3753 anti_adjust_stack (GEN_INT (args_size.constant
3754 - original_args_size.constant));
9bdaf1ba 3755
4448f543 3756 if (PUSH_ARGS_REVERSED)
3757 argnum = nargs - 1;
3758 else
3759 argnum = 0;
9bdaf1ba 3760
4ee9c684 3761 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 3762
3763 /* Now load any reg parms into their regs. */
3764
3765 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3766 are to be pushed. */
3767 for (count = 0; count < nargs; count++, argnum += inc)
3768 {
bec917cc 3769 enum machine_mode mode = argvec[argnum].mode;
19cb6b50 3770 rtx val = argvec[argnum].value;
9bdaf1ba 3771 rtx reg = argvec[argnum].reg;
3772 int partial = argvec[argnum].partial;
3773
3774 /* Handle calls that pass values in multiple non-contiguous
3775 locations. The PA64 has examples of this for library calls. */
3776 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 3777 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 3778 else if (reg != 0 && partial == 0)
3779 emit_move_insn (reg, val);
3780
3781 NO_DEFER_POP;
3782 }
3783
9bdaf1ba 3784 /* Any regs containing parms remain in use through the call. */
3785 for (count = 0; count < nargs; count++)
3786 {
3787 rtx reg = argvec[count].reg;
3788 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3789 use_group_regs (&call_fusage, reg);
3790 else if (reg != 0)
6c6f16e5 3791 {
3792 int partial = argvec[count].partial;
3793 if (partial)
3794 {
3795 int nregs;
3796 gcc_assert (partial % UNITS_PER_WORD == 0);
3797 nregs = partial / UNITS_PER_WORD;
3798 use_regs (&call_fusage, REGNO (reg), nregs);
3799 }
3800 else
3801 use_reg (&call_fusage, reg);
3802 }
9bdaf1ba 3803 }
3804
3805 /* Pass the function the address in which to return a structure value. */
45550790 3806 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 3807 {
45550790 3808 emit_move_insn (struct_value,
9bdaf1ba 3809 force_reg (Pmode,
3810 force_operand (XEXP (mem_value, 0),
3811 NULL_RTX)));
8ad4c111 3812 if (REG_P (struct_value))
45550790 3813 use_reg (&call_fusage, struct_value);
9bdaf1ba 3814 }
3815
3816 /* Don't allow popping to be deferred, since then
3817 cse'ing of library calls could delete a call and leave the pop. */
3818 NO_DEFER_POP;
16204096 3819 valreg = (mem_value == 0 && outmode != VOIDmode
3820 ? hard_libcall_value (outmode) : NULL_RTX);
9bdaf1ba 3821
481feae3 3822 /* Stack must be properly aligned now. */
231bd014 3823 gcc_assert (!(stack_pointer_delta
3824 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
fa4f1f09 3825
644c283b 3826 before_call = get_last_insn ();
3827
9bdaf1ba 3828 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3829 will set inhibit_defer_pop to that value. */
20f7032f 3830 /* The return type is needed to decide how many bytes the function pops.
3831 Signedness plays no role in that, so for simplicity, we pretend it's
3832 always signed. We also assume that the list of arguments passed has
3833 no impact, so we pretend it is unknown. */
9bdaf1ba 3834
4ee9c684 3835 emit_call_1 (fun, NULL,
c87678e4 3836 get_identifier (XSTR (orgfun, 0)),
771d21fa 3837 build_function_type (tfom, NULL_TREE),
c87678e4 3838 original_args_size.constant, args_size.constant,
9bdaf1ba 3839 struct_value_size,
3840 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
16204096 3841 valreg,
87e19636 3842 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
9bdaf1ba 3843
3072d30e 3844 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3845 that it should complain if nonvolatile values are live. For
3846 functions that cannot return, inform flow that control does not
3847 fall through. */
644c283b 3848
4fec1d6c 3849 if (flags & ECF_NORETURN)
644c283b 3850 {
9239aee6 3851 /* The barrier note must be emitted
644c283b 3852 immediately after the CALL_INSN. Some ports emit more than
3853 just a CALL_INSN above, so we must search for it here. */
3854
3855 rtx last = get_last_insn ();
6d7dc5b9 3856 while (!CALL_P (last))
644c283b 3857 {
3858 last = PREV_INSN (last);
3859 /* There was no CALL_INSN? */
231bd014 3860 gcc_assert (last != before_call);
644c283b 3861 }
3862
9239aee6 3863 emit_barrier_after (last);
644c283b 3864 }
3865
9bdaf1ba 3866 /* Now restore inhibit_defer_pop to its actual original value. */
3867 OK_DEFER_POP;
3868
2c5d421b 3869 /* If call is cse'able, make appropriate pair of reg-notes around it.
3870 Test valreg so we don't crash; may safely ignore `const'
3871 if return type is void. Disable for PARALLEL return values, because
3872 we have no way to move such values into a pseudo register. */
2a0c81bf 3873 if (flags & ECF_LIBCALL_BLOCK)
2c5d421b 3874 {
2c5d421b 3875 rtx insns;
2c5d421b 3876
40651bac 3877 if (valreg == 0)
6e17d606 3878 {
3879 insns = get_insns ();
3880 end_sequence ();
31d3e01c 3881 emit_insn (insns);
6e17d606 3882 }
3883 else
3884 {
3885 rtx note = 0;
40651bac 3886 rtx temp;
6e17d606 3887 int i;
2c5d421b 3888
40651bac 3889 if (GET_CODE (valreg) == PARALLEL)
3890 {
3891 temp = gen_reg_rtx (outmode);
0d568ddf 3892 emit_group_store (temp, valreg, NULL_TREE,
4c3a0ea5 3893 GET_MODE_SIZE (outmode));
40651bac 3894 valreg = temp;
3895 }
3896
3897 temp = gen_reg_rtx (GET_MODE (valreg));
3898
6e17d606 3899 /* Construct an "equal form" for the value which mentions all the
3900 arguments in order as well as the function name. */
3901 for (i = 0; i < nargs; i++)
3902 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3903 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
2c5d421b 3904
6e17d606 3905 insns = get_insns ();
3906 end_sequence ();
2c5d421b 3907
6e17d606 3908 if (flags & ECF_PURE)
3909 note = gen_rtx_EXPR_LIST (VOIDmode,
3910 gen_rtx_USE (VOIDmode,
3911 gen_rtx_MEM (BLKmode,
3912 gen_rtx_SCRATCH (VOIDmode))),
3913 note);
3914
3915 emit_libcall_block (insns, temp, valreg, note);
2c5d421b 3916
6e17d606 3917 valreg = temp;
3918 }
2c5d421b 3919 }
9bdaf1ba 3920 pop_temp_slots ();
3921
3922 /* Copy the value to the right place. */
20f7032f 3923 if (outmode != VOIDmode && retval)
9bdaf1ba 3924 {
3925 if (mem_value)
3926 {
3927 if (value == 0)
3928 value = mem_value;
3929 if (value != mem_value)
3930 emit_move_insn (value, mem_value);
3931 }
40651bac 3932 else if (GET_CODE (valreg) == PARALLEL)
3933 {
3934 if (value == 0)
3935 value = gen_reg_rtx (outmode);
4c3a0ea5 3936 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 3937 }
9bdaf1ba 3938 else
4e1a3169 3939 {
3940 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3941 if (GET_MODE (valreg) != outmode)
3942 {
3943 int unsignedp = TYPE_UNSIGNED (tfom);
3944
3945 gcc_assert (targetm.calls.promote_function_return (tfom));
3946 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3947 == GET_MODE (valreg));
3948
3949 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3950 }
3951
3952 if (value != 0)
3953 emit_move_insn (value, valreg);
3954 else
3955 value = valreg;
3956 }
9bdaf1ba 3957 }
3958
4448f543 3959 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 3960 {
4448f543 3961#ifdef REG_PARM_STACK_SPACE
3962 if (save_area)
6e96b626 3963 restore_fixed_argument_area (save_area, argblock,
3964 high_to_save, low_to_save);
9bdaf1ba 3965#endif
c87678e4 3966
4448f543 3967 /* If we saved any argument areas, restore them. */
3968 for (count = 0; count < nargs; count++)
3969 if (argvec[count].save_area)
3970 {
3971 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
241399f6 3972 rtx adr = plus_constant (argblock,
3973 argvec[count].locate.offset.constant);
3974 rtx stack_area = gen_rtx_MEM (save_mode,
3975 memory_address (save_mode, adr));
4448f543 3976
f9c6a9c3 3977 if (save_mode == BLKmode)
3978 emit_block_move (stack_area,
a0c938f0 3979 validize_mem (argvec[count].save_area),
f9c6a9c3 3980 GEN_INT (argvec[count].locate.size.constant),
3981 BLOCK_OP_CALL_PARM);
3982 else
3983 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 3984 }
9bdaf1ba 3985
4448f543 3986 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3987 stack_usage_map = initial_stack_usage_map;
3988 }
b39693dd 3989
a331ea1b 3990 if (stack_usage_map_buf)
3991 free (stack_usage_map_buf);
3992
20f7032f 3993 return value;
3994
3995}
3996\f
3997/* Output a library call to function FUN (a SYMBOL_REF rtx)
3998 (emitting the queue unless NO_QUEUE is nonzero),
3999 for a value of mode OUTMODE,
4000 with NARGS different arguments, passed as alternating rtx values
4001 and machine_modes to convert them to.
20f7032f 4002
0ba5f96c 4003 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4004 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4005 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4006 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4007 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4008 or other LCT_ value for other types of library calls. */
20f7032f 4009
4010void
ee582a61 4011emit_library_call (rtx orgfun, enum libcall_type fn_type,
4012 enum machine_mode outmode, int nargs, ...)
20f7032f 4013{
ee582a61 4014 va_list p;
4c9e08a4 4015
ee582a61 4016 va_start (p, nargs);
26dfc457 4017 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
ee582a61 4018 va_end (p);
20f7032f 4019}
4020\f
4021/* Like emit_library_call except that an extra argument, VALUE,
4022 comes second and says where to store the result.
4023 (If VALUE is zero, this function chooses a convenient way
4024 to return the value.
4025
4026 This function returns an rtx for where the value is to be found.
4027 If VALUE is nonzero, VALUE is returned. */
4028
4029rtx
ee582a61 4030emit_library_call_value (rtx orgfun, rtx value,
4031 enum libcall_type fn_type,
4032 enum machine_mode outmode, int nargs, ...)
20f7032f 4033{
7ad77798 4034 rtx result;
ee582a61 4035 va_list p;
4c9e08a4 4036
ee582a61 4037 va_start (p, nargs);
7ad77798 4038 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4039 nargs, p);
ee582a61 4040 va_end (p);
20f7032f 4041
7ad77798 4042 return result;
8ddf1c7e 4043}
4044\f
66d433c7 4045/* Store a single argument for a function call
4046 into the register or memory area where it must be passed.
4047 *ARG describes the argument value and where to pass it.
4048
4049 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 4050 or 0 on a machine where arguments are pushed individually.
66d433c7 4051
4052 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 4053 so must be careful about how the stack is used.
66d433c7 4054
4055 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4056 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4057 that we need not worry about saving and restoring the stack.
4058
57679d39 4059 FNDECL is the declaration of the function we are calling.
c87678e4 4060
d10cfa8d 4061 Return nonzero if this arg should cause sibcall failure,
57679d39 4062 zero otherwise. */
66d433c7 4063
57679d39 4064static int
4c9e08a4 4065store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4066 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 4067{
19cb6b50 4068 tree pval = arg->tree_value;
66d433c7 4069 rtx reg = 0;
4070 int partial = 0;
4071 int used = 0;
df9f2bb6 4072 int i, lower_bound = 0, upper_bound = 0;
57679d39 4073 int sibcall_failure = 0;
66d433c7 4074
4075 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 4076 return 1;
66d433c7 4077
1b117c60 4078 /* Push a new temporary level for any temporaries we make for
4079 this argument. */
4080 push_temp_slots ();
4081
02510658 4082 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 4083 {
4448f543 4084 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4085 save any previous data at that location. */
4086 if (argblock && ! variable_size && arg->stack)
4087 {
66d433c7 4088#ifdef ARGS_GROW_DOWNWARD
4448f543 4089 /* stack_slot is negative, but we want to index stack_usage_map
4090 with positive values. */
4091 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4092 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4093 else
4094 upper_bound = 0;
66d433c7 4095
241399f6 4096 lower_bound = upper_bound - arg->locate.size.constant;
66d433c7 4097#else
4448f543 4098 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4099 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4100 else
4101 lower_bound = 0;
66d433c7 4102
241399f6 4103 upper_bound = lower_bound + arg->locate.size.constant;
66d433c7 4104#endif
4105
fd2c0c1d 4106 i = lower_bound;
4107 /* Don't worry about things in the fixed argument area;
4108 it has already been saved. */
4109 if (i < reg_parm_stack_space)
4110 i = reg_parm_stack_space;
4111 while (i < upper_bound && stack_usage_map[i] == 0)
4112 i++;
66d433c7 4113
fd2c0c1d 4114 if (i < upper_bound)
66d433c7 4115 {
241399f6 4116 /* We need to make a save area. */
4117 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4118 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4119 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4120 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 4121
4122 if (save_mode == BLKmode)
4123 {
387bc205 4124 tree ot = TREE_TYPE (arg->tree_value);
4125 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4126 | TYPE_QUAL_CONST));
4127
4128 arg->save_area = assign_temp (nt, 0, 1, 1);
4448f543 4129 preserve_temp_slots (arg->save_area);
4130 emit_block_move (validize_mem (arg->save_area), stack_area,
c2ca1bab 4131 GEN_INT (arg->locate.size.constant),
0378dbdc 4132 BLOCK_OP_CALL_PARM);
4448f543 4133 }
4134 else
4135 {
4136 arg->save_area = gen_reg_rtx (save_mode);
4137 emit_move_insn (arg->save_area, stack_area);
4138 }
66d433c7 4139 }
4140 }
4141 }
b3caaea3 4142
66d433c7 4143 /* If this isn't going to be placed on both the stack and in registers,
4144 set up the register and number of words. */
4145 if (! arg->pass_on_stack)
04d6fcf8 4146 {
4147 if (flags & ECF_SIBCALL)
4148 reg = arg->tail_call_reg;
4149 else
4150 reg = arg->reg;
4151 partial = arg->partial;
4152 }
66d433c7 4153
231bd014 4154 /* Being passed entirely in a register. We shouldn't be called in
4155 this case. */
4156 gcc_assert (reg == 0 || partial != 0);
a0c938f0 4157
f28c7a75 4158 /* If this arg needs special alignment, don't load the registers
4159 here. */
4160 if (arg->n_aligned_regs != 0)
4161 reg = 0;
c87678e4 4162
f28c7a75 4163 /* If this is being passed partially in a register, we can't evaluate
66d433c7 4164 it directly into its stack slot. Otherwise, we can. */
4165 if (arg->value == 0)
f848041f 4166 {
f848041f 4167 /* stack_arg_under_construction is nonzero if a function argument is
4168 being evaluated directly into the outgoing argument list and
4169 expand_call must take special action to preserve the argument list
4170 if it is called recursively.
4171
4172 For scalar function arguments stack_usage_map is sufficient to
4173 determine which stack slots must be saved and restored. Scalar
4174 arguments in general have pass_on_stack == 0.
4175
4176 If this argument is initialized by a function which takes the
4177 address of the argument (a C++ constructor or a C function
4178 returning a BLKmode structure), then stack_usage_map is
4179 insufficient and expand_call must push the stack around the
4180 function call. Such arguments have pass_on_stack == 1.
4181
4182 Note that it is always safe to set stack_arg_under_construction,
4183 but this generates suboptimal code if set when not needed. */
4184
4185 if (arg->pass_on_stack)
4186 stack_arg_under_construction++;
4448f543 4187
7dbf1af4 4188 arg->value = expand_expr (pval,
4189 (partial
4190 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4191 ? NULL_RTX : arg->stack,
a35a63ff 4192 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 4193
4194 /* If we are promoting object (or for any other reason) the mode
4195 doesn't agree, convert the mode. */
4196
1560ef8f 4197 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4198 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4199 arg->value, arg->unsignedp);
1c0c37a5 4200
f848041f 4201 if (arg->pass_on_stack)
4202 stack_arg_under_construction--;
f848041f 4203 }
66d433c7 4204
63864e1c 4205 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 4206 if ((flags & ECF_SIBCALL)
4207 && MEM_P (arg->value)
4208 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4209 arg->locate.size.constant))
4210 sibcall_failure = 1;
63864e1c 4211
66d433c7 4212 /* Don't allow anything left on stack from computation
4213 of argument to alloca. */
02510658 4214 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 4215 do_pending_stack_adjust ();
4216
4217 if (arg->value == arg->stack)
8a06f2d4 4218 /* If the value is already in the stack slot, we are done. */
4219 ;
1c0c37a5 4220 else if (arg->mode != BLKmode)
66d433c7 4221 {
19cb6b50 4222 int size;
851fc2b3 4223 unsigned int parm_align;
66d433c7 4224
4225 /* Argument is a scalar, not entirely passed in registers.
4226 (If part is passed in registers, arg->partial says how much
4227 and emit_push_insn will take care of putting it there.)
c87678e4 4228
66d433c7 4229 Push it, and if its size is less than the
4230 amount of space allocated to it,
4231 also bump stack pointer by the additional space.
4232 Note that in C the default argument promotions
4233 will prevent such mismatches. */
4234
1c0c37a5 4235 size = GET_MODE_SIZE (arg->mode);
66d433c7 4236 /* Compute how much space the push instruction will push.
4237 On many machines, pushing a byte will advance the stack
4238 pointer by a halfword. */
4239#ifdef PUSH_ROUNDING
4240 size = PUSH_ROUNDING (size);
4241#endif
4242 used = size;
4243
4244 /* Compute how much space the argument should get:
4245 round up to a multiple of the alignment for arguments. */
1c0c37a5 4246 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
66d433c7 4247 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4248 / (PARM_BOUNDARY / BITS_PER_UNIT))
4249 * (PARM_BOUNDARY / BITS_PER_UNIT));
4250
851fc2b3 4251 /* Compute the alignment of the pushed argument. */
4252 parm_align = arg->locate.boundary;
4253 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4254 {
4255 int pad = used - size;
4256 if (pad)
4257 {
4258 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4259 parm_align = MIN (parm_align, pad_align);
4260 }
4261 }
4262
66d433c7 4263 /* This isn't already where we want it on the stack, so put it there.
4264 This can either be done with push or copy insns. */
4c9e08a4 4265 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
851fc2b3 4266 parm_align, partial, reg, used - size, argblock,
241399f6 4267 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4268 ARGS_SIZE_RTX (arg->locate.alignment_pad));
d5c9a99f 4269
4270 /* Unless this is a partially-in-register argument, the argument is now
4271 in the stack. */
4272 if (partial == 0)
4273 arg->value = arg->stack;
66d433c7 4274 }
4275 else
4276 {
4277 /* BLKmode, at least partly to be pushed. */
4278
cf78c9ff 4279 unsigned int parm_align;
19cb6b50 4280 int excess;
66d433c7 4281 rtx size_rtx;
4282
4283 /* Pushing a nonscalar.
4284 If part is passed in registers, PARTIAL says how much
4285 and emit_push_insn will take care of putting it there. */
4286
4287 /* Round its size up to a multiple
4288 of the allocation unit for arguments. */
4289
241399f6 4290 if (arg->locate.size.var != 0)
66d433c7 4291 {
4292 excess = 0;
241399f6 4293 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 4294 }
4295 else
4296 {
f054eb3c 4297 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4298 for BLKmode is careful to avoid it. */
4299 excess = (arg->locate.size.constant
4300 - int_size_in_bytes (TREE_TYPE (pval))
4301 + partial);
623282b0 4302 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4303 NULL_RTX, TYPE_MODE (sizetype), 0);
66d433c7 4304 }
4305
c5dc0c32 4306 parm_align = arg->locate.boundary;
cf78c9ff 4307
4308 /* When an argument is padded down, the block is aligned to
4309 PARM_BOUNDARY, but the actual argument isn't. */
4310 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4311 {
241399f6 4312 if (arg->locate.size.var)
cf78c9ff 4313 parm_align = BITS_PER_UNIT;
4314 else if (excess)
4315 {
28397255 4316 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
cf78c9ff 4317 parm_align = MIN (parm_align, excess_align);
4318 }
4319 }
4320
e16ceb8e 4321 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 4322 {
4323 /* emit_push_insn might not work properly if arg->value and
241399f6 4324 argblock + arg->locate.offset areas overlap. */
57679d39 4325 rtx x = arg->value;
4326 int i = 0;
4327
4328 if (XEXP (x, 0) == current_function_internal_arg_pointer
4329 || (GET_CODE (XEXP (x, 0)) == PLUS
4330 && XEXP (XEXP (x, 0), 0) ==
4331 current_function_internal_arg_pointer
4332 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4333 {
4334 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4335 i = INTVAL (XEXP (XEXP (x, 0), 1));
4336
21dda4ee 4337 /* expand_call should ensure this. */
231bd014 4338 gcc_assert (!arg->locate.offset.var
2ad152f7 4339 && arg->locate.size.var == 0
231bd014 4340 && GET_CODE (size_rtx) == CONST_INT);
57679d39 4341
241399f6 4342 if (arg->locate.offset.constant > i)
57679d39 4343 {
241399f6 4344 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
57679d39 4345 sibcall_failure = 1;
4346 }
241399f6 4347 else if (arg->locate.offset.constant < i)
57679d39 4348 {
2ad152f7 4349 /* Use arg->locate.size.constant instead of size_rtx
4350 because we only care about the part of the argument
4351 on the stack. */
4352 if (i < (arg->locate.offset.constant
4353 + arg->locate.size.constant))
4354 sibcall_failure = 1;
4355 }
4356 else
4357 {
4358 /* Even though they appear to be at the same location,
4359 if part of the outgoing argument is in registers,
4360 they aren't really at the same location. Check for
4361 this by making sure that the incoming size is the
4362 same as the outgoing size. */
4363 if (arg->locate.size.constant != INTVAL (size_rtx))
57679d39 4364 sibcall_failure = 1;
4365 }
4366 }
4367 }
4368
1c0c37a5 4369 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
cf78c9ff 4370 parm_align, partial, reg, excess, argblock,
241399f6 4371 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4372 ARGS_SIZE_RTX (arg->locate.alignment_pad));
66d433c7 4373
d5c9a99f 4374 /* Unless this is a partially-in-register argument, the argument is now
4375 in the stack.
66d433c7 4376
d5c9a99f 4377 ??? Unlike the case above, in which we want the actual
4378 address of the data, so that we can load it directly into a
4379 register, here we want the address of the stack slot, so that
4380 it's properly aligned for word-by-word copying or something
4381 like that. It's not clear that this is always correct. */
4382 if (partial == 0)
4383 arg->value = arg->stack_slot;
4384 }
b600a907 4385
4386 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4387 {
4388 tree type = TREE_TYPE (arg->tree_value);
4389 arg->parallel_value
4390 = emit_group_load_into_temps (arg->reg, arg->value, type,
4391 int_size_in_bytes (type));
4392 }
66d433c7 4393
a35a63ff 4394 /* Mark all slots this store used. */
4395 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4396 && argblock && ! variable_size && arg->stack)
4397 for (i = lower_bound; i < upper_bound; i++)
4398 stack_usage_map[i] = 1;
4399
66d433c7 4400 /* Once we have pushed something, pops can't safely
4401 be deferred during the rest of the arguments. */
4402 NO_DEFER_POP;
4403
148b08de 4404 /* Free any temporary slots made in processing this argument. Show
4405 that we might have taken the address of something and pushed that
4406 as an operand. */
4407 preserve_temp_slots (NULL_RTX);
66d433c7 4408 free_temp_slots ();
1b117c60 4409 pop_temp_slots ();
57679d39 4410
4411 return sibcall_failure;
66d433c7 4412}
890f0c17 4413
0336f0f0 4414/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 4415
0336f0f0 4416bool
4417must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 4418 const_tree type)
0336f0f0 4419{
4420 if (!type)
4421 return false;
4422
4423 /* If the type has variable size... */
4424 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4425 return true;
890f0c17 4426
0336f0f0 4427 /* If the type is marked as addressable (it is required
4428 to be constructed into the stack)... */
4429 if (TREE_ADDRESSABLE (type))
4430 return true;
4431
4432 return false;
4433}
890f0c17 4434
0d568ddf 4435/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 4436 takes trailing padding of a structure into account. */
4437/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 4438
4439bool
fb80456a 4440must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
890f0c17 4441{
4442 if (!type)
dceaa0b1 4443 return false;
890f0c17 4444
4445 /* If the type has variable size... */
4446 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4447 return true;
4448
4449 /* If the type is marked as addressable (it is required
4450 to be constructed into the stack)... */
4451 if (TREE_ADDRESSABLE (type))
4452 return true;
4453
4454 /* If the padding and mode of the type is such that a copy into
4455 a register would put it into the wrong part of the register. */
4456 if (mode == BLKmode
4457 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4458 && (FUNCTION_ARG_PADDING (mode, type)
4459 == (BYTES_BIG_ENDIAN ? upward : downward)))
4460 return true;
4461
4462 return false;
4463}