]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
PR middle-end/44828
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
45550790 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
7cf0dbf3 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
c5dc0c32 4 Free Software Foundation, Inc.
66d433c7 5
f12b58b3 6This file is part of GCC.
66d433c7 7
f12b58b3 8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
f12b58b3 11version.
66d433c7 12
f12b58b3 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
66d433c7 17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
66d433c7 21
22#include "config.h"
405711de 23#include "system.h"
805e22b2 24#include "coretypes.h"
25#include "tm.h"
405711de 26#include "rtl.h"
27#include "tree.h"
75a70cf9 28#include "gimple.h"
405711de 29#include "flags.h"
30#include "expr.h"
5f4cd670 31#include "optabs.h"
d8fc4d0b 32#include "libfuncs.h"
0a893c29 33#include "function.h"
405711de 34#include "regs.h"
9cdfa0b0 35#include "toplev.h"
cd03a192 36#include "output.h"
075136a2 37#include "tm_p.h"
a6260fc7 38#include "timevar.h"
7ecc63d3 39#include "sbitmap.h"
771d21fa 40#include "langhooks.h"
6fce022c 41#include "target.h"
d907ec51 42#include "debug.h"
28992b23 43#include "cgraph.h"
95cedffb 44#include "except.h"
3072d30e 45#include "dbgcnt.h"
75a70cf9 46#include "tree-flow.h"
66d433c7 47
dfb1ee39 48/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 50
51/* Data structure and subroutines used within expand_call. */
52
53struct arg_data
54{
55 /* Tree node for this argument. */
56 tree tree_value;
1c0c37a5 57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
66d433c7 59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 rtx value;
61 /* Initially-compute RTL value for argument; only for const functions. */
62 rtx initial_value;
63 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 64 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 65 registers. */
66 rtx reg;
0e0be288 67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
69 register windows. */
70 rtx tail_call_reg;
b600a907 71 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
72 form for emit_group_move. */
73 rtx parallel_value;
23eb5fa6 74 /* If REG was promoted from the actual mode of the argument expression,
75 indicates whether the promotion is sign- or zero-extended. */
76 int unsignedp;
83272ab4 77 /* Number of bytes to put in registers. 0 means put the whole arg
78 in registers. Also 0 if not passed in registers. */
66d433c7 79 int partial;
d10cfa8d 80 /* Nonzero if argument must be passed on stack.
f848041f 81 Note that some arguments may be passed on the stack
82 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
83 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 84 int pass_on_stack;
241399f6 85 /* Some fields packaged up for locate_and_pad_parm. */
86 struct locate_and_pad_arg_data locate;
66d433c7 87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
89 rtx stack;
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 rtx stack_slot;
66d433c7 94 /* Place that this stack area has been saved, if needed. */
95 rtx save_area;
f28c7a75 96 /* If an argument's alignment does not permit direct copying into registers,
97 copy in smaller-sized pieces into pseudos. These are stored in a
98 block pointed to by this field. The next field says how many
99 word-sized pseudos we made. */
100 rtx *aligned_regs;
101 int n_aligned_regs;
66d433c7 102};
103
d10cfa8d 104/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108static char *stack_usage_map;
109
110/* Size of STACK_USAGE_MAP. */
111static int highest_outgoing_arg_in_use;
d1b03b62 112
7ecc63d3 113/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
114 stack location's tail call argument has been already stored into the stack.
115 This bitmap is used to prevent sibling call optimization if function tries
116 to use parent's incoming argument slots when they have been already
117 overwritten with tail call arguments. */
118static sbitmap stored_args_map;
119
d1b03b62 120/* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
fbbbfe26 125static int stack_arg_under_construction;
66d433c7 126
4ee9c684 127static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
4c9e08a4 128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 CUMULATIVE_ARGS *);
130static void precompute_register_parameters (int, struct arg_data *, int *);
131static int store_one_arg (struct arg_data *, rtx, int, int, int);
132static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133static int finalize_must_preallocate (int, int, struct arg_data *,
134 struct args_size *);
2dd6f9ed 135static void precompute_arguments (int, struct arg_data *);
fa20f865 136static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
4c9e08a4 137static void initialize_argument_information (int, struct arg_data *,
cd46caee 138 struct args_size *, int,
139 tree, tree,
d8b9c828 140 tree, tree, CUMULATIVE_ARGS *, int,
eaa112a0 141 rtx *, int *, int *, int *,
4ee9c684 142 bool *, bool);
4c9e08a4 143static void compute_argument_addresses (struct arg_data *, rtx, int);
144static rtx rtx_for_function_call (tree, tree);
145static void load_register_parameters (struct arg_data *, int, rtx *, int,
146 int, int *);
147static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
148 enum machine_mode, int, va_list);
5d1b319b 149static int special_function_p (const_tree, int);
4c9e08a4 150static int check_sibcall_argument_overlap_1 (rtx);
151static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152
153static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
38413c80 154 unsigned int);
5ab29745 155static tree split_complex_types (tree);
cde25025 156
4448f543 157#ifdef REG_PARM_STACK_SPACE
4c9e08a4 158static rtx save_fixed_argument_area (int, rtx, int *, int *);
159static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 160#endif
66d433c7 161\f
66d433c7 162/* Force FUNEXP into a form suitable for the address of a CALL,
163 and return that as an rtx. Also load the static chain register
164 if FNDECL is a nested function.
165
8866f42d 166 CALL_FUSAGE points to a variable holding the prospective
167 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 168
d9076622 169rtx
82c7907c 170prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
4ee9c684 171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
66d433c7 172{
c7bf1374 173 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 174 but not for a constant address if -fno-function-cse. */
175 if (GET_CODE (funexp) != SYMBOL_REF)
a89aeae3 176 /* If we are using registers for parameters, force the
0dbd1c74 177 function address into a register now. */
ed5527ca 178 funexp = ((reg_parm_seen
179 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
0dbd1c74 180 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
181 : memory_address (FUNCTION_MODE, funexp));
707ff8b1 182 else if (! sibcallp)
66d433c7 183 {
184#ifndef NO_FUNCTION_CSE
185 if (optimize && ! flag_no_function_cse)
fb154d03 186 funexp = force_reg (Pmode, funexp);
66d433c7 187#endif
188 }
189
190 if (static_chain_value != 0)
191 {
82c7907c 192 rtx chain;
193
194 gcc_assert (fndecl);
195 chain = targetm.calls.static_chain (fndecl, false);
3dce56cc 196 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 197
82c7907c 198 emit_move_insn (chain, static_chain_value);
199 if (REG_P (chain))
200 use_reg (call_fusage, chain);
66d433c7 201 }
202
203 return funexp;
204}
205
206/* Generate instructions to call function FUNEXP,
207 and optionally pop the results.
208 The CALL_INSN is the first insn generated.
209
c74d0a20 210 FNDECL is the declaration node of the function. This is given to the
f5bc28da 211 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
212 its own args.
e93a4612 213
f5bc28da 214 FUNTYPE is the data type of the function. This is given to the hook
215 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
216 own args. We used to allow an identifier for library functions, but
217 that doesn't work when the return type is an aggregate type and the
218 calling convention says that the pointer to this aggregate is to be
219 popped by the callee.
66d433c7 220
221 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 222 ROUNDED_STACK_SIZE is that number rounded up to
223 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
224 both to put into the call insn and to generate explicit popping
225 code if necessary.
66d433c7 226
227 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
228 It is zero if this call doesn't want a structure value.
229
230 NEXT_ARG_REG is the rtx that results from executing
f387af4f 231 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
66d433c7 232 just after all the args have had their registers assigned.
233 This could be whatever you like, but normally it is the first
234 arg-register beyond those used for args in this call,
235 or 0 if all the arg-registers are used in this call.
236 It is passed on to `gen_call' so you can put this info in the call insn.
237
238 VALREG is a hard register in which a value is returned,
239 or 0 if the call does not return a value.
240
241 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
242 the args to this call were processed.
243 We restore `inhibit_defer_pop' to that value.
244
07409b3a 245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 246 denote registers used by the called function. */
c87678e4 247
8ddf1c7e 248static void
16c9337c 249emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
4ee9c684 250 tree funtype ATTRIBUTE_UNUSED,
4c9e08a4 251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
252 HOST_WIDE_INT rounded_stack_size,
253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
255 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
256 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
66d433c7 257{
dd837bff 258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
66d433c7 259 rtx call_insn;
260 int already_popped = 0;
f5bc28da 261 HOST_WIDE_INT n_popped
262 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
66d433c7 263
87e19636 264#ifdef CALL_POPS_ARGS
265 n_popped += CALL_POPS_ARGS (* args_so_far);
266#endif
4c9e08a4 267
66d433c7 268 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
269 and we don't want to load it into a register as an optimization,
270 because prepare_call_address already did it if it should be done. */
271 if (GET_CODE (funexp) != SYMBOL_REF)
272 funexp = memory_address (FUNCTION_MODE, funexp);
273
60ecc450 274#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
275 if ((ecf_flags & ECF_SIBCALL)
276 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
a864723e 277 && (n_popped > 0 || stack_size == 0))
60ecc450 278 {
2a631e19 279 rtx n_pop = GEN_INT (n_popped);
60ecc450 280 rtx pat;
281
282 /* If this subroutine pops its own args, record that in the call insn
283 if possible, for the sake of frame pointer elimination. */
284
285 if (valreg)
2ed6c343 286 pat = GEN_SIBCALL_VALUE_POP (valreg,
60ecc450 287 gen_rtx_MEM (FUNCTION_MODE, funexp),
288 rounded_stack_size_rtx, next_arg_reg,
289 n_pop);
290 else
2ed6c343 291 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
60ecc450 292 rounded_stack_size_rtx, next_arg_reg, n_pop);
293
294 emit_call_insn (pat);
295 already_popped = 1;
296 }
297 else
298#endif
299
66d433c7 300#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
2a631e19 301 /* If the target has "call" or "call_value" insns, then prefer them
302 if no arguments are actually popped. If the target does not have
303 "call" or "call_value" insns, then we must use the popping versions
304 even if the call has no arguments to pop. */
ec596f3b 305#if defined (HAVE_call) && defined (HAVE_call_value)
306 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
ff3ae375 307 && n_popped > 0)
ec596f3b 308#else
309 if (HAVE_call_pop && HAVE_call_value_pop)
310#endif
66d433c7 311 {
e39fae61 312 rtx n_pop = GEN_INT (n_popped);
66d433c7 313 rtx pat;
314
315 /* If this subroutine pops its own args, record that in the call insn
316 if possible, for the sake of frame pointer elimination. */
e93a4612 317
66d433c7 318 if (valreg)
2ed6c343 319 pat = GEN_CALL_VALUE_POP (valreg,
941522d6 320 gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 321 rounded_stack_size_rtx, next_arg_reg, n_pop);
66d433c7 322 else
2ed6c343 323 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 324 rounded_stack_size_rtx, next_arg_reg, n_pop);
66d433c7 325
326 emit_call_insn (pat);
327 already_popped = 1;
328 }
329 else
330#endif
66d433c7 331
60ecc450 332#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
333 if ((ecf_flags & ECF_SIBCALL)
334 && HAVE_sibcall && HAVE_sibcall_value)
335 {
336 if (valreg)
2ed6c343 337 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
60ecc450 338 gen_rtx_MEM (FUNCTION_MODE, funexp),
339 rounded_stack_size_rtx,
340 next_arg_reg, NULL_RTX));
341 else
2ed6c343 342 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
60ecc450 343 rounded_stack_size_rtx, next_arg_reg,
f018d957 344 GEN_INT (struct_value_size)));
60ecc450 345 }
346 else
347#endif
348
66d433c7 349#if defined (HAVE_call) && defined (HAVE_call_value)
350 if (HAVE_call && HAVE_call_value)
351 {
352 if (valreg)
2ed6c343 353 emit_call_insn (GEN_CALL_VALUE (valreg,
941522d6 354 gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 355 rounded_stack_size_rtx, next_arg_reg,
1e8cd5a7 356 NULL_RTX));
66d433c7 357 else
2ed6c343 358 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
dd837bff 359 rounded_stack_size_rtx, next_arg_reg,
f018d957 360 GEN_INT (struct_value_size)));
66d433c7 361 }
362 else
363#endif
231bd014 364 gcc_unreachable ();
66d433c7 365
d5f9786f 366 /* Find the call we just emitted. */
367 call_insn = last_call_insn ();
66d433c7 368
d5f9786f 369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
66d433c7 371
372 /* If this is a const call, then set the insn's unchanging bit. */
9c2a0c05 373 if (ecf_flags & ECF_CONST)
374 RTL_CONST_CALL_P (call_insn) = 1;
375
376 /* If this is a pure call, then set the insn's unchanging bit. */
377 if (ecf_flags & ECF_PURE)
378 RTL_PURE_CALL_P (call_insn) = 1;
379
380 /* If this is a const call, then set the insn's unchanging bit. */
381 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
382 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 383
e38def9c 384 /* Create a nothrow REG_EH_REGION note, if needed. */
385 make_reg_eh_region_note (call_insn, ecf_flags, 0);
00dd2e9e 386
356b51a0 387 if (ecf_flags & ECF_NORETURN)
a1ddb869 388 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
356b51a0 389
9239aee6 390 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 391 {
a1ddb869 392 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
18d50ae6 393 cfun->calls_setjmp = 1;
0ff18307 394 }
9239aee6 395
60ecc450 396 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
397
d907ec51 398 /* Record debug information for virtual calls. */
399 if (flag_enable_icf_debug && fndecl == NULL)
400 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
401 INSN_UID (call_insn));
402
d1f88d00 403 /* Restore this now, so that we do defer pops for this call's args
404 if the context of the call as a whole permits. */
405 inhibit_defer_pop = old_inhibit_defer_pop;
406
e39fae61 407 if (n_popped > 0)
66d433c7 408 {
409 if (!already_popped)
37808e3a 410 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 411 = gen_rtx_EXPR_LIST (VOIDmode,
412 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
413 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 414 rounded_stack_size -= n_popped;
dd837bff 415 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
91b70175 416 stack_pointer_delta -= n_popped;
27a7a23a 417
418 /* If popup is needed, stack realign must use DRAP */
419 if (SUPPORTS_STACK_ALIGNMENT)
420 crtl->need_drap = true;
66d433c7 421 }
422
4448f543 423 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 424 {
4448f543 425 /* If returning from the subroutine does not automatically pop the args,
426 we need an instruction to pop them sooner or later.
427 Perhaps do it now; perhaps just record how much space to pop later.
428
429 If returning from the subroutine does pop the args, indicate that the
430 stack pointer will be changed. */
431
10d1a2c0 432 if (rounded_stack_size != 0)
4448f543 433 {
ff3ae375 434 if (ecf_flags & ECF_NORETURN)
10d1a2c0 435 /* Just pretend we did the pop. */
436 stack_pointer_delta -= rounded_stack_size;
437 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 439 pending_stack_adjust += rounded_stack_size;
440 else
441 adjust_stack (rounded_stack_size_rtx);
442 }
66d433c7 443 }
4448f543 444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
445 Restore the stack pointer to its original value now. Usually
446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448 popping variants of functions exist as well.
449
450 ??? We may optimize similar to defer_pop above, but it is
451 probably not worthwhile.
c87678e4 452
4448f543 453 ??? It will be worthwhile to enable combine_stack_adjustments even for
454 such machines. */
455 else if (n_popped)
456 anti_adjust_stack (GEN_INT (n_popped));
66d433c7 457}
458
6a0e6138 459/* Determine if the function identified by NAME and FNDECL is one with
460 special properties we wish to know about.
461
462 For example, if the function might return more than one time (setjmp), then
463 set RETURNS_TWICE to a nonzero value.
464
4c8db992 465 Similarly set NORETURN if the function is in the longjmp family.
6a0e6138 466
6a0e6138 467 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468 space from the stack such as alloca. */
469
dfe08167 470static int
5d1b319b 471special_function_p (const_tree fndecl, int flags)
6a0e6138 472{
4ee9c684 473 if (fndecl && DECL_NAME (fndecl)
7259f3f8 474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
6a0e6138 475 /* Exclude functions not at the file scope, or not `extern',
476 since they are not the magic functions we would otherwise
40109983 477 think they are.
a0c938f0 478 FIXME: this should be handled with attributes, not with this
479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
480 because you can declare fork() inside a function if you
481 wish. */
0d568ddf 482 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484 && TREE_PUBLIC (fndecl))
6a0e6138 485 {
71d9fc9b 486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487 const char *tname = name;
6a0e6138 488
cc7cc47f 489 /* We assume that alloca will always be called by name. It
490 makes no sense to pass it as a pointer-to-function to
491 anything that does not understand its behavior. */
dfe08167 492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
493 && name[0] == 'a'
494 && ! strcmp (name, "alloca"))
495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
496 && name[0] == '_'
497 && ! strcmp (name, "__builtin_alloca"))))
498 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 499
73d3c8f2 500 /* Disregard prefix _, __, __x or __builtin_. */
6a0e6138 501 if (name[0] == '_')
502 {
73d3c8f2 503 if (name[1] == '_'
504 && name[2] == 'b'
505 && !strncmp (name + 3, "uiltin_", 7))
506 tname += 10;
507 else if (name[1] == '_' && name[2] == 'x')
6a0e6138 508 tname += 3;
509 else if (name[1] == '_')
510 tname += 2;
511 else
512 tname += 1;
513 }
514
515 if (tname[0] == 's')
516 {
dfe08167 517 if ((tname[1] == 'e'
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
520 || (tname[1] == 'i'
521 && ! strcmp (tname, "sigsetjmp"))
522 || (tname[1] == 'a'
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
525
6a0e6138 526 if (tname[1] == 'i'
527 && ! strcmp (tname, "siglongjmp"))
4fec1d6c 528 flags |= ECF_NORETURN;
6a0e6138 529 }
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
0b4cb8ec 533 && ! strcmp (tname, "vfork"))
534 || (tname[0] == 'g' && tname[1] == 'e'
535 && !strcmp (tname, "getcontext")))
dfe08167 536 flags |= ECF_RETURNS_TWICE;
6a0e6138 537
538 else if (tname[0] == 'l' && tname[1] == 'o'
539 && ! strcmp (tname, "longjmp"))
4fec1d6c 540 flags |= ECF_NORETURN;
6a0e6138 541 }
73673831 542
dfe08167 543 return flags;
6a0e6138 544}
545
4c8db992 546/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 547
dfe08167 548int
5d1b319b 549setjmp_call_p (const_tree fndecl)
dfe08167 550{
551 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
552}
553
75a70cf9 554
555/* Return true if STMT is an alloca call. */
556
557bool
558gimple_alloca_call_p (const_gimple stmt)
559{
560 tree fndecl;
561
562 if (!is_gimple_call (stmt))
563 return false;
564
565 fndecl = gimple_call_fndecl (stmt);
566 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
567 return true;
568
569 return false;
570}
571
9a7ecb49 572/* Return true when exp contains alloca call. */
75a70cf9 573
9a7ecb49 574bool
5d1b319b 575alloca_call_p (const_tree exp)
9a7ecb49 576{
577 if (TREE_CODE (exp) == CALL_EXPR
c2f47e15 578 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
579 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
580 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
581 & ECF_MAY_BE_ALLOCA))
9a7ecb49 582 return true;
583 return false;
584}
585
5edaabad 586/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 587
805e22b2 588int
5d1b319b 589flags_from_decl_or_type (const_tree exp)
dfe08167 590{
591 int flags = 0;
7a24815f 592
dfe08167 593 if (DECL_P (exp))
594 {
595 /* The function exp may have the `malloc' attribute. */
7a24815f 596 if (DECL_IS_MALLOC (exp))
dfe08167 597 flags |= ECF_MALLOC;
598
26d1c5ff 599 /* The function exp may have the `returns_twice' attribute. */
600 if (DECL_IS_RETURNS_TWICE (exp))
601 flags |= ECF_RETURNS_TWICE;
602
9c2a0c05 603 /* Process the pure and const attributes. */
604 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
605 flags |= ECF_CONST;
606 if (DECL_PURE_P (exp))
ef689d4e 607 flags |= ECF_PURE;
9c2a0c05 608 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
609 flags |= ECF_LOOPING_CONST_OR_PURE;
26dfc457 610
fc09b200 611 if (DECL_IS_NOVOPS (exp))
612 flags |= ECF_NOVOPS;
613
dfe08167 614 if (TREE_NOTHROW (exp))
615 flags |= ECF_NOTHROW;
b15db406 616
4ee9c684 617 flags = special_function_p (exp, flags);
dfe08167 618 }
66d12a6c 619 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
b15db406 620 flags |= ECF_CONST;
dfe08167 621
622 if (TREE_THIS_VOLATILE (exp))
623 flags |= ECF_NORETURN;
624
625 return flags;
626}
627
886a914d 628/* Detect flags from a CALL_EXPR. */
629
630int
b7bf20db 631call_expr_flags (const_tree t)
886a914d 632{
633 int flags;
634 tree decl = get_callee_fndecl (t);
635
636 if (decl)
637 flags = flags_from_decl_or_type (decl);
638 else
639 {
c2f47e15 640 t = TREE_TYPE (CALL_EXPR_FN (t));
886a914d 641 if (t && TREE_CODE (t) == POINTER_TYPE)
642 flags = flags_from_decl_or_type (TREE_TYPE (t));
643 else
644 flags = 0;
645 }
646
647 return flags;
648}
649
6a0e6138 650/* Precompute all register parameters as described by ARGS, storing values
651 into fields within the ARGS array.
652
653 NUM_ACTUALS indicates the total number elements in the ARGS array.
654
655 Set REG_PARM_SEEN if we encounter a register parameter. */
656
657static void
e2ff5c1b 658precompute_register_parameters (int num_actuals, struct arg_data *args,
659 int *reg_parm_seen)
6a0e6138 660{
661 int i;
662
663 *reg_parm_seen = 0;
664
665 for (i = 0; i < num_actuals; i++)
666 if (args[i].reg != 0 && ! args[i].pass_on_stack)
667 {
668 *reg_parm_seen = 1;
669
670 if (args[i].value == 0)
671 {
672 push_temp_slots ();
8ec3c5c2 673 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 674 preserve_temp_slots (args[i].value);
675 pop_temp_slots ();
6a0e6138 676 }
677
e80b4463 678 /* If the value is a non-legitimate constant, force it into a
679 pseudo now. TLS symbols sometimes need a call to resolve. */
680 if (CONSTANT_P (args[i].value)
681 && !LEGITIMATE_CONSTANT_P (args[i].value))
682 args[i].value = force_reg (args[i].mode, args[i].value);
683
6a0e6138 684 /* If we are to promote the function arg to a wider mode,
685 do it now. */
686
687 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
688 args[i].value
689 = convert_modes (args[i].mode,
690 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
691 args[i].value, args[i].unsignedp);
692
e2ff5c1b 693 /* If we're going to have to load the value by parts, pull the
694 parts into pseudos. The part extraction process can involve
695 non-trivial computation. */
696 if (GET_CODE (args[i].reg) == PARALLEL)
697 {
698 tree type = TREE_TYPE (args[i].tree_value);
b600a907 699 args[i].parallel_value
e2ff5c1b 700 = emit_group_load_into_temps (args[i].reg, args[i].value,
701 type, int_size_in_bytes (type));
702 }
703
c87678e4 704 /* If the value is expensive, and we are inside an appropriately
6a0e6138 705 short loop, put the value into a pseudo and then put the pseudo
706 into the hard reg.
707
708 For small register classes, also do this if this call uses
709 register parameters. This is to avoid reload conflicts while
710 loading the parameters registers. */
711
e2ff5c1b 712 else if ((! (REG_P (args[i].value)
713 || (GET_CODE (args[i].value) == SUBREG
714 && REG_P (SUBREG_REG (args[i].value)))))
715 && args[i].mode != BLKmode
f529eb25 716 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
717 > COSTS_N_INSNS (1)
ed5527ca 718 && ((*reg_parm_seen
719 && targetm.small_register_classes_for_mode_p (args[i].mode))
e2ff5c1b 720 || optimize))
6a0e6138 721 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
722 }
723}
724
4448f543 725#ifdef REG_PARM_STACK_SPACE
6a0e6138 726
727 /* The argument list is the property of the called routine and it
728 may clobber it. If the fixed area has been used for previous
729 parameters, we must save and restore it. */
f7c44134 730
6a0e6138 731static rtx
4c9e08a4 732save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 733{
6e96b626 734 int low;
735 int high;
6a0e6138 736
6e96b626 737 /* Compute the boundary of the area that needs to be saved, if any. */
738 high = reg_parm_stack_space;
6a0e6138 739#ifdef ARGS_GROW_DOWNWARD
6e96b626 740 high += 1;
6a0e6138 741#endif
6e96b626 742 if (high > highest_outgoing_arg_in_use)
743 high = highest_outgoing_arg_in_use;
6a0e6138 744
6e96b626 745 for (low = 0; low < high; low++)
746 if (stack_usage_map[low] != 0)
747 {
748 int num_to_save;
749 enum machine_mode save_mode;
750 int delta;
751 rtx stack_area;
752 rtx save_area;
6a0e6138 753
6e96b626 754 while (stack_usage_map[--high] == 0)
755 ;
6a0e6138 756
6e96b626 757 *low_to_save = low;
758 *high_to_save = high;
759
760 num_to_save = high - low + 1;
761 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
6a0e6138 762
6e96b626 763 /* If we don't have the required alignment, must do this
764 in BLKmode. */
765 if ((low & (MIN (GET_MODE_SIZE (save_mode),
766 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
767 save_mode = BLKmode;
6a0e6138 768
769#ifdef ARGS_GROW_DOWNWARD
6e96b626 770 delta = -high;
6a0e6138 771#else
6e96b626 772 delta = low;
6a0e6138 773#endif
6e96b626 774 stack_area = gen_rtx_MEM (save_mode,
775 memory_address (save_mode,
776 plus_constant (argblock,
777 delta)));
2a631e19 778
6e96b626 779 set_mem_align (stack_area, PARM_BOUNDARY);
780 if (save_mode == BLKmode)
781 {
782 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
783 emit_block_move (validize_mem (save_area), stack_area,
784 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
785 }
786 else
787 {
788 save_area = gen_reg_rtx (save_mode);
789 emit_move_insn (save_area, stack_area);
790 }
2a631e19 791
6e96b626 792 return save_area;
793 }
794
795 return NULL_RTX;
6a0e6138 796}
797
798static void
4c9e08a4 799restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 800{
801 enum machine_mode save_mode = GET_MODE (save_area);
6e96b626 802 int delta;
803 rtx stack_area;
804
6a0e6138 805#ifdef ARGS_GROW_DOWNWARD
6e96b626 806 delta = -high_to_save;
6a0e6138 807#else
6e96b626 808 delta = low_to_save;
6a0e6138 809#endif
6e96b626 810 stack_area = gen_rtx_MEM (save_mode,
811 memory_address (save_mode,
812 plus_constant (argblock, delta)));
813 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 814
815 if (save_mode != BLKmode)
816 emit_move_insn (stack_area, save_area);
817 else
0378dbdc 818 emit_block_move (stack_area, validize_mem (save_area),
819 GEN_INT (high_to_save - low_to_save + 1),
820 BLOCK_OP_CALL_PARM);
6a0e6138 821}
f6025ee7 822#endif /* REG_PARM_STACK_SPACE */
c87678e4 823
6a0e6138 824/* If any elements in ARGS refer to parameters that are to be passed in
825 registers, but not in memory, and whose alignment does not permit a
826 direct copy into registers. Copy the values into a group of pseudos
c87678e4 827 which we will later copy into the appropriate hard registers.
6d801f27 828
829 Pseudos for each unaligned argument will be stored into the array
830 args[argnum].aligned_regs. The caller is responsible for deallocating
831 the aligned_regs array if it is nonzero. */
832
6a0e6138 833static void
4c9e08a4 834store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 835{
836 int i, j;
c87678e4 837
6a0e6138 838 for (i = 0; i < num_actuals; i++)
839 if (args[i].reg != 0 && ! args[i].pass_on_stack
840 && args[i].mode == BLKmode
77f1b1bb 841 && MEM_P (args[i].value)
842 && (MEM_ALIGN (args[i].value)
6a0e6138 843 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
844 {
845 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 846 int endian_correction = 0;
6a0e6138 847
f054eb3c 848 if (args[i].partial)
849 {
850 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
851 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
852 }
853 else
854 {
855 args[i].n_aligned_regs
856 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
857 }
858
4c36ffe6 859 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 860
5f4cd670 861 /* Structures smaller than a word are normally aligned to the
862 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 863 this means we must skip the empty high order bytes when
864 calculating the bit offset. */
5f4cd670 865 if (bytes < UNITS_PER_WORD
866#ifdef BLOCK_REG_PADDING
867 && (BLOCK_REG_PADDING (args[i].mode,
868 TREE_TYPE (args[i].tree_value), 1)
869 == downward)
870#else
871 && BYTES_BIG_ENDIAN
872#endif
873 )
874 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 875
876 for (j = 0; j < args[i].n_aligned_regs; j++)
877 {
878 rtx reg = gen_reg_rtx (word_mode);
879 rtx word = operand_subword_force (args[i].value, j, BLKmode);
880 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 881
882 args[i].aligned_regs[j] = reg;
5f4cd670 883 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1445ea5b 884 word_mode, word_mode);
6a0e6138 885
886 /* There is no need to restrict this code to loading items
887 in TYPE_ALIGN sized hunks. The bitfield instructions can
888 load up entire word sized registers efficiently.
889
890 ??? This may not be needed anymore.
891 We use to emit a clobber here but that doesn't let later
892 passes optimize the instructions we emit. By storing 0 into
893 the register later passes know the first AND to zero out the
894 bitfield being set in the register is unnecessary. The store
895 of 0 will be deleted as will at least the first AND. */
896
897 emit_move_insn (reg, const0_rtx);
898
899 bytes -= bitsize / BITS_PER_UNIT;
5f4cd670 900 store_bit_field (reg, bitsize, endian_correction, word_mode,
1445ea5b 901 word);
6a0e6138 902 }
903 }
904}
905
cb543c54 906/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
48e1416a 907 CALL_EXPR EXP.
cb543c54 908
909 NUM_ACTUALS is the total number of parameters.
910
911 N_NAMED_ARGS is the total number of named arguments.
912
cd46caee 913 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
914 value, or null.
915
cb543c54 916 FNDECL is the tree code for the target of this call (if known)
917
918 ARGS_SO_FAR holds state needed by the target to know where to place
919 the next argument.
920
921 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
922 for arguments which are passed in registers.
923
924 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
925 and may be modified by this routine.
926
dfe08167 927 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
0d568ddf 928 flags which may may be modified by this routine.
eaa112a0 929
4ee9c684 930 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
931 that requires allocation of stack space.
932
eaa112a0 933 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
934 the thunked-to function. */
cb543c54 935
936static void
4c9e08a4 937initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
938 struct arg_data *args,
939 struct args_size *args_size,
940 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 941 tree exp, tree struct_value_addr_value,
d8b9c828 942 tree fndecl, tree fntype,
4c9e08a4 943 CUMULATIVE_ARGS *args_so_far,
944 int reg_parm_stack_space,
945 rtx *old_stack_level, int *old_pending_adj,
eaa112a0 946 int *must_preallocate, int *ecf_flags,
4ee9c684 947 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 948{
389dd41b 949 location_t loc = EXPR_LOCATION (exp);
cb543c54 950 /* 1 if scanning parms front to back, -1 if scanning back to front. */
951 int inc;
952
953 /* Count arg position in order args appear. */
954 int argpos;
955
956 int i;
c87678e4 957
cb543c54 958 args_size->constant = 0;
959 args_size->var = 0;
960
961 /* In this loop, we consider args in the order they are written.
962 We fill up ARGS from the front or from the back if necessary
963 so that in any case the first arg to be pushed ends up at the front. */
964
4448f543 965 if (PUSH_ARGS_REVERSED)
966 {
967 i = num_actuals - 1, inc = -1;
968 /* In this case, must reverse order of args
969 so that we compute and push the last arg first. */
970 }
971 else
972 {
973 i = 0, inc = 1;
974 }
cb543c54 975
cd46caee 976 /* First fill in the actual arguments in the ARGS array, splitting
977 complex arguments if necessary. */
978 {
979 int j = i;
980 call_expr_arg_iterator iter;
981 tree arg;
982
983 if (struct_value_addr_value)
984 {
985 args[j].tree_value = struct_value_addr_value;
986 j += inc;
987 }
988 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
989 {
990 tree argtype = TREE_TYPE (arg);
991 if (targetm.calls.split_complex_arg
992 && argtype
993 && TREE_CODE (argtype) == COMPLEX_TYPE
994 && targetm.calls.split_complex_arg (argtype))
995 {
996 tree subtype = TREE_TYPE (argtype);
cd46caee 997 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
998 j += inc;
999 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1000 }
1001 else
1002 args[j].tree_value = arg;
1003 j += inc;
1004 }
1005 }
1006
cb543c54 1007 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
cd46caee 1008 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
cb543c54 1009 {
cd46caee 1010 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 1011 int unsignedp;
1012 enum machine_mode mode;
1013
cb543c54 1014 /* Replace erroneous argument with constant zero. */
4b72716d 1015 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 1016 args[i].tree_value = integer_zero_node, type = integer_type_node;
1017
8df5a43d 1018 /* If TYPE is a transparent union or record, pass things the way
1019 we would pass the first field of the union or record. We have
1020 already verified that the modes are the same. */
1021 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1022 && TYPE_TRANSPARENT_AGGR (type))
1023 type = TREE_TYPE (first_field (type));
cb543c54 1024
1025 /* Decide where to pass this arg.
1026
1027 args[i].reg is nonzero if all or part is passed in registers.
1028
1029 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1030 and the exact value says how many bytes are passed in registers.
cb543c54 1031
1032 args[i].pass_on_stack is nonzero if the argument must at least be
1033 computed on the stack. It may then be loaded back into registers
1034 if args[i].reg is nonzero.
1035
1036 These decisions are driven by the FUNCTION_... macros and must agree
1037 with those made by function.c. */
1038
1039 /* See if this argument should be passed by invisible reference. */
cc9b8628 1040 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1041 type, argpos < n_named_args))
cb543c54 1042 {
41dc12b4 1043 bool callee_copies;
1044 tree base;
1045
1046 callee_copies
13f08ee7 1047 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1048 type, argpos < n_named_args);
41dc12b4 1049
1050 /* If we're compiling a thunk, pass through invisible references
1051 instead of making a copy. */
eaa112a0 1052 if (call_from_thunk_p
41dc12b4 1053 || (callee_copies
1054 && !TREE_ADDRESSABLE (type)
1055 && (base = get_base_address (args[i].tree_value))
d6230243 1056 && TREE_CODE (base) != SSA_NAME
41dc12b4 1057 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 1058 {
41dc12b4 1059 /* We can't use sibcalls if a callee-copied argument is
1060 stored in the current function's frame. */
1061 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
c71e72dd 1062 *may_tailcall = false;
1063
389dd41b 1064 args[i].tree_value = build_fold_addr_expr_loc (loc,
1065 args[i].tree_value);
41dc12b4 1066 type = TREE_TYPE (args[i].tree_value);
1067
9c2a0c05 1068 if (*ecf_flags & ECF_CONST)
1069 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
ce95a955 1070 }
cb543c54 1071 else
1072 {
1073 /* We make a copy of the object and pass the address to the
1074 function being called. */
1075 rtx copy;
1076
4b72716d 1077 if (!COMPLETE_TYPE_P (type)
4852b829 1078 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1079 || (flag_stack_check == GENERIC_STACK_CHECK
1080 && compare_tree_int (TYPE_SIZE_UNIT (type),
1081 STACK_CHECK_MAX_VAR_SIZE) > 0))
cb543c54 1082 {
1083 /* This is a variable-sized object. Make space on the stack
1084 for it. */
cd46caee 1085 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 1086
1087 if (*old_stack_level == 0)
1088 {
1089 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1090 *old_pending_adj = pending_stack_adjust;
1091 pending_stack_adjust = 0;
1092 }
1093
1094 copy = gen_rtx_MEM (BLKmode,
f7c44134 1095 allocate_dynamic_stack_space
1096 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1097 set_mem_attributes (copy, type, 1);
cb543c54 1098 }
1099 else
f7c44134 1100 copy = assign_temp (type, 0, 1, 0);
cb543c54 1101
5b5037b3 1102 store_expr (args[i].tree_value, copy, 0, false);
cb543c54 1103
9c2a0c05 1104 /* Just change the const function to pure and then let
1105 the next test clear the pure based on
1106 callee_copies. */
1107 if (*ecf_flags & ECF_CONST)
1108 {
1109 *ecf_flags &= ~ECF_CONST;
1110 *ecf_flags |= ECF_PURE;
1111 }
1112
1113 if (!callee_copies && *ecf_flags & ECF_PURE)
1114 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
41dc12b4 1115
1116 args[i].tree_value
389dd41b 1117 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
41dc12b4 1118 type = TREE_TYPE (args[i].tree_value);
4ee9c684 1119 *may_tailcall = false;
cb543c54 1120 }
1121 }
1122
78a8ed03 1123 unsignedp = TYPE_UNSIGNED (type);
3b2411a8 1124 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1125 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
cb543c54 1126
1127 args[i].unsignedp = unsignedp;
1128 args[i].mode = mode;
7a8d641b 1129
f387af4f 1130 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1131 argpos < n_named_args);
1132
7a8d641b 1133 /* If this is a sibling call and the machine has register windows, the
1134 register window has to be unwinded before calling the routine, so
1135 arguments have to go into the incoming registers. */
f387af4f 1136 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1137 args[i].tail_call_reg
1138 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1139 argpos < n_named_args);
1140 else
1141 args[i].tail_call_reg = args[i].reg;
7a8d641b 1142
cb543c54 1143 if (args[i].reg)
1144 args[i].partial
f054eb3c 1145 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1146 argpos < n_named_args);
cb543c54 1147
0336f0f0 1148 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 1149
1150 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1151 it means that we are to pass this arg in the register(s) designated
1152 by the PARALLEL, but also to pass it in the stack. */
1153 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1154 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1155 args[i].pass_on_stack = 1;
1156
1157 /* If this is an addressable type, we must preallocate the stack
1158 since we must evaluate the object into its final location.
1159
1160 If this is to be passed in both registers and the stack, it is simpler
1161 to preallocate. */
1162 if (TREE_ADDRESSABLE (type)
1163 || (args[i].pass_on_stack && args[i].reg != 0))
1164 *must_preallocate = 1;
1165
cb543c54 1166 /* Compute the stack-size of this argument. */
1167 if (args[i].reg == 0 || args[i].partial != 0
1168 || reg_parm_stack_space > 0
1169 || args[i].pass_on_stack)
1170 locate_and_pad_parm (mode, type,
1171#ifdef STACK_PARMS_IN_REG_PARM_AREA
1172 1,
1173#else
1174 args[i].reg != 0,
1175#endif
241399f6 1176 args[i].pass_on_stack ? 0 : args[i].partial,
1177 fndecl, args_size, &args[i].locate);
0fee47f4 1178#ifdef BLOCK_REG_PADDING
1179 else
1180 /* The argument is passed entirely in registers. See at which
1181 end it should be padded. */
1182 args[i].locate.where_pad =
1183 BLOCK_REG_PADDING (mode, type,
1184 int_size_in_bytes (type) <= UNITS_PER_WORD);
1185#endif
c87678e4 1186
cb543c54 1187 /* Update ARGS_SIZE, the total stack space for args so far. */
1188
241399f6 1189 args_size->constant += args[i].locate.size.constant;
1190 if (args[i].locate.size.var)
1191 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 1192
1193 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1194 have been used, etc. */
1195
f387af4f 1196 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1197 type, argpos < n_named_args);
cb543c54 1198 }
1199}
1200
cc45e5e8 1201/* Update ARGS_SIZE to contain the total size for the argument block.
1202 Return the original constant component of the argument block's size.
1203
1204 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1205 for arguments passed in registers. */
1206
1207static int
4c9e08a4 1208compute_argument_block_size (int reg_parm_stack_space,
1209 struct args_size *args_size,
60e2260d 1210 tree fndecl ATTRIBUTE_UNUSED,
fa20f865 1211 tree fntype ATTRIBUTE_UNUSED,
4c9e08a4 1212 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 1213{
1214 int unadjusted_args_size = args_size->constant;
1215
4448f543 1216 /* For accumulate outgoing args mode we don't need to align, since the frame
1217 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 1218 backends from generating misaligned frame sizes. */
4448f543 1219 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1220 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 1221
cc45e5e8 1222 /* Compute the actual size of the argument block required. The variable
1223 and constant sizes must be combined, the size may have to be rounded,
1224 and there may be a minimum required size. */
1225
1226 if (args_size->var)
1227 {
1228 args_size->var = ARGS_SIZE_TREE (*args_size);
1229 args_size->constant = 0;
1230
d0285dd8 1231 preferred_stack_boundary /= BITS_PER_UNIT;
1232 if (preferred_stack_boundary > 1)
91b70175 1233 {
1234 /* We don't handle this case yet. To handle it correctly we have
35a3065a 1235 to add the delta, round and subtract the delta.
91b70175 1236 Currently no machine description requires this support. */
231bd014 1237 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
91b70175 1238 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1239 }
cc45e5e8 1240
1241 if (reg_parm_stack_space > 0)
1242 {
1243 args_size->var
1244 = size_binop (MAX_EXPR, args_size->var,
902de8ed 1245 ssize_int (reg_parm_stack_space));
cc45e5e8 1246
cc45e5e8 1247 /* The area corresponding to register parameters is not to count in
1248 the size of the block we need. So make the adjustment. */
fa20f865 1249 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1250 args_size->var
1251 = size_binop (MINUS_EXPR, args_size->var,
1252 ssize_int (reg_parm_stack_space));
cc45e5e8 1253 }
1254 }
1255 else
1256 {
d0285dd8 1257 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 1258 if (preferred_stack_boundary < 1)
1259 preferred_stack_boundary = 1;
e39fae61 1260 args_size->constant = (((args_size->constant
91b70175 1261 + stack_pointer_delta
d0285dd8 1262 + preferred_stack_boundary - 1)
1263 / preferred_stack_boundary
1264 * preferred_stack_boundary)
91b70175 1265 - stack_pointer_delta);
cc45e5e8 1266
1267 args_size->constant = MAX (args_size->constant,
1268 reg_parm_stack_space);
1269
fa20f865 1270 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1271 args_size->constant -= reg_parm_stack_space;
cc45e5e8 1272 }
1273 return unadjusted_args_size;
1274}
1275
caa1595a 1276/* Precompute parameters as needed for a function call.
04707f1c 1277
dfe08167 1278 FLAGS is mask of ECF_* constants.
04707f1c 1279
04707f1c 1280 NUM_ACTUALS is the number of arguments.
1281
c87678e4 1282 ARGS is an array containing information for each argument; this
1283 routine fills in the INITIAL_VALUE and VALUE fields for each
1284 precomputed argument. */
04707f1c 1285
1286static void
2dd6f9ed 1287precompute_arguments (int num_actuals, struct arg_data *args)
04707f1c 1288{
1289 int i;
1290
8c78c14b 1291 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 1292 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 1293
1294 /* If we preallocated the stack space, and some arguments must be passed
1295 on the stack, then we must precompute any parameter which contains a
1296 function call which will store arguments on the stack.
1297 Otherwise, evaluating the parameter may clobber previous parameters
1298 which have already been stored into the stack. (we have code to avoid
1299 such case by saving the outgoing stack arguments, but it results in
1300 worse code) */
2dd6f9ed 1301 if (!ACCUMULATE_OUTGOING_ARGS)
67c155cb 1302 return;
0d568ddf 1303
04707f1c 1304 for (i = 0; i < num_actuals; i++)
67c155cb 1305 {
3b2411a8 1306 tree type;
67c155cb 1307 enum machine_mode mode;
701e46d0 1308
2dd6f9ed 1309 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
c5dc094f 1310 continue;
1311
67c155cb 1312 /* If this is an addressable type, we cannot pre-evaluate it. */
3b2411a8 1313 type = TREE_TYPE (args[i].tree_value);
1314 gcc_assert (!TREE_ADDRESSABLE (type));
04707f1c 1315
67c155cb 1316 args[i].initial_value = args[i].value
8ec3c5c2 1317 = expand_normal (args[i].tree_value);
04707f1c 1318
3b2411a8 1319 mode = TYPE_MODE (type);
67c155cb 1320 if (mode != args[i].mode)
1321 {
3b2411a8 1322 int unsignedp = args[i].unsignedp;
67c155cb 1323 args[i].value
1324 = convert_modes (args[i].mode, mode,
1325 args[i].value, args[i].unsignedp);
3b2411a8 1326
67c155cb 1327 /* CSE will replace this only if it contains args[i].value
1328 pseudo, so convert it down to the declared mode using
1329 a SUBREG. */
1330 if (REG_P (args[i].value)
3b2411a8 1331 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1332 && promote_mode (type, mode, &unsignedp) != args[i].mode)
67c155cb 1333 {
1334 args[i].initial_value
1335 = gen_lowpart_SUBREG (mode, args[i].value);
1336 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1337 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1338 args[i].unsignedp);
1339 }
67c155cb 1340 }
1341 }
04707f1c 1342}
1343
e717ffc2 1344/* Given the current state of MUST_PREALLOCATE and information about
1345 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1346 compute and return the final value for MUST_PREALLOCATE. */
1347
1348static int
48e1416a 1349finalize_must_preallocate (int must_preallocate, int num_actuals,
c2f47e15 1350 struct arg_data *args, struct args_size *args_size)
e717ffc2 1351{
1352 /* See if we have or want to preallocate stack space.
1353
1354 If we would have to push a partially-in-regs parm
1355 before other stack parms, preallocate stack space instead.
1356
1357 If the size of some parm is not a multiple of the required stack
1358 alignment, we must preallocate.
1359
1360 If the total size of arguments that would otherwise create a copy in
1361 a temporary (such as a CALL) is more than half the total argument list
1362 size, preallocation is faster.
1363
1364 Another reason to preallocate is if we have a machine (like the m88k)
1365 where stack alignment is required to be maintained between every
1366 pair of insns, not just when the call is made. However, we assume here
1367 that such machines either do not have push insns (and hence preallocation
1368 would occur anyway) or the problem is taken care of with
1369 PUSH_ROUNDING. */
1370
1371 if (! must_preallocate)
1372 {
1373 int partial_seen = 0;
1374 int copy_to_evaluate_size = 0;
1375 int i;
1376
1377 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1378 {
1379 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1380 partial_seen = 1;
1381 else if (partial_seen && args[i].reg == 0)
1382 must_preallocate = 1;
1383
1384 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1385 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1386 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1387 || TREE_CODE (args[i].tree_value) == COND_EXPR
1388 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1389 copy_to_evaluate_size
1390 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1391 }
1392
1393 if (copy_to_evaluate_size * 2 >= args_size->constant
1394 && args_size->constant > 0)
1395 must_preallocate = 1;
1396 }
1397 return must_preallocate;
1398}
cc45e5e8 1399
f3012854 1400/* If we preallocated stack space, compute the address of each argument
1401 and store it into the ARGS array.
1402
c87678e4 1403 We need not ensure it is a valid memory address here; it will be
f3012854 1404 validized when it is used.
1405
1406 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1407
1408static void
4c9e08a4 1409compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 1410{
1411 if (argblock)
1412 {
1413 rtx arg_reg = argblock;
1414 int i, arg_offset = 0;
1415
1416 if (GET_CODE (argblock) == PLUS)
1417 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1418
1419 for (i = 0; i < num_actuals; i++)
1420 {
241399f6 1421 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1422 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 1423 rtx addr;
c5dc0c32 1424 unsigned int align, boundary;
c2ca1bab 1425 unsigned int units_on_stack = 0;
1426 enum machine_mode partial_mode = VOIDmode;
f3012854 1427
1428 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 1429 if (! args[i].pass_on_stack
1430 && args[i].reg != 0
1431 && args[i].partial == 0)
f3012854 1432 continue;
1433
971ba038 1434 if (CONST_INT_P (offset))
f3012854 1435 addr = plus_constant (arg_reg, INTVAL (offset));
1436 else
1437 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1438
1439 addr = plus_constant (addr, arg_offset);
c2ca1bab 1440
1441 if (args[i].partial != 0)
1442 {
1443 /* Only part of the parameter is being passed on the stack.
1444 Generate a simple memory reference of the correct size. */
1445 units_on_stack = args[i].locate.size.constant;
1446 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1447 MODE_INT, 1);
1448 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1449 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1450 }
1451 else
1452 {
1453 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1454 set_mem_attributes (args[i].stack,
1455 TREE_TYPE (args[i].tree_value), 1);
1456 }
c5dc0c32 1457 align = BITS_PER_UNIT;
1458 boundary = args[i].locate.boundary;
1459 if (args[i].locate.where_pad != downward)
1460 align = boundary;
971ba038 1461 else if (CONST_INT_P (offset))
c5dc0c32 1462 {
1463 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1464 align = align & -align;
1465 }
1466 set_mem_align (args[i].stack, align);
f3012854 1467
971ba038 1468 if (CONST_INT_P (slot_offset))
f3012854 1469 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1470 else
1471 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1472
1473 addr = plus_constant (addr, arg_offset);
c2ca1bab 1474
1475 if (args[i].partial != 0)
1476 {
1477 /* Only part of the parameter is being passed on the stack.
1478 Generate a simple memory reference of the correct size.
1479 */
1480 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1481 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1482 }
1483 else
1484 {
1485 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1486 set_mem_attributes (args[i].stack_slot,
1487 TREE_TYPE (args[i].tree_value), 1);
1488 }
c5dc0c32 1489 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 1490
1491 /* Function incoming arguments may overlap with sibling call
1492 outgoing arguments and we cannot allow reordering of reads
1493 from function arguments with stores to outgoing arguments
1494 of sibling calls. */
ab6ab77e 1495 set_mem_alias_set (args[i].stack, 0);
1496 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 1497 }
1498 }
1499}
c87678e4 1500
f3012854 1501/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1502 in a call instruction.
1503
1504 FNDECL is the tree node for the target function. For an indirect call
1505 FNDECL will be NULL_TREE.
1506
95672afe 1507 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 1508
1509static rtx
4c9e08a4 1510rtx_for_function_call (tree fndecl, tree addr)
f3012854 1511{
1512 rtx funexp;
1513
1514 /* Get the function to call, in the form of RTL. */
1515 if (fndecl)
1516 {
1517 /* If this is the first use of the function, see if we need to
1518 make an external definition for it. */
3d053e06 1519 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
f3012854 1520 {
1521 assemble_external (fndecl);
1522 TREE_USED (fndecl) = 1;
1523 }
1524
1525 /* Get a SYMBOL_REF rtx for the function address. */
1526 funexp = XEXP (DECL_RTL (fndecl), 0);
1527 }
1528 else
1529 /* Generate an rtx (probably a pseudo-register) for the address. */
1530 {
1531 push_temp_slots ();
8ec3c5c2 1532 funexp = expand_normal (addr);
c87678e4 1533 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 1534 }
1535 return funexp;
1536}
1537
ff6c0ab2 1538/* Return true if and only if SIZE storage units (usually bytes)
1539 starting from address ADDR overlap with already clobbered argument
1540 area. This function is used to determine if we should give up a
1541 sibcall. */
1542
1543static bool
1544mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1545{
1546 HOST_WIDE_INT i;
1547
abe32cce 1548 if (addr == crtl->args.internal_arg_pointer)
ff6c0ab2 1549 i = 0;
1550 else if (GET_CODE (addr) == PLUS
abe32cce 1551 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
971ba038 1552 && CONST_INT_P (XEXP (addr, 1)))
ff6c0ab2 1553 i = INTVAL (XEXP (addr, 1));
e7ffa1e6 1554 /* Return true for arg pointer based indexed addressing. */
1555 else if (GET_CODE (addr) == PLUS
abe32cce 1556 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1557 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
e7ffa1e6 1558 return true;
ff6c0ab2 1559 else
1560 return false;
1561
1562#ifdef ARGS_GROW_DOWNWARD
1563 i = -i - size;
1564#endif
1565 if (size > 0)
1566 {
1567 unsigned HOST_WIDE_INT k;
1568
1569 for (k = 0; k < size; k++)
1570 if (i + k < stored_args_map->n_bits
1571 && TEST_BIT (stored_args_map, i + k))
1572 return true;
1573 }
1574
1575 return false;
1576}
1577
cde25025 1578/* Do the register loads required for any wholly-register parms or any
1579 parms which are passed both on the stack and in a register. Their
c87678e4 1580 expressions were already evaluated.
cde25025 1581
1582 Mark all register-parms as living through the call, putting these USE
4c9e08a4 1583 insns in the CALL_INSN_FUNCTION_USAGE field.
1584
dc537795 1585 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 1586 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 1587
1588static void
4c9e08a4 1589load_register_parameters (struct arg_data *args, int num_actuals,
1590 rtx *call_fusage, int flags, int is_sibcall,
1591 int *sibcall_failure)
cde25025 1592{
1593 int i, j;
1594
cde25025 1595 for (i = 0; i < num_actuals; i++)
cde25025 1596 {
0e0be288 1597 rtx reg = ((flags & ECF_SIBCALL)
1598 ? args[i].tail_call_reg : args[i].reg);
cde25025 1599 if (reg)
1600 {
5f4cd670 1601 int partial = args[i].partial;
1602 int nregs;
1603 int size = 0;
42b11544 1604 rtx before_arg = get_last_insn ();
83272ab4 1605 /* Set non-negative if we must move a word at a time, even if
1606 just one word (e.g, partial == 4 && mode == DFmode). Set
1607 to -1 if we just use a normal move insn. This value can be
1608 zero if the argument is a zero size structure. */
5f4cd670 1609 nregs = -1;
f054eb3c 1610 if (GET_CODE (reg) == PARALLEL)
1611 ;
1612 else if (partial)
1613 {
1614 gcc_assert (partial % UNITS_PER_WORD == 0);
1615 nregs = partial / UNITS_PER_WORD;
1616 }
5f4cd670 1617 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1618 {
1619 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1620 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1621 }
1622 else
1623 size = GET_MODE_SIZE (args[i].mode);
cde25025 1624
1625 /* Handle calls that pass values in multiple non-contiguous
1626 locations. The Irix 6 ABI has examples of this. */
1627
1628 if (GET_CODE (reg) == PARALLEL)
b600a907 1629 emit_group_move (reg, args[i].parallel_value);
cde25025 1630
1631 /* If simple case, just do move. If normal partial, store_one_arg
1632 has already loaded the register for us. In all other cases,
1633 load the register(s) from memory. */
1634
8e67abab 1635 else if (nregs == -1)
1636 {
1637 emit_move_insn (reg, args[i].value);
5f4cd670 1638#ifdef BLOCK_REG_PADDING
8e67abab 1639 /* Handle case where we have a value that needs shifting
1640 up to the msb. eg. a QImode value and we're padding
1641 upward on a BYTES_BIG_ENDIAN machine. */
1642 if (size < UNITS_PER_WORD
1643 && (args[i].locate.where_pad
1644 == (BYTES_BIG_ENDIAN ? upward : downward)))
1645 {
8e67abab 1646 rtx x;
1647 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
b2abd798 1648
1649 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1650 report the whole reg as used. Strictly speaking, the
1651 call only uses SIZE bytes at the msb end, but it doesn't
1652 seem worth generating rtl to say that. */
1653 reg = gen_rtx_REG (word_mode, REGNO (reg));
92966f8b 1654 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7016c612 1655 build_int_cst (NULL_TREE, shift),
7c446c95 1656 reg, 1);
b2abd798 1657 if (x != reg)
1658 emit_move_insn (reg, x);
8e67abab 1659 }
5f4cd670 1660#endif
8e67abab 1661 }
cde25025 1662
1663 /* If we have pre-computed the values to put in the registers in
1664 the case of non-aligned structures, copy them in now. */
1665
1666 else if (args[i].n_aligned_regs != 0)
1667 for (j = 0; j < args[i].n_aligned_regs; j++)
1668 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1669 args[i].aligned_regs[j]);
1670
1671 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 1672 {
1673 rtx mem = validize_mem (args[i].value);
1674
ff6c0ab2 1675 /* Check for overlap with already clobbered argument area. */
1676 if (is_sibcall
1677 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1678 size))
1679 *sibcall_failure = 1;
1680
5f4cd670 1681 /* Handle a BLKmode that needs shifting. */
8e67abab 1682 if (nregs == 1 && size < UNITS_PER_WORD
2c267f1a 1683#ifdef BLOCK_REG_PADDING
1684 && args[i].locate.where_pad == downward
1685#else
1686 && BYTES_BIG_ENDIAN
1687#endif
1688 )
5f4cd670 1689 {
1690 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1691 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1692 rtx x = gen_reg_rtx (word_mode);
1693 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
92966f8b 1694 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1695 : LSHIFT_EXPR;
5f4cd670 1696
1697 emit_move_insn (x, tem);
92966f8b 1698 x = expand_shift (dir, word_mode, x,
7016c612 1699 build_int_cst (NULL_TREE, shift),
7c446c95 1700 ri, 1);
5f4cd670 1701 if (x != ri)
1702 emit_move_insn (ri, x);
1703 }
1704 else
5f4cd670 1705 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1706 }
cde25025 1707
42b11544 1708 /* When a parameter is a block, and perhaps in other cases, it is
1709 possible that it did a load from an argument slot that was
6a8fa8e2 1710 already clobbered. */
42b11544 1711 if (is_sibcall
1712 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1713 *sibcall_failure = 1;
1714
cde25025 1715 /* Handle calls that pass values in multiple non-contiguous
1716 locations. The Irix 6 ABI has examples of this. */
1717 if (GET_CODE (reg) == PARALLEL)
1718 use_group_regs (call_fusage, reg);
1719 else if (nregs == -1)
1720 use_reg (call_fusage, reg);
c75d013c 1721 else if (nregs > 0)
1722 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 1723 }
1724 }
1725}
1726
92e1ef5b 1727/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1728 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1729 bytes, then we would need to push some additional bytes to pad the
481feae3 1730 arguments. So, we compute an adjust to the stack pointer for an
1731 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1732 bytes. Then, when the arguments are pushed the stack will be perfectly
1733 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1734 be popped after the call. Returns the adjustment. */
92e1ef5b 1735
481feae3 1736static int
4c9e08a4 1737combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1738 struct args_size *args_size,
38413c80 1739 unsigned int preferred_unit_stack_boundary)
92e1ef5b 1740{
1741 /* The number of bytes to pop so that the stack will be
1742 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1743 HOST_WIDE_INT adjustment;
1744 /* The alignment of the stack after the arguments are pushed, if we
1745 just pushed the arguments without adjust the stack here. */
38413c80 1746 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 1747
c87678e4 1748 unadjusted_alignment
92e1ef5b 1749 = ((stack_pointer_delta + unadjusted_args_size)
1750 % preferred_unit_stack_boundary);
1751
1752 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1753 as possible -- leaving just enough left to cancel out the
1754 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1755 PENDING_STACK_ADJUST is non-negative, and congruent to
1756 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1757
1758 /* Begin by trying to pop all the bytes. */
c87678e4 1759 unadjusted_alignment
1760 = (unadjusted_alignment
92e1ef5b 1761 - (pending_stack_adjust % preferred_unit_stack_boundary));
1762 adjustment = pending_stack_adjust;
1763 /* Push enough additional bytes that the stack will be aligned
1764 after the arguments are pushed. */
d3ef58ec 1765 if (preferred_unit_stack_boundary > 1)
1766 {
3dc35e62 1767 if (unadjusted_alignment > 0)
c87678e4 1768 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
d3ef58ec 1769 else
c87678e4 1770 adjustment += unadjusted_alignment;
d3ef58ec 1771 }
c87678e4 1772
92e1ef5b 1773 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1774 bytes after the call. The right number is the entire
1775 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1776 by the arguments in the first place. */
c87678e4 1777 args_size->constant
92e1ef5b 1778 = pending_stack_adjust - adjustment + unadjusted_args_size;
1779
481feae3 1780 return adjustment;
92e1ef5b 1781}
1782
7ecc63d3 1783/* Scan X expression if it does not dereference any argument slots
1784 we already clobbered by tail call arguments (as noted in stored_args_map
1785 bitmap).
d10cfa8d 1786 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 1787 zero otherwise. */
1788
1789static int
4c9e08a4 1790check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 1791{
1792 RTX_CODE code;
1793 int i, j;
7ecc63d3 1794 const char *fmt;
1795
1796 if (x == NULL_RTX)
1797 return 0;
1798
1799 code = GET_CODE (x);
1800
1801 if (code == MEM)
ff6c0ab2 1802 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1803 GET_MODE_SIZE (GET_MODE (x)));
7ecc63d3 1804
c87678e4 1805 /* Scan all subexpressions. */
7ecc63d3 1806 fmt = GET_RTX_FORMAT (code);
1807 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1808 {
1809 if (*fmt == 'e')
c87678e4 1810 {
1811 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1812 return 1;
1813 }
7ecc63d3 1814 else if (*fmt == 'E')
c87678e4 1815 {
1816 for (j = 0; j < XVECLEN (x, i); j++)
1817 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1818 return 1;
1819 }
7ecc63d3 1820 }
1821 return 0;
7ecc63d3 1822}
1823
1824/* Scan sequence after INSN if it does not dereference any argument slots
1825 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 1826 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1827 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1828 should be 0). Return nonzero if sequence after INSN dereferences such argument
1829 slots, zero otherwise. */
7ecc63d3 1830
1831static int
4c9e08a4 1832check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
c87678e4 1833{
7ecc63d3 1834 int low, high;
1835
1836 if (insn == NULL_RTX)
1837 insn = get_insns ();
1838 else
1839 insn = NEXT_INSN (insn);
1840
1841 for (; insn; insn = NEXT_INSN (insn))
c87678e4 1842 if (INSN_P (insn)
1843 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 1844 break;
1845
42b11544 1846 if (mark_stored_args_map)
1847 {
db10eec8 1848#ifdef ARGS_GROW_DOWNWARD
241399f6 1849 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
db10eec8 1850#else
241399f6 1851 low = arg->locate.slot_offset.constant;
db10eec8 1852#endif
1853
241399f6 1854 for (high = low + arg->locate.size.constant; low < high; low++)
42b11544 1855 SET_BIT (stored_args_map, low);
1856 }
7ecc63d3 1857 return insn != NULL_RTX;
1858}
1859
05d18e8b 1860/* Given that a function returns a value of mode MODE at the most
1861 significant end of hard register VALUE, shift VALUE left or right
1862 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 1863
05d18e8b 1864bool
1865shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2c8ff1ed 1866{
05d18e8b 1867 HOST_WIDE_INT shift;
1868
1869 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1870 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1871 if (shift == 0)
1872 return false;
1873
1874 /* Use ashr rather than lshr for right shifts. This is for the benefit
1875 of the MIPS port, which requires SImode values to be sign-extended
1876 when stored in 64-bit registers. */
1877 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1878 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1879 gcc_unreachable ();
1880 return true;
2c8ff1ed 1881}
1882
90af1361 1883/* If X is a likely-spilled register value, copy it to a pseudo
1884 register and return that register. Return X otherwise. */
1885
1886static rtx
1887avoid_likely_spilled_reg (rtx x)
1888{
f4e36c33 1889 rtx new_rtx;
90af1361 1890
1891 if (REG_P (x)
1892 && HARD_REGISTER_P (x)
1893 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1894 {
1895 /* Make sure that we generate a REG rather than a CONCAT.
1896 Moves into CONCATs can need nontrivial instructions,
1897 and the whole point of this function is to avoid
1898 using the hard register directly in such a situation. */
1899 generating_concat_p = 0;
f4e36c33 1900 new_rtx = gen_reg_rtx (GET_MODE (x));
90af1361 1901 generating_concat_p = 1;
f4e36c33 1902 emit_move_insn (new_rtx, x);
1903 return new_rtx;
90af1361 1904 }
1905 return x;
1906}
1907
c2f47e15 1908/* Generate all the code for a CALL_EXPR exp
66d433c7 1909 and return an rtx for its value.
1910 Store the value in TARGET (specified as an rtx) if convenient.
1911 If the value is stored in TARGET then TARGET is returned.
1912 If IGNORE is nonzero, then we ignore the value of the function call. */
1913
1914rtx
4c9e08a4 1915expand_call (tree exp, rtx target, int ignore)
66d433c7 1916{
60ecc450 1917 /* Nonzero if we are currently expanding a call. */
1918 static int currently_expanding_call = 0;
1919
66d433c7 1920 /* RTX for the function to be called. */
1921 rtx funexp;
60ecc450 1922 /* Sequence of insns to perform a normal "call". */
1923 rtx normal_call_insns = NULL_RTX;
4ee9c684 1924 /* Sequence of insns to perform a tail "call". */
60ecc450 1925 rtx tail_call_insns = NULL_RTX;
66d433c7 1926 /* Data type of the function. */
1927 tree funtype;
915e81b8 1928 tree type_arg_types;
16c9337c 1929 tree rettype;
66d433c7 1930 /* Declaration of the function being called,
1931 or 0 if the function is computed (not known by name). */
1932 tree fndecl = 0;
e100aadc 1933 /* The type of the function being called. */
1934 tree fntype;
4ee9c684 1935 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
60ecc450 1936 int pass;
66d433c7 1937
1938 /* Register in which non-BLKmode value will be returned,
1939 or 0 if no value or if value is BLKmode. */
1940 rtx valreg;
1941 /* Address where we should return a BLKmode value;
1942 0 if value not BLKmode. */
1943 rtx structure_value_addr = 0;
1944 /* Nonzero if that address is being passed by treating it as
1945 an extra, implicit first parameter. Otherwise,
1946 it is passed by being copied directly into struct_value_rtx. */
1947 int structure_value_addr_parm = 0;
cd46caee 1948 /* Holds the value of implicit argument for the struct value. */
1949 tree structure_value_addr_value = NULL_TREE;
66d433c7 1950 /* Size of aggregate value wanted, or zero if none wanted
1951 or if we are using the non-reentrant PCC calling convention
1952 or expecting the value in registers. */
997d68fe 1953 HOST_WIDE_INT struct_value_size = 0;
66d433c7 1954 /* Nonzero if called function returns an aggregate in memory PCC style,
1955 by returning the address of where to find it. */
1956 int pcc_struct_value = 0;
45550790 1957 rtx struct_value = 0;
66d433c7 1958
1959 /* Number of actual parameters in this call, including struct value addr. */
1960 int num_actuals;
1961 /* Number of named args. Args after this are anonymous ones
1962 and they must all go on the stack. */
1963 int n_named_args;
cd46caee 1964 /* Number of complex actual arguments that need to be split. */
1965 int num_complex_actuals = 0;
66d433c7 1966
1967 /* Vector of information about each argument.
1968 Arguments are numbered in the order they will be pushed,
1969 not the order they are written. */
1970 struct arg_data *args;
1971
1972 /* Total size in bytes of all the stack-parms scanned so far. */
1973 struct args_size args_size;
0e0be288 1974 struct args_size adjusted_args_size;
66d433c7 1975 /* Size of arguments before any adjustments (such as rounding). */
cc45e5e8 1976 int unadjusted_args_size;
66d433c7 1977 /* Data on reg parms scanned so far. */
1978 CUMULATIVE_ARGS args_so_far;
1979 /* Nonzero if a reg parm has been scanned. */
1980 int reg_parm_seen;
a50ca374 1981 /* Nonzero if this is an indirect function call. */
66d433c7 1982
c87678e4 1983 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 1984 If stack space is allocated for register parameters, but not by the
1985 caller, then it is preallocated in the fixed part of the stack frame.
1986 So the entire argument block must then be preallocated (i.e., we
1987 ignore PUSH_ROUNDING in that case). */
1988
4448f543 1989 int must_preallocate = !PUSH_ARGS;
66d433c7 1990
eb2f80f3 1991 /* Size of the stack reserved for parameter registers. */
2d7187c2 1992 int reg_parm_stack_space = 0;
1993
66d433c7 1994 /* Address of space preallocated for stack parms
1995 (on machines that lack push insns), or 0 if space not preallocated. */
1996 rtx argblock = 0;
1997
dfe08167 1998 /* Mask of ECF_ flags. */
1999 int flags = 0;
4448f543 2000#ifdef REG_PARM_STACK_SPACE
66d433c7 2001 /* Define the boundary of the register parm stack space that needs to be
6e96b626 2002 saved, if any. */
2003 int low_to_save, high_to_save;
66d433c7 2004 rtx save_area = 0; /* Place that it is saved */
2005#endif
2006
66d433c7 2007 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2008 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 2009 char *stack_usage_map_buf = NULL;
66d433c7 2010
9069face 2011 int old_stack_allocated;
2012
2013 /* State variables to track stack modifications. */
66d433c7 2014 rtx old_stack_level = 0;
9069face 2015 int old_stack_arg_under_construction = 0;
65dccdb1 2016 int old_pending_adj = 0;
66d433c7 2017 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 2018
2019 /* Some stack pointer alterations we make are performed via
2020 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2021 which we then also need to save/restore along the way. */
31d035ca 2022 int old_stack_pointer_delta = 0;
9069face 2023
60ecc450 2024 rtx call_fusage;
c2f47e15 2025 tree addr = CALL_EXPR_FN (exp);
19cb6b50 2026 int i;
92e1ef5b 2027 /* The alignment of the stack, in bits. */
38413c80 2028 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 2029 /* The alignment of the stack, in bytes. */
38413c80 2030 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 2031 /* The static chain value to use for this call. */
2032 rtx static_chain_value;
dfe08167 2033 /* See if this is "nothrow" function call. */
2034 if (TREE_NOTHROW (exp))
2035 flags |= ECF_NOTHROW;
2036
4ee9c684 2037 /* See if we can find a DECL-node for the actual function, and get the
2038 function attributes (flags) from the function decl or type node. */
97a1590b 2039 fndecl = get_callee_fndecl (exp);
2040 if (fndecl)
66d433c7 2041 {
e100aadc 2042 fntype = TREE_TYPE (fndecl);
97a1590b 2043 flags |= flags_from_decl_or_type (fndecl);
66d433c7 2044 }
97a1590b 2045 else
8a8cdb8d 2046 {
16c9337c 2047 fntype = TREE_TYPE (TREE_TYPE (addr));
e100aadc 2048 flags |= flags_from_decl_or_type (fntype);
8a8cdb8d 2049 }
16c9337c 2050 rettype = TREE_TYPE (exp);
d490e2f2 2051
e100aadc 2052 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 2053
4a081ddd 2054 /* Warn if this value is an aggregate type,
2055 regardless of which calling convention we are using for it. */
16c9337c 2056 if (AGGREGATE_TYPE_P (rettype))
efb9d9ee 2057 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 2058
9c2a0c05 2059 /* If the result of a non looping pure or const function call is
2060 ignored (or void), and none of its arguments are volatile, we can
2061 avoid expanding the call and just evaluate the arguments for
2062 side-effects. */
4a081ddd 2063 if ((flags & (ECF_CONST | ECF_PURE))
9c2a0c05 2064 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
4a081ddd 2065 && (ignore || target == const0_rtx
16c9337c 2066 || TYPE_MODE (rettype) == VOIDmode))
4a081ddd 2067 {
2068 bool volatilep = false;
2069 tree arg;
cd46caee 2070 call_expr_arg_iterator iter;
4a081ddd 2071
cd46caee 2072 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2073 if (TREE_THIS_VOLATILE (arg))
4a081ddd 2074 {
2075 volatilep = true;
2076 break;
2077 }
2078
2079 if (! volatilep)
2080 {
cd46caee 2081 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2082 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 2083 return const0_rtx;
2084 }
2085 }
2086
2d7187c2 2087#ifdef REG_PARM_STACK_SPACE
fa20f865 2088 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2d7187c2 2089#endif
2d7187c2 2090
fa20f865 2091 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 2092 && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 2093 must_preallocate = 1;
997d68fe 2094
66d433c7 2095 /* Set up a place to return a structure. */
2096
2097 /* Cater to broken compilers. */
4cd5bb61 2098 if (aggregate_value_p (exp, fntype))
66d433c7 2099 {
2100 /* This call returns a big structure. */
2dd6f9ed 2101 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
66d433c7 2102
2103#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 2104 {
2105 pcc_struct_value = 1;
f49c64ba 2106 }
2107#else /* not PCC_STATIC_STRUCT_RETURN */
2108 {
16c9337c 2109 struct_value_size = int_size_in_bytes (rettype);
66d433c7 2110
ea523851 2111 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
f49c64ba 2112 structure_value_addr = XEXP (target, 0);
2113 else
2114 {
f49c64ba 2115 /* For variable-sized objects, we must be called with a target
2116 specified. If we were to allocate space on the stack here,
2117 we would have no way of knowing when to free it. */
16c9337c 2118 rtx d = assign_temp (rettype, 0, 1, 1);
66d433c7 2119
930f0e87 2120 mark_temp_addr_taken (d);
2121 structure_value_addr = XEXP (d, 0);
f49c64ba 2122 target = 0;
2123 }
2124 }
2125#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 2126 }
2127
0e0be288 2128 /* Figure out the amount to which the stack should be aligned. */
0e0be288 2129 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 2130 if (fndecl)
2131 {
2132 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
9a27561f 2133 /* Without automatic stack alignment, we can't increase preferred
2134 stack boundary. With automatic stack alignment, it is
2135 unnecessary since unless we can guarantee that all callers will
2136 align the outgoing stack properly, callee has to align its
2137 stack anyway. */
2138 if (i
2139 && i->preferred_incoming_stack_boundary
2140 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
28992b23 2141 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2142 }
0e0be288 2143
2144 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 2145 funtype = TREE_TYPE (addr);
231bd014 2146 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 2147 funtype = TREE_TYPE (funtype);
2148
cd46caee 2149 /* Count whether there are actual complex arguments that need to be split
2150 into their real and imaginary parts. Munge the type_arg_types
2151 appropriately here as well. */
92d40bc4 2152 if (targetm.calls.split_complex_arg)
915e81b8 2153 {
cd46caee 2154 call_expr_arg_iterator iter;
2155 tree arg;
2156 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2157 {
2158 tree type = TREE_TYPE (arg);
2159 if (type && TREE_CODE (type) == COMPLEX_TYPE
2160 && targetm.calls.split_complex_arg (type))
2161 num_complex_actuals++;
2162 }
915e81b8 2163 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 2164 }
2165 else
2166 type_arg_types = TYPE_ARG_TYPES (funtype);
2167
0e0be288 2168 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 2169 cfun->calls_alloca = 1;
0e0be288 2170
2171 /* If struct_value_rtx is 0, it means pass the address
cd46caee 2172 as if it were an extra parameter. Put the argument expression
2173 in structure_value_addr_value. */
45550790 2174 if (structure_value_addr && struct_value == 0)
0e0be288 2175 {
2176 /* If structure_value_addr is a REG other than
2177 virtual_outgoing_args_rtx, we can use always use it. If it
2178 is not a REG, we must always copy it into a register.
2179 If it is virtual_outgoing_args_rtx, we must copy it to another
2180 register in some cases. */
8ad4c111 2181 rtx temp = (!REG_P (structure_value_addr)
0e0be288 2182 || (ACCUMULATE_OUTGOING_ARGS
2183 && stack_arg_under_construction
2184 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 2185 ? copy_addr_to_reg (convert_memory_address
e100aadc 2186 (Pmode, structure_value_addr))
0e0be288 2187 : structure_value_addr);
2188
cd46caee 2189 structure_value_addr_value =
2190 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
0e0be288 2191 structure_value_addr_parm = 1;
2192 }
2193
2194 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 2195 num_actuals =
2196 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 2197
2198 /* Compute number of named args.
30a10006 2199 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2200
2201 if (type_arg_types != 0)
2202 n_named_args
2203 = (list_length (type_arg_types)
2204 /* Count the struct value address, if it is passed as a parm. */
2205 + structure_value_addr_parm);
2206 else
2207 /* If we know nothing, treat all args as named. */
2208 n_named_args = num_actuals;
2209
2210 /* Start updating where the next arg would go.
2211
2212 On some machines (such as the PA) indirect calls have a different
2213 calling convention than normal calls. The fourth argument in
2214 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2215 or not. */
2216 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2217
2218 /* Now possibly adjust the number of named args.
0e0be288 2219 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 2220 We do include the last named arg if
2221 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 2222 (If no anonymous args follow, the result of list_length is actually
2223 one too large. This is harmless.)
2224
a107cd89 2225 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 2226 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2227 this machine will be able to place unnamed args that were passed
2228 in registers into the stack. So treat all args as named. This
2229 allows the insns emitting for a specific argument list to be
2230 independent of the function declaration.
a107cd89 2231
2232 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2233 we do not have any reliable way to pass unnamed args in
2234 registers, so we must force them into memory. */
0e0be288 2235
30a10006 2236 if (type_arg_types != 0
2237 && targetm.calls.strict_argument_naming (&args_so_far))
2238 ;
2239 else if (type_arg_types != 0
2240 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2241 /* Don't include the last named arg. */
2242 --n_named_args;
0e0be288 2243 else
30a10006 2244 /* Treat all args as named. */
0e0be288 2245 n_named_args = num_actuals;
2246
0e0be288 2247 /* Make a vector to hold all the information about each arg. */
364c0c59 2248 args = XALLOCAVEC (struct arg_data, num_actuals);
f0af5a88 2249 memset (args, 0, num_actuals * sizeof (struct arg_data));
0e0be288 2250
00dddcf2 2251 /* Build up entries in the ARGS array, compute the size of the
2252 arguments into ARGS_SIZE, etc. */
0e0be288 2253 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 2254 n_named_args, exp,
d8b9c828 2255 structure_value_addr_value, fndecl, fntype,
0e0be288 2256 &args_so_far, reg_parm_stack_space,
2257 &old_stack_level, &old_pending_adj,
eaa112a0 2258 &must_preallocate, &flags,
4ee9c684 2259 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 2260
2261 if (args_size.var)
2dd6f9ed 2262 must_preallocate = 1;
0e0be288 2263
2264 /* Now make final decision about preallocating stack space. */
2265 must_preallocate = finalize_must_preallocate (must_preallocate,
2266 num_actuals, args,
2267 &args_size);
2268
2269 /* If the structure value address will reference the stack pointer, we
2270 must stabilize it. We don't need to do this if we know that we are
2271 not going to adjust the stack pointer in processing this call. */
2272
2273 if (structure_value_addr
2274 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2275 || reg_mentioned_p (virtual_outgoing_args_rtx,
2276 structure_value_addr))
2277 && (args_size.var
2278 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2279 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 2280
0d568ddf 2281 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 2282 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 2283 expanding a call, as that means we're an argument. Don't try if
011e6b51 2284 there's cleanups, as we know there's code to follow the call. */
60ecc450 2285
0e0be288 2286 if (currently_expanding_call++ != 0
2287 || !flag_optimize_sibling_calls
4ee9c684 2288 || args_size.var
3072d30e 2289 || dbg_cnt (tail_call) == false)
4ee9c684 2290 try_tail_call = 0;
0e0be288 2291
2292 /* Rest of purposes for tail call optimizations to fail. */
2293 if (
2294#ifdef HAVE_sibcall_epilogue
2295 !HAVE_sibcall_epilogue
2296#else
2297 1
2298#endif
2299 || !try_tail_call
2300 /* Doing sibling call optimization needs some work, since
2301 structure_value_addr can be allocated on the stack.
2302 It does not seem worth the effort since few optimizable
2303 sibling calls will return a structure. */
2304 || structure_value_addr != NULL_RTX
aa7aa403 2305#ifdef REG_PARM_STACK_SPACE
91ebded8 2306 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2307 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2308 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2309 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
aa7aa403 2310#endif
805e22b2 2311 /* Check whether the target is able to optimize the call
2312 into a sibcall. */
883b2e73 2313 || !targetm.function_ok_for_sibcall (fndecl, exp)
805e22b2 2314 /* Functions that do not return exactly once may not be sibcall
a0c938f0 2315 optimized. */
4fec1d6c 2316 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
95672afe 2317 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
4c4a1039 2318 /* If the called function is nested in the current one, it might access
a0c938f0 2319 some of the caller's arguments, but could clobber them beforehand if
2320 the argument areas are shared. */
4c4a1039 2321 || (fndecl && decl_function_context (fndecl) == current_function_decl)
0e0be288 2322 /* If this function requires more stack slots than the current
99b442ff 2323 function, we cannot change it into a sibling call.
abe32cce 2324 crtl->args.pretend_args_size is not part of the
99b442ff 2325 stack allocated by our caller. */
abe32cce 2326 || args_size.constant > (crtl->args.size
2327 - crtl->args.pretend_args_size)
0e0be288 2328 /* If the callee pops its own arguments, then it must pop exactly
2329 the same number of arguments as the current function. */
f5bc28da 2330 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2331 != targetm.calls.return_pops_args (current_function_decl,
2332 TREE_TYPE (current_function_decl),
2333 crtl->args.size))
dc24ddbd 2334 || !lang_hooks.decls.ok_for_sibcall (fndecl))
8b1cb18e 2335 try_tail_call = 0;
4b066641 2336
4681dd41 2337 /* Check if caller and callee disagree in promotion of function
2338 return value. */
2339 if (try_tail_call)
2340 {
2341 enum machine_mode caller_mode, caller_promoted_mode;
2342 enum machine_mode callee_mode, callee_promoted_mode;
2343 int caller_unsignedp, callee_unsignedp;
2344 tree caller_res = DECL_RESULT (current_function_decl);
2345
2346 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3b2411a8 2347 caller_mode = DECL_MODE (caller_res);
4681dd41 2348 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3b2411a8 2349 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2350 caller_promoted_mode
2351 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2352 &caller_unsignedp,
2353 TREE_TYPE (current_function_decl), 1);
2354 callee_promoted_mode
c879dbcf 2355 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3b2411a8 2356 &callee_unsignedp,
c879dbcf 2357 funtype, 1);
4681dd41 2358 if (caller_mode != VOIDmode
2359 && (caller_promoted_mode != callee_promoted_mode
2360 || ((caller_mode != caller_promoted_mode
2361 || callee_mode != callee_promoted_mode)
2362 && (caller_unsignedp != callee_unsignedp
2363 || GET_MODE_BITSIZE (caller_mode)
2364 < GET_MODE_BITSIZE (callee_mode)))))
2365 try_tail_call = 0;
2366 }
2367
755ece1f 2368 /* Ensure current function's preferred stack boundary is at least
2369 what we need. Stack alignment may also increase preferred stack
2370 boundary. */
54d759e3 2371 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
edb7afe8 2372 crtl->preferred_stack_boundary = preferred_stack_boundary;
755ece1f 2373 else
2374 preferred_stack_boundary = crtl->preferred_stack_boundary;
d0285dd8 2375
0e0be288 2376 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 2377
60ecc450 2378 /* We want to make two insn chains; one for a sibling call, the other
2379 for a normal call. We will select one of the two chains after
2380 initial RTL generation is complete. */
6e96b626 2381 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 2382 {
2383 int sibcall_failure = 0;
35a3065a 2384 /* We want to emit any pending stack adjustments before the tail
60ecc450 2385 recursion "call". That way we know any adjustment after the tail
0d568ddf 2386 recursion call can be ignored if we indeed use the tail
60ecc450 2387 call expansion. */
9a5bbcc2 2388 int save_pending_stack_adjust = 0;
2389 int save_stack_pointer_delta = 0;
60ecc450 2390 rtx insns;
c0e7e9f7 2391 rtx before_call, next_arg_reg, after_args;
1e2b2ab3 2392
60ecc450 2393 if (pass == 0)
2394 {
60ecc450 2395 /* State variables we need to save and restore between
2396 iterations. */
2397 save_pending_stack_adjust = pending_stack_adjust;
91b70175 2398 save_stack_pointer_delta = stack_pointer_delta;
60ecc450 2399 }
dfe08167 2400 if (pass)
2401 flags &= ~ECF_SIBCALL;
2402 else
2403 flags |= ECF_SIBCALL;
66d433c7 2404
60ecc450 2405 /* Other state variables that we must reinitialize each time
dfe08167 2406 through the loop (that are not initialized by the loop itself). */
60ecc450 2407 argblock = 0;
2408 call_fusage = 0;
2f921ec9 2409
c87678e4 2410 /* Start a new sequence for the normal call case.
66d433c7 2411
60ecc450 2412 From this point on, if the sibling call fails, we want to set
2413 sibcall_failure instead of continuing the loop. */
2414 start_sequence ();
412321ce 2415
60ecc450 2416 /* Don't let pending stack adjusts add up to too much.
2417 Also, do all pending adjustments now if there is any chance
2418 this might be a call to alloca or if we are expanding a sibling
ff3ae375 2419 call sequence.
82e95be3 2420 Also do the adjustments before a throwing call, otherwise
2421 exception handling can fail; PR 19225. */
60ecc450 2422 if (pending_stack_adjust >= 32
5edaabad 2423 || (pending_stack_adjust > 0
ff3ae375 2424 && (flags & ECF_MAY_BE_ALLOCA))
82e95be3 2425 || (pending_stack_adjust > 0
2426 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 2427 || pass == 0)
2428 do_pending_stack_adjust ();
66d433c7 2429
60ecc450 2430 /* Precompute any arguments as needed. */
02510658 2431 if (pass)
2dd6f9ed 2432 precompute_arguments (num_actuals, args);
66d433c7 2433
60ecc450 2434 /* Now we are about to start emitting insns that can be deleted
2435 if a libcall is deleted. */
2dd6f9ed 2436 if (pass && (flags & ECF_MALLOC))
60ecc450 2437 start_sequence ();
66d433c7 2438
edb7afe8 2439 if (pass == 0 && crtl->stack_protect_guard)
71d89928 2440 stack_protect_epilogue ();
2441
0e0be288 2442 adjusted_args_size = args_size;
481feae3 2443 /* Compute the actual size of the argument block required. The variable
2444 and constant sizes must be combined, the size may have to be rounded,
2445 and there may be a minimum required size. When generating a sibcall
2446 pattern, do not round up, since we'll be re-using whatever space our
2447 caller provided. */
2448 unadjusted_args_size
c87678e4 2449 = compute_argument_block_size (reg_parm_stack_space,
2450 &adjusted_args_size,
fa20f865 2451 fndecl, fntype,
481feae3 2452 (pass == 0 ? 0
2453 : preferred_stack_boundary));
2454
c87678e4 2455 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 2456
02510658 2457 /* The argument block when performing a sibling call is the
a0c938f0 2458 incoming argument block. */
02510658 2459 if (pass == 0)
7ecc63d3 2460 {
27a7a23a 2461 argblock = crtl->args.internal_arg_pointer;
bd54bbc6 2462 argblock
2463#ifdef STACK_GROWS_DOWNWARD
abe32cce 2464 = plus_constant (argblock, crtl->args.pretend_args_size);
bd54bbc6 2465#else
abe32cce 2466 = plus_constant (argblock, -crtl->args.pretend_args_size);
bd54bbc6 2467#endif
7ecc63d3 2468 stored_args_map = sbitmap_alloc (args_size.constant);
2469 sbitmap_zero (stored_args_map);
2470 }
481feae3 2471
60ecc450 2472 /* If we have no actual push instructions, or shouldn't use them,
2473 make space for all args right now. */
0e0be288 2474 else if (adjusted_args_size.var != 0)
66d433c7 2475 {
60ecc450 2476 if (old_stack_level == 0)
2477 {
2478 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9069face 2479 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 2480 old_pending_adj = pending_stack_adjust;
2481 pending_stack_adjust = 0;
60ecc450 2482 /* stack_arg_under_construction says whether a stack arg is
2483 being constructed at the old stack level. Pushing the stack
2484 gets a clean outgoing argument block. */
2485 old_stack_arg_under_construction = stack_arg_under_construction;
2486 stack_arg_under_construction = 0;
60ecc450 2487 }
0e0be288 2488 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
66d433c7 2489 }
60ecc450 2490 else
2491 {
2492 /* Note that we must go through the motions of allocating an argument
2493 block even if the size is zero because we may be storing args
2494 in the area reserved for register arguments, which may be part of
2495 the stack frame. */
7221f864 2496
0e0be288 2497 int needed = adjusted_args_size.constant;
66d433c7 2498
60ecc450 2499 /* Store the maximum argument space used. It will be pushed by
2500 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2501 checking). */
66d433c7 2502
abe32cce 2503 if (needed > crtl->outgoing_args_size)
2504 crtl->outgoing_args_size = needed;
66d433c7 2505
60ecc450 2506 if (must_preallocate)
2507 {
4448f543 2508 if (ACCUMULATE_OUTGOING_ARGS)
2509 {
02510658 2510 /* Since the stack pointer will never be pushed, it is
2511 possible for the evaluation of a parm to clobber
2512 something we have already written to the stack.
2513 Since most function calls on RISC machines do not use
2514 the stack, this is uncommon, but must work correctly.
7221f864 2515
4448f543 2516 Therefore, we save any area of the stack that was already
02510658 2517 written and that we are using. Here we set up to do this
2518 by making a new stack usage map from the old one. The
c87678e4 2519 actual save will be done by store_one_arg.
7221f864 2520
4448f543 2521 Another approach might be to try to reorder the argument
2522 evaluations to avoid this conflicting stack usage. */
7221f864 2523
02510658 2524 /* Since we will be writing into the entire argument area,
2525 the map must be allocated for its entire size, not just
2526 the part that is the responsibility of the caller. */
fa20f865 2527 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2528 needed += reg_parm_stack_space;
66d433c7 2529
2530#ifdef ARGS_GROW_DOWNWARD
4448f543 2531 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2532 needed + 1);
66d433c7 2533#else
4448f543 2534 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2535 needed);
66d433c7 2536#endif
a331ea1b 2537 if (stack_usage_map_buf)
2538 free (stack_usage_map_buf);
4c36ffe6 2539 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2540 stack_usage_map = stack_usage_map_buf;
66d433c7 2541
4448f543 2542 if (initial_highest_arg_in_use)
8e547276 2543 memcpy (stack_usage_map, initial_stack_usage_map,
2544 initial_highest_arg_in_use);
d1b03b62 2545
4448f543 2546 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 2547 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 2548 (highest_outgoing_arg_in_use
2549 - initial_highest_arg_in_use));
2550 needed = 0;
d1b03b62 2551
02510658 2552 /* The address of the outgoing argument list must not be
2553 copied to a register here, because argblock would be left
2554 pointing to the wrong place after the call to
c87678e4 2555 allocate_dynamic_stack_space below. */
d1b03b62 2556
4448f543 2557 argblock = virtual_outgoing_args_rtx;
c87678e4 2558 }
4448f543 2559 else
7221f864 2560 {
4448f543 2561 if (inhibit_defer_pop == 0)
60ecc450 2562 {
4448f543 2563 /* Try to reuse some or all of the pending_stack_adjust
481feae3 2564 to get this space. */
2565 needed
c87678e4 2566 = (combine_pending_stack_adjustment_and_call
481feae3 2567 (unadjusted_args_size,
0e0be288 2568 &adjusted_args_size,
481feae3 2569 preferred_unit_stack_boundary));
2570
2571 /* combine_pending_stack_adjustment_and_call computes
2572 an adjustment before the arguments are allocated.
2573 Account for them and see whether or not the stack
2574 needs to go up or down. */
2575 needed = unadjusted_args_size - needed;
2576
2577 if (needed < 0)
4448f543 2578 {
481feae3 2579 /* We're releasing stack space. */
2580 /* ??? We can avoid any adjustment at all if we're
2581 already aligned. FIXME. */
2582 pending_stack_adjust = -needed;
2583 do_pending_stack_adjust ();
4448f543 2584 needed = 0;
2585 }
c87678e4 2586 else
481feae3 2587 /* We need to allocate space. We'll do that in
2588 push_block below. */
2589 pending_stack_adjust = 0;
60ecc450 2590 }
481feae3 2591
2592 /* Special case this because overhead of `push_block' in
2593 this case is non-trivial. */
4448f543 2594 if (needed == 0)
2595 argblock = virtual_outgoing_args_rtx;
60ecc450 2596 else
ad3b56f3 2597 {
2598 argblock = push_block (GEN_INT (needed), 0, 0);
2599#ifdef ARGS_GROW_DOWNWARD
2600 argblock = plus_constant (argblock, needed);
2601#endif
2602 }
4448f543 2603
02510658 2604 /* We only really need to call `copy_to_reg' in the case
2605 where push insns are going to be used to pass ARGBLOCK
2606 to a function call in ARGS. In that case, the stack
2607 pointer changes value from the allocation point to the
2608 call point, and hence the value of
2609 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2610 as well always do it. */
4448f543 2611 argblock = copy_to_reg (argblock);
9069face 2612 }
2613 }
2614 }
60ecc450 2615
9069face 2616 if (ACCUMULATE_OUTGOING_ARGS)
2617 {
2618 /* The save/restore code in store_one_arg handles all
2619 cases except one: a constructor call (including a C
2620 function returning a BLKmode struct) to initialize
2621 an argument. */
2622 if (stack_arg_under_construction)
2623 {
63c68695 2624 rtx push_size
2625 = GEN_INT (adjusted_args_size.constant
fa20f865 2626 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
22c61100 2627 : TREE_TYPE (fndecl))) ? 0
63c68695 2628 : reg_parm_stack_space));
9069face 2629 if (old_stack_level == 0)
2630 {
2631 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2632 NULL_RTX);
2633 old_stack_pointer_delta = stack_pointer_delta;
2634 old_pending_adj = pending_stack_adjust;
2635 pending_stack_adjust = 0;
2636 /* stack_arg_under_construction says whether a stack
2637 arg is being constructed at the old stack level.
2638 Pushing the stack gets a clean outgoing argument
2639 block. */
2640 old_stack_arg_under_construction
2641 = stack_arg_under_construction;
2642 stack_arg_under_construction = 0;
2643 /* Make a new map for the new argument list. */
a331ea1b 2644 if (stack_usage_map_buf)
2645 free (stack_usage_map_buf);
43959b95 2646 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 2647 stack_usage_map = stack_usage_map_buf;
9069face 2648 highest_outgoing_arg_in_use = 0;
4448f543 2649 }
9069face 2650 allocate_dynamic_stack_space (push_size, NULL_RTX,
2651 BITS_PER_UNIT);
60ecc450 2652 }
a3585b90 2653
9069face 2654 /* If argument evaluation might modify the stack pointer,
2655 copy the address of the argument list to a register. */
2656 for (i = 0; i < num_actuals; i++)
2657 if (args[i].pass_on_stack)
2658 {
2659 argblock = copy_addr_to_reg (argblock);
2660 break;
2661 }
2662 }
4c9e08a4 2663
60ecc450 2664 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 2665
60ecc450 2666 /* If we push args individually in reverse order, perform stack alignment
2667 before the first push (the last arg). */
4448f543 2668 if (PUSH_ARGS_REVERSED && argblock == 0
0e0be288 2669 && adjusted_args_size.constant != unadjusted_args_size)
ff92623c 2670 {
60ecc450 2671 /* When the stack adjustment is pending, we get better code
2672 by combining the adjustments. */
c87678e4 2673 if (pending_stack_adjust
60ecc450 2674 && ! inhibit_defer_pop)
481feae3 2675 {
2676 pending_stack_adjust
c87678e4 2677 = (combine_pending_stack_adjustment_and_call
481feae3 2678 (unadjusted_args_size,
0e0be288 2679 &adjusted_args_size,
481feae3 2680 preferred_unit_stack_boundary));
2681 do_pending_stack_adjust ();
2682 }
60ecc450 2683 else if (argblock == 0)
0e0be288 2684 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 2685 - unadjusted_args_size));
60ecc450 2686 }
fa4f1f09 2687 /* Now that the stack is properly aligned, pops can't safely
2688 be deferred during the evaluation of the arguments. */
2689 NO_DEFER_POP;
66d433c7 2690
95672afe 2691 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 2692
60ecc450 2693 /* Figure out the register where the value, if any, will come back. */
2694 valreg = 0;
16c9337c 2695 if (TYPE_MODE (rettype) != VOIDmode
60ecc450 2696 && ! structure_value_addr)
2697 {
2698 if (pcc_struct_value)
16c9337c 2699 valreg = hard_function_value (build_pointer_type (rettype),
46b3ff29 2700 fndecl, NULL, (pass == 0));
60ecc450 2701 else
16c9337c 2702 valreg = hard_function_value (rettype, fndecl, fntype,
46b3ff29 2703 (pass == 0));
2d329930 2704
2705 /* If VALREG is a PARALLEL whose first member has a zero
2706 offset, use that. This is for targets such as m68k that
2707 return the same value in multiple places. */
2708 if (GET_CODE (valreg) == PARALLEL)
2709 {
2710 rtx elem = XVECEXP (valreg, 0, 0);
2711 rtx where = XEXP (elem, 0);
2712 rtx offset = XEXP (elem, 1);
2713 if (offset == const0_rtx
2714 && GET_MODE (where) == GET_MODE (valreg))
2715 valreg = where;
2716 }
60ecc450 2717 }
66d433c7 2718
60ecc450 2719 /* Precompute all register parameters. It isn't safe to compute anything
2720 once we have started filling any specific hard regs. */
2721 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
66d433c7 2722
c2f47e15 2723 if (CALL_EXPR_STATIC_CHAIN (exp))
2724 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 2725 else
2726 static_chain_value = 0;
2727
4448f543 2728#ifdef REG_PARM_STACK_SPACE
60ecc450 2729 /* Save the fixed argument area if it's part of the caller's frame and
2730 is clobbered by argument setup for this call. */
02510658 2731 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 2732 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2733 &low_to_save, &high_to_save);
41332f48 2734#endif
66d433c7 2735
60ecc450 2736 /* Now store (and compute if necessary) all non-register parms.
2737 These come before register parms, since they can require block-moves,
2738 which could clobber the registers used for register parms.
2739 Parms which have partial registers are not stored here,
2740 but we do preallocate space here if they want that. */
66d433c7 2741
60ecc450 2742 for (i = 0; i < num_actuals; i++)
eb940a48 2743 {
2744 if (args[i].reg == 0 || args[i].pass_on_stack)
2745 {
2746 rtx before_arg = get_last_insn ();
2747
2748 if (store_one_arg (&args[i], argblock, flags,
2749 adjusted_args_size.var != 0,
2750 reg_parm_stack_space)
2751 || (pass == 0
2752 && check_sibcall_argument_overlap (before_arg,
2753 &args[i], 1)))
2754 sibcall_failure = 1;
2755 }
2756
2757 if (((flags & ECF_CONST)
2758 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2759 && args[i].stack)
2760 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2761 gen_rtx_USE (VOIDmode,
2762 args[i].stack),
2763 call_fusage);
2764 }
60ecc450 2765
2766 /* If we have a parm that is passed in registers but not in memory
2767 and whose alignment does not permit a direct copy into registers,
2768 make a group of pseudos that correspond to each register that we
2769 will later fill. */
2770 if (STRICT_ALIGNMENT)
2771 store_unaligned_arguments_into_pseudos (args, num_actuals);
2772
2773 /* Now store any partially-in-registers parm.
2774 This is the last place a block-move can happen. */
2775 if (reg_parm_seen)
2776 for (i = 0; i < num_actuals; i++)
2777 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 2778 {
2779 rtx before_arg = get_last_insn ();
2780
57679d39 2781 if (store_one_arg (&args[i], argblock, flags,
2782 adjusted_args_size.var != 0,
2783 reg_parm_stack_space)
2784 || (pass == 0
2785 && check_sibcall_argument_overlap (before_arg,
42b11544 2786 &args[i], 1)))
7ecc63d3 2787 sibcall_failure = 1;
2788 }
66d433c7 2789
60ecc450 2790 /* If we pushed args in forward order, perform stack alignment
2791 after pushing the last arg. */
4448f543 2792 if (!PUSH_ARGS_REVERSED && argblock == 0)
0e0be288 2793 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 2794 - unadjusted_args_size));
66d433c7 2795
60ecc450 2796 /* If register arguments require space on the stack and stack space
2797 was not preallocated, allocate stack space here for arguments
2798 passed in registers. */
fa20f865 2799 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 2800 && !ACCUMULATE_OUTGOING_ARGS
c87678e4 2801 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 2802 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 2803
60ecc450 2804 /* Pass the function the address in which to return a
2805 structure value. */
2806 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2807 {
0d568ddf 2808 structure_value_addr
85d654dd 2809 = convert_memory_address (Pmode, structure_value_addr);
45550790 2810 emit_move_insn (struct_value,
60ecc450 2811 force_reg (Pmode,
2812 force_operand (structure_value_addr,
2813 NULL_RTX)));
2814
8ad4c111 2815 if (REG_P (struct_value))
45550790 2816 use_reg (&call_fusage, struct_value);
60ecc450 2817 }
02c736f4 2818
c0e7e9f7 2819 after_args = get_last_insn ();
82c7907c 2820 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
4ee9c684 2821 &call_fusage, reg_parm_seen, pass == 0);
66d433c7 2822
42b11544 2823 load_register_parameters (args, num_actuals, &call_fusage, flags,
2824 pass == 0, &sibcall_failure);
c87678e4 2825
60ecc450 2826 /* Save a pointer to the last insn before the call, so that we can
2827 later safely search backwards to find the CALL_INSN. */
2828 before_call = get_last_insn ();
66d433c7 2829
7a8d641b 2830 /* Set up next argument register. For sibling calls on machines
2831 with register windows this should be the incoming register. */
7a8d641b 2832 if (pass == 0)
f387af4f 2833 next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
2834 VOIDmode,
2835 void_type_node,
2836 true);
7a8d641b 2837 else
f387af4f 2838 next_arg_reg = targetm.calls.function_arg (&args_so_far,
2839 VOIDmode, void_type_node,
2840 true);
7a8d641b 2841
60ecc450 2842 /* All arguments and registers used for the call must be set up by
2843 now! */
2844
481feae3 2845 /* Stack must be properly aligned now. */
231bd014 2846 gcc_assert (!pass
2847 || !(stack_pointer_delta % preferred_unit_stack_boundary));
fa4f1f09 2848
60ecc450 2849 /* Generate the actual call instruction. */
4ee9c684 2850 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 2851 adjusted_args_size.constant, struct_value_size,
7a8d641b 2852 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
87e19636 2853 flags, & args_so_far);
60ecc450 2854
c0e7e9f7 2855 /* If the call setup or the call itself overlaps with anything
2856 of the argument setup we probably clobbered our call address.
2857 In that case we can't do sibcalls. */
2858 if (pass == 0
2859 && check_sibcall_argument_overlap (after_args, 0, 0))
2860 sibcall_failure = 1;
2861
05d18e8b 2862 /* If a non-BLKmode value is returned at the most significant end
2863 of a register, shift the register right by the appropriate amount
2864 and update VALREG accordingly. BLKmode values are handled by the
2865 group load/store machinery below. */
2866 if (!structure_value_addr
2867 && !pcc_struct_value
16c9337c 2868 && TYPE_MODE (rettype) != BLKmode
2869 && targetm.calls.return_in_msb (rettype))
05d18e8b 2870 {
16c9337c 2871 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
05d18e8b 2872 sibcall_failure = 1;
16c9337c 2873 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
05d18e8b 2874 }
2875
2dd6f9ed 2876 if (pass && (flags & ECF_MALLOC))
60ecc450 2877 {
2878 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2879 rtx last, insns;
2880
c87678e4 2881 /* The return value from a malloc-like function is a pointer. */
16c9337c 2882 if (TREE_CODE (rettype) == POINTER_TYPE)
80909c64 2883 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
60ecc450 2884
2885 emit_move_insn (temp, valreg);
2886
2887 /* The return value from a malloc-like function can not alias
2888 anything else. */
2889 last = get_last_insn ();
a1ddb869 2890 add_reg_note (last, REG_NOALIAS, temp);
60ecc450 2891
2892 /* Write out the sequence. */
2893 insns = get_insns ();
2894 end_sequence ();
31d3e01c 2895 emit_insn (insns);
60ecc450 2896 valreg = temp;
2897 }
66d433c7 2898
3072d30e 2899 /* For calls to `setjmp', etc., inform
2900 function.c:setjmp_warnings that it should complain if
2901 nonvolatile values are live. For functions that cannot
2902 return, inform flow that control does not fall through. */
66d433c7 2903
4fec1d6c 2904 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 2905 {
9239aee6 2906 /* The barrier must be emitted
60ecc450 2907 immediately after the CALL_INSN. Some ports emit more
2908 than just a CALL_INSN above, so we must search for it here. */
66d433c7 2909
60ecc450 2910 rtx last = get_last_insn ();
6d7dc5b9 2911 while (!CALL_P (last))
60ecc450 2912 {
2913 last = PREV_INSN (last);
2914 /* There was no CALL_INSN? */
231bd014 2915 gcc_assert (last != before_call);
60ecc450 2916 }
66d433c7 2917
9239aee6 2918 emit_barrier_after (last);
20f5f6d0 2919
b494d193 2920 /* Stack adjustments after a noreturn call are dead code.
2921 However when NO_DEFER_POP is in effect, we must preserve
2922 stack_pointer_delta. */
2923 if (inhibit_defer_pop == 0)
2924 {
2925 stack_pointer_delta = old_stack_allocated;
2926 pending_stack_adjust = 0;
2927 }
60ecc450 2928 }
66d433c7 2929
60ecc450 2930 /* If value type not void, return an rtx for the value. */
66d433c7 2931
16c9337c 2932 if (TYPE_MODE (rettype) == VOIDmode
60ecc450 2933 || ignore)
5edaabad 2934 target = const0_rtx;
60ecc450 2935 else if (structure_value_addr)
2936 {
e16ceb8e 2937 if (target == 0 || !MEM_P (target))
60ecc450 2938 {
f7c44134 2939 target
16c9337c 2940 = gen_rtx_MEM (TYPE_MODE (rettype),
2941 memory_address (TYPE_MODE (rettype),
f7c44134 2942 structure_value_addr));
16c9337c 2943 set_mem_attributes (target, rettype, 1);
60ecc450 2944 }
2945 }
2946 else if (pcc_struct_value)
566d850a 2947 {
60ecc450 2948 /* This is the special C++ case where we need to
2949 know what the true target was. We take care to
2950 never use this value more than once in one expression. */
16c9337c 2951 target = gen_rtx_MEM (TYPE_MODE (rettype),
60ecc450 2952 copy_to_reg (valreg));
16c9337c 2953 set_mem_attributes (target, rettype, 1);
566d850a 2954 }
60ecc450 2955 /* Handle calls that return values in multiple non-contiguous locations.
2956 The Irix 6 ABI has examples of this. */
2957 else if (GET_CODE (valreg) == PARALLEL)
2958 {
4ee9c684 2959 if (target == 0)
60ecc450 2960 {
387bc205 2961 /* This will only be assigned once, so it can be readonly. */
16c9337c 2962 tree nt = build_qualified_type (rettype,
2963 (TYPE_QUALS (rettype)
387bc205 2964 | TYPE_QUAL_CONST));
2965
2966 target = assign_temp (nt, 0, 1, 1);
60ecc450 2967 }
2968
2969 if (! rtx_equal_p (target, valreg))
16c9337c 2970 emit_group_store (target, valreg, rettype,
2971 int_size_in_bytes (rettype));
325d1c45 2972
60ecc450 2973 /* We can not support sibling calls for this case. */
2974 sibcall_failure = 1;
2975 }
2976 else if (target
16c9337c 2977 && GET_MODE (target) == TYPE_MODE (rettype)
60ecc450 2978 && GET_MODE (target) == GET_MODE (valreg))
2979 {
aadbaa40 2980 bool may_overlap = false;
2981
360738f1 2982 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2983 reg to a plain register. */
90af1361 2984 if (!REG_P (target) || HARD_REGISTER_P (target))
2985 valreg = avoid_likely_spilled_reg (valreg);
360738f1 2986
aadbaa40 2987 /* If TARGET is a MEM in the argument area, and we have
2988 saved part of the argument area, then we can't store
2989 directly into TARGET as it may get overwritten when we
2990 restore the argument save area below. Don't work too
2991 hard though and simply force TARGET to a register if it
2992 is a MEM; the optimizer is quite likely to sort it out. */
2993 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2994 for (i = 0; i < num_actuals; i++)
2995 if (args[i].save_area)
2996 {
2997 may_overlap = true;
2998 break;
2999 }
dbe1f550 3000
aadbaa40 3001 if (may_overlap)
3002 target = copy_to_reg (valreg);
3003 else
3004 {
3005 /* TARGET and VALREG cannot be equal at this point
3006 because the latter would not have
3007 REG_FUNCTION_VALUE_P true, while the former would if
3008 it were referring to the same register.
3009
3010 If they refer to the same register, this move will be
3011 a no-op, except when function inlining is being
3012 done. */
3013 emit_move_insn (target, valreg);
3014
3015 /* If we are setting a MEM, this code must be executed.
3016 Since it is emitted after the call insn, sibcall
3017 optimization cannot be performed in that case. */
3018 if (MEM_P (target))
3019 sibcall_failure = 1;
3020 }
60ecc450 3021 }
16c9337c 3022 else if (TYPE_MODE (rettype) == BLKmode)
044aa5ed 3023 {
bee647f8 3024 rtx val = valreg;
3025 if (GET_MODE (val) != BLKmode)
3026 val = avoid_likely_spilled_reg (val);
3027 target = copy_blkmode_from_reg (target, val, rettype);
044aa5ed 3028
3029 /* We can not support sibling calls for this case. */
3030 sibcall_failure = 1;
3031 }
60ecc450 3032 else
90af1361 3033 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 3034
3b2411a8 3035 /* If we promoted this return value, make the proper SUBREG.
3036 TARGET might be const0_rtx here, so be careful. */
3037 if (REG_P (target)
16c9337c 3038 && TYPE_MODE (rettype) != BLKmode
3039 && GET_MODE (target) != TYPE_MODE (rettype))
45550790 3040 {
16c9337c 3041 tree type = rettype;
3b2411a8 3042 int unsignedp = TYPE_UNSIGNED (type);
3043 int offset = 0;
3044 enum machine_mode pmode;
3045
3046 /* Ensure we promote as expected, and get the new unsignedness. */
3047 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3048 funtype, 1);
3049 gcc_assert (GET_MODE (target) == pmode);
3050
3051 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3052 && (GET_MODE_SIZE (GET_MODE (target))
3053 > GET_MODE_SIZE (TYPE_MODE (type))))
231bd014 3054 {
3b2411a8 3055 offset = GET_MODE_SIZE (GET_MODE (target))
3056 - GET_MODE_SIZE (TYPE_MODE (type));
3057 if (! BYTES_BIG_ENDIAN)
3058 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3059 else if (! WORDS_BIG_ENDIAN)
3060 offset %= UNITS_PER_WORD;
231bd014 3061 }
3b2411a8 3062
3063 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3064 SUBREG_PROMOTED_VAR_P (target) = 1;
3065 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
45550790 3066 }
23eb5fa6 3067
60ecc450 3068 /* If size of args is variable or this was a constructor call for a stack
3069 argument, restore saved stack-pointer value. */
66d433c7 3070
ff3ae375 3071 if (old_stack_level)
60ecc450 3072 {
3073 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9069face 3074 stack_pointer_delta = old_stack_pointer_delta;
60ecc450 3075 pending_stack_adjust = old_pending_adj;
80f06481 3076 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 3077 stack_arg_under_construction = old_stack_arg_under_construction;
3078 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3079 stack_usage_map = initial_stack_usage_map;
60ecc450 3080 sibcall_failure = 1;
3081 }
02510658 3082 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 3083 {
66d433c7 3084#ifdef REG_PARM_STACK_SPACE
60ecc450 3085 if (save_area)
6e96b626 3086 restore_fixed_argument_area (save_area, argblock,
3087 high_to_save, low_to_save);
41332f48 3088#endif
66d433c7 3089
60ecc450 3090 /* If we saved any argument areas, restore them. */
3091 for (i = 0; i < num_actuals; i++)
3092 if (args[i].save_area)
3093 {
3094 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3095 rtx stack_area
3096 = gen_rtx_MEM (save_mode,
3097 memory_address (save_mode,
3098 XEXP (args[i].stack_slot, 0)));
3099
3100 if (save_mode != BLKmode)
3101 emit_move_insn (stack_area, args[i].save_area);
3102 else
0378dbdc 3103 emit_block_move (stack_area, args[i].save_area,
241399f6 3104 GEN_INT (args[i].locate.size.constant),
0378dbdc 3105 BLOCK_OP_CALL_PARM);
60ecc450 3106 }
66d433c7 3107
60ecc450 3108 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3109 stack_usage_map = initial_stack_usage_map;
3110 }
66d433c7 3111
c87678e4 3112 /* If this was alloca, record the new stack level for nonlocal gotos.
60ecc450 3113 Check for the handler slots since we might not have a save area
3114 for non-local gotos. */
dbd6697a 3115
4ee9c684 3116 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3117 update_nonlocal_goto_save_area ();
66d433c7 3118
60ecc450 3119 /* Free up storage we no longer need. */
3120 for (i = 0; i < num_actuals; ++i)
3121 if (args[i].aligned_regs)
3122 free (args[i].aligned_regs);
3123
3124 insns = get_insns ();
3125 end_sequence ();
3126
3127 if (pass == 0)
3128 {
3129 tail_call_insns = insns;
3130
60ecc450 3131 /* Restore the pending stack adjustment now that we have
3132 finished generating the sibling call sequence. */
91b70175 3133
60ecc450 3134 pending_stack_adjust = save_pending_stack_adjust;
91b70175 3135 stack_pointer_delta = save_stack_pointer_delta;
0e0be288 3136
3137 /* Prepare arg structure for next iteration. */
c87678e4 3138 for (i = 0; i < num_actuals; i++)
0e0be288 3139 {
3140 args[i].value = 0;
3141 args[i].aligned_regs = 0;
3142 args[i].stack = 0;
3143 }
7ecc63d3 3144
3145 sbitmap_free (stored_args_map);
60ecc450 3146 }
3147 else
9069face 3148 {
3149 normal_call_insns = insns;
3150
3151 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 3152 gcc_assert ((flags & ECF_NORETURN)
231bd014 3153 || (old_stack_allocated
3154 == stack_pointer_delta - pending_stack_adjust));
9069face 3155 }
ae8d6151 3156
3157 /* If something prevents making this a sibling call,
3158 zero out the sequence. */
3159 if (sibcall_failure)
3160 tail_call_insns = NULL_RTX;
4ee9c684 3161 else
3162 break;
60ecc450 3163 }
3164
365db11e 3165 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 3166 arguments too, as argument area is now clobbered by the call. */
3167 if (tail_call_insns)
60ecc450 3168 {
4ee9c684 3169 emit_insn (tail_call_insns);
18d50ae6 3170 crtl->tail_call_emit = true;
60ecc450 3171 }
3172 else
31d3e01c 3173 emit_insn (normal_call_insns);
66d433c7 3174
60ecc450 3175 currently_expanding_call--;
6d801f27 3176
a331ea1b 3177 if (stack_usage_map_buf)
3178 free (stack_usage_map_buf);
3179
66d433c7 3180 return target;
3181}
915e81b8 3182
4ee9c684 3183/* A sibling call sequence invalidates any REG_EQUIV notes made for
3184 this function's incoming arguments.
3185
3186 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 3187 in the rtl chain are those for incoming arguments, so we can look
3188 for REG_EQUIV notes between the start of the function and the
3189 NOTE_INSN_FUNCTION_BEG.
4ee9c684 3190
3191 This is (slight) overkill. We could keep track of the highest
3192 argument we clobber and be more selective in removing notes, but it
3193 does not seem to be worth the effort. */
0a227ed5 3194
4ee9c684 3195void
3196fixup_tail_calls (void)
3197{
0a227ed5 3198 rtx insn;
3199
3200 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3201 {
750a330e 3202 rtx note;
3203
0a227ed5 3204 /* There are never REG_EQUIV notes for the incoming arguments
3205 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3206 if (NOTE_P (insn)
ad4583d9 3207 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 3208 break;
3209
750a330e 3210 note = find_reg_note (insn, REG_EQUIV, 0);
3211 if (note)
3212 remove_note (insn, note);
3213 note = find_reg_note (insn, REG_EQUIV, 0);
3214 gcc_assert (!note);
0a227ed5 3215 }
4ee9c684 3216}
3217
915e81b8 3218/* Traverse a list of TYPES and expand all complex types into their
3219 components. */
5ab29745 3220static tree
915e81b8 3221split_complex_types (tree types)
3222{
3223 tree p;
3224
92d40bc4 3225 /* Before allocating memory, check for the common case of no complex. */
3226 for (p = types; p; p = TREE_CHAIN (p))
3227 {
3228 tree type = TREE_VALUE (p);
3229 if (TREE_CODE (type) == COMPLEX_TYPE
3230 && targetm.calls.split_complex_arg (type))
a0c938f0 3231 goto found;
92d40bc4 3232 }
3233 return types;
3234
3235 found:
915e81b8 3236 types = copy_list (types);
3237
3238 for (p = types; p; p = TREE_CHAIN (p))
3239 {
3240 tree complex_type = TREE_VALUE (p);
3241
92d40bc4 3242 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3243 && targetm.calls.split_complex_arg (complex_type))
915e81b8 3244 {
3245 tree next, imag;
3246
3247 /* Rewrite complex type with component type. */
3248 TREE_VALUE (p) = TREE_TYPE (complex_type);
3249 next = TREE_CHAIN (p);
3250
3251 /* Add another component type for the imaginary part. */
3252 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3253 TREE_CHAIN (p) = imag;
3254 TREE_CHAIN (imag) = next;
3255
3256 /* Skip the newly created node. */
3257 p = TREE_CHAIN (p);
3258 }
3259 }
3260
3261 return types;
3262}
66d433c7 3263\f
20f7032f 3264/* Output a library call to function FUN (a SYMBOL_REF rtx).
c87678e4 3265 The RETVAL parameter specifies whether return value needs to be saved, other
ebf77775 3266 parameters are documented in the emit_library_call function below. */
2a631e19 3267
20f7032f 3268static rtx
4c9e08a4 3269emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3270 enum libcall_type fn_type,
3271 enum machine_mode outmode, int nargs, va_list p)
b39693dd 3272{
9bdaf1ba 3273 /* Total size in bytes of all the stack-parms scanned so far. */
3274 struct args_size args_size;
3275 /* Size of arguments before any adjustments (such as rounding). */
3276 struct args_size original_args_size;
19cb6b50 3277 int argnum;
9bdaf1ba 3278 rtx fun;
22c61100 3279 /* Todo, choose the correct decl type of orgfun. Sadly this information
3280 isn't present here, so we default to native calling abi here. */
60e2260d 3281 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
fa20f865 3282 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
9bdaf1ba 3283 int inc;
3284 int count;
9bdaf1ba 3285 rtx argblock = 0;
3286 CUMULATIVE_ARGS args_so_far;
c87678e4 3287 struct arg
3288 {
3289 rtx value;
3290 enum machine_mode mode;
3291 rtx reg;
3292 int partial;
241399f6 3293 struct locate_and_pad_arg_data locate;
c87678e4 3294 rtx save_area;
3295 };
9bdaf1ba 3296 struct arg *argvec;
3297 int old_inhibit_defer_pop = inhibit_defer_pop;
3298 rtx call_fusage = 0;
3299 rtx mem_value = 0;
16204096 3300 rtx valreg;
9bdaf1ba 3301 int pcc_struct_value = 0;
3302 int struct_value_size = 0;
df4b504c 3303 int flags;
9bdaf1ba 3304 int reg_parm_stack_space = 0;
9bdaf1ba 3305 int needed;
644c283b 3306 rtx before_call;
771d21fa 3307 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 3308
4448f543 3309#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3310 /* Define the boundary of the register parm stack space that needs to be
3311 save, if any. */
75a70cf9 3312 int low_to_save = 0, high_to_save = 0;
c87678e4 3313 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 3314#endif
3315
9bdaf1ba 3316 /* Size of the stack reserved for parameter registers. */
3317 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3318 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 3319 char *stack_usage_map_buf = NULL;
9bdaf1ba 3320
45550790 3321 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3322
9bdaf1ba 3323#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3324 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 3325#endif
3326
ab7ccfa2 3327 /* By default, library functions can not throw. */
df4b504c 3328 flags = ECF_NOTHROW;
3329
ab7ccfa2 3330 switch (fn_type)
3331 {
3332 case LCT_NORMAL:
2a0c81bf 3333 break;
ab7ccfa2 3334 case LCT_CONST:
2a0c81bf 3335 flags |= ECF_CONST;
3336 break;
ab7ccfa2 3337 case LCT_PURE:
2a0c81bf 3338 flags |= ECF_PURE;
ab7ccfa2 3339 break;
ab7ccfa2 3340 case LCT_NORETURN:
3341 flags |= ECF_NORETURN;
3342 break;
3343 case LCT_THROW:
3344 flags = ECF_NORETURN;
3345 break;
0ff18307 3346 case LCT_RETURNS_TWICE:
3347 flags = ECF_RETURNS_TWICE;
3348 break;
ab7ccfa2 3349 }
9bdaf1ba 3350 fun = orgfun;
3351
9bdaf1ba 3352 /* Ensure current function's preferred stack boundary is at least
3353 what we need. */
edb7afe8 3354 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3355 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 3356
3357 /* If this kind of value comes back in memory,
3358 decide where in memory it should come back. */
771d21fa 3359 if (outmode != VOIDmode)
9bdaf1ba 3360 {
dc24ddbd 3361 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 3362 if (aggregate_value_p (tfom, 0))
771d21fa 3363 {
9bdaf1ba 3364#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 3365 rtx pointer_reg
46b3ff29 3366 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 3367 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3368 pcc_struct_value = 1;
3369 if (value == 0)
3370 value = gen_reg_rtx (outmode);
9bdaf1ba 3371#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 3372 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 3373 if (value != 0 && MEM_P (value))
771d21fa 3374 mem_value = value;
3375 else
3376 mem_value = assign_temp (tfom, 0, 1, 1);
9bdaf1ba 3377#endif
771d21fa 3378 /* This call returns a big structure. */
2dd6f9ed 3379 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
771d21fa 3380 }
9bdaf1ba 3381 }
771d21fa 3382 else
3383 tfom = void_type_node;
9bdaf1ba 3384
3385 /* ??? Unfinished: must pass the memory address as an argument. */
3386
3387 /* Copy all the libcall-arguments out of the varargs data
3388 and into a vector ARGVEC.
3389
3390 Compute how to pass each argument. We only support a very small subset
3391 of the full argument passing conventions to limit complexity here since
3392 library functions shouldn't have many args. */
3393
364c0c59 3394 argvec = XALLOCAVEC (struct arg, nargs + 1);
f0af5a88 3395 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 3396
e1efd914 3397#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3398 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3399#else
30c70355 3400 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
e1efd914 3401#endif
9bdaf1ba 3402
3403 args_size.constant = 0;
3404 args_size.var = 0;
3405
3406 count = 0;
3407
3408 push_temp_slots ();
3409
3410 /* If there's a structure value address to be passed,
3411 either pass it in the special place, or pass it as an extra argument. */
45550790 3412 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 3413 {
3414 rtx addr = XEXP (mem_value, 0);
a0c938f0 3415
9bdaf1ba 3416 nargs++;
3417
a56c46d2 3418 /* Make sure it is a reasonable operand for a move or push insn. */
3419 if (!REG_P (addr) && !MEM_P (addr)
3420 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3421 addr = force_operand (addr, NULL_RTX);
3422
9bdaf1ba 3423 argvec[count].value = addr;
3424 argvec[count].mode = Pmode;
3425 argvec[count].partial = 0;
3426
f387af4f 3427 argvec[count].reg = targetm.calls.function_arg (&args_so_far,
3428 Pmode, NULL_TREE, true);
f054eb3c 3429 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3430 NULL_TREE, 1) == 0);
9bdaf1ba 3431
3432 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 3433#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 3434 1,
2e735c0d 3435#else
3436 argvec[count].reg != 0,
3437#endif
241399f6 3438 0, NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 3439
9bdaf1ba 3440 if (argvec[count].reg == 0 || argvec[count].partial != 0
3441 || reg_parm_stack_space > 0)
241399f6 3442 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 3443
f387af4f 3444 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
9bdaf1ba 3445
3446 count++;
3447 }
3448
3449 for (; count < nargs; count++)
3450 {
3451 rtx val = va_arg (p, rtx);
d62e827b 3452 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
9bdaf1ba 3453
3454 /* We cannot convert the arg value to the mode the library wants here;
3455 must do it earlier where we know the signedness of the arg. */
231bd014 3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 3458
a56c46d2 3459 /* Make sure it is a reasonable operand for a move or push insn. */
3460 if (!REG_P (val) && !MEM_P (val)
3461 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3462 val = force_operand (val, NULL_RTX);
3463
cc9b8628 3464 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
9bdaf1ba 3465 {
ddaf7ad3 3466 rtx slot;
13f08ee7 3467 int must_copy
3468 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
ddaf7ad3 3469
9c2a0c05 3470 /* If this was a CONST function, it is now PURE since it now
3471 reads memory. */
5096b8b0 3472 if (flags & ECF_CONST)
3473 {
3474 flags &= ~ECF_CONST;
3475 flags |= ECF_PURE;
3476 }
3477
590c3166 3478 if (MEM_P (val) && !must_copy)
ddaf7ad3 3479 slot = val;
41dc12b4 3480 else
ddaf7ad3 3481 {
dc24ddbd 3482 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
771d21fa 3483 0, 1, 1);
ddaf7ad3 3484 emit_move_insn (slot, val);
3485 }
387bc205 3486
a683e787 3487 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3488 gen_rtx_USE (VOIDmode, slot),
3489 call_fusage);
ddaf7ad3 3490 if (must_copy)
3491 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3492 gen_rtx_CLOBBER (VOIDmode,
3493 slot),
3494 call_fusage);
3495
9bdaf1ba 3496 mode = Pmode;
ddaf7ad3 3497 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 3498 }
9bdaf1ba 3499
3500 argvec[count].value = val;
3501 argvec[count].mode = mode;
3502
f387af4f 3503 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
3504 NULL_TREE, true);
9bdaf1ba 3505
9bdaf1ba 3506 argvec[count].partial
f054eb3c 3507 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 3508
3509 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 3510#ifdef STACK_PARMS_IN_REG_PARM_AREA
c87678e4 3511 1,
2e735c0d 3512#else
3513 argvec[count].reg != 0,
3514#endif
241399f6 3515 argvec[count].partial,
3516 NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 3517
231bd014 3518 gcc_assert (!argvec[count].locate.size.var);
9bdaf1ba 3519
9bdaf1ba 3520 if (argvec[count].reg == 0 || argvec[count].partial != 0
3521 || reg_parm_stack_space > 0)
241399f6 3522 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 3523
f387af4f 3524 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
9bdaf1ba 3525 }
9bdaf1ba 3526
9bdaf1ba 3527 /* If this machine requires an external definition for library
3528 functions, write one out. */
3529 assemble_external_libcall (fun);
3530
3531 original_args_size = args_size;
91b70175 3532 args_size.constant = (((args_size.constant
3533 + stack_pointer_delta
3534 + STACK_BYTES - 1)
3535 / STACK_BYTES
3536 * STACK_BYTES)
3537 - stack_pointer_delta);
9bdaf1ba 3538
3539 args_size.constant = MAX (args_size.constant,
3540 reg_parm_stack_space);
3541
fa20f865 3542 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3543 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 3544
abe32cce 3545 if (args_size.constant > crtl->outgoing_args_size)
3546 crtl->outgoing_args_size = args_size.constant;
9bdaf1ba 3547
4448f543 3548 if (ACCUMULATE_OUTGOING_ARGS)
3549 {
3550 /* Since the stack pointer will never be pushed, it is possible for
3551 the evaluation of a parm to clobber something we have already
3552 written to the stack. Since most function calls on RISC machines
3553 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 3554
4448f543 3555 Therefore, we save any area of the stack that was already written
3556 and that we are using. Here we set up to do this by making a new
3557 stack usage map from the old one.
9bdaf1ba 3558
4448f543 3559 Another approach might be to try to reorder the argument
3560 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 3561
4448f543 3562 needed = args_size.constant;
9bdaf1ba 3563
4448f543 3564 /* Since we will be writing into the entire argument area, the
3565 map must be allocated for its entire size, not just the part that
3566 is the responsibility of the caller. */
fa20f865 3567 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3568 needed += reg_parm_stack_space;
9bdaf1ba 3569
3570#ifdef ARGS_GROW_DOWNWARD
4448f543 3571 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3572 needed + 1);
9bdaf1ba 3573#else
4448f543 3574 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3575 needed);
9bdaf1ba 3576#endif
4c36ffe6 3577 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3578 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 3579
4448f543 3580 if (initial_highest_arg_in_use)
8e547276 3581 memcpy (stack_usage_map, initial_stack_usage_map,
3582 initial_highest_arg_in_use);
9bdaf1ba 3583
4448f543 3584 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3585 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3586 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3587 needed = 0;
9bdaf1ba 3588
9c0a756f 3589 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 3590 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 3591 new libcalls after we've instantiated the virtual regs, and if we
3592 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 3593
9c0a756f 3594 if (virtuals_instantiated)
3595 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3596 else
3597 argblock = virtual_outgoing_args_rtx;
4448f543 3598 }
3599 else
3600 {
3601 if (!PUSH_ARGS)
3602 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3603 }
9bdaf1ba 3604
9bdaf1ba 3605 /* If we push args individually in reverse order, perform stack alignment
3606 before the first push (the last arg). */
4448f543 3607 if (argblock == 0 && PUSH_ARGS_REVERSED)
9bdaf1ba 3608 anti_adjust_stack (GEN_INT (args_size.constant
3609 - original_args_size.constant));
9bdaf1ba 3610
4448f543 3611 if (PUSH_ARGS_REVERSED)
3612 {
3613 inc = -1;
3614 argnum = nargs - 1;
3615 }
3616 else
3617 {
3618 inc = 1;
3619 argnum = 0;
3620 }
9bdaf1ba 3621
4448f543 3622#ifdef REG_PARM_STACK_SPACE
3623 if (ACCUMULATE_OUTGOING_ARGS)
3624 {
3625 /* The argument list is the property of the called routine and it
3626 may clobber it. If the fixed area has been used for previous
6e96b626 3627 parameters, we must save and restore it. */
3628 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3629 &low_to_save, &high_to_save);
9bdaf1ba 3630 }
3631#endif
c87678e4 3632
9bdaf1ba 3633 /* Push the args that need to be pushed. */
3634
3635 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3636 are to be pushed. */
3637 for (count = 0; count < nargs; count++, argnum += inc)
3638 {
19cb6b50 3639 enum machine_mode mode = argvec[argnum].mode;
3640 rtx val = argvec[argnum].value;
9bdaf1ba 3641 rtx reg = argvec[argnum].reg;
3642 int partial = argvec[argnum].partial;
c2fd5e89 3643 unsigned int parm_align = argvec[argnum].locate.boundary;
4448f543 3644 int lower_bound = 0, upper_bound = 0, i;
9bdaf1ba 3645
3646 if (! (reg != 0 && partial == 0))
3647 {
4448f543 3648 if (ACCUMULATE_OUTGOING_ARGS)
3649 {
02510658 3650 /* If this is being stored into a pre-allocated, fixed-size,
3651 stack area, save any previous data at that location. */
9bdaf1ba 3652
3653#ifdef ARGS_GROW_DOWNWARD
4448f543 3654 /* stack_slot is negative, but we want to index stack_usage_map
3655 with positive values. */
9a0cf170 3656 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
241399f6 3657 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
9bdaf1ba 3658#else
9a0cf170 3659 lower_bound = argvec[argnum].locate.slot_offset.constant;
241399f6 3660 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
9bdaf1ba 3661#endif
3662
fd2c0c1d 3663 i = lower_bound;
3664 /* Don't worry about things in the fixed argument area;
3665 it has already been saved. */
3666 if (i < reg_parm_stack_space)
3667 i = reg_parm_stack_space;
3668 while (i < upper_bound && stack_usage_map[i] == 0)
3669 i++;
9bdaf1ba 3670
fd2c0c1d 3671 if (i < upper_bound)
4448f543 3672 {
241399f6 3673 /* We need to make a save area. */
3674 unsigned int size
3675 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4448f543 3676 enum machine_mode save_mode
241399f6 3677 = mode_for_size (size, MODE_INT, 1);
3678 rtx adr
3679 = plus_constant (argblock,
3680 argvec[argnum].locate.offset.constant);
4448f543 3681 rtx stack_area
241399f6 3682 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 3683
f9c6a9c3 3684 if (save_mode == BLKmode)
3685 {
3686 argvec[argnum].save_area
3687 = assign_stack_temp (BLKmode,
a0c938f0 3688 argvec[argnum].locate.size.constant,
f9c6a9c3 3689 0);
3690
3691 emit_block_move (validize_mem (argvec[argnum].save_area),
a0c938f0 3692 stack_area,
f9c6a9c3 3693 GEN_INT (argvec[argnum].locate.size.constant),
3694 BLOCK_OP_CALL_PARM);
3695 }
3696 else
3697 {
3698 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3699
3700 emit_move_insn (argvec[argnum].save_area, stack_area);
3701 }
4448f543 3702 }
9bdaf1ba 3703 }
325d1c45 3704
c2fd5e89 3705 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
0378dbdc 3706 partial, reg, 0, argblock,
241399f6 3707 GEN_INT (argvec[argnum].locate.offset.constant),
3708 reg_parm_stack_space,
3709 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
9bdaf1ba 3710
9bdaf1ba 3711 /* Now mark the segment we just used. */
4448f543 3712 if (ACCUMULATE_OUTGOING_ARGS)
3713 for (i = lower_bound; i < upper_bound; i++)
3714 stack_usage_map[i] = 1;
9bdaf1ba 3715
3716 NO_DEFER_POP;
2eb9302a 3717
eb940a48 3718 if ((flags & ECF_CONST)
3719 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2eb9302a 3720 {
3721 rtx use;
3722
3723 /* Indicate argument access so that alias.c knows that these
3724 values are live. */
3725 if (argblock)
3726 use = plus_constant (argblock,
3727 argvec[argnum].locate.offset.constant);
3728 else
23943319 3729 /* When arguments are pushed, trying to tell alias.c where
2eb9302a 3730 exactly this argument is won't work, because the
3731 auto-increment causes confusion. So we merely indicate
3732 that we access something with a known mode somewhere on
3733 the stack. */
a0c938f0 3734 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2eb9302a 3735 gen_rtx_SCRATCH (Pmode));
3736 use = gen_rtx_MEM (argvec[argnum].mode, use);
3737 use = gen_rtx_USE (VOIDmode, use);
3738 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3739 }
9bdaf1ba 3740 }
3741 }
3742
9bdaf1ba 3743 /* If we pushed args in forward order, perform stack alignment
3744 after pushing the last arg. */
4448f543 3745 if (argblock == 0 && !PUSH_ARGS_REVERSED)
9bdaf1ba 3746 anti_adjust_stack (GEN_INT (args_size.constant
3747 - original_args_size.constant));
9bdaf1ba 3748
4448f543 3749 if (PUSH_ARGS_REVERSED)
3750 argnum = nargs - 1;
3751 else
3752 argnum = 0;
9bdaf1ba 3753
82c7907c 3754 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 3755
3756 /* Now load any reg parms into their regs. */
3757
3758 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3759 are to be pushed. */
3760 for (count = 0; count < nargs; count++, argnum += inc)
3761 {
bec917cc 3762 enum machine_mode mode = argvec[argnum].mode;
19cb6b50 3763 rtx val = argvec[argnum].value;
9bdaf1ba 3764 rtx reg = argvec[argnum].reg;
3765 int partial = argvec[argnum].partial;
3766
3767 /* Handle calls that pass values in multiple non-contiguous
3768 locations. The PA64 has examples of this for library calls. */
3769 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 3770 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 3771 else if (reg != 0 && partial == 0)
3772 emit_move_insn (reg, val);
3773
3774 NO_DEFER_POP;
3775 }
3776
9bdaf1ba 3777 /* Any regs containing parms remain in use through the call. */
3778 for (count = 0; count < nargs; count++)
3779 {
3780 rtx reg = argvec[count].reg;
3781 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3782 use_group_regs (&call_fusage, reg);
3783 else if (reg != 0)
6c6f16e5 3784 {
3785 int partial = argvec[count].partial;
3786 if (partial)
3787 {
3788 int nregs;
3789 gcc_assert (partial % UNITS_PER_WORD == 0);
3790 nregs = partial / UNITS_PER_WORD;
3791 use_regs (&call_fusage, REGNO (reg), nregs);
3792 }
3793 else
3794 use_reg (&call_fusage, reg);
3795 }
9bdaf1ba 3796 }
3797
3798 /* Pass the function the address in which to return a structure value. */
45550790 3799 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 3800 {
45550790 3801 emit_move_insn (struct_value,
9bdaf1ba 3802 force_reg (Pmode,
3803 force_operand (XEXP (mem_value, 0),
3804 NULL_RTX)));
8ad4c111 3805 if (REG_P (struct_value))
45550790 3806 use_reg (&call_fusage, struct_value);
9bdaf1ba 3807 }
3808
3809 /* Don't allow popping to be deferred, since then
3810 cse'ing of library calls could delete a call and leave the pop. */
3811 NO_DEFER_POP;
16204096 3812 valreg = (mem_value == 0 && outmode != VOIDmode
578d1295 3813 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
9bdaf1ba 3814
481feae3 3815 /* Stack must be properly aligned now. */
231bd014 3816 gcc_assert (!(stack_pointer_delta
3817 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
fa4f1f09 3818
644c283b 3819 before_call = get_last_insn ();
3820
9bdaf1ba 3821 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3822 will set inhibit_defer_pop to that value. */
20f7032f 3823 /* The return type is needed to decide how many bytes the function pops.
3824 Signedness plays no role in that, so for simplicity, we pretend it's
3825 always signed. We also assume that the list of arguments passed has
3826 no impact, so we pretend it is unknown. */
9bdaf1ba 3827
4ee9c684 3828 emit_call_1 (fun, NULL,
c87678e4 3829 get_identifier (XSTR (orgfun, 0)),
771d21fa 3830 build_function_type (tfom, NULL_TREE),
c87678e4 3831 original_args_size.constant, args_size.constant,
9bdaf1ba 3832 struct_value_size,
f387af4f 3833 targetm.calls.function_arg (&args_so_far,
3834 VOIDmode, void_type_node, true),
16204096 3835 valreg,
87e19636 3836 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
9bdaf1ba 3837
3072d30e 3838 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3839 that it should complain if nonvolatile values are live. For
3840 functions that cannot return, inform flow that control does not
3841 fall through. */
644c283b 3842
4fec1d6c 3843 if (flags & ECF_NORETURN)
644c283b 3844 {
9239aee6 3845 /* The barrier note must be emitted
644c283b 3846 immediately after the CALL_INSN. Some ports emit more than
3847 just a CALL_INSN above, so we must search for it here. */
3848
3849 rtx last = get_last_insn ();
6d7dc5b9 3850 while (!CALL_P (last))
644c283b 3851 {
3852 last = PREV_INSN (last);
3853 /* There was no CALL_INSN? */
231bd014 3854 gcc_assert (last != before_call);
644c283b 3855 }
3856
9239aee6 3857 emit_barrier_after (last);
644c283b 3858 }
3859
9bdaf1ba 3860 /* Now restore inhibit_defer_pop to its actual original value. */
3861 OK_DEFER_POP;
3862
3863 pop_temp_slots ();
3864
3865 /* Copy the value to the right place. */
20f7032f 3866 if (outmode != VOIDmode && retval)
9bdaf1ba 3867 {
3868 if (mem_value)
3869 {
3870 if (value == 0)
3871 value = mem_value;
3872 if (value != mem_value)
3873 emit_move_insn (value, mem_value);
3874 }
40651bac 3875 else if (GET_CODE (valreg) == PARALLEL)
3876 {
3877 if (value == 0)
3878 value = gen_reg_rtx (outmode);
4c3a0ea5 3879 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 3880 }
9bdaf1ba 3881 else
4e1a3169 3882 {
3b2411a8 3883 /* Convert to the proper mode if a promotion has been active. */
4e1a3169 3884 if (GET_MODE (valreg) != outmode)
3885 {
3886 int unsignedp = TYPE_UNSIGNED (tfom);
3887
3b2411a8 3888 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3889 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4e1a3169 3890 == GET_MODE (valreg));
4e1a3169 3891 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3892 }
3893
3894 if (value != 0)
3895 emit_move_insn (value, valreg);
3896 else
3897 value = valreg;
3898 }
9bdaf1ba 3899 }
3900
4448f543 3901 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 3902 {
4448f543 3903#ifdef REG_PARM_STACK_SPACE
3904 if (save_area)
6e96b626 3905 restore_fixed_argument_area (save_area, argblock,
3906 high_to_save, low_to_save);
9bdaf1ba 3907#endif
c87678e4 3908
4448f543 3909 /* If we saved any argument areas, restore them. */
3910 for (count = 0; count < nargs; count++)
3911 if (argvec[count].save_area)
3912 {
3913 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
241399f6 3914 rtx adr = plus_constant (argblock,
3915 argvec[count].locate.offset.constant);
3916 rtx stack_area = gen_rtx_MEM (save_mode,
3917 memory_address (save_mode, adr));
4448f543 3918
f9c6a9c3 3919 if (save_mode == BLKmode)
3920 emit_block_move (stack_area,
a0c938f0 3921 validize_mem (argvec[count].save_area),
f9c6a9c3 3922 GEN_INT (argvec[count].locate.size.constant),
3923 BLOCK_OP_CALL_PARM);
3924 else
3925 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 3926 }
9bdaf1ba 3927
4448f543 3928 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3929 stack_usage_map = initial_stack_usage_map;
3930 }
b39693dd 3931
a331ea1b 3932 if (stack_usage_map_buf)
3933 free (stack_usage_map_buf);
3934
20f7032f 3935 return value;
3936
3937}
3938\f
3939/* Output a library call to function FUN (a SYMBOL_REF rtx)
3940 (emitting the queue unless NO_QUEUE is nonzero),
3941 for a value of mode OUTMODE,
3942 with NARGS different arguments, passed as alternating rtx values
3943 and machine_modes to convert them to.
20f7032f 3944
2dd6f9ed 3945 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3946 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3947 other types of library calls. */
20f7032f 3948
3949void
ee582a61 3950emit_library_call (rtx orgfun, enum libcall_type fn_type,
3951 enum machine_mode outmode, int nargs, ...)
20f7032f 3952{
ee582a61 3953 va_list p;
4c9e08a4 3954
ee582a61 3955 va_start (p, nargs);
26dfc457 3956 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
ee582a61 3957 va_end (p);
20f7032f 3958}
3959\f
3960/* Like emit_library_call except that an extra argument, VALUE,
3961 comes second and says where to store the result.
3962 (If VALUE is zero, this function chooses a convenient way
3963 to return the value.
3964
3965 This function returns an rtx for where the value is to be found.
3966 If VALUE is nonzero, VALUE is returned. */
3967
3968rtx
ee582a61 3969emit_library_call_value (rtx orgfun, rtx value,
3970 enum libcall_type fn_type,
3971 enum machine_mode outmode, int nargs, ...)
20f7032f 3972{
7ad77798 3973 rtx result;
ee582a61 3974 va_list p;
4c9e08a4 3975
ee582a61 3976 va_start (p, nargs);
7ad77798 3977 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3978 nargs, p);
ee582a61 3979 va_end (p);
20f7032f 3980
7ad77798 3981 return result;
8ddf1c7e 3982}
3983\f
66d433c7 3984/* Store a single argument for a function call
3985 into the register or memory area where it must be passed.
3986 *ARG describes the argument value and where to pass it.
3987
3988 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 3989 or 0 on a machine where arguments are pushed individually.
66d433c7 3990
3991 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 3992 so must be careful about how the stack is used.
66d433c7 3993
3994 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3995 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3996 that we need not worry about saving and restoring the stack.
3997
57679d39 3998 FNDECL is the declaration of the function we are calling.
c87678e4 3999
d10cfa8d 4000 Return nonzero if this arg should cause sibcall failure,
57679d39 4001 zero otherwise. */
66d433c7 4002
57679d39 4003static int
4c9e08a4 4004store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4005 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 4006{
19cb6b50 4007 tree pval = arg->tree_value;
66d433c7 4008 rtx reg = 0;
4009 int partial = 0;
4010 int used = 0;
df9f2bb6 4011 int i, lower_bound = 0, upper_bound = 0;
57679d39 4012 int sibcall_failure = 0;
66d433c7 4013
4014 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 4015 return 1;
66d433c7 4016
1b117c60 4017 /* Push a new temporary level for any temporaries we make for
4018 this argument. */
4019 push_temp_slots ();
4020
02510658 4021 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 4022 {
4448f543 4023 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4024 save any previous data at that location. */
4025 if (argblock && ! variable_size && arg->stack)
4026 {
66d433c7 4027#ifdef ARGS_GROW_DOWNWARD
4448f543 4028 /* stack_slot is negative, but we want to index stack_usage_map
4029 with positive values. */
4030 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4031 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4032 else
4033 upper_bound = 0;
66d433c7 4034
241399f6 4035 lower_bound = upper_bound - arg->locate.size.constant;
66d433c7 4036#else
4448f543 4037 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4038 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4039 else
4040 lower_bound = 0;
66d433c7 4041
241399f6 4042 upper_bound = lower_bound + arg->locate.size.constant;
66d433c7 4043#endif
4044
fd2c0c1d 4045 i = lower_bound;
4046 /* Don't worry about things in the fixed argument area;
4047 it has already been saved. */
4048 if (i < reg_parm_stack_space)
4049 i = reg_parm_stack_space;
4050 while (i < upper_bound && stack_usage_map[i] == 0)
4051 i++;
66d433c7 4052
fd2c0c1d 4053 if (i < upper_bound)
66d433c7 4054 {
241399f6 4055 /* We need to make a save area. */
4056 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4057 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4058 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4059 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 4060
4061 if (save_mode == BLKmode)
4062 {
387bc205 4063 tree ot = TREE_TYPE (arg->tree_value);
4064 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4065 | TYPE_QUAL_CONST));
4066
4067 arg->save_area = assign_temp (nt, 0, 1, 1);
4448f543 4068 preserve_temp_slots (arg->save_area);
4069 emit_block_move (validize_mem (arg->save_area), stack_area,
c2ca1bab 4070 GEN_INT (arg->locate.size.constant),
0378dbdc 4071 BLOCK_OP_CALL_PARM);
4448f543 4072 }
4073 else
4074 {
4075 arg->save_area = gen_reg_rtx (save_mode);
4076 emit_move_insn (arg->save_area, stack_area);
4077 }
66d433c7 4078 }
4079 }
4080 }
b3caaea3 4081
66d433c7 4082 /* If this isn't going to be placed on both the stack and in registers,
4083 set up the register and number of words. */
4084 if (! arg->pass_on_stack)
04d6fcf8 4085 {
4086 if (flags & ECF_SIBCALL)
4087 reg = arg->tail_call_reg;
4088 else
4089 reg = arg->reg;
4090 partial = arg->partial;
4091 }
66d433c7 4092
231bd014 4093 /* Being passed entirely in a register. We shouldn't be called in
4094 this case. */
4095 gcc_assert (reg == 0 || partial != 0);
a0c938f0 4096
f28c7a75 4097 /* If this arg needs special alignment, don't load the registers
4098 here. */
4099 if (arg->n_aligned_regs != 0)
4100 reg = 0;
c87678e4 4101
f28c7a75 4102 /* If this is being passed partially in a register, we can't evaluate
66d433c7 4103 it directly into its stack slot. Otherwise, we can. */
4104 if (arg->value == 0)
f848041f 4105 {
f848041f 4106 /* stack_arg_under_construction is nonzero if a function argument is
4107 being evaluated directly into the outgoing argument list and
4108 expand_call must take special action to preserve the argument list
4109 if it is called recursively.
4110
4111 For scalar function arguments stack_usage_map is sufficient to
4112 determine which stack slots must be saved and restored. Scalar
4113 arguments in general have pass_on_stack == 0.
4114
4115 If this argument is initialized by a function which takes the
4116 address of the argument (a C++ constructor or a C function
4117 returning a BLKmode structure), then stack_usage_map is
4118 insufficient and expand_call must push the stack around the
4119 function call. Such arguments have pass_on_stack == 1.
4120
4121 Note that it is always safe to set stack_arg_under_construction,
4122 but this generates suboptimal code if set when not needed. */
4123
4124 if (arg->pass_on_stack)
4125 stack_arg_under_construction++;
4448f543 4126
7dbf1af4 4127 arg->value = expand_expr (pval,
4128 (partial
4129 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4130 ? NULL_RTX : arg->stack,
a35a63ff 4131 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 4132
4133 /* If we are promoting object (or for any other reason) the mode
4134 doesn't agree, convert the mode. */
4135
1560ef8f 4136 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4137 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4138 arg->value, arg->unsignedp);
1c0c37a5 4139
f848041f 4140 if (arg->pass_on_stack)
4141 stack_arg_under_construction--;
f848041f 4142 }
66d433c7 4143
63864e1c 4144 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 4145 if ((flags & ECF_SIBCALL)
4146 && MEM_P (arg->value)
4147 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4148 arg->locate.size.constant))
4149 sibcall_failure = 1;
63864e1c 4150
66d433c7 4151 /* Don't allow anything left on stack from computation
4152 of argument to alloca. */
02510658 4153 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 4154 do_pending_stack_adjust ();
4155
4156 if (arg->value == arg->stack)
8a06f2d4 4157 /* If the value is already in the stack slot, we are done. */
4158 ;
1c0c37a5 4159 else if (arg->mode != BLKmode)
66d433c7 4160 {
19cb6b50 4161 int size;
851fc2b3 4162 unsigned int parm_align;
66d433c7 4163
4164 /* Argument is a scalar, not entirely passed in registers.
4165 (If part is passed in registers, arg->partial says how much
4166 and emit_push_insn will take care of putting it there.)
c87678e4 4167
66d433c7 4168 Push it, and if its size is less than the
4169 amount of space allocated to it,
4170 also bump stack pointer by the additional space.
4171 Note that in C the default argument promotions
4172 will prevent such mismatches. */
4173
1c0c37a5 4174 size = GET_MODE_SIZE (arg->mode);
66d433c7 4175 /* Compute how much space the push instruction will push.
4176 On many machines, pushing a byte will advance the stack
4177 pointer by a halfword. */
4178#ifdef PUSH_ROUNDING
4179 size = PUSH_ROUNDING (size);
4180#endif
4181 used = size;
4182
4183 /* Compute how much space the argument should get:
4184 round up to a multiple of the alignment for arguments. */
1c0c37a5 4185 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
66d433c7 4186 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4187 / (PARM_BOUNDARY / BITS_PER_UNIT))
4188 * (PARM_BOUNDARY / BITS_PER_UNIT));
4189
851fc2b3 4190 /* Compute the alignment of the pushed argument. */
4191 parm_align = arg->locate.boundary;
4192 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4193 {
4194 int pad = used - size;
4195 if (pad)
4196 {
4197 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4198 parm_align = MIN (parm_align, pad_align);
4199 }
4200 }
4201
66d433c7 4202 /* This isn't already where we want it on the stack, so put it there.
4203 This can either be done with push or copy insns. */
4c9e08a4 4204 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
851fc2b3 4205 parm_align, partial, reg, used - size, argblock,
241399f6 4206 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4207 ARGS_SIZE_RTX (arg->locate.alignment_pad));
d5c9a99f 4208
4209 /* Unless this is a partially-in-register argument, the argument is now
4210 in the stack. */
4211 if (partial == 0)
4212 arg->value = arg->stack;
66d433c7 4213 }
4214 else
4215 {
4216 /* BLKmode, at least partly to be pushed. */
4217
cf78c9ff 4218 unsigned int parm_align;
19cb6b50 4219 int excess;
66d433c7 4220 rtx size_rtx;
4221
4222 /* Pushing a nonscalar.
4223 If part is passed in registers, PARTIAL says how much
4224 and emit_push_insn will take care of putting it there. */
4225
4226 /* Round its size up to a multiple
4227 of the allocation unit for arguments. */
4228
241399f6 4229 if (arg->locate.size.var != 0)
66d433c7 4230 {
4231 excess = 0;
241399f6 4232 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 4233 }
4234 else
4235 {
f054eb3c 4236 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4237 for BLKmode is careful to avoid it. */
4238 excess = (arg->locate.size.constant
4239 - int_size_in_bytes (TREE_TYPE (pval))
4240 + partial);
623282b0 4241 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
b9c74b4d 4242 NULL_RTX, TYPE_MODE (sizetype),
4243 EXPAND_NORMAL);
66d433c7 4244 }
4245
c5dc0c32 4246 parm_align = arg->locate.boundary;
cf78c9ff 4247
4248 /* When an argument is padded down, the block is aligned to
4249 PARM_BOUNDARY, but the actual argument isn't. */
4250 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4251 {
241399f6 4252 if (arg->locate.size.var)
cf78c9ff 4253 parm_align = BITS_PER_UNIT;
4254 else if (excess)
4255 {
28397255 4256 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
cf78c9ff 4257 parm_align = MIN (parm_align, excess_align);
4258 }
4259 }
4260
e16ceb8e 4261 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 4262 {
4263 /* emit_push_insn might not work properly if arg->value and
241399f6 4264 argblock + arg->locate.offset areas overlap. */
57679d39 4265 rtx x = arg->value;
4266 int i = 0;
4267
abe32cce 4268 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
57679d39 4269 || (GET_CODE (XEXP (x, 0)) == PLUS
4270 && XEXP (XEXP (x, 0), 0) ==
abe32cce 4271 crtl->args.internal_arg_pointer
971ba038 4272 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
57679d39 4273 {
abe32cce 4274 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
57679d39 4275 i = INTVAL (XEXP (XEXP (x, 0), 1));
4276
21dda4ee 4277 /* expand_call should ensure this. */
231bd014 4278 gcc_assert (!arg->locate.offset.var
2ad152f7 4279 && arg->locate.size.var == 0
971ba038 4280 && CONST_INT_P (size_rtx));
57679d39 4281
241399f6 4282 if (arg->locate.offset.constant > i)
57679d39 4283 {
241399f6 4284 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
57679d39 4285 sibcall_failure = 1;
4286 }
241399f6 4287 else if (arg->locate.offset.constant < i)
57679d39 4288 {
2ad152f7 4289 /* Use arg->locate.size.constant instead of size_rtx
4290 because we only care about the part of the argument
4291 on the stack. */
4292 if (i < (arg->locate.offset.constant
4293 + arg->locate.size.constant))
4294 sibcall_failure = 1;
4295 }
4296 else
4297 {
4298 /* Even though they appear to be at the same location,
4299 if part of the outgoing argument is in registers,
4300 they aren't really at the same location. Check for
4301 this by making sure that the incoming size is the
4302 same as the outgoing size. */
4303 if (arg->locate.size.constant != INTVAL (size_rtx))
57679d39 4304 sibcall_failure = 1;
4305 }
4306 }
4307 }
4308
1c0c37a5 4309 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
cf78c9ff 4310 parm_align, partial, reg, excess, argblock,
241399f6 4311 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4312 ARGS_SIZE_RTX (arg->locate.alignment_pad));
66d433c7 4313
d5c9a99f 4314 /* Unless this is a partially-in-register argument, the argument is now
4315 in the stack.
66d433c7 4316
d5c9a99f 4317 ??? Unlike the case above, in which we want the actual
4318 address of the data, so that we can load it directly into a
4319 register, here we want the address of the stack slot, so that
4320 it's properly aligned for word-by-word copying or something
4321 like that. It's not clear that this is always correct. */
4322 if (partial == 0)
4323 arg->value = arg->stack_slot;
4324 }
b600a907 4325
4326 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4327 {
4328 tree type = TREE_TYPE (arg->tree_value);
4329 arg->parallel_value
4330 = emit_group_load_into_temps (arg->reg, arg->value, type,
4331 int_size_in_bytes (type));
4332 }
66d433c7 4333
a35a63ff 4334 /* Mark all slots this store used. */
4335 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4336 && argblock && ! variable_size && arg->stack)
4337 for (i = lower_bound; i < upper_bound; i++)
4338 stack_usage_map[i] = 1;
4339
66d433c7 4340 /* Once we have pushed something, pops can't safely
4341 be deferred during the rest of the arguments. */
4342 NO_DEFER_POP;
4343
148b08de 4344 /* Free any temporary slots made in processing this argument. Show
4345 that we might have taken the address of something and pushed that
4346 as an operand. */
4347 preserve_temp_slots (NULL_RTX);
66d433c7 4348 free_temp_slots ();
1b117c60 4349 pop_temp_slots ();
57679d39 4350
4351 return sibcall_failure;
66d433c7 4352}
890f0c17 4353
0336f0f0 4354/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 4355
0336f0f0 4356bool
4357must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 4358 const_tree type)
0336f0f0 4359{
4360 if (!type)
4361 return false;
4362
4363 /* If the type has variable size... */
4364 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4365 return true;
890f0c17 4366
0336f0f0 4367 /* If the type is marked as addressable (it is required
4368 to be constructed into the stack)... */
4369 if (TREE_ADDRESSABLE (type))
4370 return true;
4371
4372 return false;
4373}
890f0c17 4374
0d568ddf 4375/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 4376 takes trailing padding of a structure into account. */
4377/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 4378
4379bool
fb80456a 4380must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
890f0c17 4381{
4382 if (!type)
dceaa0b1 4383 return false;
890f0c17 4384
4385 /* If the type has variable size... */
4386 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4387 return true;
4388
4389 /* If the type is marked as addressable (it is required
4390 to be constructed into the stack)... */
4391 if (TREE_ADDRESSABLE (type))
4392 return true;
4393
4394 /* If the padding and mode of the type is such that a copy into
4395 a register would put it into the wrong part of the register. */
4396 if (mode == BLKmode
4397 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4398 && (FUNCTION_ARG_PADDING (mode, type)
4399 == (BYTES_BIG_ENDIAN ? upward : downward)))
4400 return true;
4401
4402 return false;
4403}