]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Daily bump.
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
61f71b34 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
46bd2bee 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
bfc45551 4 Free Software Foundation, Inc.
51bbfa0c 5
1322177d 6This file is part of GCC.
51bbfa0c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
51bbfa0c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
51bbfa0c
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
51bbfa0c
RS
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
670ee920
KG
27#include "rtl.h"
28#include "tree.h"
29#include "flags.h"
30#include "expr.h"
6e985040 31#include "optabs.h"
e78d8e51 32#include "libfuncs.h"
49ad7cfa 33#include "function.h"
670ee920 34#include "regs.h"
5f6da302 35#include "toplev.h"
d6f4ec51 36#include "output.h"
b1474bb7 37#include "tm_p.h"
ea11ca7e 38#include "timevar.h"
c67846f2 39#include "sbitmap.h"
b0c48229 40#include "langhooks.h"
23626154 41#include "target.h"
b255a036 42#include "cgraph.h"
b2dd096b 43#include "except.h"
6fb5fa3c 44#include "dbgcnt.h"
51bbfa0c 45
c795bca9
BS
46/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
47#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
48
49/* Data structure and subroutines used within expand_call. */
50
51struct arg_data
52{
53 /* Tree node for this argument. */
54 tree tree_value;
1efe6448
RK
55 /* Mode for value; TYPE_MODE unless promoted. */
56 enum machine_mode mode;
51bbfa0c
RS
57 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 rtx value;
59 /* Initially-compute RTL value for argument; only for const functions. */
60 rtx initial_value;
61 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 62 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
63 registers. */
64 rtx reg;
099e9712
JH
65 /* Register to pass this argument in when generating tail call sequence.
66 This is not the same register as for normal calls on machines with
67 register windows. */
68 rtx tail_call_reg;
8df3dbb7
RH
69 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
70 form for emit_group_move. */
71 rtx parallel_value;
84b55618
RK
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
74 int unsignedp;
f0078f86
AM
75 /* Number of bytes to put in registers. 0 means put the whole arg
76 in registers. Also 0 if not passed in registers. */
51bbfa0c 77 int partial;
da7d8304 78 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 82 int pass_on_stack;
e7949876
AM
83 /* Some fields packaged up for locate_and_pad_parm. */
84 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
85 /* Location on the stack at which parameter should be stored. The store
86 has already been done if STACK == VALUE. */
87 rtx stack;
88 /* Location on the stack of the start of this argument slot. This can
89 differ from STACK if this arg pads downward. This location is known
90 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 rtx stack_slot;
51bbfa0c
RS
92 /* Place that this stack area has been saved, if needed. */
93 rtx save_area;
4ab56118
RK
94 /* If an argument's alignment does not permit direct copying into registers,
95 copy in smaller-sized pieces into pseudos. These are stored in a
96 block pointed to by this field. The next field says how many
97 word-sized pseudos we made. */
98 rtx *aligned_regs;
99 int n_aligned_regs;
51bbfa0c
RS
100};
101
da7d8304 102/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
103 the corresponding stack location has been used.
104 This vector is used to prevent a function call within an argument from
105 clobbering any stack already set up. */
106static char *stack_usage_map;
107
108/* Size of STACK_USAGE_MAP. */
109static int highest_outgoing_arg_in_use;
2f4aa534 110
c67846f2
JJ
111/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
112 stack location's tail call argument has been already stored into the stack.
113 This bitmap is used to prevent sibling call optimization if function tries
114 to use parent's incoming argument slots when they have been already
115 overwritten with tail call arguments. */
116static sbitmap stored_args_map;
117
2f4aa534
RS
118/* stack_arg_under_construction is nonzero when an argument may be
119 initialized with a constructor call (including a C function that
120 returns a BLKmode struct) and expand_call must take special action
121 to make sure the object being constructed does not overlap the
122 argument list for the constructor call. */
0405cc0e 123static int stack_arg_under_construction;
51bbfa0c 124
6de9cd9a 125static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
d329e058
AJ
126 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 CUMULATIVE_ARGS *);
128static void precompute_register_parameters (int, struct arg_data *, int *);
129static int store_one_arg (struct arg_data *, rtx, int, int, int);
130static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
131static int finalize_must_preallocate (int, int, struct arg_data *,
132 struct args_size *);
133static void precompute_arguments (int, int, struct arg_data *);
134static int compute_argument_block_size (int, struct args_size *, int);
135static void initialize_argument_information (int, struct arg_data *,
078a18a4
SL
136 struct args_size *, int,
137 tree, tree,
d329e058 138 tree, CUMULATIVE_ARGS *, int,
dd292d0a 139 rtx *, int *, int *, int *,
6de9cd9a 140 bool *, bool);
d329e058
AJ
141static void compute_argument_addresses (struct arg_data *, rtx, int);
142static rtx rtx_for_function_call (tree, tree);
143static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 int, int *);
145static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147static int special_function_p (tree, int);
d329e058
AJ
148static int check_sibcall_argument_overlap_1 (rtx);
149static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
150
151static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
95899b34 152 unsigned int);
2f2b4a02 153static tree split_complex_types (tree);
21a3b983 154
f73ad30e 155#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
156static rtx save_fixed_argument_area (int, rtx, int *, int *);
157static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 158#endif
51bbfa0c 159\f
51bbfa0c
RS
160/* Force FUNEXP into a form suitable for the address of a CALL,
161 and return that as an rtx. Also load the static chain register
162 if FNDECL is a nested function.
163
77cac2f2
RK
164 CALL_FUSAGE points to a variable holding the prospective
165 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 166
03dacb02 167rtx
6de9cd9a
DN
168prepare_call_address (rtx funexp, rtx static_chain_value,
169 rtx *call_fusage, int reg_parm_seen, int sibcallp)
51bbfa0c 170{
ba228239 171 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
172 but not for a constant address if -fno-function-cse. */
173 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 174 /* If we are using registers for parameters, force the
e9a25f70
JL
175 function address into a register now. */
176 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
177 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
178 : memory_address (FUNCTION_MODE, funexp));
3affaf29 179 else if (! sibcallp)
51bbfa0c
RS
180 {
181#ifndef NO_FUNCTION_CSE
182 if (optimize && ! flag_no_function_cse)
082a099c 183 funexp = force_reg (Pmode, funexp);
51bbfa0c
RS
184#endif
185 }
186
187 if (static_chain_value != 0)
188 {
5e89a381 189 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c
RS
190 emit_move_insn (static_chain_rtx, static_chain_value);
191
f8cfc6aa 192 if (REG_P (static_chain_rtx))
f991a240 193 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
194 }
195
196 return funexp;
197}
198
199/* Generate instructions to call function FUNEXP,
200 and optionally pop the results.
201 The CALL_INSN is the first insn generated.
202
607ea900 203 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
204 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205
334c4f0f
RK
206 FUNTYPE is the data type of the function. This is given to the macro
207 RETURN_POPS_ARGS to determine whether this function pops its own args.
208 We used to allow an identifier for library functions, but that doesn't
209 work when the return type is an aggregate type and the calling convention
210 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
211
212 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
213 ROUNDED_STACK_SIZE is that number rounded up to
214 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
215 both to put into the call insn and to generate explicit popping
216 code if necessary.
51bbfa0c
RS
217
218 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
219 It is zero if this call doesn't want a structure value.
220
221 NEXT_ARG_REG is the rtx that results from executing
222 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
223 just after all the args have had their registers assigned.
224 This could be whatever you like, but normally it is the first
225 arg-register beyond those used for args in this call,
226 or 0 if all the arg-registers are used in this call.
227 It is passed on to `gen_call' so you can put this info in the call insn.
228
229 VALREG is a hard register in which a value is returned,
230 or 0 if the call does not return a value.
231
232 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
233 the args to this call were processed.
234 We restore `inhibit_defer_pop' to that value.
235
94b25f81 236 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 237 denote registers used by the called function. */
f725a3ec 238
322e3e34 239static void
6de9cd9a
DN
240emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
241 tree funtype ATTRIBUTE_UNUSED,
d329e058
AJ
242 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
243 HOST_WIDE_INT rounded_stack_size,
244 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
245 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
246 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
247 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 248{
062e7fd8 249 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
250 rtx call_insn;
251 int already_popped = 0;
fb5eebb9 252 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
f45c9d95
ZW
253#if defined (HAVE_call) && defined (HAVE_call_value)
254 rtx struct_value_size_rtx;
255 struct_value_size_rtx = GEN_INT (struct_value_size);
256#endif
51bbfa0c 257
fa5322fa
AO
258#ifdef CALL_POPS_ARGS
259 n_popped += CALL_POPS_ARGS (* args_so_far);
260#endif
d329e058 261
51bbfa0c
RS
262 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
263 and we don't want to load it into a register as an optimization,
264 because prepare_call_address already did it if it should be done. */
265 if (GET_CODE (funexp) != SYMBOL_REF)
266 funexp = memory_address (FUNCTION_MODE, funexp);
267
0a1c58a2
JL
268#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
269 if ((ecf_flags & ECF_SIBCALL)
270 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
f132f529 271 && (n_popped > 0 || stack_size == 0))
0a1c58a2 272 {
8ac61af7 273 rtx n_pop = GEN_INT (n_popped);
0a1c58a2
JL
274 rtx pat;
275
276 /* If this subroutine pops its own args, record that in the call insn
277 if possible, for the sake of frame pointer elimination. */
278
279 if (valreg)
f45c9d95 280 pat = GEN_SIBCALL_VALUE_POP (valreg,
0a1c58a2
JL
281 gen_rtx_MEM (FUNCTION_MODE, funexp),
282 rounded_stack_size_rtx, next_arg_reg,
283 n_pop);
284 else
f45c9d95 285 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
286 rounded_stack_size_rtx, next_arg_reg, n_pop);
287
288 emit_call_insn (pat);
289 already_popped = 1;
290 }
291 else
292#endif
293
51bbfa0c 294#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8ac61af7
RK
295 /* If the target has "call" or "call_value" insns, then prefer them
296 if no arguments are actually popped. If the target does not have
297 "call" or "call_value" insns, then we must use the popping versions
298 even if the call has no arguments to pop. */
8bcafee3
JDA
299#if defined (HAVE_call) && defined (HAVE_call_value)
300 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
7393c642 301 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
8bcafee3
JDA
302#else
303 if (HAVE_call_pop && HAVE_call_value_pop)
304#endif
51bbfa0c 305 {
fb5eebb9 306 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
307 rtx pat;
308
309 /* If this subroutine pops its own args, record that in the call insn
310 if possible, for the sake of frame pointer elimination. */
2c8da025 311
51bbfa0c 312 if (valreg)
f45c9d95 313 pat = GEN_CALL_VALUE_POP (valreg,
38a448ca 314 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 315 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 316 else
f45c9d95 317 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 318 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
319
320 emit_call_insn (pat);
321 already_popped = 1;
322 }
323 else
324#endif
51bbfa0c 325
0a1c58a2
JL
326#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
327 if ((ecf_flags & ECF_SIBCALL)
328 && HAVE_sibcall && HAVE_sibcall_value)
329 {
330 if (valreg)
f45c9d95 331 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
0a1c58a2
JL
332 gen_rtx_MEM (FUNCTION_MODE, funexp),
333 rounded_stack_size_rtx,
334 next_arg_reg, NULL_RTX));
335 else
f45c9d95 336 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
337 rounded_stack_size_rtx, next_arg_reg,
338 struct_value_size_rtx));
339 }
340 else
341#endif
342
51bbfa0c
RS
343#if defined (HAVE_call) && defined (HAVE_call_value)
344 if (HAVE_call && HAVE_call_value)
345 {
346 if (valreg)
f45c9d95 347 emit_call_insn (GEN_CALL_VALUE (valreg,
38a448ca 348 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 349 rounded_stack_size_rtx, next_arg_reg,
e992302c 350 NULL_RTX));
51bbfa0c 351 else
f45c9d95 352 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 353 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
354 struct_value_size_rtx));
355 }
356 else
357#endif
366de0ce 358 gcc_unreachable ();
51bbfa0c 359
ee960939
OH
360 /* Find the call we just emitted. */
361 call_insn = last_call_insn ();
51bbfa0c 362
2a8f6b90
JH
363 /* Mark memory as used for "pure" function call. */
364 if (ecf_flags & ECF_PURE)
8ac61af7
RK
365 call_fusage
366 = gen_rtx_EXPR_LIST
367 (VOIDmode,
368 gen_rtx_USE (VOIDmode,
369 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
370 call_fusage);
2a8f6b90 371
ee960939
OH
372 /* Put the register usage information there. */
373 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
374
375 /* If this is a const call, then set the insn's unchanging bit. */
2a8f6b90 376 if (ecf_flags & (ECF_CONST | ECF_PURE))
24a28584 377 CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 378
12a22e76
JM
379 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 effect. */
0a1c58a2 381 if (ecf_flags & ECF_NOTHROW)
54cea123 382 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76 383 REG_NOTES (call_insn));
b2dd096b 384 else
6de9cd9a
DN
385 {
386 int rn = lookup_stmt_eh_region (fntree);
387
388 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
389 throw, which we already took care of. */
390 if (rn > 0)
391 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
392 REG_NOTES (call_insn));
393 note_current_region_may_contain_throw ();
394 }
12a22e76 395
ca3920ad
JW
396 if (ecf_flags & ECF_NORETURN)
397 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
398 REG_NOTES (call_insn));
399
570a98eb 400 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7
RH
401 {
402 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
c22cacf3 403 REG_NOTES (call_insn));
9defc9b7
RH
404 current_function_calls_setjmp = 1;
405 }
570a98eb 406
0a1c58a2
JL
407 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408
b1e64e0d
RS
409 /* Restore this now, so that we do defer pops for this call's args
410 if the context of the call as a whole permits. */
411 inhibit_defer_pop = old_inhibit_defer_pop;
412
fb5eebb9 413 if (n_popped > 0)
51bbfa0c
RS
414 {
415 if (!already_popped)
e3da301d 416 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
417 = gen_rtx_EXPR_LIST (VOIDmode,
418 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
419 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 420 rounded_stack_size -= n_popped;
062e7fd8 421 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 422 stack_pointer_delta -= n_popped;
51bbfa0c
RS
423 }
424
f73ad30e 425 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 426 {
f73ad30e
JH
427 /* If returning from the subroutine does not automatically pop the args,
428 we need an instruction to pop them sooner or later.
429 Perhaps do it now; perhaps just record how much space to pop later.
430
431 If returning from the subroutine does pop the args, indicate that the
432 stack pointer will be changed. */
433
f79a65c0 434 if (rounded_stack_size != 0)
f73ad30e 435 {
6e14af16 436 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
f79a65c0
RK
437 /* Just pretend we did the pop. */
438 stack_pointer_delta -= rounded_stack_size;
439 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 440 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
441 pending_stack_adjust += rounded_stack_size;
442 else
443 adjust_stack (rounded_stack_size_rtx);
444 }
51bbfa0c 445 }
f73ad30e
JH
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
447 Restore the stack pointer to its original value now. Usually
448 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
449 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
450 popping variants of functions exist as well.
451
452 ??? We may optimize similar to defer_pop above, but it is
453 probably not worthwhile.
f725a3ec 454
f73ad30e
JH
455 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 such machines. */
457 else if (n_popped)
458 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
459}
460
20efdf74
JL
461/* Determine if the function identified by NAME and FNDECL is one with
462 special properties we wish to know about.
463
464 For example, if the function might return more than one time (setjmp), then
465 set RETURNS_TWICE to a nonzero value.
466
bae802f9 467 Similarly set NORETURN if the function is in the longjmp family.
20efdf74 468
20efdf74
JL
469 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
470 space from the stack such as alloca. */
471
f2d33f13 472static int
d329e058 473special_function_p (tree fndecl, int flags)
20efdf74 474{
6de9cd9a 475 if (fndecl && DECL_NAME (fndecl)
140592a0 476 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
477 /* Exclude functions not at the file scope, or not `extern',
478 since they are not the magic functions we would otherwise
d1bd0ded 479 think they are.
c22cacf3
MS
480 FIXME: this should be handled with attributes, not with this
481 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
482 because you can declare fork() inside a function if you
483 wish. */
7ae4ad28 484 && (DECL_CONTEXT (fndecl) == NULL_TREE
d1bd0ded
GK
485 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
486 && TREE_PUBLIC (fndecl))
20efdf74 487 {
63ad61ed
ZW
488 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
489 const char *tname = name;
20efdf74 490
ca54603f
JL
491 /* We assume that alloca will always be called by name. It
492 makes no sense to pass it as a pointer-to-function to
493 anything that does not understand its behavior. */
f2d33f13
JH
494 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && name[0] == 'a'
496 && ! strcmp (name, "alloca"))
497 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && name[0] == '_'
499 && ! strcmp (name, "__builtin_alloca"))))
500 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 501
20efdf74
JL
502 /* Disregard prefix _, __ or __x. */
503 if (name[0] == '_')
504 {
505 if (name[1] == '_' && name[2] == 'x')
506 tname += 3;
507 else if (name[1] == '_')
508 tname += 2;
509 else
510 tname += 1;
511 }
512
513 if (tname[0] == 's')
514 {
f2d33f13
JH
515 if ((tname[1] == 'e'
516 && (! strcmp (tname, "setjmp")
517 || ! strcmp (tname, "setjmp_syscall")))
518 || (tname[1] == 'i'
519 && ! strcmp (tname, "sigsetjmp"))
520 || (tname[1] == 'a'
521 && ! strcmp (tname, "savectx")))
522 flags |= ECF_RETURNS_TWICE;
523
20efdf74
JL
524 if (tname[1] == 'i'
525 && ! strcmp (tname, "siglongjmp"))
6e14af16 526 flags |= ECF_NORETURN;
20efdf74
JL
527 }
528 else if ((tname[0] == 'q' && tname[1] == 's'
529 && ! strcmp (tname, "qsetjmp"))
530 || (tname[0] == 'v' && tname[1] == 'f'
cd9ed4b4
EB
531 && ! strcmp (tname, "vfork"))
532 || (tname[0] == 'g' && tname[1] == 'e'
533 && !strcmp (tname, "getcontext")))
f2d33f13 534 flags |= ECF_RETURNS_TWICE;
20efdf74
JL
535
536 else if (tname[0] == 'l' && tname[1] == 'o'
537 && ! strcmp (tname, "longjmp"))
6e14af16 538 flags |= ECF_NORETURN;
20efdf74 539 }
d1c38823 540
f2d33f13 541 return flags;
20efdf74
JL
542}
543
bae802f9 544/* Return nonzero when FNDECL represents a call to setjmp. */
7393c642 545
f2d33f13 546int
d329e058 547setjmp_call_p (tree fndecl)
f2d33f13
JH
548{
549 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
550}
551
c986baf6
JH
552/* Return true when exp contains alloca call. */
553bool
d329e058 554alloca_call_p (tree exp)
c986baf6
JH
555{
556 if (TREE_CODE (exp) == CALL_EXPR
5039610b
SL
557 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
558 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
559 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
560 & ECF_MAY_BE_ALLOCA))
c986baf6
JH
561 return true;
562 return false;
563}
564
b5cd4ed4 565/* Detect flags (function attributes) from the function decl or type node. */
7393c642 566
4977bab6 567int
d329e058 568flags_from_decl_or_type (tree exp)
f2d33f13
JH
569{
570 int flags = 0;
b5cd4ed4 571 tree type = exp;
36dbb93d 572
f2d33f13
JH
573 if (DECL_P (exp))
574 {
b5cd4ed4
RK
575 type = TREE_TYPE (exp);
576
f2d33f13 577 /* The function exp may have the `malloc' attribute. */
36dbb93d 578 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
579 flags |= ECF_MALLOC;
580
6e9a3221
AN
581 /* The function exp may have the `returns_twice' attribute. */
582 if (DECL_IS_RETURNS_TWICE (exp))
583 flags |= ECF_RETURNS_TWICE;
584
2a8f6b90 585 /* The function exp may have the `pure' attribute. */
36dbb93d 586 if (DECL_IS_PURE (exp))
e238ccac 587 flags |= ECF_PURE;
2a8f6b90 588
dcd6de6d
ZD
589 if (DECL_IS_NOVOPS (exp))
590 flags |= ECF_NOVOPS;
591
f2d33f13
JH
592 if (TREE_NOTHROW (exp))
593 flags |= ECF_NOTHROW;
2b187c63
MM
594
595 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
e238ccac 596 flags |= ECF_CONST;
6de9cd9a
DN
597
598 flags = special_function_p (exp, flags);
f2d33f13 599 }
4f976745 600 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
2b187c63 601 flags |= ECF_CONST;
f2d33f13
JH
602
603 if (TREE_THIS_VOLATILE (exp))
604 flags |= ECF_NORETURN;
605
b5cd4ed4
RK
606 /* Mark if the function returns with the stack pointer depressed. We
607 cannot consider it pure or constant in that case. */
608 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609 {
610 flags |= ECF_SP_DEPRESSED;
e238ccac 611 flags &= ~(ECF_PURE | ECF_CONST);
b5cd4ed4
RK
612 }
613
f2d33f13
JH
614 return flags;
615}
616
f027e0a2
JM
617/* Detect flags from a CALL_EXPR. */
618
619int
620call_expr_flags (tree t)
621{
622 int flags;
623 tree decl = get_callee_fndecl (t);
624
625 if (decl)
626 flags = flags_from_decl_or_type (decl);
627 else
628 {
5039610b 629 t = TREE_TYPE (CALL_EXPR_FN (t));
f027e0a2
JM
630 if (t && TREE_CODE (t) == POINTER_TYPE)
631 flags = flags_from_decl_or_type (TREE_TYPE (t));
632 else
633 flags = 0;
634 }
635
636 return flags;
637}
638
20efdf74
JL
639/* Precompute all register parameters as described by ARGS, storing values
640 into fields within the ARGS array.
641
642 NUM_ACTUALS indicates the total number elements in the ARGS array.
643
644 Set REG_PARM_SEEN if we encounter a register parameter. */
645
646static void
27e29549
RH
647precompute_register_parameters (int num_actuals, struct arg_data *args,
648 int *reg_parm_seen)
20efdf74
JL
649{
650 int i;
651
652 *reg_parm_seen = 0;
653
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
656 {
657 *reg_parm_seen = 1;
658
659 if (args[i].value == 0)
660 {
661 push_temp_slots ();
84217346 662 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
663 preserve_temp_slots (args[i].value);
664 pop_temp_slots ();
20efdf74
JL
665 }
666
fd1e5d25
RH
667 /* If the value is a non-legitimate constant, force it into a
668 pseudo now. TLS symbols sometimes need a call to resolve. */
669 if (CONSTANT_P (args[i].value)
670 && !LEGITIMATE_CONSTANT_P (args[i].value))
671 args[i].value = force_reg (args[i].mode, args[i].value);
672
20efdf74
JL
673 /* If we are to promote the function arg to a wider mode,
674 do it now. */
675
676 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
677 args[i].value
678 = convert_modes (args[i].mode,
679 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
680 args[i].value, args[i].unsignedp);
681
27e29549
RH
682 /* If we're going to have to load the value by parts, pull the
683 parts into pseudos. The part extraction process can involve
684 non-trivial computation. */
685 if (GET_CODE (args[i].reg) == PARALLEL)
686 {
687 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 688 args[i].parallel_value
27e29549
RH
689 = emit_group_load_into_temps (args[i].reg, args[i].value,
690 type, int_size_in_bytes (type));
691 }
692
f725a3ec 693 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
694 short loop, put the value into a pseudo and then put the pseudo
695 into the hard reg.
696
697 For small register classes, also do this if this call uses
698 register parameters. This is to avoid reload conflicts while
699 loading the parameters registers. */
700
27e29549
RH
701 else if ((! (REG_P (args[i].value)
702 || (GET_CODE (args[i].value) == SUBREG
703 && REG_P (SUBREG_REG (args[i].value)))))
704 && args[i].mode != BLKmode
705 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
706 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
707 || optimize))
20efdf74
JL
708 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
709 }
710}
711
f73ad30e 712#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
713
714 /* The argument list is the property of the called routine and it
715 may clobber it. If the fixed area has been used for previous
716 parameters, we must save and restore it. */
3bdf5ad1 717
20efdf74 718static rtx
d329e058 719save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 720{
b820d2b8
AM
721 int low;
722 int high;
20efdf74 723
b820d2b8
AM
724 /* Compute the boundary of the area that needs to be saved, if any. */
725 high = reg_parm_stack_space;
20efdf74 726#ifdef ARGS_GROW_DOWNWARD
b820d2b8 727 high += 1;
20efdf74 728#endif
b820d2b8
AM
729 if (high > highest_outgoing_arg_in_use)
730 high = highest_outgoing_arg_in_use;
20efdf74 731
b820d2b8
AM
732 for (low = 0; low < high; low++)
733 if (stack_usage_map[low] != 0)
734 {
735 int num_to_save;
736 enum machine_mode save_mode;
737 int delta;
738 rtx stack_area;
739 rtx save_area;
20efdf74 740
b820d2b8
AM
741 while (stack_usage_map[--high] == 0)
742 ;
20efdf74 743
b820d2b8
AM
744 *low_to_save = low;
745 *high_to_save = high;
746
747 num_to_save = high - low + 1;
748 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
20efdf74 749
b820d2b8
AM
750 /* If we don't have the required alignment, must do this
751 in BLKmode. */
752 if ((low & (MIN (GET_MODE_SIZE (save_mode),
753 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
754 save_mode = BLKmode;
20efdf74
JL
755
756#ifdef ARGS_GROW_DOWNWARD
b820d2b8 757 delta = -high;
20efdf74 758#else
b820d2b8 759 delta = low;
20efdf74 760#endif
b820d2b8
AM
761 stack_area = gen_rtx_MEM (save_mode,
762 memory_address (save_mode,
763 plus_constant (argblock,
764 delta)));
8ac61af7 765
b820d2b8
AM
766 set_mem_align (stack_area, PARM_BOUNDARY);
767 if (save_mode == BLKmode)
768 {
769 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
770 emit_block_move (validize_mem (save_area), stack_area,
771 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
772 }
773 else
774 {
775 save_area = gen_reg_rtx (save_mode);
776 emit_move_insn (save_area, stack_area);
777 }
8ac61af7 778
b820d2b8
AM
779 return save_area;
780 }
781
782 return NULL_RTX;
20efdf74
JL
783}
784
785static void
d329e058 786restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74
JL
787{
788 enum machine_mode save_mode = GET_MODE (save_area);
b820d2b8
AM
789 int delta;
790 rtx stack_area;
791
20efdf74 792#ifdef ARGS_GROW_DOWNWARD
b820d2b8 793 delta = -high_to_save;
20efdf74 794#else
b820d2b8 795 delta = low_to_save;
20efdf74 796#endif
b820d2b8
AM
797 stack_area = gen_rtx_MEM (save_mode,
798 memory_address (save_mode,
799 plus_constant (argblock, delta)));
800 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
801
802 if (save_mode != BLKmode)
803 emit_move_insn (stack_area, save_area);
804 else
44bb111a
RH
805 emit_block_move (stack_area, validize_mem (save_area),
806 GEN_INT (high_to_save - low_to_save + 1),
807 BLOCK_OP_CALL_PARM);
20efdf74 808}
19652adf 809#endif /* REG_PARM_STACK_SPACE */
f725a3ec 810
20efdf74
JL
811/* If any elements in ARGS refer to parameters that are to be passed in
812 registers, but not in memory, and whose alignment does not permit a
813 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 814 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
815
816 Pseudos for each unaligned argument will be stored into the array
817 args[argnum].aligned_regs. The caller is responsible for deallocating
818 the aligned_regs array if it is nonzero. */
819
20efdf74 820static void
d329e058 821store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
822{
823 int i, j;
f725a3ec 824
20efdf74
JL
825 for (i = 0; i < num_actuals; i++)
826 if (args[i].reg != 0 && ! args[i].pass_on_stack
827 && args[i].mode == BLKmode
828 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
829 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
830 {
831 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 832 int endian_correction = 0;
20efdf74 833
78a52f11
RH
834 if (args[i].partial)
835 {
836 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
837 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
838 }
839 else
840 {
841 args[i].n_aligned_regs
842 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
843 }
844
5ed6ace5 845 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 846
6e985040
AM
847 /* Structures smaller than a word are normally aligned to the
848 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
849 this means we must skip the empty high order bytes when
850 calculating the bit offset. */
6e985040
AM
851 if (bytes < UNITS_PER_WORD
852#ifdef BLOCK_REG_PADDING
853 && (BLOCK_REG_PADDING (args[i].mode,
854 TREE_TYPE (args[i].tree_value), 1)
855 == downward)
856#else
857 && BYTES_BIG_ENDIAN
858#endif
859 )
860 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
861
862 for (j = 0; j < args[i].n_aligned_regs; j++)
863 {
864 rtx reg = gen_reg_rtx (word_mode);
865 rtx word = operand_subword_force (args[i].value, j, BLKmode);
866 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
867
868 args[i].aligned_regs[j] = reg;
6e985040 869 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
b3520980 870 word_mode, word_mode);
20efdf74
JL
871
872 /* There is no need to restrict this code to loading items
873 in TYPE_ALIGN sized hunks. The bitfield instructions can
874 load up entire word sized registers efficiently.
875
876 ??? This may not be needed anymore.
877 We use to emit a clobber here but that doesn't let later
878 passes optimize the instructions we emit. By storing 0 into
879 the register later passes know the first AND to zero out the
880 bitfield being set in the register is unnecessary. The store
881 of 0 will be deleted as will at least the first AND. */
882
883 emit_move_insn (reg, const0_rtx);
884
885 bytes -= bitsize / BITS_PER_UNIT;
6e985040 886 store_bit_field (reg, bitsize, endian_correction, word_mode,
b3520980 887 word);
20efdf74
JL
888 }
889 }
890}
891
d7cdf113 892/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
078a18a4 893 CALL_EXPR EXP.
d7cdf113
JL
894
895 NUM_ACTUALS is the total number of parameters.
896
897 N_NAMED_ARGS is the total number of named arguments.
898
078a18a4
SL
899 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
900 value, or null.
901
d7cdf113
JL
902 FNDECL is the tree code for the target of this call (if known)
903
904 ARGS_SO_FAR holds state needed by the target to know where to place
905 the next argument.
906
907 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
908 for arguments which are passed in registers.
909
910 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
911 and may be modified by this routine.
912
f2d33f13 913 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
7ae4ad28 914 flags which may may be modified by this routine.
dd292d0a 915
6de9cd9a
DN
916 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
917 that requires allocation of stack space.
918
dd292d0a
MM
919 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
920 the thunked-to function. */
d7cdf113
JL
921
922static void
d329e058
AJ
923initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
924 struct arg_data *args,
925 struct args_size *args_size,
926 int n_named_args ATTRIBUTE_UNUSED,
078a18a4
SL
927 tree exp, tree struct_value_addr_value,
928 tree fndecl,
d329e058
AJ
929 CUMULATIVE_ARGS *args_so_far,
930 int reg_parm_stack_space,
931 rtx *old_stack_level, int *old_pending_adj,
dd292d0a 932 int *must_preallocate, int *ecf_flags,
6de9cd9a 933 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113
JL
934{
935 /* 1 if scanning parms front to back, -1 if scanning back to front. */
936 int inc;
937
938 /* Count arg position in order args appear. */
939 int argpos;
940
941 int i;
f725a3ec 942
d7cdf113
JL
943 args_size->constant = 0;
944 args_size->var = 0;
945
946 /* In this loop, we consider args in the order they are written.
947 We fill up ARGS from the front or from the back if necessary
948 so that in any case the first arg to be pushed ends up at the front. */
949
f73ad30e
JH
950 if (PUSH_ARGS_REVERSED)
951 {
952 i = num_actuals - 1, inc = -1;
953 /* In this case, must reverse order of args
954 so that we compute and push the last arg first. */
955 }
956 else
957 {
958 i = 0, inc = 1;
959 }
d7cdf113 960
078a18a4
SL
961 /* First fill in the actual arguments in the ARGS array, splitting
962 complex arguments if necessary. */
963 {
964 int j = i;
965 call_expr_arg_iterator iter;
966 tree arg;
967
968 if (struct_value_addr_value)
969 {
970 args[j].tree_value = struct_value_addr_value;
971 j += inc;
972 }
973 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
974 {
975 tree argtype = TREE_TYPE (arg);
976 if (targetm.calls.split_complex_arg
977 && argtype
978 && TREE_CODE (argtype) == COMPLEX_TYPE
979 && targetm.calls.split_complex_arg (argtype))
980 {
981 tree subtype = TREE_TYPE (argtype);
982 arg = save_expr (arg);
983 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
984 j += inc;
985 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
986 }
987 else
988 args[j].tree_value = arg;
989 j += inc;
990 }
991 }
992
d7cdf113 993 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
078a18a4 994 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
d7cdf113 995 {
078a18a4 996 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113
JL
997 int unsignedp;
998 enum machine_mode mode;
999
d7cdf113 1000 /* Replace erroneous argument with constant zero. */
d0f062fb 1001 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1002 args[i].tree_value = integer_zero_node, type = integer_type_node;
1003
1004 /* If TYPE is a transparent union, pass things the way we would
1005 pass the first field of the union. We have already verified that
1006 the modes are the same. */
2bf105ab 1007 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
d7cdf113
JL
1008 type = TREE_TYPE (TYPE_FIELDS (type));
1009
1010 /* Decide where to pass this arg.
1011
1012 args[i].reg is nonzero if all or part is passed in registers.
1013
1014 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1015 and the exact value says how many bytes are passed in registers.
d7cdf113
JL
1016
1017 args[i].pass_on_stack is nonzero if the argument must at least be
1018 computed on the stack. It may then be loaded back into registers
1019 if args[i].reg is nonzero.
1020
1021 These decisions are driven by the FUNCTION_... macros and must agree
1022 with those made by function.c. */
1023
1024 /* See if this argument should be passed by invisible reference. */
0976078c
RH
1025 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1026 type, argpos < n_named_args))
d7cdf113 1027 {
9969aaf6
RH
1028 bool callee_copies;
1029 tree base;
1030
1031 callee_copies
6cdd5672
RH
1032 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1033 type, argpos < n_named_args);
9969aaf6
RH
1034
1035 /* If we're compiling a thunk, pass through invisible references
1036 instead of making a copy. */
dd292d0a 1037 if (call_from_thunk_p
9969aaf6
RH
1038 || (callee_copies
1039 && !TREE_ADDRESSABLE (type)
1040 && (base = get_base_address (args[i].tree_value))
1041 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
d7cdf113 1042 {
9969aaf6
RH
1043 /* We can't use sibcalls if a callee-copied argument is
1044 stored in the current function's frame. */
1045 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9fd47435
RS
1046 *may_tailcall = false;
1047
9969aaf6
RH
1048 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1049 type = TREE_TYPE (args[i].tree_value);
1050
1051 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
f21add07 1052 }
d7cdf113
JL
1053 else
1054 {
1055 /* We make a copy of the object and pass the address to the
1056 function being called. */
1057 rtx copy;
1058
d0f062fb 1059 if (!COMPLETE_TYPE_P (type)
d7cdf113
JL
1060 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1061 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1062 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1063 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1064 {
1065 /* This is a variable-sized object. Make space on the stack
1066 for it. */
078a18a4 1067 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
1068
1069 if (*old_stack_level == 0)
1070 {
1071 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1072 *old_pending_adj = pending_stack_adjust;
1073 pending_stack_adjust = 0;
1074 }
1075
1076 copy = gen_rtx_MEM (BLKmode,
3bdf5ad1
RK
1077 allocate_dynamic_stack_space
1078 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1079 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1080 }
1081 else
3bdf5ad1 1082 copy = assign_temp (type, 0, 1, 0);
d7cdf113 1083
79f5e442 1084 store_expr (args[i].tree_value, copy, 0, false);
d7cdf113 1085
9969aaf6
RH
1086 if (callee_copies)
1087 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1088 else
1089 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1090
1091 args[i].tree_value
1092 = build_fold_addr_expr (make_tree (type, copy));
1093 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 1094 *may_tailcall = false;
d7cdf113
JL
1095 }
1096 }
1097
1098 mode = TYPE_MODE (type);
8df83eae 1099 unsignedp = TYPE_UNSIGNED (type);
d7cdf113 1100
61f71b34
DD
1101 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1102 mode = promote_mode (type, mode, &unsignedp, 1);
d7cdf113
JL
1103
1104 args[i].unsignedp = unsignedp;
1105 args[i].mode = mode;
7d167afd 1106
099e9712
JH
1107 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1108 argpos < n_named_args);
7d167afd
JJ
1109#ifdef FUNCTION_INCOMING_ARG
1110 /* If this is a sibling call and the machine has register windows, the
1111 register window has to be unwinded before calling the routine, so
1112 arguments have to go into the incoming registers. */
099e9712 1113 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
f725a3ec 1114 argpos < n_named_args);
099e9712
JH
1115#else
1116 args[i].tail_call_reg = args[i].reg;
7d167afd 1117#endif
7d167afd 1118
d7cdf113
JL
1119 if (args[i].reg)
1120 args[i].partial
78a52f11
RH
1121 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1122 argpos < n_named_args);
d7cdf113 1123
fe984136 1124 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
d7cdf113
JL
1125
1126 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1127 it means that we are to pass this arg in the register(s) designated
1128 by the PARALLEL, but also to pass it in the stack. */
1129 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1130 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1131 args[i].pass_on_stack = 1;
1132
1133 /* If this is an addressable type, we must preallocate the stack
1134 since we must evaluate the object into its final location.
1135
1136 If this is to be passed in both registers and the stack, it is simpler
1137 to preallocate. */
1138 if (TREE_ADDRESSABLE (type)
1139 || (args[i].pass_on_stack && args[i].reg != 0))
1140 *must_preallocate = 1;
1141
1142 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1143 we cannot consider this function call constant. */
1144 if (TREE_ADDRESSABLE (type))
53d4257f 1145 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
d7cdf113
JL
1146
1147 /* Compute the stack-size of this argument. */
1148 if (args[i].reg == 0 || args[i].partial != 0
1149 || reg_parm_stack_space > 0
1150 || args[i].pass_on_stack)
1151 locate_and_pad_parm (mode, type,
1152#ifdef STACK_PARMS_IN_REG_PARM_AREA
1153 1,
1154#else
1155 args[i].reg != 0,
1156#endif
e7949876
AM
1157 args[i].pass_on_stack ? 0 : args[i].partial,
1158 fndecl, args_size, &args[i].locate);
648bb159
RS
1159#ifdef BLOCK_REG_PADDING
1160 else
1161 /* The argument is passed entirely in registers. See at which
1162 end it should be padded. */
1163 args[i].locate.where_pad =
1164 BLOCK_REG_PADDING (mode, type,
1165 int_size_in_bytes (type) <= UNITS_PER_WORD);
1166#endif
f725a3ec 1167
d7cdf113
JL
1168 /* Update ARGS_SIZE, the total stack space for args so far. */
1169
e7949876
AM
1170 args_size->constant += args[i].locate.size.constant;
1171 if (args[i].locate.size.var)
1172 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1173
1174 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1175 have been used, etc. */
1176
959f3a06 1177 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1178 argpos < n_named_args);
1179 }
1180}
1181
599f37b6
JL
1182/* Update ARGS_SIZE to contain the total size for the argument block.
1183 Return the original constant component of the argument block's size.
1184
1185 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1186 for arguments passed in registers. */
1187
1188static int
d329e058
AJ
1189compute_argument_block_size (int reg_parm_stack_space,
1190 struct args_size *args_size,
1191 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6
JL
1192{
1193 int unadjusted_args_size = args_size->constant;
1194
f73ad30e
JH
1195 /* For accumulate outgoing args mode we don't need to align, since the frame
1196 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1197 backends from generating misaligned frame sizes. */
f73ad30e
JH
1198 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1199 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1200
599f37b6
JL
1201 /* Compute the actual size of the argument block required. The variable
1202 and constant sizes must be combined, the size may have to be rounded,
1203 and there may be a minimum required size. */
1204
1205 if (args_size->var)
1206 {
1207 args_size->var = ARGS_SIZE_TREE (*args_size);
1208 args_size->constant = 0;
1209
c2f8b491
JH
1210 preferred_stack_boundary /= BITS_PER_UNIT;
1211 if (preferred_stack_boundary > 1)
1503a7ec
JH
1212 {
1213 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1214 to add the delta, round and subtract the delta.
1503a7ec 1215 Currently no machine description requires this support. */
366de0ce 1216 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1503a7ec
JH
1217 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1218 }
599f37b6
JL
1219
1220 if (reg_parm_stack_space > 0)
1221 {
1222 args_size->var
1223 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1224 ssize_int (reg_parm_stack_space));
599f37b6 1225
599f37b6
JL
1226 /* The area corresponding to register parameters is not to count in
1227 the size of the block we need. So make the adjustment. */
ac294f0b
KT
1228 if (!OUTGOING_REG_PARM_STACK_SPACE)
1229 args_size->var
1230 = size_binop (MINUS_EXPR, args_size->var,
1231 ssize_int (reg_parm_stack_space));
599f37b6
JL
1232 }
1233 }
1234 else
1235 {
c2f8b491 1236 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1237 if (preferred_stack_boundary < 1)
1238 preferred_stack_boundary = 1;
fb5eebb9 1239 args_size->constant = (((args_size->constant
1503a7ec 1240 + stack_pointer_delta
c2f8b491
JH
1241 + preferred_stack_boundary - 1)
1242 / preferred_stack_boundary
1243 * preferred_stack_boundary)
1503a7ec 1244 - stack_pointer_delta);
599f37b6
JL
1245
1246 args_size->constant = MAX (args_size->constant,
1247 reg_parm_stack_space);
1248
ac294f0b
KT
1249 if (!OUTGOING_REG_PARM_STACK_SPACE)
1250 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
1251 }
1252 return unadjusted_args_size;
1253}
1254
19832c77 1255/* Precompute parameters as needed for a function call.
cc0b1adc 1256
f2d33f13 1257 FLAGS is mask of ECF_* constants.
cc0b1adc 1258
cc0b1adc
JL
1259 NUM_ACTUALS is the number of arguments.
1260
f725a3ec
KH
1261 ARGS is an array containing information for each argument; this
1262 routine fills in the INITIAL_VALUE and VALUE fields for each
1263 precomputed argument. */
cc0b1adc
JL
1264
1265static void
d329e058 1266precompute_arguments (int flags, int num_actuals, struct arg_data *args)
cc0b1adc
JL
1267{
1268 int i;
1269
3638733b 1270 /* If this is a libcall, then precompute all arguments so that we do not
82c82743
RH
1271 get extraneous instructions emitted as part of the libcall sequence. */
1272 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1273 return;
7ae4ad28 1274
cc0b1adc 1275 for (i = 0; i < num_actuals; i++)
82c82743
RH
1276 {
1277 enum machine_mode mode;
ddef6bc7 1278
82c82743 1279 /* If this is an addressable type, we cannot pre-evaluate it. */
366de0ce 1280 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
cc0b1adc 1281
82c82743 1282 args[i].initial_value = args[i].value
84217346 1283 = expand_normal (args[i].tree_value);
cc0b1adc 1284
82c82743
RH
1285 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1286 if (mode != args[i].mode)
1287 {
1288 args[i].value
1289 = convert_modes (args[i].mode, mode,
1290 args[i].value, args[i].unsignedp);
9e1622ed 1291#if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
82c82743
RH
1292 /* CSE will replace this only if it contains args[i].value
1293 pseudo, so convert it down to the declared mode using
1294 a SUBREG. */
1295 if (REG_P (args[i].value)
1296 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1297 {
1298 args[i].initial_value
1299 = gen_lowpart_SUBREG (mode, args[i].value);
1300 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1301 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1302 args[i].unsignedp);
1303 }
47841d1b 1304#endif
82c82743
RH
1305 }
1306 }
cc0b1adc
JL
1307}
1308
0f9b3ea6
JL
1309/* Given the current state of MUST_PREALLOCATE and information about
1310 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1311 compute and return the final value for MUST_PREALLOCATE. */
1312
1313static int
5039610b
SL
1314finalize_must_preallocate (int must_preallocate, int num_actuals,
1315 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
1316{
1317 /* See if we have or want to preallocate stack space.
1318
1319 If we would have to push a partially-in-regs parm
1320 before other stack parms, preallocate stack space instead.
1321
1322 If the size of some parm is not a multiple of the required stack
1323 alignment, we must preallocate.
1324
1325 If the total size of arguments that would otherwise create a copy in
1326 a temporary (such as a CALL) is more than half the total argument list
1327 size, preallocation is faster.
1328
1329 Another reason to preallocate is if we have a machine (like the m88k)
1330 where stack alignment is required to be maintained between every
1331 pair of insns, not just when the call is made. However, we assume here
1332 that such machines either do not have push insns (and hence preallocation
1333 would occur anyway) or the problem is taken care of with
1334 PUSH_ROUNDING. */
1335
1336 if (! must_preallocate)
1337 {
1338 int partial_seen = 0;
1339 int copy_to_evaluate_size = 0;
1340 int i;
1341
1342 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1343 {
1344 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1345 partial_seen = 1;
1346 else if (partial_seen && args[i].reg == 0)
1347 must_preallocate = 1;
1348
1349 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1350 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1351 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1352 || TREE_CODE (args[i].tree_value) == COND_EXPR
1353 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1354 copy_to_evaluate_size
1355 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1356 }
1357
1358 if (copy_to_evaluate_size * 2 >= args_size->constant
1359 && args_size->constant > 0)
1360 must_preallocate = 1;
1361 }
1362 return must_preallocate;
1363}
599f37b6 1364
a45bdd02
JL
1365/* If we preallocated stack space, compute the address of each argument
1366 and store it into the ARGS array.
1367
f725a3ec 1368 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1369 validized when it is used.
1370
1371 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1372
1373static void
d329e058 1374compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
1375{
1376 if (argblock)
1377 {
1378 rtx arg_reg = argblock;
1379 int i, arg_offset = 0;
1380
1381 if (GET_CODE (argblock) == PLUS)
1382 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1383
1384 for (i = 0; i < num_actuals; i++)
1385 {
e7949876
AM
1386 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1387 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 1388 rtx addr;
bfc45551 1389 unsigned int align, boundary;
7816b87e
JC
1390 unsigned int units_on_stack = 0;
1391 enum machine_mode partial_mode = VOIDmode;
a45bdd02
JL
1392
1393 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
1394 if (! args[i].pass_on_stack
1395 && args[i].reg != 0
1396 && args[i].partial == 0)
a45bdd02
JL
1397 continue;
1398
1399 if (GET_CODE (offset) == CONST_INT)
1400 addr = plus_constant (arg_reg, INTVAL (offset));
1401 else
1402 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1403
1404 addr = plus_constant (addr, arg_offset);
7816b87e
JC
1405
1406 if (args[i].partial != 0)
1407 {
1408 /* Only part of the parameter is being passed on the stack.
1409 Generate a simple memory reference of the correct size. */
1410 units_on_stack = args[i].locate.size.constant;
1411 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1412 MODE_INT, 1);
1413 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1414 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1415 }
1416 else
1417 {
1418 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1419 set_mem_attributes (args[i].stack,
1420 TREE_TYPE (args[i].tree_value), 1);
1421 }
bfc45551
AM
1422 align = BITS_PER_UNIT;
1423 boundary = args[i].locate.boundary;
1424 if (args[i].locate.where_pad != downward)
1425 align = boundary;
1426 else if (GET_CODE (offset) == CONST_INT)
1427 {
1428 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1429 align = align & -align;
1430 }
1431 set_mem_align (args[i].stack, align);
a45bdd02
JL
1432
1433 if (GET_CODE (slot_offset) == CONST_INT)
1434 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1435 else
1436 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1437
1438 addr = plus_constant (addr, arg_offset);
7816b87e
JC
1439
1440 if (args[i].partial != 0)
1441 {
1442 /* Only part of the parameter is being passed on the stack.
1443 Generate a simple memory reference of the correct size.
1444 */
1445 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1446 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1447 }
1448 else
1449 {
1450 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1451 set_mem_attributes (args[i].stack_slot,
1452 TREE_TYPE (args[i].tree_value), 1);
1453 }
bfc45551 1454 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
1455
1456 /* Function incoming arguments may overlap with sibling call
1457 outgoing arguments and we cannot allow reordering of reads
1458 from function arguments with stores to outgoing arguments
1459 of sibling calls. */
ba4828e0
RK
1460 set_mem_alias_set (args[i].stack, 0);
1461 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1462 }
1463 }
1464}
f725a3ec 1465
a45bdd02
JL
1466/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1467 in a call instruction.
1468
1469 FNDECL is the tree node for the target function. For an indirect call
1470 FNDECL will be NULL_TREE.
1471
09e2bf48 1472 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
1473
1474static rtx
d329e058 1475rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
1476{
1477 rtx funexp;
1478
1479 /* Get the function to call, in the form of RTL. */
1480 if (fndecl)
1481 {
1482 /* If this is the first use of the function, see if we need to
1483 make an external definition for it. */
1484 if (! TREE_USED (fndecl))
1485 {
1486 assemble_external (fndecl);
1487 TREE_USED (fndecl) = 1;
1488 }
1489
1490 /* Get a SYMBOL_REF rtx for the function address. */
1491 funexp = XEXP (DECL_RTL (fndecl), 0);
1492 }
1493 else
1494 /* Generate an rtx (probably a pseudo-register) for the address. */
1495 {
1496 push_temp_slots ();
84217346 1497 funexp = expand_normal (addr);
f725a3ec 1498 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1499 }
1500 return funexp;
1501}
1502
07eef816
KH
1503/* Return true if and only if SIZE storage units (usually bytes)
1504 starting from address ADDR overlap with already clobbered argument
1505 area. This function is used to determine if we should give up a
1506 sibcall. */
1507
1508static bool
1509mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1510{
1511 HOST_WIDE_INT i;
1512
1513 if (addr == current_function_internal_arg_pointer)
1514 i = 0;
1515 else if (GET_CODE (addr) == PLUS
ae22dcff 1516 && XEXP (addr, 0) == current_function_internal_arg_pointer
07eef816
KH
1517 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1518 i = INTVAL (XEXP (addr, 1));
ae22dcff
EB
1519 /* Return true for arg pointer based indexed addressing. */
1520 else if (GET_CODE (addr) == PLUS
1521 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1522 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1523 return true;
07eef816
KH
1524 else
1525 return false;
1526
1527#ifdef ARGS_GROW_DOWNWARD
1528 i = -i - size;
1529#endif
1530 if (size > 0)
1531 {
1532 unsigned HOST_WIDE_INT k;
1533
1534 for (k = 0; k < size; k++)
1535 if (i + k < stored_args_map->n_bits
1536 && TEST_BIT (stored_args_map, i + k))
1537 return true;
1538 }
1539
1540 return false;
1541}
1542
21a3b983
JL
1543/* Do the register loads required for any wholly-register parms or any
1544 parms which are passed both on the stack and in a register. Their
f725a3ec 1545 expressions were already evaluated.
21a3b983
JL
1546
1547 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
1548 insns in the CALL_INSN_FUNCTION_USAGE field.
1549
40b0345d 1550 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 1551 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
1552
1553static void
d329e058
AJ
1554load_register_parameters (struct arg_data *args, int num_actuals,
1555 rtx *call_fusage, int flags, int is_sibcall,
1556 int *sibcall_failure)
21a3b983
JL
1557{
1558 int i, j;
1559
21a3b983 1560 for (i = 0; i < num_actuals; i++)
21a3b983 1561 {
099e9712
JH
1562 rtx reg = ((flags & ECF_SIBCALL)
1563 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
1564 if (reg)
1565 {
6e985040
AM
1566 int partial = args[i].partial;
1567 int nregs;
1568 int size = 0;
0cdca92b 1569 rtx before_arg = get_last_insn ();
f0078f86
AM
1570 /* Set non-negative if we must move a word at a time, even if
1571 just one word (e.g, partial == 4 && mode == DFmode). Set
1572 to -1 if we just use a normal move insn. This value can be
1573 zero if the argument is a zero size structure. */
6e985040 1574 nregs = -1;
78a52f11
RH
1575 if (GET_CODE (reg) == PARALLEL)
1576 ;
1577 else if (partial)
1578 {
1579 gcc_assert (partial % UNITS_PER_WORD == 0);
1580 nregs = partial / UNITS_PER_WORD;
1581 }
6e985040
AM
1582 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1583 {
1584 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1585 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1586 }
1587 else
1588 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
1589
1590 /* Handle calls that pass values in multiple non-contiguous
1591 locations. The Irix 6 ABI has examples of this. */
1592
1593 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 1594 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
1595
1596 /* If simple case, just do move. If normal partial, store_one_arg
1597 has already loaded the register for us. In all other cases,
1598 load the register(s) from memory. */
1599
9206d736
AM
1600 else if (nregs == -1)
1601 {
1602 emit_move_insn (reg, args[i].value);
6e985040 1603#ifdef BLOCK_REG_PADDING
9206d736
AM
1604 /* Handle case where we have a value that needs shifting
1605 up to the msb. eg. a QImode value and we're padding
1606 upward on a BYTES_BIG_ENDIAN machine. */
1607 if (size < UNITS_PER_WORD
1608 && (args[i].locate.where_pad
1609 == (BYTES_BIG_ENDIAN ? upward : downward)))
1610 {
9206d736
AM
1611 rtx x;
1612 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
980f6e8e
AM
1613
1614 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1615 report the whole reg as used. Strictly speaking, the
1616 call only uses SIZE bytes at the msb end, but it doesn't
1617 seem worth generating rtl to say that. */
1618 reg = gen_rtx_REG (word_mode, REGNO (reg));
09b52670 1619 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 1620 build_int_cst (NULL_TREE, shift),
4a90aeeb 1621 reg, 1);
980f6e8e
AM
1622 if (x != reg)
1623 emit_move_insn (reg, x);
9206d736 1624 }
6e985040 1625#endif
9206d736 1626 }
21a3b983
JL
1627
1628 /* If we have pre-computed the values to put in the registers in
1629 the case of non-aligned structures, copy them in now. */
1630
1631 else if (args[i].n_aligned_regs != 0)
1632 for (j = 0; j < args[i].n_aligned_regs; j++)
1633 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1634 args[i].aligned_regs[j]);
1635
1636 else if (partial == 0 || args[i].pass_on_stack)
6e985040
AM
1637 {
1638 rtx mem = validize_mem (args[i].value);
1639
07eef816
KH
1640 /* Check for overlap with already clobbered argument area. */
1641 if (is_sibcall
1642 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1643 size))
1644 *sibcall_failure = 1;
1645
6e985040 1646 /* Handle a BLKmode that needs shifting. */
9206d736 1647 if (nregs == 1 && size < UNITS_PER_WORD
03ca1672
UW
1648#ifdef BLOCK_REG_PADDING
1649 && args[i].locate.where_pad == downward
1650#else
1651 && BYTES_BIG_ENDIAN
1652#endif
1653 )
6e985040
AM
1654 {
1655 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1656 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1657 rtx x = gen_reg_rtx (word_mode);
1658 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
09b52670
RS
1659 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1660 : LSHIFT_EXPR;
6e985040
AM
1661
1662 emit_move_insn (x, tem);
09b52670 1663 x = expand_shift (dir, word_mode, x,
7d60be94 1664 build_int_cst (NULL_TREE, shift),
4a90aeeb 1665 ri, 1);
6e985040
AM
1666 if (x != ri)
1667 emit_move_insn (ri, x);
1668 }
1669 else
6e985040
AM
1670 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1671 }
21a3b983 1672
0cdca92b
DJ
1673 /* When a parameter is a block, and perhaps in other cases, it is
1674 possible that it did a load from an argument slot that was
32dd366d 1675 already clobbered. */
0cdca92b
DJ
1676 if (is_sibcall
1677 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1678 *sibcall_failure = 1;
1679
21a3b983
JL
1680 /* Handle calls that pass values in multiple non-contiguous
1681 locations. The Irix 6 ABI has examples of this. */
1682 if (GET_CODE (reg) == PARALLEL)
1683 use_group_regs (call_fusage, reg);
1684 else if (nregs == -1)
1685 use_reg (call_fusage, reg);
faa00334
AO
1686 else if (nregs > 0)
1687 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
1688 }
1689 }
1690}
1691
739fb049
MM
1692/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1693 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1694 bytes, then we would need to push some additional bytes to pad the
ce48579b
RH
1695 arguments. So, we compute an adjust to the stack pointer for an
1696 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1697 bytes. Then, when the arguments are pushed the stack will be perfectly
1698 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1699 be popped after the call. Returns the adjustment. */
739fb049 1700
ce48579b 1701static int
d329e058
AJ
1702combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1703 struct args_size *args_size,
95899b34 1704 unsigned int preferred_unit_stack_boundary)
739fb049
MM
1705{
1706 /* The number of bytes to pop so that the stack will be
1707 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1708 HOST_WIDE_INT adjustment;
1709 /* The alignment of the stack after the arguments are pushed, if we
1710 just pushed the arguments without adjust the stack here. */
95899b34 1711 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 1712
f725a3ec 1713 unadjusted_alignment
739fb049
MM
1714 = ((stack_pointer_delta + unadjusted_args_size)
1715 % preferred_unit_stack_boundary);
1716
1717 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1718 as possible -- leaving just enough left to cancel out the
1719 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1720 PENDING_STACK_ADJUST is non-negative, and congruent to
1721 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1722
1723 /* Begin by trying to pop all the bytes. */
f725a3ec
KH
1724 unadjusted_alignment
1725 = (unadjusted_alignment
739fb049
MM
1726 - (pending_stack_adjust % preferred_unit_stack_boundary));
1727 adjustment = pending_stack_adjust;
1728 /* Push enough additional bytes that the stack will be aligned
1729 after the arguments are pushed. */
e079dcdb
HB
1730 if (preferred_unit_stack_boundary > 1)
1731 {
3e555c7d 1732 if (unadjusted_alignment > 0)
f725a3ec 1733 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
e079dcdb 1734 else
f725a3ec 1735 adjustment += unadjusted_alignment;
e079dcdb 1736 }
f725a3ec 1737
739fb049
MM
1738 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1739 bytes after the call. The right number is the entire
1740 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1741 by the arguments in the first place. */
f725a3ec 1742 args_size->constant
739fb049
MM
1743 = pending_stack_adjust - adjustment + unadjusted_args_size;
1744
ce48579b 1745 return adjustment;
739fb049
MM
1746}
1747
c67846f2
JJ
1748/* Scan X expression if it does not dereference any argument slots
1749 we already clobbered by tail call arguments (as noted in stored_args_map
1750 bitmap).
da7d8304 1751 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
1752 zero otherwise. */
1753
1754static int
d329e058 1755check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
1756{
1757 RTX_CODE code;
1758 int i, j;
c67846f2
JJ
1759 const char *fmt;
1760
1761 if (x == NULL_RTX)
1762 return 0;
1763
1764 code = GET_CODE (x);
1765
1766 if (code == MEM)
07eef816
KH
1767 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1768 GET_MODE_SIZE (GET_MODE (x)));
c67846f2 1769
f725a3ec 1770 /* Scan all subexpressions. */
c67846f2
JJ
1771 fmt = GET_RTX_FORMAT (code);
1772 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1773 {
1774 if (*fmt == 'e')
f725a3ec
KH
1775 {
1776 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1777 return 1;
1778 }
c67846f2 1779 else if (*fmt == 'E')
f725a3ec
KH
1780 {
1781 for (j = 0; j < XVECLEN (x, i); j++)
1782 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1783 return 1;
1784 }
c67846f2
JJ
1785 }
1786 return 0;
c67846f2
JJ
1787}
1788
1789/* Scan sequence after INSN if it does not dereference any argument slots
1790 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
1791 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1792 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1793 should be 0). Return nonzero if sequence after INSN dereferences such argument
1794 slots, zero otherwise. */
c67846f2
JJ
1795
1796static int
d329e058 1797check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
f725a3ec 1798{
c67846f2
JJ
1799 int low, high;
1800
1801 if (insn == NULL_RTX)
1802 insn = get_insns ();
1803 else
1804 insn = NEXT_INSN (insn);
1805
1806 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
1807 if (INSN_P (insn)
1808 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
1809 break;
1810
0cdca92b
DJ
1811 if (mark_stored_args_map)
1812 {
d60eab50 1813#ifdef ARGS_GROW_DOWNWARD
e7949876 1814 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
d60eab50 1815#else
e7949876 1816 low = arg->locate.slot_offset.constant;
d60eab50
AO
1817#endif
1818
e7949876 1819 for (high = low + arg->locate.size.constant; low < high; low++)
0cdca92b
DJ
1820 SET_BIT (stored_args_map, low);
1821 }
c67846f2
JJ
1822 return insn != NULL_RTX;
1823}
1824
bef5d8b6
RS
1825/* Given that a function returns a value of mode MODE at the most
1826 significant end of hard register VALUE, shift VALUE left or right
1827 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 1828
bef5d8b6
RS
1829bool
1830shift_return_value (enum machine_mode mode, bool left_p, rtx value)
c988af2b 1831{
bef5d8b6
RS
1832 HOST_WIDE_INT shift;
1833
1834 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1835 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1836 if (shift == 0)
1837 return false;
1838
1839 /* Use ashr rather than lshr for right shifts. This is for the benefit
1840 of the MIPS port, which requires SImode values to be sign-extended
1841 when stored in 64-bit registers. */
1842 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1843 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1844 gcc_unreachable ();
1845 return true;
c988af2b
RS
1846}
1847
5039610b 1848/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
1849 and return an rtx for its value.
1850 Store the value in TARGET (specified as an rtx) if convenient.
1851 If the value is stored in TARGET then TARGET is returned.
1852 If IGNORE is nonzero, then we ignore the value of the function call. */
1853
1854rtx
d329e058 1855expand_call (tree exp, rtx target, int ignore)
51bbfa0c 1856{
0a1c58a2
JL
1857 /* Nonzero if we are currently expanding a call. */
1858 static int currently_expanding_call = 0;
1859
51bbfa0c
RS
1860 /* RTX for the function to be called. */
1861 rtx funexp;
0a1c58a2
JL
1862 /* Sequence of insns to perform a normal "call". */
1863 rtx normal_call_insns = NULL_RTX;
6de9cd9a 1864 /* Sequence of insns to perform a tail "call". */
0a1c58a2 1865 rtx tail_call_insns = NULL_RTX;
51bbfa0c
RS
1866 /* Data type of the function. */
1867 tree funtype;
ded9bf77 1868 tree type_arg_types;
51bbfa0c
RS
1869 /* Declaration of the function being called,
1870 or 0 if the function is computed (not known by name). */
1871 tree fndecl = 0;
57782ad8
MM
1872 /* The type of the function being called. */
1873 tree fntype;
6de9cd9a 1874 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
0a1c58a2 1875 int pass;
51bbfa0c
RS
1876
1877 /* Register in which non-BLKmode value will be returned,
1878 or 0 if no value or if value is BLKmode. */
1879 rtx valreg;
1880 /* Address where we should return a BLKmode value;
1881 0 if value not BLKmode. */
1882 rtx structure_value_addr = 0;
1883 /* Nonzero if that address is being passed by treating it as
1884 an extra, implicit first parameter. Otherwise,
1885 it is passed by being copied directly into struct_value_rtx. */
1886 int structure_value_addr_parm = 0;
078a18a4
SL
1887 /* Holds the value of implicit argument for the struct value. */
1888 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
1889 /* Size of aggregate value wanted, or zero if none wanted
1890 or if we are using the non-reentrant PCC calling convention
1891 or expecting the value in registers. */
e5e809f4 1892 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
1893 /* Nonzero if called function returns an aggregate in memory PCC style,
1894 by returning the address of where to find it. */
1895 int pcc_struct_value = 0;
61f71b34 1896 rtx struct_value = 0;
51bbfa0c
RS
1897
1898 /* Number of actual parameters in this call, including struct value addr. */
1899 int num_actuals;
1900 /* Number of named args. Args after this are anonymous ones
1901 and they must all go on the stack. */
1902 int n_named_args;
078a18a4
SL
1903 /* Number of complex actual arguments that need to be split. */
1904 int num_complex_actuals = 0;
51bbfa0c
RS
1905
1906 /* Vector of information about each argument.
1907 Arguments are numbered in the order they will be pushed,
1908 not the order they are written. */
1909 struct arg_data *args;
1910
1911 /* Total size in bytes of all the stack-parms scanned so far. */
1912 struct args_size args_size;
099e9712 1913 struct args_size adjusted_args_size;
51bbfa0c 1914 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 1915 int unadjusted_args_size;
51bbfa0c
RS
1916 /* Data on reg parms scanned so far. */
1917 CUMULATIVE_ARGS args_so_far;
1918 /* Nonzero if a reg parm has been scanned. */
1919 int reg_parm_seen;
efd65a8b 1920 /* Nonzero if this is an indirect function call. */
51bbfa0c 1921
f725a3ec 1922 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
1923 If stack space is allocated for register parameters, but not by the
1924 caller, then it is preallocated in the fixed part of the stack frame.
1925 So the entire argument block must then be preallocated (i.e., we
1926 ignore PUSH_ROUNDING in that case). */
1927
f73ad30e 1928 int must_preallocate = !PUSH_ARGS;
51bbfa0c 1929
f72aed24 1930 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
1931 int reg_parm_stack_space = 0;
1932
51bbfa0c
RS
1933 /* Address of space preallocated for stack parms
1934 (on machines that lack push insns), or 0 if space not preallocated. */
1935 rtx argblock = 0;
1936
f2d33f13
JH
1937 /* Mask of ECF_ flags. */
1938 int flags = 0;
f73ad30e 1939#ifdef REG_PARM_STACK_SPACE
51bbfa0c 1940 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
1941 saved, if any. */
1942 int low_to_save, high_to_save;
51bbfa0c
RS
1943 rtx save_area = 0; /* Place that it is saved */
1944#endif
1945
51bbfa0c
RS
1946 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1947 char *initial_stack_usage_map = stack_usage_map;
d9725c41 1948 char *stack_usage_map_buf = NULL;
51bbfa0c 1949
38afb23f
OH
1950 int old_stack_allocated;
1951
1952 /* State variables to track stack modifications. */
51bbfa0c 1953 rtx old_stack_level = 0;
38afb23f 1954 int old_stack_arg_under_construction = 0;
79be3418 1955 int old_pending_adj = 0;
51bbfa0c 1956 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
1957
1958 /* Some stack pointer alterations we make are performed via
1959 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1960 which we then also need to save/restore along the way. */
a259f218 1961 int old_stack_pointer_delta = 0;
38afb23f 1962
0a1c58a2 1963 rtx call_fusage;
5039610b
SL
1964 tree p = CALL_EXPR_FN (exp);
1965 tree addr = CALL_EXPR_FN (exp);
b3694847 1966 int i;
739fb049 1967 /* The alignment of the stack, in bits. */
95899b34 1968 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 1969 /* The alignment of the stack, in bytes. */
95899b34 1970 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
1971 /* The static chain value to use for this call. */
1972 rtx static_chain_value;
f2d33f13
JH
1973 /* See if this is "nothrow" function call. */
1974 if (TREE_NOTHROW (exp))
1975 flags |= ECF_NOTHROW;
1976
6de9cd9a
DN
1977 /* See if we can find a DECL-node for the actual function, and get the
1978 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
1979 fndecl = get_callee_fndecl (exp);
1980 if (fndecl)
51bbfa0c 1981 {
57782ad8 1982 fntype = TREE_TYPE (fndecl);
39b0dce7 1983 flags |= flags_from_decl_or_type (fndecl);
51bbfa0c 1984 }
39b0dce7 1985 else
72954a4f 1986 {
57782ad8 1987 fntype = TREE_TYPE (TREE_TYPE (p));
57782ad8 1988 flags |= flags_from_decl_or_type (fntype);
72954a4f 1989 }
7393c642 1990
57782ad8 1991 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 1992
8c6a8269
RS
1993 /* Warn if this value is an aggregate type,
1994 regardless of which calling convention we are using for it. */
ccf08a6e
DD
1995 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1996 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269
RS
1997
1998 /* If the result of a pure or const function call is ignored (or void),
1999 and none of its arguments are volatile, we can avoid expanding the
2000 call and just evaluate the arguments for side-effects. */
2001 if ((flags & (ECF_CONST | ECF_PURE))
2002 && (ignore || target == const0_rtx
2003 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2004 {
2005 bool volatilep = false;
2006 tree arg;
078a18a4 2007 call_expr_arg_iterator iter;
8c6a8269 2008
078a18a4
SL
2009 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2010 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
2011 {
2012 volatilep = true;
2013 break;
2014 }
2015
2016 if (! volatilep)
2017 {
078a18a4
SL
2018 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2019 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
2020 return const0_rtx;
2021 }
2022 }
2023
6f90e075 2024#ifdef REG_PARM_STACK_SPACE
6f90e075
JW
2025 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2026#endif
6f90e075 2027
ac294f0b 2028 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4 2029 must_preallocate = 1;
e5e809f4 2030
51bbfa0c
RS
2031 /* Set up a place to return a structure. */
2032
2033 /* Cater to broken compilers. */
61f71b34 2034 if (aggregate_value_p (exp, fndecl))
51bbfa0c
RS
2035 {
2036 /* This call returns a big structure. */
53d4257f 2037 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
51bbfa0c
RS
2038
2039#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2040 {
2041 pcc_struct_value = 1;
9e7b1d0a
RS
2042 }
2043#else /* not PCC_STATIC_STRUCT_RETURN */
2044 {
2045 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 2046
fa47911c 2047 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
9e7b1d0a
RS
2048 structure_value_addr = XEXP (target, 0);
2049 else
2050 {
9e7b1d0a
RS
2051 /* For variable-sized objects, we must be called with a target
2052 specified. If we were to allocate space on the stack here,
2053 we would have no way of knowing when to free it. */
1186ec8c 2054 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
51bbfa0c 2055
4361b41d
MM
2056 mark_temp_addr_taken (d);
2057 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2058 target = 0;
2059 }
2060 }
2061#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2062 }
2063
099e9712 2064 /* Figure out the amount to which the stack should be aligned. */
099e9712 2065 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
2066 if (fndecl)
2067 {
2068 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2069 if (i && i->preferred_incoming_stack_boundary)
2070 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2071 }
099e9712
JH
2072
2073 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 2074 funtype = TREE_TYPE (addr);
366de0ce 2075 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
2076 funtype = TREE_TYPE (funtype);
2077
078a18a4
SL
2078 /* Count whether there are actual complex arguments that need to be split
2079 into their real and imaginary parts. Munge the type_arg_types
2080 appropriately here as well. */
42ba5130 2081 if (targetm.calls.split_complex_arg)
ded9bf77 2082 {
078a18a4
SL
2083 call_expr_arg_iterator iter;
2084 tree arg;
2085 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2086 {
2087 tree type = TREE_TYPE (arg);
2088 if (type && TREE_CODE (type) == COMPLEX_TYPE
2089 && targetm.calls.split_complex_arg (type))
2090 num_complex_actuals++;
2091 }
ded9bf77 2092 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
2093 }
2094 else
2095 type_arg_types = TYPE_ARG_TYPES (funtype);
2096
099e9712
JH
2097 if (flags & ECF_MAY_BE_ALLOCA)
2098 current_function_calls_alloca = 1;
2099
2100 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
2101 as if it were an extra parameter. Put the argument expression
2102 in structure_value_addr_value. */
61f71b34 2103 if (structure_value_addr && struct_value == 0)
099e9712
JH
2104 {
2105 /* If structure_value_addr is a REG other than
2106 virtual_outgoing_args_rtx, we can use always use it. If it
2107 is not a REG, we must always copy it into a register.
2108 If it is virtual_outgoing_args_rtx, we must copy it to another
2109 register in some cases. */
f8cfc6aa 2110 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
2111 || (ACCUMULATE_OUTGOING_ARGS
2112 && stack_arg_under_construction
2113 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 2114 ? copy_addr_to_reg (convert_memory_address
57782ad8 2115 (Pmode, structure_value_addr))
099e9712
JH
2116 : structure_value_addr);
2117
078a18a4
SL
2118 structure_value_addr_value =
2119 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
099e9712
JH
2120 structure_value_addr_parm = 1;
2121 }
2122
2123 /* Count the arguments and set NUM_ACTUALS. */
078a18a4
SL
2124 num_actuals =
2125 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
2126
2127 /* Compute number of named args.
3a4d587b
AM
2128 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2129
2130 if (type_arg_types != 0)
2131 n_named_args
2132 = (list_length (type_arg_types)
2133 /* Count the struct value address, if it is passed as a parm. */
2134 + structure_value_addr_parm);
2135 else
2136 /* If we know nothing, treat all args as named. */
2137 n_named_args = num_actuals;
2138
2139 /* Start updating where the next arg would go.
2140
2141 On some machines (such as the PA) indirect calls have a different
2142 calling convention than normal calls. The fourth argument in
2143 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2144 or not. */
2145 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2146
2147 /* Now possibly adjust the number of named args.
099e9712 2148 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
2149 We do include the last named arg if
2150 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
2151 (If no anonymous args follow, the result of list_length is actually
2152 one too large. This is harmless.)
2153
4ac8340c 2154 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
2155 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2156 this machine will be able to place unnamed args that were passed
2157 in registers into the stack. So treat all args as named. This
2158 allows the insns emitting for a specific argument list to be
2159 independent of the function declaration.
4ac8340c
KH
2160
2161 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2162 we do not have any reliable way to pass unnamed args in
2163 registers, so we must force them into memory. */
099e9712 2164
3a4d587b
AM
2165 if (type_arg_types != 0
2166 && targetm.calls.strict_argument_naming (&args_so_far))
2167 ;
2168 else if (type_arg_types != 0
2169 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2170 /* Don't include the last named arg. */
2171 --n_named_args;
099e9712 2172 else
3a4d587b 2173 /* Treat all args as named. */
099e9712
JH
2174 n_named_args = num_actuals;
2175
099e9712 2176 /* Make a vector to hold all the information about each arg. */
703ad42b
KG
2177 args = alloca (num_actuals * sizeof (struct arg_data));
2178 memset (args, 0, num_actuals * sizeof (struct arg_data));
099e9712 2179
d80d2d2a
KH
2180 /* Build up entries in the ARGS array, compute the size of the
2181 arguments into ARGS_SIZE, etc. */
099e9712 2182 initialize_argument_information (num_actuals, args, &args_size,
078a18a4
SL
2183 n_named_args, exp,
2184 structure_value_addr_value, fndecl,
099e9712
JH
2185 &args_so_far, reg_parm_stack_space,
2186 &old_stack_level, &old_pending_adj,
dd292d0a 2187 &must_preallocate, &flags,
6de9cd9a 2188 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
2189
2190 if (args_size.var)
2191 {
2192 /* If this function requires a variable-sized argument list, don't
2193 try to make a cse'able block for this call. We may be able to
2194 do this eventually, but it is too complicated to keep track of
6d2f8887 2195 what insns go in the cse'able block and which don't. */
099e9712 2196
53d4257f 2197 flags &= ~ECF_LIBCALL_BLOCK;
099e9712
JH
2198 must_preallocate = 1;
2199 }
2200
2201 /* Now make final decision about preallocating stack space. */
2202 must_preallocate = finalize_must_preallocate (must_preallocate,
2203 num_actuals, args,
2204 &args_size);
2205
2206 /* If the structure value address will reference the stack pointer, we
2207 must stabilize it. We don't need to do this if we know that we are
2208 not going to adjust the stack pointer in processing this call. */
2209
2210 if (structure_value_addr
2211 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2212 || reg_mentioned_p (virtual_outgoing_args_rtx,
2213 structure_value_addr))
2214 && (args_size.var
2215 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2216 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 2217
7ae4ad28 2218 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 2219 pushed these optimizations into -O2. Don't try if we're already
fb158467 2220 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 2221 there's cleanups, as we know there's code to follow the call. */
0a1c58a2 2222
099e9712
JH
2223 if (currently_expanding_call++ != 0
2224 || !flag_optimize_sibling_calls
6de9cd9a 2225 || args_size.var
6fb5fa3c
DB
2226 || lookup_stmt_eh_region (exp) >= 0
2227 || dbg_cnt (tail_call) == false)
6de9cd9a 2228 try_tail_call = 0;
099e9712
JH
2229
2230 /* Rest of purposes for tail call optimizations to fail. */
2231 if (
2232#ifdef HAVE_sibcall_epilogue
2233 !HAVE_sibcall_epilogue
2234#else
2235 1
2236#endif
2237 || !try_tail_call
2238 /* Doing sibling call optimization needs some work, since
2239 structure_value_addr can be allocated on the stack.
2240 It does not seem worth the effort since few optimizable
2241 sibling calls will return a structure. */
2242 || structure_value_addr != NULL_RTX
4977bab6
ZW
2243 /* Check whether the target is able to optimize the call
2244 into a sibcall. */
5fd9b178 2245 || !targetm.function_ok_for_sibcall (fndecl, exp)
4977bab6 2246 /* Functions that do not return exactly once may not be sibcall
c22cacf3 2247 optimized. */
6e14af16 2248 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
09e2bf48 2249 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
6a48df45 2250 /* If the called function is nested in the current one, it might access
c22cacf3
MS
2251 some of the caller's arguments, but could clobber them beforehand if
2252 the argument areas are shared. */
6a48df45 2253 || (fndecl && decl_function_context (fndecl) == current_function_decl)
099e9712 2254 /* If this function requires more stack slots than the current
ff7f012a
DJ
2255 function, we cannot change it into a sibling call.
2256 current_function_pretend_args_size is not part of the
2257 stack allocated by our caller. */
2258 || args_size.constant > (current_function_args_size
2259 - current_function_pretend_args_size)
099e9712
JH
2260 /* If the callee pops its own arguments, then it must pop exactly
2261 the same number of arguments as the current function. */
e076f71a
AH
2262 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2263 != RETURN_POPS_ARGS (current_function_decl,
2264 TREE_TYPE (current_function_decl),
2265 current_function_args_size))
ae2bcd98 2266 || !lang_hooks.decls.ok_for_sibcall (fndecl))
e6f64875 2267 try_tail_call = 0;
497eb8c3 2268
c2f8b491
JH
2269 /* Ensure current function's preferred stack boundary is at least
2270 what we need. We don't have to increase alignment for recursive
2271 functions. */
2272 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2273 && fndecl != current_function_decl)
2274 cfun->preferred_stack_boundary = preferred_stack_boundary;
b255a036
JH
2275 if (fndecl == current_function_decl)
2276 cfun->recursive_call_emit = true;
c2f8b491 2277
099e9712 2278 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 2279
0a1c58a2
JL
2280 /* We want to make two insn chains; one for a sibling call, the other
2281 for a normal call. We will select one of the two chains after
2282 initial RTL generation is complete. */
b820d2b8 2283 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
2284 {
2285 int sibcall_failure = 0;
f5143c46 2286 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 2287 recursion "call". That way we know any adjustment after the tail
7ae4ad28 2288 recursion call can be ignored if we indeed use the tail
0a1c58a2 2289 call expansion. */
5ac9118e
KG
2290 int save_pending_stack_adjust = 0;
2291 int save_stack_pointer_delta = 0;
0a1c58a2 2292 rtx insns;
7d167afd 2293 rtx before_call, next_arg_reg;
39842893 2294
0a1c58a2
JL
2295 if (pass == 0)
2296 {
0a1c58a2
JL
2297 /* State variables we need to save and restore between
2298 iterations. */
2299 save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2300 save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2301 }
f2d33f13
JH
2302 if (pass)
2303 flags &= ~ECF_SIBCALL;
2304 else
2305 flags |= ECF_SIBCALL;
51bbfa0c 2306
0a1c58a2 2307 /* Other state variables that we must reinitialize each time
f2d33f13 2308 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
2309 argblock = 0;
2310 call_fusage = 0;
fa76d9e0 2311
f725a3ec 2312 /* Start a new sequence for the normal call case.
51bbfa0c 2313
0a1c58a2
JL
2314 From this point on, if the sibling call fails, we want to set
2315 sibcall_failure instead of continuing the loop. */
2316 start_sequence ();
eecb6f50 2317
0a1c58a2
JL
2318 /* Don't let pending stack adjusts add up to too much.
2319 Also, do all pending adjustments now if there is any chance
2320 this might be a call to alloca or if we are expanding a sibling
b5cd4ed4 2321 call sequence or if we are calling a function that is to return
63579539
DJ
2322 with stack pointer depressed.
2323 Also do the adjustments before a throwing call, otherwise
2324 exception handling can fail; PR 19225. */
0a1c58a2 2325 if (pending_stack_adjust >= 32
b5cd4ed4
RK
2326 || (pending_stack_adjust > 0
2327 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
63579539
DJ
2328 || (pending_stack_adjust > 0
2329 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
2330 || pass == 0)
2331 do_pending_stack_adjust ();
51bbfa0c 2332
54fef245
RH
2333 /* When calling a const function, we must pop the stack args right away,
2334 so that the pop is deleted or moved with the call. */
53d4257f 2335 if (pass && (flags & ECF_LIBCALL_BLOCK))
54fef245
RH
2336 NO_DEFER_POP;
2337
0a1c58a2 2338 /* Precompute any arguments as needed. */
f8a097cd
JH
2339 if (pass)
2340 precompute_arguments (flags, num_actuals, args);
51bbfa0c 2341
0a1c58a2
JL
2342 /* Now we are about to start emitting insns that can be deleted
2343 if a libcall is deleted. */
53d4257f 2344 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
0a1c58a2 2345 start_sequence ();
51bbfa0c 2346
b755446c
RH
2347 if (pass == 0 && cfun->stack_protect_guard)
2348 stack_protect_epilogue ();
2349
099e9712 2350 adjusted_args_size = args_size;
ce48579b
RH
2351 /* Compute the actual size of the argument block required. The variable
2352 and constant sizes must be combined, the size may have to be rounded,
2353 and there may be a minimum required size. When generating a sibcall
2354 pattern, do not round up, since we'll be re-using whatever space our
2355 caller provided. */
2356 unadjusted_args_size
f725a3ec
KH
2357 = compute_argument_block_size (reg_parm_stack_space,
2358 &adjusted_args_size,
ce48579b
RH
2359 (pass == 0 ? 0
2360 : preferred_stack_boundary));
2361
f725a3ec 2362 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 2363
f8a097cd 2364 /* The argument block when performing a sibling call is the
c22cacf3 2365 incoming argument block. */
f8a097cd 2366 if (pass == 0)
c67846f2
JJ
2367 {
2368 argblock = virtual_incoming_args_rtx;
fcae219a
R
2369 argblock
2370#ifdef STACK_GROWS_DOWNWARD
2371 = plus_constant (argblock, current_function_pretend_args_size);
2372#else
2373 = plus_constant (argblock, -current_function_pretend_args_size);
2374#endif
c67846f2
JJ
2375 stored_args_map = sbitmap_alloc (args_size.constant);
2376 sbitmap_zero (stored_args_map);
2377 }
ce48579b 2378
0a1c58a2
JL
2379 /* If we have no actual push instructions, or shouldn't use them,
2380 make space for all args right now. */
099e9712 2381 else if (adjusted_args_size.var != 0)
51bbfa0c 2382 {
0a1c58a2
JL
2383 if (old_stack_level == 0)
2384 {
2385 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
38afb23f 2386 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
2387 old_pending_adj = pending_stack_adjust;
2388 pending_stack_adjust = 0;
0a1c58a2
JL
2389 /* stack_arg_under_construction says whether a stack arg is
2390 being constructed at the old stack level. Pushing the stack
2391 gets a clean outgoing argument block. */
2392 old_stack_arg_under_construction = stack_arg_under_construction;
2393 stack_arg_under_construction = 0;
0a1c58a2 2394 }
099e9712 2395 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
51bbfa0c 2396 }
0a1c58a2
JL
2397 else
2398 {
2399 /* Note that we must go through the motions of allocating an argument
2400 block even if the size is zero because we may be storing args
2401 in the area reserved for register arguments, which may be part of
2402 the stack frame. */
26a258fe 2403
099e9712 2404 int needed = adjusted_args_size.constant;
51bbfa0c 2405
0a1c58a2
JL
2406 /* Store the maximum argument space used. It will be pushed by
2407 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2408 checking). */
51bbfa0c 2409
0a1c58a2
JL
2410 if (needed > current_function_outgoing_args_size)
2411 current_function_outgoing_args_size = needed;
51bbfa0c 2412
0a1c58a2
JL
2413 if (must_preallocate)
2414 {
f73ad30e
JH
2415 if (ACCUMULATE_OUTGOING_ARGS)
2416 {
f8a097cd
JH
2417 /* Since the stack pointer will never be pushed, it is
2418 possible for the evaluation of a parm to clobber
2419 something we have already written to the stack.
2420 Since most function calls on RISC machines do not use
2421 the stack, this is uncommon, but must work correctly.
26a258fe 2422
f73ad30e 2423 Therefore, we save any area of the stack that was already
f8a097cd
JH
2424 written and that we are using. Here we set up to do this
2425 by making a new stack usage map from the old one. The
f725a3ec 2426 actual save will be done by store_one_arg.
26a258fe 2427
f73ad30e
JH
2428 Another approach might be to try to reorder the argument
2429 evaluations to avoid this conflicting stack usage. */
26a258fe 2430
f8a097cd
JH
2431 /* Since we will be writing into the entire argument area,
2432 the map must be allocated for its entire size, not just
2433 the part that is the responsibility of the caller. */
ac294f0b
KT
2434 if (!OUTGOING_REG_PARM_STACK_SPACE)
2435 needed += reg_parm_stack_space;
51bbfa0c
RS
2436
2437#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
2438 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2439 needed + 1);
51bbfa0c 2440#else
f73ad30e
JH
2441 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2442 needed);
51bbfa0c 2443#endif
d9725c41
JJ
2444 if (stack_usage_map_buf)
2445 free (stack_usage_map_buf);
5ed6ace5 2446 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 2447 stack_usage_map = stack_usage_map_buf;
51bbfa0c 2448
f73ad30e 2449 if (initial_highest_arg_in_use)
2e09e75a
JM
2450 memcpy (stack_usage_map, initial_stack_usage_map,
2451 initial_highest_arg_in_use);
2f4aa534 2452
f73ad30e 2453 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 2454 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
2455 (highest_outgoing_arg_in_use
2456 - initial_highest_arg_in_use));
2457 needed = 0;
2f4aa534 2458
f8a097cd
JH
2459 /* The address of the outgoing argument list must not be
2460 copied to a register here, because argblock would be left
2461 pointing to the wrong place after the call to
f725a3ec 2462 allocate_dynamic_stack_space below. */
2f4aa534 2463
f73ad30e 2464 argblock = virtual_outgoing_args_rtx;
f725a3ec 2465 }
f73ad30e 2466 else
26a258fe 2467 {
f73ad30e 2468 if (inhibit_defer_pop == 0)
0a1c58a2 2469 {
f73ad30e 2470 /* Try to reuse some or all of the pending_stack_adjust
ce48579b
RH
2471 to get this space. */
2472 needed
f725a3ec 2473 = (combine_pending_stack_adjustment_and_call
ce48579b 2474 (unadjusted_args_size,
099e9712 2475 &adjusted_args_size,
ce48579b
RH
2476 preferred_unit_stack_boundary));
2477
2478 /* combine_pending_stack_adjustment_and_call computes
2479 an adjustment before the arguments are allocated.
2480 Account for them and see whether or not the stack
2481 needs to go up or down. */
2482 needed = unadjusted_args_size - needed;
2483
2484 if (needed < 0)
f73ad30e 2485 {
ce48579b
RH
2486 /* We're releasing stack space. */
2487 /* ??? We can avoid any adjustment at all if we're
2488 already aligned. FIXME. */
2489 pending_stack_adjust = -needed;
2490 do_pending_stack_adjust ();
f73ad30e
JH
2491 needed = 0;
2492 }
f725a3ec 2493 else
ce48579b
RH
2494 /* We need to allocate space. We'll do that in
2495 push_block below. */
2496 pending_stack_adjust = 0;
0a1c58a2 2497 }
ce48579b
RH
2498
2499 /* Special case this because overhead of `push_block' in
2500 this case is non-trivial. */
f73ad30e
JH
2501 if (needed == 0)
2502 argblock = virtual_outgoing_args_rtx;
0a1c58a2 2503 else
d892f288
DD
2504 {
2505 argblock = push_block (GEN_INT (needed), 0, 0);
2506#ifdef ARGS_GROW_DOWNWARD
2507 argblock = plus_constant (argblock, needed);
2508#endif
2509 }
f73ad30e 2510
f8a097cd
JH
2511 /* We only really need to call `copy_to_reg' in the case
2512 where push insns are going to be used to pass ARGBLOCK
2513 to a function call in ARGS. In that case, the stack
2514 pointer changes value from the allocation point to the
2515 call point, and hence the value of
2516 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2517 as well always do it. */
f73ad30e 2518 argblock = copy_to_reg (argblock);
38afb23f
OH
2519 }
2520 }
2521 }
0a1c58a2 2522
38afb23f
OH
2523 if (ACCUMULATE_OUTGOING_ARGS)
2524 {
2525 /* The save/restore code in store_one_arg handles all
2526 cases except one: a constructor call (including a C
2527 function returning a BLKmode struct) to initialize
2528 an argument. */
2529 if (stack_arg_under_construction)
2530 {
ac294f0b
KT
2531 rtx push_size
2532 = GEN_INT (adjusted_args_size.constant
2533 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2534 : reg_parm_stack_space));
38afb23f
OH
2535 if (old_stack_level == 0)
2536 {
2537 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2538 NULL_RTX);
2539 old_stack_pointer_delta = stack_pointer_delta;
2540 old_pending_adj = pending_stack_adjust;
2541 pending_stack_adjust = 0;
2542 /* stack_arg_under_construction says whether a stack
2543 arg is being constructed at the old stack level.
2544 Pushing the stack gets a clean outgoing argument
2545 block. */
2546 old_stack_arg_under_construction
2547 = stack_arg_under_construction;
2548 stack_arg_under_construction = 0;
2549 /* Make a new map for the new argument list. */
d9725c41
JJ
2550 if (stack_usage_map_buf)
2551 free (stack_usage_map_buf);
b9eae1a9 2552 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 2553 stack_usage_map = stack_usage_map_buf;
38afb23f 2554 highest_outgoing_arg_in_use = 0;
f73ad30e 2555 }
38afb23f
OH
2556 allocate_dynamic_stack_space (push_size, NULL_RTX,
2557 BITS_PER_UNIT);
0a1c58a2 2558 }
bfbf933a 2559
38afb23f
OH
2560 /* If argument evaluation might modify the stack pointer,
2561 copy the address of the argument list to a register. */
2562 for (i = 0; i < num_actuals; i++)
2563 if (args[i].pass_on_stack)
2564 {
2565 argblock = copy_addr_to_reg (argblock);
2566 break;
2567 }
2568 }
d329e058 2569
0a1c58a2 2570 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2571
0a1c58a2
JL
2572 /* If we push args individually in reverse order, perform stack alignment
2573 before the first push (the last arg). */
f73ad30e 2574 if (PUSH_ARGS_REVERSED && argblock == 0
099e9712 2575 && adjusted_args_size.constant != unadjusted_args_size)
4e217aed 2576 {
0a1c58a2
JL
2577 /* When the stack adjustment is pending, we get better code
2578 by combining the adjustments. */
f725a3ec 2579 if (pending_stack_adjust
53d4257f 2580 && ! (flags & ECF_LIBCALL_BLOCK)
0a1c58a2 2581 && ! inhibit_defer_pop)
ce48579b
RH
2582 {
2583 pending_stack_adjust
f725a3ec 2584 = (combine_pending_stack_adjustment_and_call
ce48579b 2585 (unadjusted_args_size,
099e9712 2586 &adjusted_args_size,
ce48579b
RH
2587 preferred_unit_stack_boundary));
2588 do_pending_stack_adjust ();
2589 }
0a1c58a2 2590 else if (argblock == 0)
099e9712 2591 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 2592 - unadjusted_args_size));
0a1c58a2 2593 }
ebcd0b57
JH
2594 /* Now that the stack is properly aligned, pops can't safely
2595 be deferred during the evaluation of the arguments. */
2596 NO_DEFER_POP;
51bbfa0c 2597
09e2bf48 2598 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 2599
0a1c58a2
JL
2600 /* Figure out the register where the value, if any, will come back. */
2601 valreg = 0;
2602 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2603 && ! structure_value_addr)
2604 {
2605 if (pcc_struct_value)
2606 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1d636cc6 2607 fndecl, NULL, (pass == 0));
0a1c58a2 2608 else
1d636cc6
RG
2609 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2610 (pass == 0));
576c9028
KH
2611
2612 /* If VALREG is a PARALLEL whose first member has a zero
2613 offset, use that. This is for targets such as m68k that
2614 return the same value in multiple places. */
2615 if (GET_CODE (valreg) == PARALLEL)
2616 {
2617 rtx elem = XVECEXP (valreg, 0, 0);
2618 rtx where = XEXP (elem, 0);
2619 rtx offset = XEXP (elem, 1);
2620 if (offset == const0_rtx
2621 && GET_MODE (where) == GET_MODE (valreg))
2622 valreg = where;
2623 }
0a1c58a2 2624 }
51bbfa0c 2625
0a1c58a2
JL
2626 /* Precompute all register parameters. It isn't safe to compute anything
2627 once we have started filling any specific hard regs. */
2628 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c 2629
5039610b
SL
2630 if (CALL_EXPR_STATIC_CHAIN (exp))
2631 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
2632 else
2633 static_chain_value = 0;
2634
f73ad30e 2635#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
2636 /* Save the fixed argument area if it's part of the caller's frame and
2637 is clobbered by argument setup for this call. */
f8a097cd 2638 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
2639 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2640 &low_to_save, &high_to_save);
b94301c2 2641#endif
51bbfa0c 2642
0a1c58a2
JL
2643 /* Now store (and compute if necessary) all non-register parms.
2644 These come before register parms, since they can require block-moves,
2645 which could clobber the registers used for register parms.
2646 Parms which have partial registers are not stored here,
2647 but we do preallocate space here if they want that. */
51bbfa0c 2648
0a1c58a2
JL
2649 for (i = 0; i < num_actuals; i++)
2650 if (args[i].reg == 0 || args[i].pass_on_stack)
c67846f2
JJ
2651 {
2652 rtx before_arg = get_last_insn ();
2653
4c6b3b2a
JJ
2654 if (store_one_arg (&args[i], argblock, flags,
2655 adjusted_args_size.var != 0,
2656 reg_parm_stack_space)
2657 || (pass == 0
2658 && check_sibcall_argument_overlap (before_arg,
0cdca92b 2659 &args[i], 1)))
c67846f2 2660 sibcall_failure = 1;
2fabc3d6
JDA
2661
2662 if (flags & ECF_CONST
2663 && args[i].stack
2664 && args[i].value == args[i].stack)
2665 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2666 gen_rtx_USE (VOIDmode,
2667 args[i].value),
2668 call_fusage);
c67846f2 2669 }
0a1c58a2
JL
2670
2671 /* If we have a parm that is passed in registers but not in memory
2672 and whose alignment does not permit a direct copy into registers,
2673 make a group of pseudos that correspond to each register that we
2674 will later fill. */
2675 if (STRICT_ALIGNMENT)
2676 store_unaligned_arguments_into_pseudos (args, num_actuals);
2677
2678 /* Now store any partially-in-registers parm.
2679 This is the last place a block-move can happen. */
2680 if (reg_parm_seen)
2681 for (i = 0; i < num_actuals; i++)
2682 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2
JJ
2683 {
2684 rtx before_arg = get_last_insn ();
2685
4c6b3b2a
JJ
2686 if (store_one_arg (&args[i], argblock, flags,
2687 adjusted_args_size.var != 0,
2688 reg_parm_stack_space)
2689 || (pass == 0
2690 && check_sibcall_argument_overlap (before_arg,
0cdca92b 2691 &args[i], 1)))
c67846f2
JJ
2692 sibcall_failure = 1;
2693 }
51bbfa0c 2694
0a1c58a2
JL
2695 /* If we pushed args in forward order, perform stack alignment
2696 after pushing the last arg. */
f73ad30e 2697 if (!PUSH_ARGS_REVERSED && argblock == 0)
099e9712 2698 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 2699 - unadjusted_args_size));
51bbfa0c 2700
0a1c58a2
JL
2701 /* If register arguments require space on the stack and stack space
2702 was not preallocated, allocate stack space here for arguments
2703 passed in registers. */
ac294f0b 2704 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
f725a3ec 2705 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 2706 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 2707
0a1c58a2
JL
2708 /* Pass the function the address in which to return a
2709 structure value. */
2710 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2711 {
7ae4ad28 2712 structure_value_addr
5ae6cd0d 2713 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 2714 emit_move_insn (struct_value,
0a1c58a2
JL
2715 force_reg (Pmode,
2716 force_operand (structure_value_addr,
2717 NULL_RTX)));
2718
f8cfc6aa 2719 if (REG_P (struct_value))
61f71b34 2720 use_reg (&call_fusage, struct_value);
0a1c58a2 2721 }
c2939b57 2722
6de9cd9a
DN
2723 funexp = prepare_call_address (funexp, static_chain_value,
2724 &call_fusage, reg_parm_seen, pass == 0);
51bbfa0c 2725
0cdca92b
DJ
2726 load_register_parameters (args, num_actuals, &call_fusage, flags,
2727 pass == 0, &sibcall_failure);
f725a3ec 2728
0a1c58a2
JL
2729 /* Save a pointer to the last insn before the call, so that we can
2730 later safely search backwards to find the CALL_INSN. */
2731 before_call = get_last_insn ();
51bbfa0c 2732
7d167afd
JJ
2733 /* Set up next argument register. For sibling calls on machines
2734 with register windows this should be the incoming register. */
2735#ifdef FUNCTION_INCOMING_ARG
2736 if (pass == 0)
2737 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2738 void_type_node, 1);
2739 else
2740#endif
2741 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2742 void_type_node, 1);
2743
0a1c58a2
JL
2744 /* All arguments and registers used for the call must be set up by
2745 now! */
2746
ce48579b 2747 /* Stack must be properly aligned now. */
366de0ce
NS
2748 gcc_assert (!pass
2749 || !(stack_pointer_delta % preferred_unit_stack_boundary));
ebcd0b57 2750
0a1c58a2 2751 /* Generate the actual call instruction. */
6de9cd9a 2752 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 2753 adjusted_args_size.constant, struct_value_size,
7d167afd 2754 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
fa5322fa 2755 flags, & args_so_far);
0a1c58a2 2756
bef5d8b6
RS
2757 /* If a non-BLKmode value is returned at the most significant end
2758 of a register, shift the register right by the appropriate amount
2759 and update VALREG accordingly. BLKmode values are handled by the
2760 group load/store machinery below. */
2761 if (!structure_value_addr
2762 && !pcc_struct_value
2763 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2764 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2765 {
2766 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2767 sibcall_failure = 1;
2768 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2769 }
2770
0a1c58a2
JL
2771 /* If call is cse'able, make appropriate pair of reg-notes around it.
2772 Test valreg so we don't crash; may safely ignore `const'
2773 if return type is void. Disable for PARALLEL return values, because
2774 we have no way to move such values into a pseudo register. */
53d4257f 2775 if (pass && (flags & ECF_LIBCALL_BLOCK))
9ae8ffe7 2776 {
0a1c58a2 2777 rtx insns;
9778f2f8
JH
2778 rtx insn;
2779 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
9ae8ffe7 2780
c22cacf3 2781 insns = get_insns ();
9778f2f8
JH
2782
2783 /* Expansion of block moves possibly introduced a loop that may
2784 not appear inside libcall block. */
2785 for (insn = insns; insn; insn = NEXT_INSN (insn))
4b4bf941 2786 if (JUMP_P (insn))
9778f2f8
JH
2787 failed = true;
2788
2789 if (failed)
e4abc3d5 2790 {
e4abc3d5 2791 end_sequence ();
2f937369 2792 emit_insn (insns);
e4abc3d5
RH
2793 }
2794 else
2795 {
2796 rtx note = 0;
2797 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2798
2799 /* Mark the return value as a pointer if needed. */
2800 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2801 mark_reg_pointer (temp,
2802 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2803
e4abc3d5 2804 end_sequence ();
8f23fc81
MM
2805 if (flag_unsafe_math_optimizations
2806 && fndecl
8c96cd51 2807 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8f23fc81
MM
2808 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2809 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2810 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
7ae4ad28
EC
2811 note = gen_rtx_fmt_e (SQRT,
2812 GET_MODE (temp),
8f23fc81
MM
2813 args[0].initial_value);
2814 else
2815 {
2816 /* Construct an "equal form" for the value which
2817 mentions all the arguments in order as well as
2818 the function name. */
2819 for (i = 0; i < num_actuals; i++)
2820 note = gen_rtx_EXPR_LIST (VOIDmode,
2821 args[i].initial_value, note);
2822 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
7ae4ad28 2823
8f23fc81
MM
2824 if (flags & ECF_PURE)
2825 note = gen_rtx_EXPR_LIST (VOIDmode,
e4abc3d5
RH
2826 gen_rtx_USE (VOIDmode,
2827 gen_rtx_MEM (BLKmode,
2828 gen_rtx_SCRATCH (VOIDmode))),
2829 note);
8f23fc81 2830 }
e4abc3d5
RH
2831 emit_libcall_block (insns, temp, valreg, note);
2832
2833 valreg = temp;
2834 }
0a1c58a2 2835 }
53d4257f 2836 else if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
2837 {
2838 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2839 rtx last, insns;
2840
f725a3ec 2841 /* The return value from a malloc-like function is a pointer. */
0a1c58a2 2842 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
bdb429a5 2843 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
0a1c58a2
JL
2844
2845 emit_move_insn (temp, valreg);
2846
2847 /* The return value from a malloc-like function can not alias
2848 anything else. */
2849 last = get_last_insn ();
f725a3ec 2850 REG_NOTES (last) =
0a1c58a2
JL
2851 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2852
2853 /* Write out the sequence. */
2854 insns = get_insns ();
2855 end_sequence ();
2f937369 2856 emit_insn (insns);
0a1c58a2
JL
2857 valreg = temp;
2858 }
51bbfa0c 2859
6fb5fa3c
DB
2860 /* For calls to `setjmp', etc., inform
2861 function.c:setjmp_warnings that it should complain if
2862 nonvolatile values are live. For functions that cannot
2863 return, inform flow that control does not fall through. */
51bbfa0c 2864
6e14af16 2865 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 2866 {
570a98eb 2867 /* The barrier must be emitted
0a1c58a2
JL
2868 immediately after the CALL_INSN. Some ports emit more
2869 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 2870
0a1c58a2 2871 rtx last = get_last_insn ();
4b4bf941 2872 while (!CALL_P (last))
0a1c58a2
JL
2873 {
2874 last = PREV_INSN (last);
2875 /* There was no CALL_INSN? */
366de0ce 2876 gcc_assert (last != before_call);
0a1c58a2 2877 }
51bbfa0c 2878
570a98eb 2879 emit_barrier_after (last);
8af61113 2880
f451eeef
JS
2881 /* Stack adjustments after a noreturn call are dead code.
2882 However when NO_DEFER_POP is in effect, we must preserve
2883 stack_pointer_delta. */
2884 if (inhibit_defer_pop == 0)
2885 {
2886 stack_pointer_delta = old_stack_allocated;
2887 pending_stack_adjust = 0;
2888 }
0a1c58a2 2889 }
51bbfa0c 2890
0a1c58a2 2891 /* If value type not void, return an rtx for the value. */
51bbfa0c 2892
0a1c58a2
JL
2893 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2894 || ignore)
b5cd4ed4 2895 target = const0_rtx;
0a1c58a2
JL
2896 else if (structure_value_addr)
2897 {
3c0cb5de 2898 if (target == 0 || !MEM_P (target))
0a1c58a2 2899 {
3bdf5ad1
RK
2900 target
2901 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2902 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2903 structure_value_addr));
2904 set_mem_attributes (target, exp, 1);
0a1c58a2
JL
2905 }
2906 }
2907 else if (pcc_struct_value)
cacbd532 2908 {
0a1c58a2
JL
2909 /* This is the special C++ case where we need to
2910 know what the true target was. We take care to
2911 never use this value more than once in one expression. */
2912 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2913 copy_to_reg (valreg));
3bdf5ad1 2914 set_mem_attributes (target, exp, 1);
cacbd532 2915 }
0a1c58a2
JL
2916 /* Handle calls that return values in multiple non-contiguous locations.
2917 The Irix 6 ABI has examples of this. */
2918 else if (GET_CODE (valreg) == PARALLEL)
2919 {
6de9cd9a 2920 if (target == 0)
0a1c58a2 2921 {
1da68f56
RK
2922 /* This will only be assigned once, so it can be readonly. */
2923 tree nt = build_qualified_type (TREE_TYPE (exp),
2924 (TYPE_QUALS (TREE_TYPE (exp))
2925 | TYPE_QUAL_CONST));
2926
2927 target = assign_temp (nt, 0, 1, 1);
0a1c58a2
JL
2928 }
2929
2930 if (! rtx_equal_p (target, valreg))
6e985040 2931 emit_group_store (target, valreg, TREE_TYPE (exp),
04050c69 2932 int_size_in_bytes (TREE_TYPE (exp)));
19caa751 2933
0a1c58a2
JL
2934 /* We can not support sibling calls for this case. */
2935 sibcall_failure = 1;
2936 }
2937 else if (target
2938 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2939 && GET_MODE (target) == GET_MODE (valreg))
2940 {
51caaefe
EB
2941 bool may_overlap = false;
2942
f2d18690
KK
2943 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2944 reg to a plain register. */
2945 if (REG_P (valreg)
2946 && HARD_REGISTER_P (valreg)
2947 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2948 && !(REG_P (target) && !HARD_REGISTER_P (target)))
2949 valreg = copy_to_reg (valreg);
2950
51caaefe
EB
2951 /* If TARGET is a MEM in the argument area, and we have
2952 saved part of the argument area, then we can't store
2953 directly into TARGET as it may get overwritten when we
2954 restore the argument save area below. Don't work too
2955 hard though and simply force TARGET to a register if it
2956 is a MEM; the optimizer is quite likely to sort it out. */
2957 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2958 for (i = 0; i < num_actuals; i++)
2959 if (args[i].save_area)
2960 {
2961 may_overlap = true;
2962 break;
2963 }
0219237c 2964
51caaefe
EB
2965 if (may_overlap)
2966 target = copy_to_reg (valreg);
2967 else
2968 {
2969 /* TARGET and VALREG cannot be equal at this point
2970 because the latter would not have
2971 REG_FUNCTION_VALUE_P true, while the former would if
2972 it were referring to the same register.
2973
2974 If they refer to the same register, this move will be
2975 a no-op, except when function inlining is being
2976 done. */
2977 emit_move_insn (target, valreg);
2978
2979 /* If we are setting a MEM, this code must be executed.
2980 Since it is emitted after the call insn, sibcall
2981 optimization cannot be performed in that case. */
2982 if (MEM_P (target))
2983 sibcall_failure = 1;
2984 }
0a1c58a2
JL
2985 }
2986 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
8eb99146
RH
2987 {
2988 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2989
2990 /* We can not support sibling calls for this case. */
2991 sibcall_failure = 1;
2992 }
0a1c58a2 2993 else
bef5d8b6 2994 target = copy_to_reg (valreg);
51bbfa0c 2995
61f71b34
DD
2996 if (targetm.calls.promote_function_return(funtype))
2997 {
366de0ce
NS
2998 /* If we promoted this return value, make the proper SUBREG.
2999 TARGET might be const0_rtx here, so be careful. */
3000 if (REG_P (target)
3001 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3002 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3003 {
3004 tree type = TREE_TYPE (exp);
3005 int unsignedp = TYPE_UNSIGNED (type);
3006 int offset = 0;
3007 enum machine_mode pmode;
c22cacf3 3008
366de0ce
NS
3009 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3010 /* If we don't promote as expected, something is wrong. */
3011 gcc_assert (GET_MODE (target) == pmode);
c22cacf3 3012
366de0ce
NS
3013 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3014 && (GET_MODE_SIZE (GET_MODE (target))
3015 > GET_MODE_SIZE (TYPE_MODE (type))))
3016 {
3017 offset = GET_MODE_SIZE (GET_MODE (target))
3018 - GET_MODE_SIZE (TYPE_MODE (type));
3019 if (! BYTES_BIG_ENDIAN)
3020 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3021 else if (! WORDS_BIG_ENDIAN)
3022 offset %= UNITS_PER_WORD;
3023 }
3024 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3025 SUBREG_PROMOTED_VAR_P (target) = 1;
3026 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3027 }
61f71b34 3028 }
84b55618 3029
0a1c58a2
JL
3030 /* If size of args is variable or this was a constructor call for a stack
3031 argument, restore saved stack-pointer value. */
51bbfa0c 3032
7393c642 3033 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
0a1c58a2
JL
3034 {
3035 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
38afb23f 3036 stack_pointer_delta = old_stack_pointer_delta;
0a1c58a2 3037 pending_stack_adjust = old_pending_adj;
d25cee4d 3038 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
3039 stack_arg_under_construction = old_stack_arg_under_construction;
3040 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3041 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
3042 sibcall_failure = 1;
3043 }
f8a097cd 3044 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3045 {
51bbfa0c 3046#ifdef REG_PARM_STACK_SPACE
0a1c58a2 3047 if (save_area)
b820d2b8
AM
3048 restore_fixed_argument_area (save_area, argblock,
3049 high_to_save, low_to_save);
b94301c2 3050#endif
51bbfa0c 3051
0a1c58a2
JL
3052 /* If we saved any argument areas, restore them. */
3053 for (i = 0; i < num_actuals; i++)
3054 if (args[i].save_area)
3055 {
3056 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3057 rtx stack_area
3058 = gen_rtx_MEM (save_mode,
3059 memory_address (save_mode,
3060 XEXP (args[i].stack_slot, 0)));
3061
3062 if (save_mode != BLKmode)
3063 emit_move_insn (stack_area, args[i].save_area);
3064 else
44bb111a 3065 emit_block_move (stack_area, args[i].save_area,
e7949876 3066 GEN_INT (args[i].locate.size.constant),
44bb111a 3067 BLOCK_OP_CALL_PARM);
0a1c58a2 3068 }
51bbfa0c 3069
0a1c58a2
JL
3070 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3071 stack_usage_map = initial_stack_usage_map;
3072 }
51bbfa0c 3073
f725a3ec 3074 /* If this was alloca, record the new stack level for nonlocal gotos.
0a1c58a2
JL
3075 Check for the handler slots since we might not have a save area
3076 for non-local gotos. */
59257ff7 3077
6de9cd9a
DN
3078 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3079 update_nonlocal_goto_save_area ();
51bbfa0c 3080
0a1c58a2
JL
3081 /* Free up storage we no longer need. */
3082 for (i = 0; i < num_actuals; ++i)
3083 if (args[i].aligned_regs)
3084 free (args[i].aligned_regs);
3085
3086 insns = get_insns ();
3087 end_sequence ();
3088
3089 if (pass == 0)
3090 {
3091 tail_call_insns = insns;
3092
0a1c58a2
JL
3093 /* Restore the pending stack adjustment now that we have
3094 finished generating the sibling call sequence. */
1503a7ec 3095
0a1c58a2 3096 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 3097 stack_pointer_delta = save_stack_pointer_delta;
099e9712
JH
3098
3099 /* Prepare arg structure for next iteration. */
f725a3ec 3100 for (i = 0; i < num_actuals; i++)
099e9712
JH
3101 {
3102 args[i].value = 0;
3103 args[i].aligned_regs = 0;
3104 args[i].stack = 0;
3105 }
c67846f2
JJ
3106
3107 sbitmap_free (stored_args_map);
0a1c58a2
JL
3108 }
3109 else
38afb23f
OH
3110 {
3111 normal_call_insns = insns;
3112
3113 /* Verify that we've deallocated all the stack we used. */
6e14af16 3114 gcc_assert ((flags & ECF_NORETURN)
366de0ce
NS
3115 || (old_stack_allocated
3116 == stack_pointer_delta - pending_stack_adjust));
38afb23f 3117 }
fadb729c
JJ
3118
3119 /* If something prevents making this a sibling call,
3120 zero out the sequence. */
3121 if (sibcall_failure)
3122 tail_call_insns = NULL_RTX;
6de9cd9a
DN
3123 else
3124 break;
0a1c58a2
JL
3125 }
3126
1ea7e6ad 3127 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
3128 arguments too, as argument area is now clobbered by the call. */
3129 if (tail_call_insns)
0a1c58a2 3130 {
6de9cd9a
DN
3131 emit_insn (tail_call_insns);
3132 cfun->tail_call_emit = true;
0a1c58a2
JL
3133 }
3134 else
2f937369 3135 emit_insn (normal_call_insns);
51bbfa0c 3136
0a1c58a2 3137 currently_expanding_call--;
8e6a59fe 3138
7393c642
RK
3139 /* If this function returns with the stack pointer depressed, ensure
3140 this block saves and restores the stack pointer, show it was
3141 changed, and adjust for any outgoing arg space. */
3142 if (flags & ECF_SP_DEPRESSED)
3143 {
3144 clear_pending_stack_adjust ();
f84d109f 3145 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
7393c642 3146 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
7393c642
RK
3147 }
3148
d9725c41
JJ
3149 if (stack_usage_map_buf)
3150 free (stack_usage_map_buf);
3151
51bbfa0c
RS
3152 return target;
3153}
ded9bf77 3154
6de9cd9a
DN
3155/* A sibling call sequence invalidates any REG_EQUIV notes made for
3156 this function's incoming arguments.
3157
3158 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
3159 in the rtl chain are those for incoming arguments, so we can look
3160 for REG_EQUIV notes between the start of the function and the
3161 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
3162
3163 This is (slight) overkill. We could keep track of the highest
3164 argument we clobber and be more selective in removing notes, but it
3165 does not seem to be worth the effort. */
29d51cdb 3166
6de9cd9a
DN
3167void
3168fixup_tail_calls (void)
3169{
29d51cdb
SB
3170 rtx insn;
3171
3172 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3173 {
a31830a7
SB
3174 rtx note;
3175
29d51cdb
SB
3176 /* There are never REG_EQUIV notes for the incoming arguments
3177 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3178 if (NOTE_P (insn)
a38e7aa5 3179 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
3180 break;
3181
a31830a7
SB
3182 note = find_reg_note (insn, REG_EQUIV, 0);
3183 if (note)
3184 remove_note (insn, note);
3185 note = find_reg_note (insn, REG_EQUIV, 0);
3186 gcc_assert (!note);
29d51cdb 3187 }
6de9cd9a
DN
3188}
3189
ded9bf77
AH
3190/* Traverse a list of TYPES and expand all complex types into their
3191 components. */
2f2b4a02 3192static tree
ded9bf77
AH
3193split_complex_types (tree types)
3194{
3195 tree p;
3196
42ba5130
RH
3197 /* Before allocating memory, check for the common case of no complex. */
3198 for (p = types; p; p = TREE_CHAIN (p))
3199 {
3200 tree type = TREE_VALUE (p);
3201 if (TREE_CODE (type) == COMPLEX_TYPE
3202 && targetm.calls.split_complex_arg (type))
c22cacf3 3203 goto found;
42ba5130
RH
3204 }
3205 return types;
3206
3207 found:
ded9bf77
AH
3208 types = copy_list (types);
3209
3210 for (p = types; p; p = TREE_CHAIN (p))
3211 {
3212 tree complex_type = TREE_VALUE (p);
3213
42ba5130
RH
3214 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3215 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
3216 {
3217 tree next, imag;
3218
3219 /* Rewrite complex type with component type. */
3220 TREE_VALUE (p) = TREE_TYPE (complex_type);
3221 next = TREE_CHAIN (p);
3222
3223 /* Add another component type for the imaginary part. */
3224 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3225 TREE_CHAIN (p) = imag;
3226 TREE_CHAIN (imag) = next;
3227
3228 /* Skip the newly created node. */
3229 p = TREE_CHAIN (p);
3230 }
3231 }
3232
3233 return types;
3234}
51bbfa0c 3235\f
de76b467 3236/* Output a library call to function FUN (a SYMBOL_REF rtx).
f725a3ec 3237 The RETVAL parameter specifies whether return value needs to be saved, other
0407c02b 3238 parameters are documented in the emit_library_call function below. */
8ac61af7 3239
de76b467 3240static rtx
d329e058
AJ
3241emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3242 enum libcall_type fn_type,
3243 enum machine_mode outmode, int nargs, va_list p)
43bc5f13 3244{
3c0fca12
RH
3245 /* Total size in bytes of all the stack-parms scanned so far. */
3246 struct args_size args_size;
3247 /* Size of arguments before any adjustments (such as rounding). */
3248 struct args_size original_args_size;
b3694847 3249 int argnum;
3c0fca12
RH
3250 rtx fun;
3251 int inc;
3252 int count;
3c0fca12
RH
3253 rtx argblock = 0;
3254 CUMULATIVE_ARGS args_so_far;
f725a3ec
KH
3255 struct arg
3256 {
3257 rtx value;
3258 enum machine_mode mode;
3259 rtx reg;
3260 int partial;
e7949876 3261 struct locate_and_pad_arg_data locate;
f725a3ec
KH
3262 rtx save_area;
3263 };
3c0fca12
RH
3264 struct arg *argvec;
3265 int old_inhibit_defer_pop = inhibit_defer_pop;
3266 rtx call_fusage = 0;
3267 rtx mem_value = 0;
5591ee6f 3268 rtx valreg;
3c0fca12
RH
3269 int pcc_struct_value = 0;
3270 int struct_value_size = 0;
52a11cbf 3271 int flags;
3c0fca12 3272 int reg_parm_stack_space = 0;
3c0fca12 3273 int needed;
695ee791 3274 rtx before_call;
b0c48229 3275 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 3276
f73ad30e 3277#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
3278 /* Define the boundary of the register parm stack space that needs to be
3279 save, if any. */
b820d2b8 3280 int low_to_save, high_to_save;
f725a3ec 3281 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
3282#endif
3283
3c0fca12
RH
3284 /* Size of the stack reserved for parameter registers. */
3285 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3286 char *initial_stack_usage_map = stack_usage_map;
d9725c41 3287 char *stack_usage_map_buf = NULL;
3c0fca12 3288
61f71b34
DD
3289 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3290
3c0fca12 3291#ifdef REG_PARM_STACK_SPACE
3c0fca12 3292 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
3293#endif
3294
9555a122 3295 /* By default, library functions can not throw. */
52a11cbf
RH
3296 flags = ECF_NOTHROW;
3297
9555a122
RH
3298 switch (fn_type)
3299 {
3300 case LCT_NORMAL:
53d4257f 3301 break;
9555a122 3302 case LCT_CONST:
53d4257f
JH
3303 flags |= ECF_CONST;
3304 break;
9555a122 3305 case LCT_PURE:
53d4257f 3306 flags |= ECF_PURE;
9555a122
RH
3307 break;
3308 case LCT_CONST_MAKE_BLOCK:
53d4257f 3309 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
9555a122
RH
3310 break;
3311 case LCT_PURE_MAKE_BLOCK:
53d4257f 3312 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
9555a122
RH
3313 break;
3314 case LCT_NORETURN:
3315 flags |= ECF_NORETURN;
3316 break;
3317 case LCT_THROW:
3318 flags = ECF_NORETURN;
3319 break;
9defc9b7
RH
3320 case LCT_RETURNS_TWICE:
3321 flags = ECF_RETURNS_TWICE;
3322 break;
9555a122 3323 }
3c0fca12
RH
3324 fun = orgfun;
3325
3c0fca12
RH
3326 /* Ensure current function's preferred stack boundary is at least
3327 what we need. */
3328 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3329 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
3330
3331 /* If this kind of value comes back in memory,
3332 decide where in memory it should come back. */
b0c48229 3333 if (outmode != VOIDmode)
3c0fca12 3334 {
ae2bcd98 3335 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 3336 if (aggregate_value_p (tfom, 0))
b0c48229 3337 {
3c0fca12 3338#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 3339 rtx pointer_reg
1d636cc6 3340 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229
NB
3341 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3342 pcc_struct_value = 1;
3343 if (value == 0)
3344 value = gen_reg_rtx (outmode);
3c0fca12 3345#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 3346 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 3347 if (value != 0 && MEM_P (value))
b0c48229
NB
3348 mem_value = value;
3349 else
3350 mem_value = assign_temp (tfom, 0, 1, 1);
3c0fca12 3351#endif
b0c48229
NB
3352 /* This call returns a big structure. */
3353 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3354 }
3c0fca12 3355 }
b0c48229
NB
3356 else
3357 tfom = void_type_node;
3c0fca12
RH
3358
3359 /* ??? Unfinished: must pass the memory address as an argument. */
3360
3361 /* Copy all the libcall-arguments out of the varargs data
3362 and into a vector ARGVEC.
3363
3364 Compute how to pass each argument. We only support a very small subset
3365 of the full argument passing conventions to limit complexity here since
3366 library functions shouldn't have many args. */
3367
703ad42b
KG
3368 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3369 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 3370
97fc4caf
AO
3371#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3372 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3373#else
0f6937fe 3374 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
97fc4caf 3375#endif
3c0fca12
RH
3376
3377 args_size.constant = 0;
3378 args_size.var = 0;
3379
3380 count = 0;
3381
ebb1b59a
BS
3382 /* Now we are about to start emitting insns that can be deleted
3383 if a libcall is deleted. */
53d4257f 3384 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a
BS
3385 start_sequence ();
3386
3c0fca12
RH
3387 push_temp_slots ();
3388
3389 /* If there's a structure value address to be passed,
3390 either pass it in the special place, or pass it as an extra argument. */
61f71b34 3391 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
3392 {
3393 rtx addr = XEXP (mem_value, 0);
c22cacf3 3394
3c0fca12
RH
3395 nargs++;
3396
3397 /* Make sure it is a reasonable operand for a move or push insn. */
3c0cb5de 3398 if (!REG_P (addr) && !MEM_P (addr)
3c0fca12
RH
3399 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3400 addr = force_operand (addr, NULL_RTX);
3401
3402 argvec[count].value = addr;
3403 argvec[count].mode = Pmode;
3404 argvec[count].partial = 0;
3405
3406 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
78a52f11
RH
3407 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3408 NULL_TREE, 1) == 0);
3c0fca12
RH
3409
3410 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 3411#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 3412 1,
a4d5044f
CM
3413#else
3414 argvec[count].reg != 0,
3415#endif
e7949876 3416 0, NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 3417
3c0fca12
RH
3418 if (argvec[count].reg == 0 || argvec[count].partial != 0
3419 || reg_parm_stack_space > 0)
e7949876 3420 args_size.constant += argvec[count].locate.size.constant;
3c0fca12
RH
3421
3422 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3423
3424 count++;
3425 }
3426
3427 for (; count < nargs; count++)
3428 {
3429 rtx val = va_arg (p, rtx);
3430 enum machine_mode mode = va_arg (p, enum machine_mode);
3431
3432 /* We cannot convert the arg value to the mode the library wants here;
3433 must do it earlier where we know the signedness of the arg. */
366de0ce
NS
3434 gcc_assert (mode != BLKmode
3435 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3c0fca12 3436
3c0fca12 3437 /* Make sure it is a reasonable operand for a move or push insn. */
3c0cb5de 3438 if (!REG_P (val) && !MEM_P (val)
3c0fca12
RH
3439 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3440 val = force_operand (val, NULL_RTX);
3441
0976078c 3442 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3c0fca12 3443 {
f474c6f8 3444 rtx slot;
6cdd5672
RH
3445 int must_copy
3446 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
f474c6f8 3447
a0dc500c
R
3448 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3449 functions, so we have to pretend this isn't such a function. */
3450 if (flags & ECF_LIBCALL_BLOCK)
3451 {
3452 rtx insns = get_insns ();
3453 end_sequence ();
3454 emit_insn (insns);
3455 }
3456 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3457
99a32567
DM
3458 /* If this was a CONST function, it is now PURE since
3459 it now reads memory. */
3460 if (flags & ECF_CONST)
3461 {
3462 flags &= ~ECF_CONST;
3463 flags |= ECF_PURE;
3464 }
3465
9969aaf6 3466 if (GET_MODE (val) == MEM && !must_copy)
f474c6f8 3467 slot = val;
9969aaf6 3468 else
f474c6f8 3469 {
ae2bcd98 3470 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
b0c48229 3471 0, 1, 1);
f474c6f8
AO
3472 emit_move_insn (slot, val);
3473 }
1da68f56 3474
6b5273c3
AO
3475 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3476 gen_rtx_USE (VOIDmode, slot),
3477 call_fusage);
f474c6f8
AO
3478 if (must_copy)
3479 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3480 gen_rtx_CLOBBER (VOIDmode,
3481 slot),
3482 call_fusage);
3483
3c0fca12 3484 mode = Pmode;
f474c6f8 3485 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 3486 }
3c0fca12
RH
3487
3488 argvec[count].value = val;
3489 argvec[count].mode = mode;
3490
3491 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3492
3c0fca12 3493 argvec[count].partial
78a52f11 3494 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3c0fca12
RH
3495
3496 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 3497#ifdef STACK_PARMS_IN_REG_PARM_AREA
f725a3ec 3498 1,
a4d5044f
CM
3499#else
3500 argvec[count].reg != 0,
3501#endif
e7949876
AM
3502 argvec[count].partial,
3503 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 3504
366de0ce 3505 gcc_assert (!argvec[count].locate.size.var);
3c0fca12 3506
3c0fca12
RH
3507 if (argvec[count].reg == 0 || argvec[count].partial != 0
3508 || reg_parm_stack_space > 0)
e7949876 3509 args_size.constant += argvec[count].locate.size.constant;
3c0fca12
RH
3510
3511 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3512 }
3c0fca12 3513
3c0fca12
RH
3514 /* If this machine requires an external definition for library
3515 functions, write one out. */
3516 assemble_external_libcall (fun);
3517
3518 original_args_size = args_size;
1503a7ec
JH
3519 args_size.constant = (((args_size.constant
3520 + stack_pointer_delta
3521 + STACK_BYTES - 1)
3522 / STACK_BYTES
3523 * STACK_BYTES)
3524 - stack_pointer_delta);
3c0fca12
RH
3525
3526 args_size.constant = MAX (args_size.constant,
3527 reg_parm_stack_space);
3528
ac294f0b
KT
3529 if (!OUTGOING_REG_PARM_STACK_SPACE)
3530 args_size.constant -= reg_parm_stack_space;
3c0fca12
RH
3531
3532 if (args_size.constant > current_function_outgoing_args_size)
3533 current_function_outgoing_args_size = args_size.constant;
3534
f73ad30e
JH
3535 if (ACCUMULATE_OUTGOING_ARGS)
3536 {
3537 /* Since the stack pointer will never be pushed, it is possible for
3538 the evaluation of a parm to clobber something we have already
3539 written to the stack. Since most function calls on RISC machines
3540 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 3541
f73ad30e
JH
3542 Therefore, we save any area of the stack that was already written
3543 and that we are using. Here we set up to do this by making a new
3544 stack usage map from the old one.
3c0fca12 3545
f73ad30e
JH
3546 Another approach might be to try to reorder the argument
3547 evaluations to avoid this conflicting stack usage. */
3c0fca12 3548
f73ad30e 3549 needed = args_size.constant;
3c0fca12 3550
f73ad30e
JH
3551 /* Since we will be writing into the entire argument area, the
3552 map must be allocated for its entire size, not just the part that
3553 is the responsibility of the caller. */
ac294f0b
KT
3554 if (!OUTGOING_REG_PARM_STACK_SPACE)
3555 needed += reg_parm_stack_space;
3c0fca12
RH
3556
3557#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3558 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3559 needed + 1);
3c0fca12 3560#else
f73ad30e
JH
3561 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3562 needed);
3c0fca12 3563#endif
5ed6ace5 3564 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3565 stack_usage_map = stack_usage_map_buf;
3c0fca12 3566
f73ad30e 3567 if (initial_highest_arg_in_use)
2e09e75a
JM
3568 memcpy (stack_usage_map, initial_stack_usage_map,
3569 initial_highest_arg_in_use);
3c0fca12 3570
f73ad30e 3571 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3572 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3573 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3574 needed = 0;
3c0fca12 3575
c39ada04 3576 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 3577 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
3578 new libcalls after we've instantiated the virtual regs, and if we
3579 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 3580
c39ada04
DD
3581 if (virtuals_instantiated)
3582 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3583 else
3584 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
3585 }
3586 else
3587 {
3588 if (!PUSH_ARGS)
3589 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3590 }
3c0fca12 3591
3c0fca12
RH
3592 /* If we push args individually in reverse order, perform stack alignment
3593 before the first push (the last arg). */
f73ad30e 3594 if (argblock == 0 && PUSH_ARGS_REVERSED)
3c0fca12
RH
3595 anti_adjust_stack (GEN_INT (args_size.constant
3596 - original_args_size.constant));
3c0fca12 3597
f73ad30e
JH
3598 if (PUSH_ARGS_REVERSED)
3599 {
3600 inc = -1;
3601 argnum = nargs - 1;
3602 }
3603 else
3604 {
3605 inc = 1;
3606 argnum = 0;
3607 }
3c0fca12 3608
f73ad30e
JH
3609#ifdef REG_PARM_STACK_SPACE
3610 if (ACCUMULATE_OUTGOING_ARGS)
3611 {
3612 /* The argument list is the property of the called routine and it
3613 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
3614 parameters, we must save and restore it. */
3615 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3616 &low_to_save, &high_to_save);
3c0fca12
RH
3617 }
3618#endif
f725a3ec 3619
3c0fca12
RH
3620 /* Push the args that need to be pushed. */
3621
3622 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3623 are to be pushed. */
3624 for (count = 0; count < nargs; count++, argnum += inc)
3625 {
b3694847
SS
3626 enum machine_mode mode = argvec[argnum].mode;
3627 rtx val = argvec[argnum].value;
3c0fca12
RH
3628 rtx reg = argvec[argnum].reg;
3629 int partial = argvec[argnum].partial;
f73ad30e 3630 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
3631
3632 if (! (reg != 0 && partial == 0))
3633 {
f73ad30e
JH
3634 if (ACCUMULATE_OUTGOING_ARGS)
3635 {
f8a097cd
JH
3636 /* If this is being stored into a pre-allocated, fixed-size,
3637 stack area, save any previous data at that location. */
3c0fca12
RH
3638
3639#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3640 /* stack_slot is negative, but we want to index stack_usage_map
3641 with positive values. */
e7949876
AM
3642 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3643 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3c0fca12 3644#else
e7949876
AM
3645 lower_bound = argvec[argnum].locate.offset.constant;
3646 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3c0fca12
RH
3647#endif
3648
546ff777
AM
3649 i = lower_bound;
3650 /* Don't worry about things in the fixed argument area;
3651 it has already been saved. */
3652 if (i < reg_parm_stack_space)
3653 i = reg_parm_stack_space;
3654 while (i < upper_bound && stack_usage_map[i] == 0)
3655 i++;
3c0fca12 3656
546ff777 3657 if (i < upper_bound)
f73ad30e 3658 {
e7949876
AM
3659 /* We need to make a save area. */
3660 unsigned int size
3661 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
f73ad30e 3662 enum machine_mode save_mode
e7949876
AM
3663 = mode_for_size (size, MODE_INT, 1);
3664 rtx adr
3665 = plus_constant (argblock,
3666 argvec[argnum].locate.offset.constant);
f73ad30e 3667 rtx stack_area
e7949876 3668 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 3669
9778f2f8
JH
3670 if (save_mode == BLKmode)
3671 {
3672 argvec[argnum].save_area
3673 = assign_stack_temp (BLKmode,
c22cacf3 3674 argvec[argnum].locate.size.constant,
9778f2f8
JH
3675 0);
3676
3677 emit_block_move (validize_mem (argvec[argnum].save_area),
c22cacf3 3678 stack_area,
9778f2f8
JH
3679 GEN_INT (argvec[argnum].locate.size.constant),
3680 BLOCK_OP_CALL_PARM);
3681 }
3682 else
3683 {
3684 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3685
3686 emit_move_insn (argvec[argnum].save_area, stack_area);
3687 }
f73ad30e 3688 }
3c0fca12 3689 }
19caa751 3690
44bb111a
RH
3691 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3692 partial, reg, 0, argblock,
e7949876
AM
3693 GEN_INT (argvec[argnum].locate.offset.constant),
3694 reg_parm_stack_space,
3695 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3c0fca12 3696
3c0fca12 3697 /* Now mark the segment we just used. */
f73ad30e
JH
3698 if (ACCUMULATE_OUTGOING_ARGS)
3699 for (i = lower_bound; i < upper_bound; i++)
3700 stack_usage_map[i] = 1;
3c0fca12
RH
3701
3702 NO_DEFER_POP;
475a3eef
R
3703
3704 if (flags & ECF_CONST)
3705 {
3706 rtx use;
3707
3708 /* Indicate argument access so that alias.c knows that these
3709 values are live. */
3710 if (argblock)
3711 use = plus_constant (argblock,
3712 argvec[argnum].locate.offset.constant);
3713 else
a4174ebf 3714 /* When arguments are pushed, trying to tell alias.c where
475a3eef
R
3715 exactly this argument is won't work, because the
3716 auto-increment causes confusion. So we merely indicate
3717 that we access something with a known mode somewhere on
3718 the stack. */
c22cacf3 3719 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
475a3eef
R
3720 gen_rtx_SCRATCH (Pmode));
3721 use = gen_rtx_MEM (argvec[argnum].mode, use);
3722 use = gen_rtx_USE (VOIDmode, use);
3723 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3724 }
3c0fca12
RH
3725 }
3726 }
3727
3c0fca12
RH
3728 /* If we pushed args in forward order, perform stack alignment
3729 after pushing the last arg. */
f73ad30e 3730 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3c0fca12
RH
3731 anti_adjust_stack (GEN_INT (args_size.constant
3732 - original_args_size.constant));
3c0fca12 3733
f73ad30e
JH
3734 if (PUSH_ARGS_REVERSED)
3735 argnum = nargs - 1;
3736 else
3737 argnum = 0;
3c0fca12 3738
6de9cd9a 3739 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
3740
3741 /* Now load any reg parms into their regs. */
3742
3743 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3744 are to be pushed. */
3745 for (count = 0; count < nargs; count++, argnum += inc)
3746 {
ff15c351 3747 enum machine_mode mode = argvec[argnum].mode;
b3694847 3748 rtx val = argvec[argnum].value;
3c0fca12
RH
3749 rtx reg = argvec[argnum].reg;
3750 int partial = argvec[argnum].partial;
3751
3752 /* Handle calls that pass values in multiple non-contiguous
3753 locations. The PA64 has examples of this for library calls. */
3754 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 3755 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12
RH
3756 else if (reg != 0 && partial == 0)
3757 emit_move_insn (reg, val);
3758
3759 NO_DEFER_POP;
3760 }
3761
3c0fca12
RH
3762 /* Any regs containing parms remain in use through the call. */
3763 for (count = 0; count < nargs; count++)
3764 {
3765 rtx reg = argvec[count].reg;
3766 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3767 use_group_regs (&call_fusage, reg);
3768 else if (reg != 0)
3b1bf459
BS
3769 {
3770 int partial = argvec[count].partial;
3771 if (partial)
3772 {
3773 int nregs;
3774 gcc_assert (partial % UNITS_PER_WORD == 0);
3775 nregs = partial / UNITS_PER_WORD;
3776 use_regs (&call_fusage, REGNO (reg), nregs);
3777 }
3778 else
3779 use_reg (&call_fusage, reg);
3780 }
3c0fca12
RH
3781 }
3782
3783 /* Pass the function the address in which to return a structure value. */
61f71b34 3784 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 3785 {
61f71b34 3786 emit_move_insn (struct_value,
3c0fca12
RH
3787 force_reg (Pmode,
3788 force_operand (XEXP (mem_value, 0),
3789 NULL_RTX)));
f8cfc6aa 3790 if (REG_P (struct_value))
61f71b34 3791 use_reg (&call_fusage, struct_value);
3c0fca12
RH
3792 }
3793
3794 /* Don't allow popping to be deferred, since then
3795 cse'ing of library calls could delete a call and leave the pop. */
3796 NO_DEFER_POP;
5591ee6f
JH
3797 valreg = (mem_value == 0 && outmode != VOIDmode
3798 ? hard_libcall_value (outmode) : NULL_RTX);
3c0fca12 3799
ce48579b 3800 /* Stack must be properly aligned now. */
366de0ce
NS
3801 gcc_assert (!(stack_pointer_delta
3802 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
ebcd0b57 3803
695ee791
RH
3804 before_call = get_last_insn ();
3805
3c0fca12
RH
3806 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3807 will set inhibit_defer_pop to that value. */
de76b467
JH
3808 /* The return type is needed to decide how many bytes the function pops.
3809 Signedness plays no role in that, so for simplicity, we pretend it's
3810 always signed. We also assume that the list of arguments passed has
3811 no impact, so we pretend it is unknown. */
3c0fca12 3812
6de9cd9a 3813 emit_call_1 (fun, NULL,
f725a3ec 3814 get_identifier (XSTR (orgfun, 0)),
b0c48229 3815 build_function_type (tfom, NULL_TREE),
f725a3ec 3816 original_args_size.constant, args_size.constant,
3c0fca12
RH
3817 struct_value_size,
3818 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
5591ee6f 3819 valreg,
fa5322fa 3820 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3c0fca12 3821
6fb5fa3c
DB
3822 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3823 that it should complain if nonvolatile values are live. For
3824 functions that cannot return, inform flow that control does not
3825 fall through. */
695ee791 3826
6e14af16 3827 if (flags & ECF_NORETURN)
695ee791 3828 {
570a98eb 3829 /* The barrier note must be emitted
695ee791
RH
3830 immediately after the CALL_INSN. Some ports emit more than
3831 just a CALL_INSN above, so we must search for it here. */
3832
3833 rtx last = get_last_insn ();
4b4bf941 3834 while (!CALL_P (last))
695ee791
RH
3835 {
3836 last = PREV_INSN (last);
3837 /* There was no CALL_INSN? */
366de0ce 3838 gcc_assert (last != before_call);
695ee791
RH
3839 }
3840
570a98eb 3841 emit_barrier_after (last);
695ee791
RH
3842 }
3843
3c0fca12
RH
3844 /* Now restore inhibit_defer_pop to its actual original value. */
3845 OK_DEFER_POP;
3846
ebb1b59a
BS
3847 /* If call is cse'able, make appropriate pair of reg-notes around it.
3848 Test valreg so we don't crash; may safely ignore `const'
3849 if return type is void. Disable for PARALLEL return values, because
3850 we have no way to move such values into a pseudo register. */
53d4257f 3851 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a 3852 {
ebb1b59a 3853 rtx insns;
ebb1b59a 3854
c3297561 3855 if (valreg == 0)
e4abc3d5
RH
3856 {
3857 insns = get_insns ();
3858 end_sequence ();
2f937369 3859 emit_insn (insns);
e4abc3d5
RH
3860 }
3861 else
3862 {
3863 rtx note = 0;
c3297561 3864 rtx temp;
e4abc3d5 3865 int i;
ebb1b59a 3866
c3297561
AO
3867 if (GET_CODE (valreg) == PARALLEL)
3868 {
3869 temp = gen_reg_rtx (outmode);
7ae4ad28 3870 emit_group_store (temp, valreg, NULL_TREE,
643642eb 3871 GET_MODE_SIZE (outmode));
c3297561
AO
3872 valreg = temp;
3873 }
3874
3875 temp = gen_reg_rtx (GET_MODE (valreg));
3876
e4abc3d5
RH
3877 /* Construct an "equal form" for the value which mentions all the
3878 arguments in order as well as the function name. */
3879 for (i = 0; i < nargs; i++)
3880 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3881 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
ebb1b59a 3882
e4abc3d5
RH
3883 insns = get_insns ();
3884 end_sequence ();
ebb1b59a 3885
e4abc3d5
RH
3886 if (flags & ECF_PURE)
3887 note = gen_rtx_EXPR_LIST (VOIDmode,
3888 gen_rtx_USE (VOIDmode,
3889 gen_rtx_MEM (BLKmode,
3890 gen_rtx_SCRATCH (VOIDmode))),
3891 note);
3892
3893 emit_libcall_block (insns, temp, valreg, note);
ebb1b59a 3894
e4abc3d5
RH
3895 valreg = temp;
3896 }
ebb1b59a 3897 }
3c0fca12
RH
3898 pop_temp_slots ();
3899
3900 /* Copy the value to the right place. */
de76b467 3901 if (outmode != VOIDmode && retval)
3c0fca12
RH
3902 {
3903 if (mem_value)
3904 {
3905 if (value == 0)
3906 value = mem_value;
3907 if (value != mem_value)
3908 emit_move_insn (value, mem_value);
3909 }
c3297561
AO
3910 else if (GET_CODE (valreg) == PARALLEL)
3911 {
3912 if (value == 0)
3913 value = gen_reg_rtx (outmode);
643642eb 3914 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 3915 }
3c0fca12 3916 else
7ab0aca2
RH
3917 {
3918 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3919 if (GET_MODE (valreg) != outmode)
3920 {
3921 int unsignedp = TYPE_UNSIGNED (tfom);
3922
3923 gcc_assert (targetm.calls.promote_function_return (tfom));
3924 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3925 == GET_MODE (valreg));
3926
3927 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3928 }
3929
3930 if (value != 0)
3931 emit_move_insn (value, valreg);
3932 else
3933 value = valreg;
3934 }
3c0fca12
RH
3935 }
3936
f73ad30e 3937 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 3938 {
f73ad30e
JH
3939#ifdef REG_PARM_STACK_SPACE
3940 if (save_area)
b820d2b8
AM
3941 restore_fixed_argument_area (save_area, argblock,
3942 high_to_save, low_to_save);
3c0fca12 3943#endif
f725a3ec 3944
f73ad30e
JH
3945 /* If we saved any argument areas, restore them. */
3946 for (count = 0; count < nargs; count++)
3947 if (argvec[count].save_area)
3948 {
3949 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
e7949876
AM
3950 rtx adr = plus_constant (argblock,
3951 argvec[count].locate.offset.constant);
3952 rtx stack_area = gen_rtx_MEM (save_mode,
3953 memory_address (save_mode, adr));
f73ad30e 3954
9778f2f8
JH
3955 if (save_mode == BLKmode)
3956 emit_block_move (stack_area,
c22cacf3 3957 validize_mem (argvec[count].save_area),
9778f2f8
JH
3958 GEN_INT (argvec[count].locate.size.constant),
3959 BLOCK_OP_CALL_PARM);
3960 else
3961 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 3962 }
3c0fca12 3963
f73ad30e
JH
3964 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3965 stack_usage_map = initial_stack_usage_map;
3966 }
43bc5f13 3967
d9725c41
JJ
3968 if (stack_usage_map_buf)
3969 free (stack_usage_map_buf);
3970
de76b467
JH
3971 return value;
3972
3973}
3974\f
3975/* Output a library call to function FUN (a SYMBOL_REF rtx)
3976 (emitting the queue unless NO_QUEUE is nonzero),
3977 for a value of mode OUTMODE,
3978 with NARGS different arguments, passed as alternating rtx values
3979 and machine_modes to convert them to.
de76b467 3980
1258ee80
JJ
3981 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3982 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3983 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3984 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3985 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3986 or other LCT_ value for other types of library calls. */
de76b467
JH
3987
3988void
e34d07f2
KG
3989emit_library_call (rtx orgfun, enum libcall_type fn_type,
3990 enum machine_mode outmode, int nargs, ...)
de76b467 3991{
e34d07f2 3992 va_list p;
d329e058 3993
e34d07f2 3994 va_start (p, nargs);
2a8f6b90 3995 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
e34d07f2 3996 va_end (p);
de76b467
JH
3997}
3998\f
3999/* Like emit_library_call except that an extra argument, VALUE,
4000 comes second and says where to store the result.
4001 (If VALUE is zero, this function chooses a convenient way
4002 to return the value.
4003
4004 This function returns an rtx for where the value is to be found.
4005 If VALUE is nonzero, VALUE is returned. */
4006
4007rtx
e34d07f2
KG
4008emit_library_call_value (rtx orgfun, rtx value,
4009 enum libcall_type fn_type,
4010 enum machine_mode outmode, int nargs, ...)
de76b467 4011{
6268b922 4012 rtx result;
e34d07f2 4013 va_list p;
d329e058 4014
e34d07f2 4015 va_start (p, nargs);
6268b922
KG
4016 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4017 nargs, p);
e34d07f2 4018 va_end (p);
de76b467 4019
6268b922 4020 return result;
322e3e34
RK
4021}
4022\f
51bbfa0c
RS
4023/* Store a single argument for a function call
4024 into the register or memory area where it must be passed.
4025 *ARG describes the argument value and where to pass it.
4026
4027 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4028 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4029
4030 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4031 so must be careful about how the stack is used.
51bbfa0c
RS
4032
4033 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4034 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4035 that we need not worry about saving and restoring the stack.
4036
4c6b3b2a 4037 FNDECL is the declaration of the function we are calling.
f725a3ec 4038
da7d8304 4039 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 4040 zero otherwise. */
51bbfa0c 4041
4c6b3b2a 4042static int
d329e058
AJ
4043store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4044 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 4045{
b3694847 4046 tree pval = arg->tree_value;
51bbfa0c
RS
4047 rtx reg = 0;
4048 int partial = 0;
4049 int used = 0;
6a651371 4050 int i, lower_bound = 0, upper_bound = 0;
4c6b3b2a 4051 int sibcall_failure = 0;
51bbfa0c
RS
4052
4053 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 4054 return 1;
51bbfa0c 4055
cc79451b
RK
4056 /* Push a new temporary level for any temporaries we make for
4057 this argument. */
4058 push_temp_slots ();
4059
f8a097cd 4060 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4061 {
f73ad30e
JH
4062 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4063 save any previous data at that location. */
4064 if (argblock && ! variable_size && arg->stack)
4065 {
51bbfa0c 4066#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4067 /* stack_slot is negative, but we want to index stack_usage_map
4068 with positive values. */
4069 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4070 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4071 else
4072 upper_bound = 0;
51bbfa0c 4073
e7949876 4074 lower_bound = upper_bound - arg->locate.size.constant;
51bbfa0c 4075#else
f73ad30e
JH
4076 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4077 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4078 else
4079 lower_bound = 0;
51bbfa0c 4080
e7949876 4081 upper_bound = lower_bound + arg->locate.size.constant;
51bbfa0c
RS
4082#endif
4083
546ff777
AM
4084 i = lower_bound;
4085 /* Don't worry about things in the fixed argument area;
4086 it has already been saved. */
4087 if (i < reg_parm_stack_space)
4088 i = reg_parm_stack_space;
4089 while (i < upper_bound && stack_usage_map[i] == 0)
4090 i++;
51bbfa0c 4091
546ff777 4092 if (i < upper_bound)
51bbfa0c 4093 {
e7949876
AM
4094 /* We need to make a save area. */
4095 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4096 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4097 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4098 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
4099
4100 if (save_mode == BLKmode)
4101 {
1da68f56
RK
4102 tree ot = TREE_TYPE (arg->tree_value);
4103 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4104 | TYPE_QUAL_CONST));
4105
4106 arg->save_area = assign_temp (nt, 0, 1, 1);
f73ad30e
JH
4107 preserve_temp_slots (arg->save_area);
4108 emit_block_move (validize_mem (arg->save_area), stack_area,
7816b87e 4109 GEN_INT (arg->locate.size.constant),
44bb111a 4110 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4111 }
4112 else
4113 {
4114 arg->save_area = gen_reg_rtx (save_mode);
4115 emit_move_insn (arg->save_area, stack_area);
4116 }
51bbfa0c
RS
4117 }
4118 }
4119 }
b564df06 4120
51bbfa0c
RS
4121 /* If this isn't going to be placed on both the stack and in registers,
4122 set up the register and number of words. */
4123 if (! arg->pass_on_stack)
aa7634dd
DM
4124 {
4125 if (flags & ECF_SIBCALL)
4126 reg = arg->tail_call_reg;
4127 else
4128 reg = arg->reg;
4129 partial = arg->partial;
4130 }
51bbfa0c 4131
366de0ce
NS
4132 /* Being passed entirely in a register. We shouldn't be called in
4133 this case. */
4134 gcc_assert (reg == 0 || partial != 0);
c22cacf3 4135
4ab56118
RK
4136 /* If this arg needs special alignment, don't load the registers
4137 here. */
4138 if (arg->n_aligned_regs != 0)
4139 reg = 0;
f725a3ec 4140
4ab56118 4141 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4142 it directly into its stack slot. Otherwise, we can. */
4143 if (arg->value == 0)
d64f5a78 4144 {
d64f5a78
RS
4145 /* stack_arg_under_construction is nonzero if a function argument is
4146 being evaluated directly into the outgoing argument list and
4147 expand_call must take special action to preserve the argument list
4148 if it is called recursively.
4149
4150 For scalar function arguments stack_usage_map is sufficient to
4151 determine which stack slots must be saved and restored. Scalar
4152 arguments in general have pass_on_stack == 0.
4153
4154 If this argument is initialized by a function which takes the
4155 address of the argument (a C++ constructor or a C function
4156 returning a BLKmode structure), then stack_usage_map is
4157 insufficient and expand_call must push the stack around the
4158 function call. Such arguments have pass_on_stack == 1.
4159
4160 Note that it is always safe to set stack_arg_under_construction,
4161 but this generates suboptimal code if set when not needed. */
4162
4163 if (arg->pass_on_stack)
4164 stack_arg_under_construction++;
f73ad30e 4165
3a08477a
RK
4166 arg->value = expand_expr (pval,
4167 (partial
4168 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4169 ? NULL_RTX : arg->stack,
8403445a 4170 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
4171
4172 /* If we are promoting object (or for any other reason) the mode
4173 doesn't agree, convert the mode. */
4174
7373d92d
RK
4175 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4176 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4177 arg->value, arg->unsignedp);
1efe6448 4178
d64f5a78
RS
4179 if (arg->pass_on_stack)
4180 stack_arg_under_construction--;
d64f5a78 4181 }
51bbfa0c 4182
0dc42b03 4183 /* Check for overlap with already clobbered argument area. */
07eef816
KH
4184 if ((flags & ECF_SIBCALL)
4185 && MEM_P (arg->value)
4186 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4187 arg->locate.size.constant))
4188 sibcall_failure = 1;
0dc42b03 4189
51bbfa0c
RS
4190 /* Don't allow anything left on stack from computation
4191 of argument to alloca. */
f8a097cd 4192 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
4193 do_pending_stack_adjust ();
4194
4195 if (arg->value == arg->stack)
37a08a29
RK
4196 /* If the value is already in the stack slot, we are done. */
4197 ;
1efe6448 4198 else if (arg->mode != BLKmode)
51bbfa0c 4199 {
b3694847 4200 int size;
46bd2bee 4201 unsigned int parm_align;
51bbfa0c
RS
4202
4203 /* Argument is a scalar, not entirely passed in registers.
4204 (If part is passed in registers, arg->partial says how much
4205 and emit_push_insn will take care of putting it there.)
f725a3ec 4206
51bbfa0c
RS
4207 Push it, and if its size is less than the
4208 amount of space allocated to it,
4209 also bump stack pointer by the additional space.
4210 Note that in C the default argument promotions
4211 will prevent such mismatches. */
4212
1efe6448 4213 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4214 /* Compute how much space the push instruction will push.
4215 On many machines, pushing a byte will advance the stack
4216 pointer by a halfword. */
4217#ifdef PUSH_ROUNDING
4218 size = PUSH_ROUNDING (size);
4219#endif
4220 used = size;
4221
4222 /* Compute how much space the argument should get:
4223 round up to a multiple of the alignment for arguments. */
1efe6448 4224 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4225 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4226 / (PARM_BOUNDARY / BITS_PER_UNIT))
4227 * (PARM_BOUNDARY / BITS_PER_UNIT));
4228
46bd2bee
JM
4229 /* Compute the alignment of the pushed argument. */
4230 parm_align = arg->locate.boundary;
4231 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4232 {
4233 int pad = used - size;
4234 if (pad)
4235 {
4236 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4237 parm_align = MIN (parm_align, pad_align);
4238 }
4239 }
4240
51bbfa0c
RS
4241 /* This isn't already where we want it on the stack, so put it there.
4242 This can either be done with push or copy insns. */
d329e058 4243 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
46bd2bee 4244 parm_align, partial, reg, used - size, argblock,
e7949876
AM
4245 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4246 ARGS_SIZE_RTX (arg->locate.alignment_pad));
841404cd
AO
4247
4248 /* Unless this is a partially-in-register argument, the argument is now
4249 in the stack. */
4250 if (partial == 0)
4251 arg->value = arg->stack;
51bbfa0c
RS
4252 }
4253 else
4254 {
4255 /* BLKmode, at least partly to be pushed. */
4256
1b1f20ca 4257 unsigned int parm_align;
b3694847 4258 int excess;
51bbfa0c
RS
4259 rtx size_rtx;
4260
4261 /* Pushing a nonscalar.
4262 If part is passed in registers, PARTIAL says how much
4263 and emit_push_insn will take care of putting it there. */
4264
4265 /* Round its size up to a multiple
4266 of the allocation unit for arguments. */
4267
e7949876 4268 if (arg->locate.size.var != 0)
51bbfa0c
RS
4269 {
4270 excess = 0;
e7949876 4271 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
4272 }
4273 else
4274 {
78a52f11
RH
4275 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4276 for BLKmode is careful to avoid it. */
4277 excess = (arg->locate.size.constant
4278 - int_size_in_bytes (TREE_TYPE (pval))
4279 + partial);
db4c55f6
JM
4280 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4281 NULL_RTX, TYPE_MODE (sizetype), 0);
51bbfa0c
RS
4282 }
4283
bfc45551 4284 parm_align = arg->locate.boundary;
1b1f20ca
RH
4285
4286 /* When an argument is padded down, the block is aligned to
4287 PARM_BOUNDARY, but the actual argument isn't. */
4288 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4289 {
e7949876 4290 if (arg->locate.size.var)
1b1f20ca
RH
4291 parm_align = BITS_PER_UNIT;
4292 else if (excess)
4293 {
97d05bfd 4294 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
1b1f20ca
RH
4295 parm_align = MIN (parm_align, excess_align);
4296 }
4297 }
4298
3c0cb5de 4299 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
4300 {
4301 /* emit_push_insn might not work properly if arg->value and
e7949876 4302 argblock + arg->locate.offset areas overlap. */
4c6b3b2a
JJ
4303 rtx x = arg->value;
4304 int i = 0;
4305
4306 if (XEXP (x, 0) == current_function_internal_arg_pointer
4307 || (GET_CODE (XEXP (x, 0)) == PLUS
4308 && XEXP (XEXP (x, 0), 0) ==
4309 current_function_internal_arg_pointer
4310 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4311 {
4312 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4313 i = INTVAL (XEXP (XEXP (x, 0), 1));
4314
e0a21ab9 4315 /* expand_call should ensure this. */
366de0ce
NS
4316 gcc_assert (!arg->locate.offset.var
4317 && GET_CODE (size_rtx) == CONST_INT);
4c6b3b2a 4318
e7949876 4319 if (arg->locate.offset.constant > i)
4c6b3b2a 4320 {
e7949876 4321 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4c6b3b2a
JJ
4322 sibcall_failure = 1;
4323 }
e7949876 4324 else if (arg->locate.offset.constant < i)
4c6b3b2a 4325 {
e7949876 4326 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4c6b3b2a
JJ
4327 sibcall_failure = 1;
4328 }
4329 }
4330 }
4331
1efe6448 4332 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
1b1f20ca 4333 parm_align, partial, reg, excess, argblock,
e7949876
AM
4334 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4335 ARGS_SIZE_RTX (arg->locate.alignment_pad));
51bbfa0c 4336
841404cd
AO
4337 /* Unless this is a partially-in-register argument, the argument is now
4338 in the stack.
51bbfa0c 4339
841404cd
AO
4340 ??? Unlike the case above, in which we want the actual
4341 address of the data, so that we can load it directly into a
4342 register, here we want the address of the stack slot, so that
4343 it's properly aligned for word-by-word copying or something
4344 like that. It's not clear that this is always correct. */
4345 if (partial == 0)
4346 arg->value = arg->stack_slot;
4347 }
8df3dbb7
RH
4348
4349 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4350 {
4351 tree type = TREE_TYPE (arg->tree_value);
4352 arg->parallel_value
4353 = emit_group_load_into_temps (arg->reg, arg->value, type,
4354 int_size_in_bytes (type));
4355 }
51bbfa0c 4356
8403445a
AM
4357 /* Mark all slots this store used. */
4358 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4359 && argblock && ! variable_size && arg->stack)
4360 for (i = lower_bound; i < upper_bound; i++)
4361 stack_usage_map[i] = 1;
4362
51bbfa0c
RS
4363 /* Once we have pushed something, pops can't safely
4364 be deferred during the rest of the arguments. */
4365 NO_DEFER_POP;
4366
db907e7b
RK
4367 /* Free any temporary slots made in processing this argument. Show
4368 that we might have taken the address of something and pushed that
4369 as an operand. */
4370 preserve_temp_slots (NULL_RTX);
51bbfa0c 4371 free_temp_slots ();
cc79451b 4372 pop_temp_slots ();
4c6b3b2a
JJ
4373
4374 return sibcall_failure;
51bbfa0c 4375}
a4b1b92a 4376
fe984136 4377/* Nonzero if we do not know how to pass TYPE solely in registers. */
a4b1b92a 4378
fe984136
RH
4379bool
4380must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4381 tree type)
4382{
4383 if (!type)
4384 return false;
4385
4386 /* If the type has variable size... */
4387 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4388 return true;
a4b1b92a 4389
fe984136
RH
4390 /* If the type is marked as addressable (it is required
4391 to be constructed into the stack)... */
4392 if (TREE_ADDRESSABLE (type))
4393 return true;
4394
4395 return false;
4396}
a4b1b92a 4397
7ae4ad28 4398/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
4399 takes trailing padding of a structure into account. */
4400/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
4401
4402bool
fe984136 4403must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
a4b1b92a
RH
4404{
4405 if (!type)
40cdfd5a 4406 return false;
a4b1b92a
RH
4407
4408 /* If the type has variable size... */
4409 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4410 return true;
4411
4412 /* If the type is marked as addressable (it is required
4413 to be constructed into the stack)... */
4414 if (TREE_ADDRESSABLE (type))
4415 return true;
4416
4417 /* If the padding and mode of the type is such that a copy into
4418 a register would put it into the wrong part of the register. */
4419 if (mode == BLKmode
4420 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4421 && (FUNCTION_ARG_PADDING (mode, type)
4422 == (BYTES_BIG_ENDIAN ? upward : downward)))
4423 return true;
4424
4425 return false;
4426}