]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
* doc/install.texi (--enable-checking): Update valgrind's URL.
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
61f71b34 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
b820d2b8 3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
51bbfa0c 4
1322177d 5This file is part of GCC.
51bbfa0c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
51bbfa0c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
51bbfa0c
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
51bbfa0c
RS
21
22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
670ee920
KG
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "expr.h"
6e985040 30#include "optabs.h"
e78d8e51 31#include "libfuncs.h"
49ad7cfa 32#include "function.h"
670ee920 33#include "regs.h"
5f6da302 34#include "toplev.h"
d6f4ec51 35#include "output.h"
b1474bb7 36#include "tm_p.h"
ea11ca7e 37#include "timevar.h"
c67846f2 38#include "sbitmap.h"
b0c48229 39#include "langhooks.h"
23626154 40#include "target.h"
b255a036 41#include "cgraph.h"
b2dd096b 42#include "except.h"
51bbfa0c 43
c795bca9
BS
44/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
46
47/* Data structure and subroutines used within expand_call. */
48
49struct arg_data
50{
51 /* Tree node for this argument. */
52 tree tree_value;
1efe6448
RK
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
51bbfa0c
RS
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
56 rtx value;
57 /* Initially-compute RTL value for argument; only for const functions. */
58 rtx initial_value;
59 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 60 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
61 registers. */
62 rtx reg;
099e9712
JH
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
65 register windows. */
66 rtx tail_call_reg;
84b55618
RK
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
69 int unsignedp;
51bbfa0c
RS
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
72 int partial;
da7d8304 73 /* Nonzero if argument must be passed on stack.
d64f5a78
RS
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c 77 int pass_on_stack;
e7949876
AM
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
82 rtx stack;
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
86 rtx stack_slot;
51bbfa0c
RS
87 /* Place that this stack area has been saved, if needed. */
88 rtx save_area;
4ab56118
RK
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
93 rtx *aligned_regs;
94 int n_aligned_regs;
51bbfa0c
RS
95};
96
da7d8304 97/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101static char *stack_usage_map;
102
103/* Size of STACK_USAGE_MAP. */
104static int highest_outgoing_arg_in_use;
2f4aa534 105
c67846f2
JJ
106/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111static sbitmap stored_args_map;
112
2f4aa534
RS
113/* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118int stack_arg_under_construction;
51bbfa0c 119
d329e058
AJ
120static int calls_function (tree, int);
121static int calls_function_1 (tree, int);
122
123static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
124 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
125 CUMULATIVE_ARGS *);
126static void precompute_register_parameters (int, struct arg_data *, int *);
127static int store_one_arg (struct arg_data *, rtx, int, int, int);
128static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
129static int finalize_must_preallocate (int, int, struct arg_data *,
130 struct args_size *);
131static void precompute_arguments (int, int, struct arg_data *);
132static int compute_argument_block_size (int, struct args_size *, int);
133static void initialize_argument_information (int, struct arg_data *,
134 struct args_size *, int, tree,
135 tree, CUMULATIVE_ARGS *, int,
136 rtx *, int *, int *, int *);
137static void compute_argument_addresses (struct arg_data *, rtx, int);
138static rtx rtx_for_function_call (tree, tree);
139static void load_register_parameters (struct arg_data *, int, rtx *, int,
140 int, int *);
141static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
142 enum machine_mode, int, va_list);
143static int special_function_p (tree, int);
144static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
145static int check_sibcall_argument_overlap_1 (rtx);
146static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
147
148static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
149 int);
150static tree fix_unsafe_tree (tree);
21a3b983 151
f73ad30e 152#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
153static rtx save_fixed_argument_area (int, rtx, int *, int *);
154static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 155#endif
51bbfa0c 156\f
1ce0cb53
JW
157/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
158 `alloca'.
159
160 If WHICH is 0, return 1 if EXP contains a call to any function.
161 Actually, we only need return 1 if evaluating EXP would require pushing
162 arguments on the stack, but that is too difficult to compute, so we just
163 assume any function call might require the stack. */
51bbfa0c 164
1c8d7aef
RS
165static tree calls_function_save_exprs;
166
51bbfa0c 167static int
d329e058 168calls_function (tree exp, int which)
1c8d7aef
RS
169{
170 int val;
8d5e6e25 171
1c8d7aef
RS
172 calls_function_save_exprs = 0;
173 val = calls_function_1 (exp, which);
174 calls_function_save_exprs = 0;
175 return val;
176}
177
8d5e6e25
RK
178/* Recursive function to do the work of above function. */
179
1c8d7aef 180static int
d329e058 181calls_function_1 (tree exp, int which)
51bbfa0c 182{
b3694847 183 int i;
0207efa2 184 enum tree_code code = TREE_CODE (exp);
8d5e6e25
RK
185 int class = TREE_CODE_CLASS (code);
186 int length = first_rtl_op (code);
51bbfa0c 187
ddd5a7c1 188 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
189 if ((int) code >= NUM_TREE_CODES)
190 return 1;
51bbfa0c 191
0207efa2 192 switch (code)
51bbfa0c
RS
193 {
194 case CALL_EXPR:
1ce0cb53
JW
195 if (which == 0)
196 return 1;
43db0363
RK
197 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
198 == FUNCTION_TYPE)
199 && (TYPE_RETURNS_STACK_DEPRESSED
200 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
7393c642 201 return 1;
1ce0cb53
JW
202 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
203 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8d5e6e25
RK
204 == FUNCTION_DECL)
205 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
206 0)
207 & ECF_MAY_BE_ALLOCA))
208 return 1;
51bbfa0c 209
51bbfa0c
RS
210 break;
211
b367c416
RK
212 case CONSTRUCTOR:
213 {
214 tree tem;
215
216 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
217 if (calls_function_1 (TREE_VALUE (tem), which))
218 return 1;
219 }
220
221 return 0;
222
51bbfa0c
RS
223 case SAVE_EXPR:
224 if (SAVE_EXPR_RTL (exp) != 0)
225 return 0;
1c8d7aef
RS
226 if (value_member (exp, calls_function_save_exprs))
227 return 0;
228 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
229 calls_function_save_exprs);
230 return (TREE_OPERAND (exp, 0) != 0
231 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
232
233 case BLOCK:
ef03bc85 234 {
b3694847
SS
235 tree local;
236 tree subblock;
ef03bc85
CH
237
238 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 239 if (DECL_INITIAL (local) != 0
1c8d7aef 240 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85 241 return 1;
ef03bc85
CH
242
243 for (subblock = BLOCK_SUBBLOCKS (exp);
244 subblock;
245 subblock = TREE_CHAIN (subblock))
1c8d7aef 246 if (calls_function_1 (subblock, which))
ef03bc85
CH
247 return 1;
248 }
249 return 0;
8d5e6e25 250
0c4c16df
JH
251 case TREE_LIST:
252 for (; exp != 0; exp = TREE_CHAIN (exp))
253 if (calls_function_1 (TREE_VALUE (exp), which))
254 return 1;
255 return 0;
51bbfa0c 256
e9a25f70
JL
257 default:
258 break;
51bbfa0c
RS
259 }
260
68ad9159
JM
261 /* Only expressions and blocks can contain calls. */
262 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
0c4c16df
JH
263 return 0;
264
51bbfa0c
RS
265 for (i = 0; i < length; i++)
266 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 267 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
268 return 1;
269
270 return 0;
271}
272\f
273/* Force FUNEXP into a form suitable for the address of a CALL,
274 and return that as an rtx. Also load the static chain register
275 if FNDECL is a nested function.
276
77cac2f2
RK
277 CALL_FUSAGE points to a variable holding the prospective
278 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 279
03dacb02 280rtx
d329e058
AJ
281prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
282 int reg_parm_seen, int sibcallp)
51bbfa0c
RS
283{
284 rtx static_chain_value = 0;
285
286 funexp = protect_from_queue (funexp, 0);
287
288 if (fndecl != 0)
0f41302f 289 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
290 static_chain_value = lookup_static_chain (fndecl);
291
292 /* Make a valid memory address and copy constants thru pseudo-regs,
293 but not for a constant address if -fno-function-cse. */
294 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 295 /* If we are using registers for parameters, force the
e9a25f70
JL
296 function address into a register now. */
297 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
298 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
299 : memory_address (FUNCTION_MODE, funexp));
3affaf29 300 else if (! sibcallp)
51bbfa0c
RS
301 {
302#ifndef NO_FUNCTION_CSE
303 if (optimize && ! flag_no_function_cse)
304#ifdef NO_RECURSIVE_FUNCTION_CSE
305 if (fndecl != current_function_decl)
306#endif
307 funexp = force_reg (Pmode, funexp);
308#endif
309 }
310
311 if (static_chain_value != 0)
312 {
313 emit_move_insn (static_chain_rtx, static_chain_value);
314
f991a240
RK
315 if (GET_CODE (static_chain_rtx) == REG)
316 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
317 }
318
319 return funexp;
320}
321
322/* Generate instructions to call function FUNEXP,
323 and optionally pop the results.
324 The CALL_INSN is the first insn generated.
325
607ea900 326 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
327 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
328
334c4f0f
RK
329 FUNTYPE is the data type of the function. This is given to the macro
330 RETURN_POPS_ARGS to determine whether this function pops its own args.
331 We used to allow an identifier for library functions, but that doesn't
332 work when the return type is an aggregate type and the calling convention
333 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
334
335 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
336 ROUNDED_STACK_SIZE is that number rounded up to
337 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
338 both to put into the call insn and to generate explicit popping
339 code if necessary.
51bbfa0c
RS
340
341 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
342 It is zero if this call doesn't want a structure value.
343
344 NEXT_ARG_REG is the rtx that results from executing
345 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
346 just after all the args have had their registers assigned.
347 This could be whatever you like, but normally it is the first
348 arg-register beyond those used for args in this call,
349 or 0 if all the arg-registers are used in this call.
350 It is passed on to `gen_call' so you can put this info in the call insn.
351
352 VALREG is a hard register in which a value is returned,
353 or 0 if the call does not return a value.
354
355 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
356 the args to this call were processed.
357 We restore `inhibit_defer_pop' to that value.
358
94b25f81 359 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 360 denote registers used by the called function. */
f725a3ec 361
322e3e34 362static void
b0a31300 363emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
d329e058
AJ
364 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
365 HOST_WIDE_INT rounded_stack_size,
366 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
367 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
368 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
369 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 370{
062e7fd8 371 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
51bbfa0c
RS
372 rtx call_insn;
373 int already_popped = 0;
fb5eebb9 374 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
f45c9d95
ZW
375#if defined (HAVE_call) && defined (HAVE_call_value)
376 rtx struct_value_size_rtx;
377 struct_value_size_rtx = GEN_INT (struct_value_size);
378#endif
51bbfa0c 379
fa5322fa
AO
380#ifdef CALL_POPS_ARGS
381 n_popped += CALL_POPS_ARGS (* args_so_far);
382#endif
d329e058 383
51bbfa0c
RS
384 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
385 and we don't want to load it into a register as an optimization,
386 because prepare_call_address already did it if it should be done. */
387 if (GET_CODE (funexp) != SYMBOL_REF)
388 funexp = memory_address (FUNCTION_MODE, funexp);
389
0a1c58a2
JL
390#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
391 if ((ecf_flags & ECF_SIBCALL)
392 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
f132f529 393 && (n_popped > 0 || stack_size == 0))
0a1c58a2 394 {
8ac61af7 395 rtx n_pop = GEN_INT (n_popped);
0a1c58a2
JL
396 rtx pat;
397
398 /* If this subroutine pops its own args, record that in the call insn
399 if possible, for the sake of frame pointer elimination. */
400
401 if (valreg)
f45c9d95 402 pat = GEN_SIBCALL_VALUE_POP (valreg,
0a1c58a2
JL
403 gen_rtx_MEM (FUNCTION_MODE, funexp),
404 rounded_stack_size_rtx, next_arg_reg,
405 n_pop);
406 else
f45c9d95 407 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
408 rounded_stack_size_rtx, next_arg_reg, n_pop);
409
410 emit_call_insn (pat);
411 already_popped = 1;
412 }
413 else
414#endif
415
51bbfa0c 416#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
8ac61af7
RK
417 /* If the target has "call" or "call_value" insns, then prefer them
418 if no arguments are actually popped. If the target does not have
419 "call" or "call_value" insns, then we must use the popping versions
420 even if the call has no arguments to pop. */
8bcafee3
JDA
421#if defined (HAVE_call) && defined (HAVE_call_value)
422 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
7393c642 423 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
8bcafee3
JDA
424#else
425 if (HAVE_call_pop && HAVE_call_value_pop)
426#endif
51bbfa0c 427 {
fb5eebb9 428 rtx n_pop = GEN_INT (n_popped);
51bbfa0c
RS
429 rtx pat;
430
431 /* If this subroutine pops its own args, record that in the call insn
432 if possible, for the sake of frame pointer elimination. */
2c8da025 433
51bbfa0c 434 if (valreg)
f45c9d95 435 pat = GEN_CALL_VALUE_POP (valreg,
38a448ca 436 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 437 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c 438 else
f45c9d95 439 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 440 rounded_stack_size_rtx, next_arg_reg, n_pop);
51bbfa0c
RS
441
442 emit_call_insn (pat);
443 already_popped = 1;
444 }
445 else
446#endif
51bbfa0c 447
0a1c58a2
JL
448#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
449 if ((ecf_flags & ECF_SIBCALL)
450 && HAVE_sibcall && HAVE_sibcall_value)
451 {
452 if (valreg)
f45c9d95 453 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
0a1c58a2
JL
454 gen_rtx_MEM (FUNCTION_MODE, funexp),
455 rounded_stack_size_rtx,
456 next_arg_reg, NULL_RTX));
457 else
f45c9d95 458 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
0a1c58a2
JL
459 rounded_stack_size_rtx, next_arg_reg,
460 struct_value_size_rtx));
461 }
462 else
463#endif
464
51bbfa0c
RS
465#if defined (HAVE_call) && defined (HAVE_call_value)
466 if (HAVE_call && HAVE_call_value)
467 {
468 if (valreg)
f45c9d95 469 emit_call_insn (GEN_CALL_VALUE (valreg,
38a448ca 470 gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 471 rounded_stack_size_rtx, next_arg_reg,
e992302c 472 NULL_RTX));
51bbfa0c 473 else
f45c9d95 474 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
062e7fd8 475 rounded_stack_size_rtx, next_arg_reg,
51bbfa0c
RS
476 struct_value_size_rtx));
477 }
478 else
479#endif
480 abort ();
481
ee960939
OH
482 /* Find the call we just emitted. */
483 call_insn = last_call_insn ();
51bbfa0c 484
2a8f6b90
JH
485 /* Mark memory as used for "pure" function call. */
486 if (ecf_flags & ECF_PURE)
8ac61af7
RK
487 call_fusage
488 = gen_rtx_EXPR_LIST
489 (VOIDmode,
490 gen_rtx_USE (VOIDmode,
491 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
492 call_fusage);
2a8f6b90 493
ee960939
OH
494 /* Put the register usage information there. */
495 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
496
497 /* If this is a const call, then set the insn's unchanging bit. */
2a8f6b90 498 if (ecf_flags & (ECF_CONST | ECF_PURE))
24a28584 499 CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 500
12a22e76
JM
501 /* If this call can't throw, attach a REG_EH_REGION reg note to that
502 effect. */
0a1c58a2 503 if (ecf_flags & ECF_NOTHROW)
54cea123 504 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
12a22e76 505 REG_NOTES (call_insn));
b2dd096b
MM
506 else
507 note_eh_region_may_contain_throw ();
12a22e76 508
ca3920ad
JW
509 if (ecf_flags & ECF_NORETURN)
510 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
511 REG_NOTES (call_insn));
9d98f8f9
JH
512 if (ecf_flags & ECF_ALWAYS_RETURN)
513 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
514 REG_NOTES (call_insn));
ca3920ad 515
570a98eb 516 if (ecf_flags & ECF_RETURNS_TWICE)
9defc9b7
RH
517 {
518 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
519 REG_NOTES (call_insn));
520 current_function_calls_setjmp = 1;
521 }
570a98eb 522
0a1c58a2
JL
523 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
524
b1e64e0d
RS
525 /* Restore this now, so that we do defer pops for this call's args
526 if the context of the call as a whole permits. */
527 inhibit_defer_pop = old_inhibit_defer_pop;
528
fb5eebb9 529 if (n_popped > 0)
51bbfa0c
RS
530 {
531 if (!already_popped)
e3da301d 532 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
533 = gen_rtx_EXPR_LIST (VOIDmode,
534 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
535 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 536 rounded_stack_size -= n_popped;
062e7fd8 537 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
1503a7ec 538 stack_pointer_delta -= n_popped;
51bbfa0c
RS
539 }
540
f73ad30e 541 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 542 {
f73ad30e
JH
543 /* If returning from the subroutine does not automatically pop the args,
544 we need an instruction to pop them sooner or later.
545 Perhaps do it now; perhaps just record how much space to pop later.
546
547 If returning from the subroutine does pop the args, indicate that the
548 stack pointer will be changed. */
549
f79a65c0 550 if (rounded_stack_size != 0)
f73ad30e 551 {
f451eeef 552 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
f79a65c0
RK
553 /* Just pretend we did the pop. */
554 stack_pointer_delta -= rounded_stack_size;
555 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 556 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
557 pending_stack_adjust += rounded_stack_size;
558 else
559 adjust_stack (rounded_stack_size_rtx);
560 }
51bbfa0c 561 }
f73ad30e
JH
562 /* When we accumulate outgoing args, we must avoid any stack manipulations.
563 Restore the stack pointer to its original value now. Usually
564 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
565 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
566 popping variants of functions exist as well.
567
568 ??? We may optimize similar to defer_pop above, but it is
569 probably not worthwhile.
f725a3ec 570
f73ad30e
JH
571 ??? It will be worthwhile to enable combine_stack_adjustments even for
572 such machines. */
573 else if (n_popped)
574 anti_adjust_stack (GEN_INT (n_popped));
51bbfa0c
RS
575}
576
20efdf74
JL
577/* Determine if the function identified by NAME and FNDECL is one with
578 special properties we wish to know about.
579
580 For example, if the function might return more than one time (setjmp), then
581 set RETURNS_TWICE to a nonzero value.
582
f2d33f13 583 Similarly set LONGJMP for if the function is in the longjmp family.
20efdf74 584
20efdf74
JL
585 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
586 space from the stack such as alloca. */
587
f2d33f13 588static int
d329e058 589special_function_p (tree fndecl, int flags)
20efdf74 590{
f2d33f13 591 if (! (flags & ECF_MALLOC)
3a8c995b 592 && fndecl && DECL_NAME (fndecl)
140592a0 593 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
20efdf74
JL
594 /* Exclude functions not at the file scope, or not `extern',
595 since they are not the magic functions we would otherwise
d1bd0ded
GK
596 think they are.
597 FIXME: this should be handled with attributes, not with this
598 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
599 because you can declare fork() inside a function if you
600 wish. */
601 && (DECL_CONTEXT (fndecl) == NULL_TREE
602 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
603 && TREE_PUBLIC (fndecl))
20efdf74 604 {
63ad61ed
ZW
605 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
606 const char *tname = name;
20efdf74 607
ca54603f
JL
608 /* We assume that alloca will always be called by name. It
609 makes no sense to pass it as a pointer-to-function to
610 anything that does not understand its behavior. */
f2d33f13
JH
611 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
612 && name[0] == 'a'
613 && ! strcmp (name, "alloca"))
614 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
615 && name[0] == '_'
616 && ! strcmp (name, "__builtin_alloca"))))
617 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 618
20efdf74
JL
619 /* Disregard prefix _, __ or __x. */
620 if (name[0] == '_')
621 {
622 if (name[1] == '_' && name[2] == 'x')
623 tname += 3;
624 else if (name[1] == '_')
625 tname += 2;
626 else
627 tname += 1;
628 }
629
630 if (tname[0] == 's')
631 {
f2d33f13
JH
632 if ((tname[1] == 'e'
633 && (! strcmp (tname, "setjmp")
634 || ! strcmp (tname, "setjmp_syscall")))
635 || (tname[1] == 'i'
636 && ! strcmp (tname, "sigsetjmp"))
637 || (tname[1] == 'a'
638 && ! strcmp (tname, "savectx")))
639 flags |= ECF_RETURNS_TWICE;
640
20efdf74
JL
641 if (tname[1] == 'i'
642 && ! strcmp (tname, "siglongjmp"))
f2d33f13 643 flags |= ECF_LONGJMP;
20efdf74
JL
644 }
645 else if ((tname[0] == 'q' && tname[1] == 's'
646 && ! strcmp (tname, "qsetjmp"))
647 || (tname[0] == 'v' && tname[1] == 'f'
648 && ! strcmp (tname, "vfork")))
f2d33f13 649 flags |= ECF_RETURNS_TWICE;
20efdf74
JL
650
651 else if (tname[0] == 'l' && tname[1] == 'o'
652 && ! strcmp (tname, "longjmp"))
f2d33f13 653 flags |= ECF_LONGJMP;
fa76d9e0
JR
654
655 else if ((tname[0] == 'f' && tname[1] == 'o'
656 && ! strcmp (tname, "fork"))
657 /* Linux specific: __clone. check NAME to insist on the
658 leading underscores, to avoid polluting the ISO / POSIX
659 namespace. */
660 || (name[0] == '_' && name[1] == '_'
661 && ! strcmp (tname, "clone"))
662 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
663 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
664 && (tname[5] == '\0'
665 || ((tname[5] == 'p' || tname[5] == 'e')
666 && tname[6] == '\0'))))
f2d33f13 667 flags |= ECF_FORK_OR_EXEC;
20efdf74 668 }
f2d33f13 669 return flags;
20efdf74
JL
670}
671
f2d33f13 672/* Return nonzero when tree represent call to longjmp. */
7393c642 673
f2d33f13 674int
d329e058 675setjmp_call_p (tree fndecl)
f2d33f13
JH
676{
677 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
678}
679
c986baf6
JH
680/* Return true when exp contains alloca call. */
681bool
d329e058 682alloca_call_p (tree exp)
c986baf6
JH
683{
684 if (TREE_CODE (exp) == CALL_EXPR
685 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
686 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
687 == FUNCTION_DECL)
688 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
689 0) & ECF_MAY_BE_ALLOCA))
690 return true;
691 return false;
692}
693
b5cd4ed4 694/* Detect flags (function attributes) from the function decl or type node. */
7393c642 695
4977bab6 696int
d329e058 697flags_from_decl_or_type (tree exp)
f2d33f13
JH
698{
699 int flags = 0;
b5cd4ed4 700 tree type = exp;
36dbb93d 701
f2d33f13
JH
702 if (DECL_P (exp))
703 {
b255a036 704 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
b5cd4ed4
RK
705 type = TREE_TYPE (exp);
706
36dbb93d
RS
707 if (i)
708 {
709 if (i->pure_function)
710 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
711 if (i->const_function)
712 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
713 }
b255a036 714
f2d33f13 715 /* The function exp may have the `malloc' attribute. */
36dbb93d 716 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
717 flags |= ECF_MALLOC;
718
2a8f6b90 719 /* The function exp may have the `pure' attribute. */
36dbb93d 720 if (DECL_IS_PURE (exp))
53d4257f 721 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
2a8f6b90 722
f2d33f13
JH
723 if (TREE_NOTHROW (exp))
724 flags |= ECF_NOTHROW;
2b187c63
MM
725
726 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
727 flags |= ECF_LIBCALL_BLOCK;
f2d33f13
JH
728 }
729
7393c642 730 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
2b187c63 731 flags |= ECF_CONST;
f2d33f13
JH
732
733 if (TREE_THIS_VOLATILE (exp))
734 flags |= ECF_NORETURN;
735
b5cd4ed4
RK
736 /* Mark if the function returns with the stack pointer depressed. We
737 cannot consider it pure or constant in that case. */
738 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
739 {
740 flags |= ECF_SP_DEPRESSED;
53d4257f 741 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
b5cd4ed4
RK
742 }
743
f2d33f13
JH
744 return flags;
745}
746
f027e0a2
JM
747/* Detect flags from a CALL_EXPR. */
748
749int
750call_expr_flags (tree t)
751{
752 int flags;
753 tree decl = get_callee_fndecl (t);
754
755 if (decl)
756 flags = flags_from_decl_or_type (decl);
757 else
758 {
759 t = TREE_TYPE (TREE_OPERAND (t, 0));
760 if (t && TREE_CODE (t) == POINTER_TYPE)
761 flags = flags_from_decl_or_type (TREE_TYPE (t));
762 else
763 flags = 0;
764 }
765
766 return flags;
767}
768
20efdf74
JL
769/* Precompute all register parameters as described by ARGS, storing values
770 into fields within the ARGS array.
771
772 NUM_ACTUALS indicates the total number elements in the ARGS array.
773
774 Set REG_PARM_SEEN if we encounter a register parameter. */
775
776static void
d329e058 777precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
20efdf74
JL
778{
779 int i;
780
781 *reg_parm_seen = 0;
782
783 for (i = 0; i < num_actuals; i++)
784 if (args[i].reg != 0 && ! args[i].pass_on_stack)
785 {
786 *reg_parm_seen = 1;
787
788 if (args[i].value == 0)
789 {
790 push_temp_slots ();
791 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
792 VOIDmode, 0);
793 preserve_temp_slots (args[i].value);
794 pop_temp_slots ();
795
796 /* ANSI doesn't require a sequence point here,
797 but PCC has one, so this will avoid some problems. */
798 emit_queue ();
799 }
800
fd1e5d25
RH
801 /* If the value is a non-legitimate constant, force it into a
802 pseudo now. TLS symbols sometimes need a call to resolve. */
803 if (CONSTANT_P (args[i].value)
804 && !LEGITIMATE_CONSTANT_P (args[i].value))
805 args[i].value = force_reg (args[i].mode, args[i].value);
806
20efdf74
JL
807 /* If we are to promote the function arg to a wider mode,
808 do it now. */
809
810 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
811 args[i].value
812 = convert_modes (args[i].mode,
813 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
814 args[i].value, args[i].unsignedp);
815
f725a3ec 816 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
817 short loop, put the value into a pseudo and then put the pseudo
818 into the hard reg.
819
820 For small register classes, also do this if this call uses
821 register parameters. This is to avoid reload conflicts while
822 loading the parameters registers. */
823
824 if ((! (GET_CODE (args[i].value) == REG
825 || (GET_CODE (args[i].value) == SUBREG
826 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
827 && args[i].mode != BLKmode
b437f1a7 828 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
20efdf74
JL
829 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
830 || preserve_subexpressions_p ()))
831 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
832 }
833}
834
f73ad30e 835#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
836
837 /* The argument list is the property of the called routine and it
838 may clobber it. If the fixed area has been used for previous
839 parameters, we must save and restore it. */
3bdf5ad1 840
20efdf74 841static rtx
d329e058 842save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 843{
b820d2b8
AM
844 int low;
845 int high;
20efdf74 846
b820d2b8
AM
847 /* Compute the boundary of the area that needs to be saved, if any. */
848 high = reg_parm_stack_space;
20efdf74 849#ifdef ARGS_GROW_DOWNWARD
b820d2b8 850 high += 1;
20efdf74 851#endif
b820d2b8
AM
852 if (high > highest_outgoing_arg_in_use)
853 high = highest_outgoing_arg_in_use;
20efdf74 854
b820d2b8
AM
855 for (low = 0; low < high; low++)
856 if (stack_usage_map[low] != 0)
857 {
858 int num_to_save;
859 enum machine_mode save_mode;
860 int delta;
861 rtx stack_area;
862 rtx save_area;
20efdf74 863
b820d2b8
AM
864 while (stack_usage_map[--high] == 0)
865 ;
20efdf74 866
b820d2b8
AM
867 *low_to_save = low;
868 *high_to_save = high;
869
870 num_to_save = high - low + 1;
871 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
20efdf74 872
b820d2b8
AM
873 /* If we don't have the required alignment, must do this
874 in BLKmode. */
875 if ((low & (MIN (GET_MODE_SIZE (save_mode),
876 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
877 save_mode = BLKmode;
20efdf74
JL
878
879#ifdef ARGS_GROW_DOWNWARD
b820d2b8 880 delta = -high;
20efdf74 881#else
b820d2b8 882 delta = low;
20efdf74 883#endif
b820d2b8
AM
884 stack_area = gen_rtx_MEM (save_mode,
885 memory_address (save_mode,
886 plus_constant (argblock,
887 delta)));
8ac61af7 888
b820d2b8
AM
889 set_mem_align (stack_area, PARM_BOUNDARY);
890 if (save_mode == BLKmode)
891 {
892 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
893 emit_block_move (validize_mem (save_area), stack_area,
894 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
895 }
896 else
897 {
898 save_area = gen_reg_rtx (save_mode);
899 emit_move_insn (save_area, stack_area);
900 }
8ac61af7 901
b820d2b8
AM
902 return save_area;
903 }
904
905 return NULL_RTX;
20efdf74
JL
906}
907
908static void
d329e058 909restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74
JL
910{
911 enum machine_mode save_mode = GET_MODE (save_area);
b820d2b8
AM
912 int delta;
913 rtx stack_area;
914
20efdf74 915#ifdef ARGS_GROW_DOWNWARD
b820d2b8 916 delta = -high_to_save;
20efdf74 917#else
b820d2b8 918 delta = low_to_save;
20efdf74 919#endif
b820d2b8
AM
920 stack_area = gen_rtx_MEM (save_mode,
921 memory_address (save_mode,
922 plus_constant (argblock, delta)));
923 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
924
925 if (save_mode != BLKmode)
926 emit_move_insn (stack_area, save_area);
927 else
44bb111a
RH
928 emit_block_move (stack_area, validize_mem (save_area),
929 GEN_INT (high_to_save - low_to_save + 1),
930 BLOCK_OP_CALL_PARM);
20efdf74 931}
19652adf 932#endif /* REG_PARM_STACK_SPACE */
f725a3ec 933
20efdf74
JL
934/* If any elements in ARGS refer to parameters that are to be passed in
935 registers, but not in memory, and whose alignment does not permit a
936 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 937 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
938
939 Pseudos for each unaligned argument will be stored into the array
940 args[argnum].aligned_regs. The caller is responsible for deallocating
941 the aligned_regs array if it is nonzero. */
942
20efdf74 943static void
d329e058 944store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
945{
946 int i, j;
f725a3ec 947
20efdf74
JL
948 for (i = 0; i < num_actuals; i++)
949 if (args[i].reg != 0 && ! args[i].pass_on_stack
950 && args[i].mode == BLKmode
951 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
952 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
953 {
954 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040
AM
955 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
956 int endian_correction = 0;
20efdf74 957
6e985040 958 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
703ad42b 959 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
20efdf74 960
6e985040
AM
961 /* Structures smaller than a word are normally aligned to the
962 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
963 this means we must skip the empty high order bytes when
964 calculating the bit offset. */
6e985040
AM
965 if (bytes < UNITS_PER_WORD
966#ifdef BLOCK_REG_PADDING
967 && (BLOCK_REG_PADDING (args[i].mode,
968 TREE_TYPE (args[i].tree_value), 1)
969 == downward)
970#else
971 && BYTES_BIG_ENDIAN
972#endif
973 )
974 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
975
976 for (j = 0; j < args[i].n_aligned_regs; j++)
977 {
978 rtx reg = gen_reg_rtx (word_mode);
979 rtx word = operand_subword_force (args[i].value, j, BLKmode);
980 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
981
982 args[i].aligned_regs[j] = reg;
6e985040
AM
983 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
984 word_mode, word_mode, BITS_PER_WORD);
20efdf74
JL
985
986 /* There is no need to restrict this code to loading items
987 in TYPE_ALIGN sized hunks. The bitfield instructions can
988 load up entire word sized registers efficiently.
989
990 ??? This may not be needed anymore.
991 We use to emit a clobber here but that doesn't let later
992 passes optimize the instructions we emit. By storing 0 into
993 the register later passes know the first AND to zero out the
994 bitfield being set in the register is unnecessary. The store
995 of 0 will be deleted as will at least the first AND. */
996
997 emit_move_insn (reg, const0_rtx);
998
999 bytes -= bitsize / BITS_PER_UNIT;
6e985040
AM
1000 store_bit_field (reg, bitsize, endian_correction, word_mode,
1001 word, BITS_PER_WORD);
20efdf74
JL
1002 }
1003 }
1004}
1005
d7cdf113 1006/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
f725a3ec 1007 ACTPARMS.
d7cdf113
JL
1008
1009 NUM_ACTUALS is the total number of parameters.
1010
1011 N_NAMED_ARGS is the total number of named arguments.
1012
1013 FNDECL is the tree code for the target of this call (if known)
1014
1015 ARGS_SO_FAR holds state needed by the target to know where to place
1016 the next argument.
1017
1018 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1019 for arguments which are passed in registers.
1020
1021 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1022 and may be modified by this routine.
1023
f2d33f13 1024 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
d7cdf113
JL
1025 flags which may may be modified by this routine. */
1026
1027static void
d329e058
AJ
1028initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1029 struct arg_data *args,
1030 struct args_size *args_size,
1031 int n_named_args ATTRIBUTE_UNUSED,
1032 tree actparms, tree fndecl,
1033 CUMULATIVE_ARGS *args_so_far,
1034 int reg_parm_stack_space,
1035 rtx *old_stack_level, int *old_pending_adj,
1036 int *must_preallocate, int *ecf_flags)
d7cdf113
JL
1037{
1038 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1039 int inc;
1040
1041 /* Count arg position in order args appear. */
1042 int argpos;
1043
1044 int i;
1045 tree p;
f725a3ec 1046
d7cdf113
JL
1047 args_size->constant = 0;
1048 args_size->var = 0;
1049
1050 /* In this loop, we consider args in the order they are written.
1051 We fill up ARGS from the front or from the back if necessary
1052 so that in any case the first arg to be pushed ends up at the front. */
1053
f73ad30e
JH
1054 if (PUSH_ARGS_REVERSED)
1055 {
1056 i = num_actuals - 1, inc = -1;
1057 /* In this case, must reverse order of args
1058 so that we compute and push the last arg first. */
1059 }
1060 else
1061 {
1062 i = 0, inc = 1;
1063 }
d7cdf113
JL
1064
1065 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1066 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1067 {
1068 tree type = TREE_TYPE (TREE_VALUE (p));
1069 int unsignedp;
1070 enum machine_mode mode;
1071
1072 args[i].tree_value = TREE_VALUE (p);
1073
1074 /* Replace erroneous argument with constant zero. */
d0f062fb 1075 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1076 args[i].tree_value = integer_zero_node, type = integer_type_node;
1077
1078 /* If TYPE is a transparent union, pass things the way we would
1079 pass the first field of the union. We have already verified that
1080 the modes are the same. */
2bf105ab 1081 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
d7cdf113
JL
1082 type = TREE_TYPE (TYPE_FIELDS (type));
1083
1084 /* Decide where to pass this arg.
1085
1086 args[i].reg is nonzero if all or part is passed in registers.
1087
1088 args[i].partial is nonzero if part but not all is passed in registers,
1089 and the exact value says how many words are passed in registers.
1090
1091 args[i].pass_on_stack is nonzero if the argument must at least be
1092 computed on the stack. It may then be loaded back into registers
1093 if args[i].reg is nonzero.
1094
1095 These decisions are driven by the FUNCTION_... macros and must agree
1096 with those made by function.c. */
1097
1098 /* See if this argument should be passed by invisible reference. */
7a6cdb44 1099 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
d7cdf113
JL
1100 || TREE_ADDRESSABLE (type)
1101#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
959f3a06 1102 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1103 type, argpos < n_named_args)
1104#endif
1105 )
1106 {
1107 /* If we're compiling a thunk, pass through invisible
1108 references instead of making a copy. */
1109 if (current_function_is_thunk
1110#ifdef FUNCTION_ARG_CALLEE_COPIES
959f3a06 1111 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
d7cdf113
JL
1112 type, argpos < n_named_args)
1113 /* If it's in a register, we must make a copy of it too. */
1114 /* ??? Is this a sufficient test? Is there a better one? */
1115 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1116 && REG_P (DECL_RTL (args[i].tree_value)))
1117 && ! TREE_ADDRESSABLE (type))
1118#endif
1119 )
1120 {
1121 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1122 new object from the argument. If we are passing by
1123 invisible reference, the callee will do that for us, so we
1124 can strip off the TARGET_EXPR. This is not always safe,
1125 but it is safe in the only case where this is a useful
1126 optimization; namely, when the argument is a plain object.
1127 In that case, the frontend is just asking the backend to
f725a3ec
KH
1128 make a bitwise copy of the argument. */
1129
d7cdf113 1130 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
2f939d94 1131 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
d7cdf113
JL
1132 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1133 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1134
1135 args[i].tree_value = build1 (ADDR_EXPR,
1136 build_pointer_type (type),
1137 args[i].tree_value);
1138 type = build_pointer_type (type);
1139 }
f21add07
JM
1140 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1141 {
1142 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1143 We implement this by passing the address of the temporary
1144 rather than expanding it into another allocated slot. */
1145 args[i].tree_value = build1 (ADDR_EXPR,
1146 build_pointer_type (type),
1147 args[i].tree_value);
1148 type = build_pointer_type (type);
1149 }
d7cdf113
JL
1150 else
1151 {
1152 /* We make a copy of the object and pass the address to the
1153 function being called. */
1154 rtx copy;
1155
d0f062fb 1156 if (!COMPLETE_TYPE_P (type)
d7cdf113
JL
1157 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1158 || (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
1159 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1160 STACK_CHECK_MAX_VAR_SIZE))))
d7cdf113
JL
1161 {
1162 /* This is a variable-sized object. Make space on the stack
1163 for it. */
1164 rtx size_rtx = expr_size (TREE_VALUE (p));
1165
1166 if (*old_stack_level == 0)
1167 {
1168 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1169 *old_pending_adj = pending_stack_adjust;
1170 pending_stack_adjust = 0;
1171 }
1172
1173 copy = gen_rtx_MEM (BLKmode,
3bdf5ad1
RK
1174 allocate_dynamic_stack_space
1175 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1176 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1177 }
1178 else
3bdf5ad1 1179 copy = assign_temp (type, 0, 1, 0);
d7cdf113
JL
1180
1181 store_expr (args[i].tree_value, copy, 0);
53d4257f 1182 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
d7cdf113
JL
1183
1184 args[i].tree_value = build1 (ADDR_EXPR,
1185 build_pointer_type (type),
1186 make_tree (type, copy));
1187 type = build_pointer_type (type);
1188 }
1189 }
1190
1191 mode = TYPE_MODE (type);
1192 unsignedp = TREE_UNSIGNED (type);
1193
61f71b34
DD
1194 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1195 mode = promote_mode (type, mode, &unsignedp, 1);
d7cdf113
JL
1196
1197 args[i].unsignedp = unsignedp;
1198 args[i].mode = mode;
7d167afd 1199
099e9712
JH
1200 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1201 argpos < n_named_args);
7d167afd
JJ
1202#ifdef FUNCTION_INCOMING_ARG
1203 /* If this is a sibling call and the machine has register windows, the
1204 register window has to be unwinded before calling the routine, so
1205 arguments have to go into the incoming registers. */
099e9712 1206 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
f725a3ec 1207 argpos < n_named_args);
099e9712
JH
1208#else
1209 args[i].tail_call_reg = args[i].reg;
7d167afd 1210#endif
7d167afd 1211
d7cdf113
JL
1212#ifdef FUNCTION_ARG_PARTIAL_NREGS
1213 if (args[i].reg)
1214 args[i].partial
959f3a06 1215 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
d7cdf113
JL
1216 argpos < n_named_args);
1217#endif
1218
1219 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1220
1221 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1222 it means that we are to pass this arg in the register(s) designated
1223 by the PARALLEL, but also to pass it in the stack. */
1224 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1225 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1226 args[i].pass_on_stack = 1;
1227
1228 /* If this is an addressable type, we must preallocate the stack
1229 since we must evaluate the object into its final location.
1230
1231 If this is to be passed in both registers and the stack, it is simpler
1232 to preallocate. */
1233 if (TREE_ADDRESSABLE (type)
1234 || (args[i].pass_on_stack && args[i].reg != 0))
1235 *must_preallocate = 1;
1236
1237 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1238 we cannot consider this function call constant. */
1239 if (TREE_ADDRESSABLE (type))
53d4257f 1240 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
d7cdf113
JL
1241
1242 /* Compute the stack-size of this argument. */
1243 if (args[i].reg == 0 || args[i].partial != 0
1244 || reg_parm_stack_space > 0
1245 || args[i].pass_on_stack)
1246 locate_and_pad_parm (mode, type,
1247#ifdef STACK_PARMS_IN_REG_PARM_AREA
1248 1,
1249#else
1250 args[i].reg != 0,
1251#endif
e7949876
AM
1252 args[i].pass_on_stack ? 0 : args[i].partial,
1253 fndecl, args_size, &args[i].locate);
648bb159
RS
1254#ifdef BLOCK_REG_PADDING
1255 else
1256 /* The argument is passed entirely in registers. See at which
1257 end it should be padded. */
1258 args[i].locate.where_pad =
1259 BLOCK_REG_PADDING (mode, type,
1260 int_size_in_bytes (type) <= UNITS_PER_WORD);
1261#endif
f725a3ec 1262
d7cdf113
JL
1263 /* Update ARGS_SIZE, the total stack space for args so far. */
1264
e7949876
AM
1265 args_size->constant += args[i].locate.size.constant;
1266 if (args[i].locate.size.var)
1267 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1268
1269 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1270 have been used, etc. */
1271
959f3a06 1272 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
d7cdf113
JL
1273 argpos < n_named_args);
1274 }
1275}
1276
599f37b6
JL
1277/* Update ARGS_SIZE to contain the total size for the argument block.
1278 Return the original constant component of the argument block's size.
1279
1280 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1281 for arguments passed in registers. */
1282
1283static int
d329e058
AJ
1284compute_argument_block_size (int reg_parm_stack_space,
1285 struct args_size *args_size,
1286 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6
JL
1287{
1288 int unadjusted_args_size = args_size->constant;
1289
f73ad30e
JH
1290 /* For accumulate outgoing args mode we don't need to align, since the frame
1291 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1292 backends from generating misaligned frame sizes. */
f73ad30e
JH
1293 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1294 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1295
599f37b6
JL
1296 /* Compute the actual size of the argument block required. The variable
1297 and constant sizes must be combined, the size may have to be rounded,
1298 and there may be a minimum required size. */
1299
1300 if (args_size->var)
1301 {
1302 args_size->var = ARGS_SIZE_TREE (*args_size);
1303 args_size->constant = 0;
1304
c2f8b491
JH
1305 preferred_stack_boundary /= BITS_PER_UNIT;
1306 if (preferred_stack_boundary > 1)
1503a7ec
JH
1307 {
1308 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1309 to add the delta, round and subtract the delta.
1503a7ec
JH
1310 Currently no machine description requires this support. */
1311 if (stack_pointer_delta & (preferred_stack_boundary - 1))
f725a3ec 1312 abort ();
1503a7ec
JH
1313 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1314 }
599f37b6
JL
1315
1316 if (reg_parm_stack_space > 0)
1317 {
1318 args_size->var
1319 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1320 ssize_int (reg_parm_stack_space));
599f37b6
JL
1321
1322#ifndef OUTGOING_REG_PARM_STACK_SPACE
1323 /* The area corresponding to register parameters is not to count in
1324 the size of the block we need. So make the adjustment. */
1325 args_size->var
1326 = size_binop (MINUS_EXPR, args_size->var,
fed3cef0 1327 ssize_int (reg_parm_stack_space));
599f37b6
JL
1328#endif
1329 }
1330 }
1331 else
1332 {
c2f8b491 1333 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1334 if (preferred_stack_boundary < 1)
1335 preferred_stack_boundary = 1;
fb5eebb9 1336 args_size->constant = (((args_size->constant
1503a7ec 1337 + stack_pointer_delta
c2f8b491
JH
1338 + preferred_stack_boundary - 1)
1339 / preferred_stack_boundary
1340 * preferred_stack_boundary)
1503a7ec 1341 - stack_pointer_delta);
599f37b6
JL
1342
1343 args_size->constant = MAX (args_size->constant,
1344 reg_parm_stack_space);
1345
1346#ifdef MAYBE_REG_PARM_STACK_SPACE
1347 if (reg_parm_stack_space == 0)
1348 args_size->constant = 0;
1349#endif
1350
1351#ifndef OUTGOING_REG_PARM_STACK_SPACE
1352 args_size->constant -= reg_parm_stack_space;
1353#endif
1354 }
1355 return unadjusted_args_size;
1356}
1357
19832c77 1358/* Precompute parameters as needed for a function call.
cc0b1adc 1359
f2d33f13 1360 FLAGS is mask of ECF_* constants.
cc0b1adc 1361
cc0b1adc
JL
1362 NUM_ACTUALS is the number of arguments.
1363
f725a3ec
KH
1364 ARGS is an array containing information for each argument; this
1365 routine fills in the INITIAL_VALUE and VALUE fields for each
1366 precomputed argument. */
cc0b1adc
JL
1367
1368static void
d329e058 1369precompute_arguments (int flags, int num_actuals, struct arg_data *args)
cc0b1adc
JL
1370{
1371 int i;
1372
1373 /* If this function call is cse'able, precompute all the parameters.
1374 Note that if the parameter is constructed into a temporary, this will
1375 cause an additional copy because the parameter will be constructed
1376 into a temporary location and then copied into the outgoing arguments.
1377 If a parameter contains a call to alloca and this function uses the
1378 stack, precompute the parameter. */
1379
1380 /* If we preallocated the stack space, and some arguments must be passed
1381 on the stack, then we must precompute any parameter which contains a
1382 function call which will store arguments on the stack.
1383 Otherwise, evaluating the parameter may clobber previous parameters
40d6e956 1384 which have already been stored into the stack. (we have code to avoid
f5143c46 1385 such case by saving the outgoing stack arguments, but it results in
40d6e956 1386 worse code) */
cc0b1adc
JL
1387
1388 for (i = 0; i < num_actuals; i++)
53d4257f 1389 if ((flags & ECF_LIBCALL_BLOCK)
40d6e956 1390 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
cc0b1adc 1391 {
ddef6bc7
JJ
1392 enum machine_mode mode;
1393
cc0b1adc
JL
1394 /* If this is an addressable type, we cannot pre-evaluate it. */
1395 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1396 abort ();
1397
47841d1b 1398 args[i].value
cc0b1adc
JL
1399 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1400
cc0b1adc
JL
1401 /* ANSI doesn't require a sequence point here,
1402 but PCC has one, so this will avoid some problems. */
1403 emit_queue ();
1404
1405 args[i].initial_value = args[i].value
47841d1b 1406 = protect_from_queue (args[i].value, 0);
cc0b1adc 1407
ddef6bc7
JJ
1408 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1409 if (mode != args[i].mode)
47841d1b
JJ
1410 {
1411 args[i].value
ddef6bc7 1412 = convert_modes (args[i].mode, mode,
47841d1b
JJ
1413 args[i].value, args[i].unsignedp);
1414#ifdef PROMOTE_FOR_CALL_ONLY
1415 /* CSE will replace this only if it contains args[i].value
1416 pseudo, so convert it down to the declared mode using
1417 a SUBREG. */
1418 if (GET_CODE (args[i].value) == REG
1419 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1420 {
1421 args[i].initial_value
ddef6bc7 1422 = gen_lowpart_SUBREG (mode, args[i].value);
47841d1b 1423 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
7879b81e
SE
1424 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1425 args[i].unsignedp);
47841d1b
JJ
1426 }
1427#endif
1428 }
cc0b1adc
JL
1429 }
1430}
1431
0f9b3ea6
JL
1432/* Given the current state of MUST_PREALLOCATE and information about
1433 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1434 compute and return the final value for MUST_PREALLOCATE. */
1435
1436static int
d329e058 1437finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
1438{
1439 /* See if we have or want to preallocate stack space.
1440
1441 If we would have to push a partially-in-regs parm
1442 before other stack parms, preallocate stack space instead.
1443
1444 If the size of some parm is not a multiple of the required stack
1445 alignment, we must preallocate.
1446
1447 If the total size of arguments that would otherwise create a copy in
1448 a temporary (such as a CALL) is more than half the total argument list
1449 size, preallocation is faster.
1450
1451 Another reason to preallocate is if we have a machine (like the m88k)
1452 where stack alignment is required to be maintained between every
1453 pair of insns, not just when the call is made. However, we assume here
1454 that such machines either do not have push insns (and hence preallocation
1455 would occur anyway) or the problem is taken care of with
1456 PUSH_ROUNDING. */
1457
1458 if (! must_preallocate)
1459 {
1460 int partial_seen = 0;
1461 int copy_to_evaluate_size = 0;
1462 int i;
1463
1464 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1465 {
1466 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1467 partial_seen = 1;
1468 else if (partial_seen && args[i].reg == 0)
1469 must_preallocate = 1;
1470
1471 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1472 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1473 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1474 || TREE_CODE (args[i].tree_value) == COND_EXPR
1475 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1476 copy_to_evaluate_size
1477 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1478 }
1479
1480 if (copy_to_evaluate_size * 2 >= args_size->constant
1481 && args_size->constant > 0)
1482 must_preallocate = 1;
1483 }
1484 return must_preallocate;
1485}
599f37b6 1486
a45bdd02
JL
1487/* If we preallocated stack space, compute the address of each argument
1488 and store it into the ARGS array.
1489
f725a3ec 1490 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1491 validized when it is used.
1492
1493 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1494
1495static void
d329e058 1496compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
1497{
1498 if (argblock)
1499 {
1500 rtx arg_reg = argblock;
1501 int i, arg_offset = 0;
1502
1503 if (GET_CODE (argblock) == PLUS)
1504 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1505
1506 for (i = 0; i < num_actuals; i++)
1507 {
e7949876
AM
1508 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1509 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02
JL
1510 rtx addr;
1511
1512 /* Skip this parm if it will not be passed on the stack. */
1513 if (! args[i].pass_on_stack && args[i].reg != 0)
1514 continue;
1515
1516 if (GET_CODE (offset) == CONST_INT)
1517 addr = plus_constant (arg_reg, INTVAL (offset));
1518 else
1519 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1520
1521 addr = plus_constant (addr, arg_offset);
1522 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
d1a74aa7 1523 set_mem_align (args[i].stack, PARM_BOUNDARY);
3bdf5ad1
RK
1524 set_mem_attributes (args[i].stack,
1525 TREE_TYPE (args[i].tree_value), 1);
a45bdd02
JL
1526
1527 if (GET_CODE (slot_offset) == CONST_INT)
1528 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1529 else
1530 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1531
1532 addr = plus_constant (addr, arg_offset);
1533 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
d1a74aa7 1534 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
3bdf5ad1
RK
1535 set_mem_attributes (args[i].stack_slot,
1536 TREE_TYPE (args[i].tree_value), 1);
7ab923cc
JJ
1537
1538 /* Function incoming arguments may overlap with sibling call
1539 outgoing arguments and we cannot allow reordering of reads
1540 from function arguments with stores to outgoing arguments
1541 of sibling calls. */
ba4828e0
RK
1542 set_mem_alias_set (args[i].stack, 0);
1543 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1544 }
1545 }
1546}
f725a3ec 1547
a45bdd02
JL
1548/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1549 in a call instruction.
1550
1551 FNDECL is the tree node for the target function. For an indirect call
1552 FNDECL will be NULL_TREE.
1553
09e2bf48 1554 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
1555
1556static rtx
d329e058 1557rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
1558{
1559 rtx funexp;
1560
1561 /* Get the function to call, in the form of RTL. */
1562 if (fndecl)
1563 {
1564 /* If this is the first use of the function, see if we need to
1565 make an external definition for it. */
1566 if (! TREE_USED (fndecl))
1567 {
1568 assemble_external (fndecl);
1569 TREE_USED (fndecl) = 1;
1570 }
1571
1572 /* Get a SYMBOL_REF rtx for the function address. */
1573 funexp = XEXP (DECL_RTL (fndecl), 0);
1574 }
1575 else
1576 /* Generate an rtx (probably a pseudo-register) for the address. */
1577 {
1578 push_temp_slots ();
09e2bf48 1579 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
f725a3ec 1580 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1581 emit_queue ();
1582 }
1583 return funexp;
1584}
1585
21a3b983
JL
1586/* Do the register loads required for any wholly-register parms or any
1587 parms which are passed both on the stack and in a register. Their
f725a3ec 1588 expressions were already evaluated.
21a3b983
JL
1589
1590 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
1591 insns in the CALL_INSN_FUNCTION_USAGE field.
1592
0cdca92b
DJ
1593 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1594 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
1595
1596static void
d329e058
AJ
1597load_register_parameters (struct arg_data *args, int num_actuals,
1598 rtx *call_fusage, int flags, int is_sibcall,
1599 int *sibcall_failure)
21a3b983
JL
1600{
1601 int i, j;
1602
1603#ifdef LOAD_ARGS_REVERSED
1604 for (i = num_actuals - 1; i >= 0; i--)
1605#else
1606 for (i = 0; i < num_actuals; i++)
1607#endif
1608 {
099e9712
JH
1609 rtx reg = ((flags & ECF_SIBCALL)
1610 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
1611 if (reg)
1612 {
6e985040
AM
1613 int partial = args[i].partial;
1614 int nregs;
1615 int size = 0;
0cdca92b 1616 rtx before_arg = get_last_insn ();
21a3b983
JL
1617 /* Set to non-negative if must move a word at a time, even if just
1618 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1619 we just use a normal move insn. This value can be zero if the
1620 argument is a zero size structure with no fields. */
6e985040
AM
1621 nregs = -1;
1622 if (partial)
1623 nregs = partial;
1624 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1625 {
1626 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1627 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1628 }
1629 else
1630 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
1631
1632 /* Handle calls that pass values in multiple non-contiguous
1633 locations. The Irix 6 ABI has examples of this. */
1634
1635 if (GET_CODE (reg) == PARALLEL)
6e985040
AM
1636 {
1637 tree type = TREE_TYPE (args[i].tree_value);
1638 emit_group_load (reg, args[i].value, type,
1639 int_size_in_bytes (type));
1640 }
21a3b983
JL
1641
1642 /* If simple case, just do move. If normal partial, store_one_arg
1643 has already loaded the register for us. In all other cases,
1644 load the register(s) from memory. */
1645
9206d736
AM
1646 else if (nregs == -1)
1647 {
1648 emit_move_insn (reg, args[i].value);
6e985040 1649#ifdef BLOCK_REG_PADDING
9206d736
AM
1650 /* Handle case where we have a value that needs shifting
1651 up to the msb. eg. a QImode value and we're padding
1652 upward on a BYTES_BIG_ENDIAN machine. */
1653 if (size < UNITS_PER_WORD
1654 && (args[i].locate.where_pad
1655 == (BYTES_BIG_ENDIAN ? upward : downward)))
1656 {
9206d736
AM
1657 rtx x;
1658 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
980f6e8e
AM
1659
1660 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1661 report the whole reg as used. Strictly speaking, the
1662 call only uses SIZE bytes at the msb end, but it doesn't
1663 seem worth generating rtl to say that. */
1664 reg = gen_rtx_REG (word_mode, REGNO (reg));
1665 x = expand_binop (word_mode, ashl_optab, reg,
1666 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1667 if (x != reg)
1668 emit_move_insn (reg, x);
9206d736 1669 }
6e985040 1670#endif
9206d736 1671 }
21a3b983
JL
1672
1673 /* If we have pre-computed the values to put in the registers in
1674 the case of non-aligned structures, copy them in now. */
1675
1676 else if (args[i].n_aligned_regs != 0)
1677 for (j = 0; j < args[i].n_aligned_regs; j++)
1678 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1679 args[i].aligned_regs[j]);
1680
1681 else if (partial == 0 || args[i].pass_on_stack)
6e985040
AM
1682 {
1683 rtx mem = validize_mem (args[i].value);
1684
1685#ifdef BLOCK_REG_PADDING
6e985040 1686 /* Handle a BLKmode that needs shifting. */
9206d736
AM
1687 if (nregs == 1 && size < UNITS_PER_WORD
1688 && args[i].locate.where_pad == downward)
6e985040
AM
1689 {
1690 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1691 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1692 rtx x = gen_reg_rtx (word_mode);
1693 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1694 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1695
1696 emit_move_insn (x, tem);
1697 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1698 ri, 1, OPTAB_WIDEN);
1699 if (x != ri)
1700 emit_move_insn (ri, x);
1701 }
1702 else
1703#endif
1704 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1705 }
21a3b983 1706
0cdca92b
DJ
1707 /* When a parameter is a block, and perhaps in other cases, it is
1708 possible that it did a load from an argument slot that was
32dd366d 1709 already clobbered. */
0cdca92b
DJ
1710 if (is_sibcall
1711 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1712 *sibcall_failure = 1;
1713
21a3b983
JL
1714 /* Handle calls that pass values in multiple non-contiguous
1715 locations. The Irix 6 ABI has examples of this. */
1716 if (GET_CODE (reg) == PARALLEL)
1717 use_group_regs (call_fusage, reg);
1718 else if (nregs == -1)
1719 use_reg (call_fusage, reg);
1720 else
1721 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1722 }
1723 }
1724}
1725
ea11ca7e 1726/* Try to integrate function. See expand_inline_function for documentation
f2d33f13
JH
1727 about the parameters. */
1728
1729static rtx
d329e058
AJ
1730try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1731 tree type, rtx structure_value_addr)
f2d33f13
JH
1732{
1733 rtx temp;
1734 rtx before_call;
1735 int i;
1736 rtx old_stack_level = 0;
7657ad0a 1737 int reg_parm_stack_space = 0;
f2d33f13
JH
1738
1739#ifdef REG_PARM_STACK_SPACE
1740#ifdef MAYBE_REG_PARM_STACK_SPACE
1741 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1742#else
1743 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1744#endif
1745#endif
1746
1747 before_call = get_last_insn ();
1748
ea11ca7e
JM
1749 timevar_push (TV_INTEGRATION);
1750
f2d33f13
JH
1751 temp = expand_inline_function (fndecl, actparms, target,
1752 ignore, type,
1753 structure_value_addr);
1754
ea11ca7e
JM
1755 timevar_pop (TV_INTEGRATION);
1756
f2d33f13 1757 /* If inlining succeeded, return. */
60e8b9f0 1758 if (temp != (rtx) (size_t) - 1)
f2d33f13
JH
1759 {
1760 if (ACCUMULATE_OUTGOING_ARGS)
1761 {
1762 /* If the outgoing argument list must be preserved, push
1763 the stack before executing the inlined function if it
1764 makes any calls. */
1765
546ff777
AM
1766 i = reg_parm_stack_space;
1767 if (i > highest_outgoing_arg_in_use)
1768 i = highest_outgoing_arg_in_use;
1769 while (--i >= 0 && stack_usage_map[i] == 0)
1770 ;
f2d33f13
JH
1771
1772 if (stack_arg_under_construction || i >= 0)
1773 {
1774 rtx first_insn
1775 = before_call ? NEXT_INSN (before_call) : get_insns ();
1776 rtx insn = NULL_RTX, seq;
1777
1778 /* Look for a call in the inline function code.
1779 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1780 nonzero then there is a call and it is not necessary
1781 to scan the insns. */
1782
1783 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1784 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1785 if (GET_CODE (insn) == CALL_INSN)
1786 break;
1787
1788 if (insn)
1789 {
1790 /* Reserve enough stack space so that the largest
1791 argument list of any function call in the inline
1792 function does not overlap the argument list being
1793 evaluated. This is usually an overestimate because
1794 allocate_dynamic_stack_space reserves space for an
1795 outgoing argument list in addition to the requested
1796 space, but there is no way to ask for stack space such
1797 that an argument list of a certain length can be
f725a3ec 1798 safely constructed.
f2d33f13
JH
1799
1800 Add the stack space reserved for register arguments, if
1801 any, in the inline function. What is really needed is the
1802 largest value of reg_parm_stack_space in the inline
1803 function, but that is not available. Using the current
1804 value of reg_parm_stack_space is wrong, but gives
1805 correct results on all supported machines. */
1806
1807 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1808 + reg_parm_stack_space);
1809
1810 start_sequence ();
1811 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1812 allocate_dynamic_stack_space (GEN_INT (adjust),
1813 NULL_RTX, BITS_PER_UNIT);
1814 seq = get_insns ();
1815 end_sequence ();
2f937369 1816 emit_insn_before (seq, first_insn);
f2d33f13
JH
1817 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1818 }
1819 }
1820 }
1821
1822 /* If the result is equivalent to TARGET, return TARGET to simplify
1823 checks in store_expr. They can be equivalent but not equal in the
1824 case of a function that returns BLKmode. */
1825 if (temp != target && rtx_equal_p (temp, target))
1826 return target;
1827 return temp;
1828 }
1829
1830 /* If inlining failed, mark FNDECL as needing to be compiled
1831 separately after all. If function was declared inline,
1832 give a warning. */
1833 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1834 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1835 {
ddd2d57e 1836 warning ("%Jinlining failed in call to '%F'", fndecl, fndecl);
f2d33f13
JH
1837 warning ("called from here");
1838 }
dffd7eb6 1839 (*lang_hooks.mark_addressable) (fndecl);
60e8b9f0 1840 return (rtx) (size_t) - 1;
f2d33f13
JH
1841}
1842
739fb049
MM
1843/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1844 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1845 bytes, then we would need to push some additional bytes to pad the
ce48579b
RH
1846 arguments. So, we compute an adjust to the stack pointer for an
1847 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1848 bytes. Then, when the arguments are pushed the stack will be perfectly
1849 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1850 be popped after the call. Returns the adjustment. */
739fb049 1851
ce48579b 1852static int
d329e058
AJ
1853combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1854 struct args_size *args_size,
1855 int preferred_unit_stack_boundary)
739fb049
MM
1856{
1857 /* The number of bytes to pop so that the stack will be
1858 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1859 HOST_WIDE_INT adjustment;
1860 /* The alignment of the stack after the arguments are pushed, if we
1861 just pushed the arguments without adjust the stack here. */
1862 HOST_WIDE_INT unadjusted_alignment;
1863
f725a3ec 1864 unadjusted_alignment
739fb049
MM
1865 = ((stack_pointer_delta + unadjusted_args_size)
1866 % preferred_unit_stack_boundary);
1867
1868 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1869 as possible -- leaving just enough left to cancel out the
1870 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1871 PENDING_STACK_ADJUST is non-negative, and congruent to
1872 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1873
1874 /* Begin by trying to pop all the bytes. */
f725a3ec
KH
1875 unadjusted_alignment
1876 = (unadjusted_alignment
739fb049
MM
1877 - (pending_stack_adjust % preferred_unit_stack_boundary));
1878 adjustment = pending_stack_adjust;
1879 /* Push enough additional bytes that the stack will be aligned
1880 after the arguments are pushed. */
e079dcdb
HB
1881 if (preferred_unit_stack_boundary > 1)
1882 {
3e555c7d 1883 if (unadjusted_alignment > 0)
f725a3ec 1884 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
e079dcdb 1885 else
f725a3ec 1886 adjustment += unadjusted_alignment;
e079dcdb 1887 }
f725a3ec 1888
739fb049
MM
1889 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1890 bytes after the call. The right number is the entire
1891 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1892 by the arguments in the first place. */
f725a3ec 1893 args_size->constant
739fb049
MM
1894 = pending_stack_adjust - adjustment + unadjusted_args_size;
1895
ce48579b 1896 return adjustment;
739fb049
MM
1897}
1898
c67846f2
JJ
1899/* Scan X expression if it does not dereference any argument slots
1900 we already clobbered by tail call arguments (as noted in stored_args_map
1901 bitmap).
da7d8304 1902 Return nonzero if X expression dereferences such argument slots,
c67846f2
JJ
1903 zero otherwise. */
1904
1905static int
d329e058 1906check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
1907{
1908 RTX_CODE code;
1909 int i, j;
1910 unsigned int k;
1911 const char *fmt;
1912
1913 if (x == NULL_RTX)
1914 return 0;
1915
1916 code = GET_CODE (x);
1917
1918 if (code == MEM)
1919 {
1920 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1921 i = 0;
4c6b3b2a
JJ
1922 else if (GET_CODE (XEXP (x, 0)) == PLUS
1923 && XEXP (XEXP (x, 0), 0) ==
1924 current_function_internal_arg_pointer
1925 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
c67846f2
JJ
1926 i = INTVAL (XEXP (XEXP (x, 0), 1));
1927 else
1928 return 0;
1929
d60eab50
AO
1930#ifdef ARGS_GROW_DOWNWARD
1931 i = -i - GET_MODE_SIZE (GET_MODE (x));
1932#endif
1933
c67846f2
JJ
1934 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1935 if (i + k < stored_args_map->n_bits
1936 && TEST_BIT (stored_args_map, i + k))
1937 return 1;
1938
1939 return 0;
1940 }
1941
f725a3ec 1942 /* Scan all subexpressions. */
c67846f2
JJ
1943 fmt = GET_RTX_FORMAT (code);
1944 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1945 {
1946 if (*fmt == 'e')
f725a3ec
KH
1947 {
1948 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1949 return 1;
1950 }
c67846f2 1951 else if (*fmt == 'E')
f725a3ec
KH
1952 {
1953 for (j = 0; j < XVECLEN (x, i); j++)
1954 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1955 return 1;
1956 }
c67846f2
JJ
1957 }
1958 return 0;
c67846f2
JJ
1959}
1960
1961/* Scan sequence after INSN if it does not dereference any argument slots
1962 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b
DJ
1963 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1964 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1965 should be 0). Return nonzero if sequence after INSN dereferences such argument
1966 slots, zero otherwise. */
c67846f2
JJ
1967
1968static int
d329e058 1969check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
f725a3ec 1970{
c67846f2
JJ
1971 int low, high;
1972
1973 if (insn == NULL_RTX)
1974 insn = get_insns ();
1975 else
1976 insn = NEXT_INSN (insn);
1977
1978 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
1979 if (INSN_P (insn)
1980 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
1981 break;
1982
0cdca92b
DJ
1983 if (mark_stored_args_map)
1984 {
d60eab50 1985#ifdef ARGS_GROW_DOWNWARD
e7949876 1986 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
d60eab50 1987#else
e7949876 1988 low = arg->locate.slot_offset.constant;
d60eab50
AO
1989#endif
1990
e7949876 1991 for (high = low + arg->locate.size.constant; low < high; low++)
0cdca92b
DJ
1992 SET_BIT (stored_args_map, low);
1993 }
c67846f2
JJ
1994 return insn != NULL_RTX;
1995}
1996
292e35a3 1997static tree
d329e058 1998fix_unsafe_tree (tree t)
292e35a3
CE
1999{
2000 switch (unsafe_for_reeval (t))
2001 {
2002 case 0: /* Safe. */
2003 break;
2004
2005 case 1: /* Mildly unsafe. */
2006 t = unsave_expr (t);
2007 break;
2008
2009 case 2: /* Wildly unsafe. */
2010 {
2011 tree var = build_decl (VAR_DECL, NULL_TREE,
2012 TREE_TYPE (t));
2013 SET_DECL_RTL (var,
2014 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2015 t = var;
2016 }
2017 break;
2018
2019 default:
2020 abort ();
2021 }
2022 return t;
2023}
2024
51bbfa0c
RS
2025/* Generate all the code for a function call
2026 and return an rtx for its value.
2027 Store the value in TARGET (specified as an rtx) if convenient.
2028 If the value is stored in TARGET then TARGET is returned.
2029 If IGNORE is nonzero, then we ignore the value of the function call. */
2030
2031rtx
d329e058 2032expand_call (tree exp, rtx target, int ignore)
51bbfa0c 2033{
0a1c58a2
JL
2034 /* Nonzero if we are currently expanding a call. */
2035 static int currently_expanding_call = 0;
2036
51bbfa0c
RS
2037 /* List of actual parameters. */
2038 tree actparms = TREE_OPERAND (exp, 1);
2039 /* RTX for the function to be called. */
2040 rtx funexp;
0a1c58a2
JL
2041 /* Sequence of insns to perform a tail recursive "call". */
2042 rtx tail_recursion_insns = NULL_RTX;
2043 /* Sequence of insns to perform a normal "call". */
2044 rtx normal_call_insns = NULL_RTX;
2045 /* Sequence of insns to perform a tail recursive "call". */
2046 rtx tail_call_insns = NULL_RTX;
51bbfa0c
RS
2047 /* Data type of the function. */
2048 tree funtype;
ded9bf77 2049 tree type_arg_types;
51bbfa0c
RS
2050 /* Declaration of the function being called,
2051 or 0 if the function is computed (not known by name). */
2052 tree fndecl = 0;
0a1c58a2 2053 rtx insn;
099e9712
JH
2054 int try_tail_call = 1;
2055 int try_tail_recursion = 1;
0a1c58a2 2056 int pass;
51bbfa0c
RS
2057
2058 /* Register in which non-BLKmode value will be returned,
2059 or 0 if no value or if value is BLKmode. */
2060 rtx valreg;
2061 /* Address where we should return a BLKmode value;
2062 0 if value not BLKmode. */
2063 rtx structure_value_addr = 0;
2064 /* Nonzero if that address is being passed by treating it as
2065 an extra, implicit first parameter. Otherwise,
2066 it is passed by being copied directly into struct_value_rtx. */
2067 int structure_value_addr_parm = 0;
2068 /* Size of aggregate value wanted, or zero if none wanted
2069 or if we are using the non-reentrant PCC calling convention
2070 or expecting the value in registers. */
e5e809f4 2071 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
2072 /* Nonzero if called function returns an aggregate in memory PCC style,
2073 by returning the address of where to find it. */
2074 int pcc_struct_value = 0;
61f71b34 2075 rtx struct_value = 0;
51bbfa0c
RS
2076
2077 /* Number of actual parameters in this call, including struct value addr. */
2078 int num_actuals;
2079 /* Number of named args. Args after this are anonymous ones
2080 and they must all go on the stack. */
2081 int n_named_args;
51bbfa0c
RS
2082
2083 /* Vector of information about each argument.
2084 Arguments are numbered in the order they will be pushed,
2085 not the order they are written. */
2086 struct arg_data *args;
2087
2088 /* Total size in bytes of all the stack-parms scanned so far. */
2089 struct args_size args_size;
099e9712 2090 struct args_size adjusted_args_size;
51bbfa0c 2091 /* Size of arguments before any adjustments (such as rounding). */
599f37b6 2092 int unadjusted_args_size;
51bbfa0c
RS
2093 /* Data on reg parms scanned so far. */
2094 CUMULATIVE_ARGS args_so_far;
2095 /* Nonzero if a reg parm has been scanned. */
2096 int reg_parm_seen;
efd65a8b 2097 /* Nonzero if this is an indirect function call. */
51bbfa0c 2098
f725a3ec 2099 /* Nonzero if we must avoid push-insns in the args for this call.
51bbfa0c
RS
2100 If stack space is allocated for register parameters, but not by the
2101 caller, then it is preallocated in the fixed part of the stack frame.
2102 So the entire argument block must then be preallocated (i.e., we
2103 ignore PUSH_ROUNDING in that case). */
2104
f73ad30e 2105 int must_preallocate = !PUSH_ARGS;
51bbfa0c 2106
f72aed24 2107 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
2108 int reg_parm_stack_space = 0;
2109
51bbfa0c
RS
2110 /* Address of space preallocated for stack parms
2111 (on machines that lack push insns), or 0 if space not preallocated. */
2112 rtx argblock = 0;
2113
f2d33f13
JH
2114 /* Mask of ECF_ flags. */
2115 int flags = 0;
51bbfa0c
RS
2116 /* Nonzero if this is a call to an inline function. */
2117 int is_integrable = 0;
f73ad30e 2118#ifdef REG_PARM_STACK_SPACE
51bbfa0c 2119 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
2120 saved, if any. */
2121 int low_to_save, high_to_save;
51bbfa0c
RS
2122 rtx save_area = 0; /* Place that it is saved */
2123#endif
2124
51bbfa0c
RS
2125 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2126 char *initial_stack_usage_map = stack_usage_map;
51bbfa0c 2127
38afb23f
OH
2128 int old_stack_allocated;
2129
2130 /* State variables to track stack modifications. */
51bbfa0c 2131 rtx old_stack_level = 0;
38afb23f 2132 int old_stack_arg_under_construction = 0;
79be3418 2133 int old_pending_adj = 0;
51bbfa0c 2134 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
2135
2136 /* Some stack pointer alterations we make are performed via
2137 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2138 which we then also need to save/restore along the way. */
a259f218 2139 int old_stack_pointer_delta = 0;
38afb23f 2140
0a1c58a2 2141 rtx call_fusage;
b3694847 2142 tree p = TREE_OPERAND (exp, 0);
09e2bf48 2143 tree addr = TREE_OPERAND (exp, 0);
b3694847 2144 int i;
739fb049
MM
2145 /* The alignment of the stack, in bits. */
2146 HOST_WIDE_INT preferred_stack_boundary;
2147 /* The alignment of the stack, in bytes. */
2148 HOST_WIDE_INT preferred_unit_stack_boundary;
51bbfa0c 2149
f2d33f13
JH
2150 /* See if this is "nothrow" function call. */
2151 if (TREE_NOTHROW (exp))
2152 flags |= ECF_NOTHROW;
2153
51bbfa0c
RS
2154 /* See if we can find a DECL-node for the actual function.
2155 As a result, decide whether this is a call to an integrable function. */
2156
39b0dce7
JM
2157 fndecl = get_callee_fndecl (exp);
2158 if (fndecl)
51bbfa0c 2159 {
39b0dce7
JM
2160 if (!flag_no_inline
2161 && fndecl != current_function_decl
2162 && DECL_INLINE (fndecl)
2163 && DECL_SAVED_INSNS (fndecl)
2164 && DECL_SAVED_INSNS (fndecl)->inlinable)
2165 is_integrable = 1;
2166 else if (! TREE_ADDRESSABLE (fndecl))
51bbfa0c 2167 {
39b0dce7
JM
2168 /* In case this function later becomes inlinable,
2169 record that there was already a non-inline call to it.
51bbfa0c 2170
39b0dce7
JM
2171 Use abstraction instead of setting TREE_ADDRESSABLE
2172 directly. */
2173 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2174 && optimize > 0)
2175 {
ddd2d57e 2176 warning ("%Jcan't inline call to '%F'", fndecl, fndecl);
39b0dce7 2177 warning ("called from here");
51bbfa0c 2178 }
dffd7eb6 2179 (*lang_hooks.mark_addressable) (fndecl);
51bbfa0c 2180 }
39b0dce7 2181
72954a4f
JM
2182 if (ignore
2183 && lookup_attribute ("warn_unused_result",
2184 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
2185 warning ("ignoring return value of `%D', "
2186 "declared with attribute warn_unused_result", fndecl);
2187
39b0dce7 2188 flags |= flags_from_decl_or_type (fndecl);
51bbfa0c
RS
2189 }
2190
f725a3ec 2191 /* If we don't have specific function to call, see if we have a
f2d33f13 2192 attributes set in the type. */
39b0dce7 2193 else
72954a4f
JM
2194 {
2195 if (ignore
2196 && lookup_attribute ("warn_unused_result",
2197 TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (p)))))
2198 warning ("ignoring return value of function "
2199 "declared with attribute warn_unused_result");
2200 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2201 }
7393c642 2202
61f71b34
DD
2203 struct_value = targetm.calls.struct_value_rtx (fndecl ? TREE_TYPE (fndecl) : 0, 0);
2204
8c6a8269
RS
2205 /* Warn if this value is an aggregate type,
2206 regardless of which calling convention we are using for it. */
2207 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2208 warning ("function call has aggregate value");
2209
2210 /* If the result of a pure or const function call is ignored (or void),
2211 and none of its arguments are volatile, we can avoid expanding the
2212 call and just evaluate the arguments for side-effects. */
2213 if ((flags & (ECF_CONST | ECF_PURE))
2214 && (ignore || target == const0_rtx
2215 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2216 {
2217 bool volatilep = false;
2218 tree arg;
2219
2220 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2221 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2222 {
2223 volatilep = true;
2224 break;
2225 }
2226
2227 if (! volatilep)
2228 {
2229 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2230 expand_expr (TREE_VALUE (arg), const0_rtx,
2231 VOIDmode, EXPAND_NORMAL);
2232 return const0_rtx;
2233 }
2234 }
2235
6f90e075
JW
2236#ifdef REG_PARM_STACK_SPACE
2237#ifdef MAYBE_REG_PARM_STACK_SPACE
2238 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2239#else
2240 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2241#endif
2242#endif
2243
f73ad30e
JH
2244#ifndef OUTGOING_REG_PARM_STACK_SPACE
2245 if (reg_parm_stack_space > 0 && PUSH_ARGS)
e5e809f4
JL
2246 must_preallocate = 1;
2247#endif
2248
51bbfa0c
RS
2249 /* Set up a place to return a structure. */
2250
2251 /* Cater to broken compilers. */
61f71b34 2252 if (aggregate_value_p (exp, fndecl))
51bbfa0c
RS
2253 {
2254 /* This call returns a big structure. */
53d4257f 2255 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
51bbfa0c
RS
2256
2257#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
2258 {
2259 pcc_struct_value = 1;
0dd532dc
JW
2260 /* Easier than making that case work right. */
2261 if (is_integrable)
2262 {
2263 /* In case this is a static function, note that it has been
2264 used. */
2265 if (! TREE_ADDRESSABLE (fndecl))
dffd7eb6 2266 (*lang_hooks.mark_addressable) (fndecl);
0dd532dc
JW
2267 is_integrable = 0;
2268 }
9e7b1d0a
RS
2269 }
2270#else /* not PCC_STATIC_STRUCT_RETURN */
2271 {
2272 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 2273
4977bab6 2274 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
89ea02fb 2275 {
f32ac70d
JM
2276 /* The structure value address arg is already in actparms.
2277 Pull it out. It might be nice to just leave it there, but
2278 we need to set structure_value_addr. */
2279 tree return_arg = TREE_VALUE (actparms);
2280 actparms = TREE_CHAIN (actparms);
2281 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2282 VOIDmode, EXPAND_NORMAL);
89ea02fb 2283 }
4977bab6 2284 else if (target && GET_CODE (target) == MEM)
9e7b1d0a
RS
2285 structure_value_addr = XEXP (target, 0);
2286 else
2287 {
9e7b1d0a
RS
2288 /* For variable-sized objects, we must be called with a target
2289 specified. If we were to allocate space on the stack here,
2290 we would have no way of knowing when to free it. */
1da68f56 2291 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
51bbfa0c 2292
4361b41d
MM
2293 mark_temp_addr_taken (d);
2294 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2295 target = 0;
2296 }
2297 }
2298#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2299 }
2300
2301 /* If called function is inline, try to integrate it. */
2302
2303 if (is_integrable)
2304 {
f2d33f13
JH
2305 rtx temp = try_to_integrate (fndecl, actparms, target,
2306 ignore, TREE_TYPE (exp),
2307 structure_value_addr);
60e8b9f0 2308 if (temp != (rtx) (size_t) - 1)
f2d33f13 2309 return temp;
51bbfa0c
RS
2310 }
2311
099e9712 2312 /* Figure out the amount to which the stack should be aligned. */
099e9712 2313 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
2314 if (fndecl)
2315 {
2316 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2317 if (i && i->preferred_incoming_stack_boundary)
2318 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2319 }
099e9712
JH
2320
2321 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 2322 funtype = TREE_TYPE (addr);
099e9712
JH
2323 if (! POINTER_TYPE_P (funtype))
2324 abort ();
2325 funtype = TREE_TYPE (funtype);
2326
ded9bf77
AH
2327 /* Munge the tree to split complex arguments into their imaginary
2328 and real parts. */
2329 if (SPLIT_COMPLEX_ARGS)
2330 {
2331 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2332 actparms = split_complex_values (actparms);
2333 }
2334 else
2335 type_arg_types = TYPE_ARG_TYPES (funtype);
2336
099e9712
JH
2337 /* See if this is a call to a function that can return more than once
2338 or a call to longjmp or malloc. */
2339 flags |= special_function_p (fndecl, flags);
2340
2341 if (flags & ECF_MAY_BE_ALLOCA)
2342 current_function_calls_alloca = 1;
2343
2344 /* If struct_value_rtx is 0, it means pass the address
2345 as if it were an extra parameter. */
61f71b34 2346 if (structure_value_addr && struct_value == 0)
099e9712
JH
2347 {
2348 /* If structure_value_addr is a REG other than
2349 virtual_outgoing_args_rtx, we can use always use it. If it
2350 is not a REG, we must always copy it into a register.
2351 If it is virtual_outgoing_args_rtx, we must copy it to another
2352 register in some cases. */
2353 rtx temp = (GET_CODE (structure_value_addr) != REG
2354 || (ACCUMULATE_OUTGOING_ARGS
2355 && stack_arg_under_construction
2356 && structure_value_addr == virtual_outgoing_args_rtx)
2357 ? copy_addr_to_reg (structure_value_addr)
2358 : structure_value_addr);
2359
2360 actparms
2361 = tree_cons (error_mark_node,
2362 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2363 temp),
2364 actparms);
2365 structure_value_addr_parm = 1;
2366 }
2367
2368 /* Count the arguments and set NUM_ACTUALS. */
2369 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2370 num_actuals++;
2371
61f71b34
DD
2372 /* Start updating where the next arg would go.
2373
2067c116 2374 On some machines (such as the PA) indirect calls have a different
61f71b34
DD
2375 calling convention than normal calls. The last argument in
2376 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2377 or not. */
2378 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2379
099e9712
JH
2380 /* Compute number of named args.
2381 Normally, don't include the last named arg if anonymous args follow.
2382 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2383 (If no anonymous args follow, the result of list_length is actually
2384 one too large. This is harmless.)
2385
2386 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2387 zero, this machine will be able to place unnamed args that were
2388 passed in registers into the stack. So treat all args as named.
2389 This allows the insns emitting for a specific argument list to be
2390 independent of the function declaration.
2391
2392 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2393 reliable way to pass unnamed args in registers, so we must force
2394 them into memory. */
2395
61f71b34
DD
2396 if ((targetm.calls.strict_argument_naming (&args_so_far)
2397 || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
ded9bf77 2398 && type_arg_types != 0)
099e9712 2399 n_named_args
ded9bf77 2400 = (list_length (type_arg_types)
099e9712 2401 /* Don't include the last named arg. */
61f71b34 2402 - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1)
099e9712
JH
2403 /* Count the struct value address, if it is passed as a parm. */
2404 + structure_value_addr_parm);
2405 else
2406 /* If we know nothing, treat all args as named. */
2407 n_named_args = num_actuals;
2408
099e9712 2409 /* Make a vector to hold all the information about each arg. */
703ad42b
KG
2410 args = alloca (num_actuals * sizeof (struct arg_data));
2411 memset (args, 0, num_actuals * sizeof (struct arg_data));
099e9712 2412
d80d2d2a
KH
2413 /* Build up entries in the ARGS array, compute the size of the
2414 arguments into ARGS_SIZE, etc. */
099e9712
JH
2415 initialize_argument_information (num_actuals, args, &args_size,
2416 n_named_args, actparms, fndecl,
2417 &args_so_far, reg_parm_stack_space,
2418 &old_stack_level, &old_pending_adj,
2419 &must_preallocate, &flags);
2420
2421 if (args_size.var)
2422 {
2423 /* If this function requires a variable-sized argument list, don't
2424 try to make a cse'able block for this call. We may be able to
2425 do this eventually, but it is too complicated to keep track of
6d2f8887 2426 what insns go in the cse'able block and which don't. */
099e9712 2427
53d4257f 2428 flags &= ~ECF_LIBCALL_BLOCK;
099e9712
JH
2429 must_preallocate = 1;
2430 }
2431
2432 /* Now make final decision about preallocating stack space. */
2433 must_preallocate = finalize_must_preallocate (must_preallocate,
2434 num_actuals, args,
2435 &args_size);
2436
2437 /* If the structure value address will reference the stack pointer, we
2438 must stabilize it. We don't need to do this if we know that we are
2439 not going to adjust the stack pointer in processing this call. */
2440
2441 if (structure_value_addr
2442 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2443 || reg_mentioned_p (virtual_outgoing_args_rtx,
2444 structure_value_addr))
2445 && (args_size.var
2446 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2447 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 2448
194c7c45
RH
2449 /* Tail calls can make things harder to debug, and we're traditionally
2450 pushed these optimizations into -O2. Don't try if we're already
fb158467
JH
2451 expanding a call, as that means we're an argument. Don't try if
2452 there's cleanups, as we know there's code to follow the call.
e2ee9912 2453
f725a3ec 2454 If rtx_equal_function_value_matters is false, that means we've
e2ee9912
RH
2455 finished with regular parsing. Which means that some of the
2456 machinery we use to generate tail-calls is no longer in place.
2457 This is most often true of sjlj-exceptions, which we couldn't
2458 tail-call to anyway. */
0a1c58a2 2459
099e9712
JH
2460 if (currently_expanding_call++ != 0
2461 || !flag_optimize_sibling_calls
2462 || !rtx_equal_function_value_matters
de1f5659 2463 || any_pending_cleanups ()
099e9712
JH
2464 || args_size.var)
2465 try_tail_call = try_tail_recursion = 0;
2466
2467 /* Tail recursion fails, when we are not dealing with recursive calls. */
2468 if (!try_tail_recursion
09e2bf48
JH
2469 || TREE_CODE (addr) != ADDR_EXPR
2470 || TREE_OPERAND (addr, 0) != current_function_decl)
099e9712
JH
2471 try_tail_recursion = 0;
2472
2473 /* Rest of purposes for tail call optimizations to fail. */
2474 if (
2475#ifdef HAVE_sibcall_epilogue
2476 !HAVE_sibcall_epilogue
2477#else
2478 1
2479#endif
2480 || !try_tail_call
2481 /* Doing sibling call optimization needs some work, since
2482 structure_value_addr can be allocated on the stack.
2483 It does not seem worth the effort since few optimizable
2484 sibling calls will return a structure. */
2485 || structure_value_addr != NULL_RTX
4977bab6
ZW
2486 /* Check whether the target is able to optimize the call
2487 into a sibcall. */
2488 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2489 /* Functions that do not return exactly once may not be sibcall
2490 optimized. */
db655634 2491 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
09e2bf48 2492 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
6a48df45
OH
2493 /* If the called function is nested in the current one, it might access
2494 some of the caller's arguments, but could clobber them beforehand if
2495 the argument areas are shared. */
2496 || (fndecl && decl_function_context (fndecl) == current_function_decl)
099e9712
JH
2497 /* If this function requires more stack slots than the current
2498 function, we cannot change it into a sibling call. */
2499 || args_size.constant > current_function_args_size
2500 /* If the callee pops its own arguments, then it must pop exactly
2501 the same number of arguments as the current function. */
e076f71a
AH
2502 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2503 != RETURN_POPS_ARGS (current_function_decl,
2504 TREE_TYPE (current_function_decl),
2505 current_function_args_size))
2506 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
e6f64875 2507 try_tail_call = 0;
497eb8c3 2508
099e9712
JH
2509 if (try_tail_call || try_tail_recursion)
2510 {
2511 int end, inc;
2512 actparms = NULL_TREE;
194c7c45
RH
2513 /* Ok, we're going to give the tail call the old college try.
2514 This means we're going to evaluate the function arguments
2515 up to three times. There are two degrees of badness we can
2516 encounter, those that can be unsaved and those that can't.
2517 (See unsafe_for_reeval commentary for details.)
2518
2519 Generate a new argument list. Pass safe arguments through
f725a3ec 2520 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
194c7c45 2521 For hard badness, evaluate them now and put their resulting
099e9712
JH
2522 rtx in a temporary VAR_DECL.
2523
2524 initialize_argument_information has ordered the array for the
2525 order to be pushed, and we must remember this when reconstructing
f5143c46 2526 the original argument order. */
4d393a0b 2527
099e9712
JH
2528 if (PUSH_ARGS_REVERSED)
2529 {
2530 inc = 1;
2531 i = 0;
2532 end = num_actuals;
2533 }
2534 else
f725a3ec 2535 {
099e9712
JH
2536 inc = -1;
2537 i = num_actuals - 1;
2538 end = -1;
2539 }
2540
2541 for (; i != end; i += inc)
2542 {
292e35a3 2543 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
099e9712
JH
2544 /* We need to build actparms for optimize_tail_recursion. We can
2545 safely trash away TREE_PURPOSE, since it is unused by this
2546 function. */
2547 if (try_tail_recursion)
2548 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2549 }
3dc575ff 2550 /* Do the same for the function address if it is an expression. */
292e35a3 2551 if (!fndecl)
09e2bf48 2552 addr = fix_unsafe_tree (addr);
194c7c45
RH
2553 /* Expanding one of those dangerous arguments could have added
2554 cleanups, but otherwise give it a whirl. */
de1f5659 2555 if (any_pending_cleanups ())
099e9712 2556 try_tail_call = try_tail_recursion = 0;
0a1c58a2
JL
2557 }
2558
2559 /* Generate a tail recursion sequence when calling ourselves. */
2560
099e9712 2561 if (try_tail_recursion)
0a1c58a2
JL
2562 {
2563 /* We want to emit any pending stack adjustments before the tail
2564 recursion "call". That way we know any adjustment after the tail
2565 recursion call can be ignored if we indeed use the tail recursion
2566 call expansion. */
2567 int save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2568 int save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2569
41c39533
RH
2570 /* Emit any queued insns now; otherwise they would end up in
2571 only one of the alternates. */
2572 emit_queue ();
2573
0a1c58a2
JL
2574 /* Use a new sequence to hold any RTL we generate. We do not even
2575 know if we will use this RTL yet. The final decision can not be
2576 made until after RTL generation for the entire function is
2577 complete. */
b06775f9 2578 start_sequence ();
f5e846c8
MM
2579 /* If expanding any of the arguments creates cleanups, we can't
2580 do a tailcall. So, we'll need to pop the pending cleanups
2581 list. If, however, all goes well, and there are no cleanups
2582 then the call to expand_start_target_temps will have no
2583 effect. */
2584 expand_start_target_temps ();
b06775f9 2585 if (optimize_tail_recursion (actparms, get_last_insn ()))
f5e846c8 2586 {
de1f5659 2587 if (any_pending_cleanups ())
f5e846c8
MM
2588 try_tail_call = try_tail_recursion = 0;
2589 else
2590 tail_recursion_insns = get_insns ();
2591 }
2592 expand_end_target_temps ();
0a1c58a2
JL
2593 end_sequence ();
2594
0a1c58a2
JL
2595 /* Restore the original pending stack adjustment for the sibling and
2596 normal call cases below. */
2597 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 2598 stack_pointer_delta = save_stack_pointer_delta;
0a1c58a2
JL
2599 }
2600
099e9712
JH
2601 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2602 {
2603 /* A fork duplicates the profile information, and an exec discards
2604 it. We can't rely on fork/exec to be paired. So write out the
2605 profile information we have gathered so far, and clear it. */
2606 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2607 is subject to race conditions, just as with multithreaded
2608 programs. */
2609
68d28100 2610 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
099e9712 2611 }
0a1c58a2 2612
c2f8b491
JH
2613 /* Ensure current function's preferred stack boundary is at least
2614 what we need. We don't have to increase alignment for recursive
2615 functions. */
2616 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2617 && fndecl != current_function_decl)
2618 cfun->preferred_stack_boundary = preferred_stack_boundary;
b255a036
JH
2619 if (fndecl == current_function_decl)
2620 cfun->recursive_call_emit = true;
c2f8b491 2621
099e9712 2622 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 2623
099e9712 2624 function_call_count++;
39842893 2625
0a1c58a2
JL
2626 /* We want to make two insn chains; one for a sibling call, the other
2627 for a normal call. We will select one of the two chains after
2628 initial RTL generation is complete. */
b820d2b8 2629 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2
JL
2630 {
2631 int sibcall_failure = 0;
f5143c46 2632 /* We want to emit any pending stack adjustments before the tail
0a1c58a2
JL
2633 recursion "call". That way we know any adjustment after the tail
2634 recursion call can be ignored if we indeed use the tail recursion
2635 call expansion. */
5ac9118e
KG
2636 int save_pending_stack_adjust = 0;
2637 int save_stack_pointer_delta = 0;
0a1c58a2 2638 rtx insns;
7d167afd 2639 rtx before_call, next_arg_reg;
39842893 2640
0a1c58a2
JL
2641 if (pass == 0)
2642 {
1c81f9fe
JM
2643 /* Emit any queued insns now; otherwise they would end up in
2644 only one of the alternates. */
2645 emit_queue ();
2646
0a1c58a2
JL
2647 /* State variables we need to save and restore between
2648 iterations. */
2649 save_pending_stack_adjust = pending_stack_adjust;
1503a7ec 2650 save_stack_pointer_delta = stack_pointer_delta;
0a1c58a2 2651 }
f2d33f13
JH
2652 if (pass)
2653 flags &= ~ECF_SIBCALL;
2654 else
2655 flags |= ECF_SIBCALL;
51bbfa0c 2656
0a1c58a2 2657 /* Other state variables that we must reinitialize each time
f2d33f13 2658 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
2659 argblock = 0;
2660 call_fusage = 0;
fa76d9e0 2661
f725a3ec 2662 /* Start a new sequence for the normal call case.
51bbfa0c 2663
0a1c58a2
JL
2664 From this point on, if the sibling call fails, we want to set
2665 sibcall_failure instead of continuing the loop. */
2666 start_sequence ();
eecb6f50 2667
b8d254e6
JJ
2668 if (pass == 0)
2669 {
2670 /* We know at this point that there are not currently any
2671 pending cleanups. If, however, in the process of evaluating
2672 the arguments we were to create some, we'll need to be
2673 able to get rid of them. */
2674 expand_start_target_temps ();
2675 }
2676
0a1c58a2
JL
2677 /* Don't let pending stack adjusts add up to too much.
2678 Also, do all pending adjustments now if there is any chance
2679 this might be a call to alloca or if we are expanding a sibling
b5cd4ed4
RK
2680 call sequence or if we are calling a function that is to return
2681 with stack pointer depressed. */
0a1c58a2 2682 if (pending_stack_adjust >= 32
b5cd4ed4
RK
2683 || (pending_stack_adjust > 0
2684 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
0a1c58a2
JL
2685 || pass == 0)
2686 do_pending_stack_adjust ();
51bbfa0c 2687
54fef245
RH
2688 /* When calling a const function, we must pop the stack args right away,
2689 so that the pop is deleted or moved with the call. */
53d4257f 2690 if (pass && (flags & ECF_LIBCALL_BLOCK))
54fef245
RH
2691 NO_DEFER_POP;
2692
6f90e075 2693#ifdef FINAL_REG_PARM_STACK_SPACE
0a1c58a2
JL
2694 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2695 args_size.var);
6f90e075 2696#endif
0a1c58a2 2697 /* Precompute any arguments as needed. */
f8a097cd
JH
2698 if (pass)
2699 precompute_arguments (flags, num_actuals, args);
51bbfa0c 2700
0a1c58a2
JL
2701 /* Now we are about to start emitting insns that can be deleted
2702 if a libcall is deleted. */
53d4257f 2703 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
0a1c58a2 2704 start_sequence ();
51bbfa0c 2705
099e9712 2706 adjusted_args_size = args_size;
ce48579b
RH
2707 /* Compute the actual size of the argument block required. The variable
2708 and constant sizes must be combined, the size may have to be rounded,
2709 and there may be a minimum required size. When generating a sibcall
2710 pattern, do not round up, since we'll be re-using whatever space our
2711 caller provided. */
2712 unadjusted_args_size
f725a3ec
KH
2713 = compute_argument_block_size (reg_parm_stack_space,
2714 &adjusted_args_size,
ce48579b
RH
2715 (pass == 0 ? 0
2716 : preferred_stack_boundary));
2717
f725a3ec 2718 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 2719
f8a097cd
JH
2720 /* The argument block when performing a sibling call is the
2721 incoming argument block. */
2722 if (pass == 0)
c67846f2
JJ
2723 {
2724 argblock = virtual_incoming_args_rtx;
fcae219a
R
2725 argblock
2726#ifdef STACK_GROWS_DOWNWARD
2727 = plus_constant (argblock, current_function_pretend_args_size);
2728#else
2729 = plus_constant (argblock, -current_function_pretend_args_size);
2730#endif
c67846f2
JJ
2731 stored_args_map = sbitmap_alloc (args_size.constant);
2732 sbitmap_zero (stored_args_map);
2733 }
ce48579b 2734
0a1c58a2
JL
2735 /* If we have no actual push instructions, or shouldn't use them,
2736 make space for all args right now. */
099e9712 2737 else if (adjusted_args_size.var != 0)
51bbfa0c 2738 {
0a1c58a2
JL
2739 if (old_stack_level == 0)
2740 {
2741 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
38afb23f 2742 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
2743 old_pending_adj = pending_stack_adjust;
2744 pending_stack_adjust = 0;
0a1c58a2
JL
2745 /* stack_arg_under_construction says whether a stack arg is
2746 being constructed at the old stack level. Pushing the stack
2747 gets a clean outgoing argument block. */
2748 old_stack_arg_under_construction = stack_arg_under_construction;
2749 stack_arg_under_construction = 0;
0a1c58a2 2750 }
099e9712 2751 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
51bbfa0c 2752 }
0a1c58a2
JL
2753 else
2754 {
2755 /* Note that we must go through the motions of allocating an argument
2756 block even if the size is zero because we may be storing args
2757 in the area reserved for register arguments, which may be part of
2758 the stack frame. */
26a258fe 2759
099e9712 2760 int needed = adjusted_args_size.constant;
51bbfa0c 2761
0a1c58a2
JL
2762 /* Store the maximum argument space used. It will be pushed by
2763 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2764 checking). */
51bbfa0c 2765
0a1c58a2
JL
2766 if (needed > current_function_outgoing_args_size)
2767 current_function_outgoing_args_size = needed;
51bbfa0c 2768
0a1c58a2
JL
2769 if (must_preallocate)
2770 {
f73ad30e
JH
2771 if (ACCUMULATE_OUTGOING_ARGS)
2772 {
f8a097cd
JH
2773 /* Since the stack pointer will never be pushed, it is
2774 possible for the evaluation of a parm to clobber
2775 something we have already written to the stack.
2776 Since most function calls on RISC machines do not use
2777 the stack, this is uncommon, but must work correctly.
26a258fe 2778
f73ad30e 2779 Therefore, we save any area of the stack that was already
f8a097cd
JH
2780 written and that we are using. Here we set up to do this
2781 by making a new stack usage map from the old one. The
f725a3ec 2782 actual save will be done by store_one_arg.
26a258fe 2783
f73ad30e
JH
2784 Another approach might be to try to reorder the argument
2785 evaluations to avoid this conflicting stack usage. */
26a258fe 2786
e5e809f4 2787#ifndef OUTGOING_REG_PARM_STACK_SPACE
f8a097cd
JH
2788 /* Since we will be writing into the entire argument area,
2789 the map must be allocated for its entire size, not just
2790 the part that is the responsibility of the caller. */
f73ad30e 2791 needed += reg_parm_stack_space;
51bbfa0c
RS
2792#endif
2793
2794#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
2795 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2796 needed + 1);
51bbfa0c 2797#else
f73ad30e
JH
2798 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2799 needed);
51bbfa0c 2800#endif
703ad42b 2801 stack_usage_map = alloca (highest_outgoing_arg_in_use);
51bbfa0c 2802
f73ad30e 2803 if (initial_highest_arg_in_use)
2e09e75a
JM
2804 memcpy (stack_usage_map, initial_stack_usage_map,
2805 initial_highest_arg_in_use);
2f4aa534 2806
f73ad30e 2807 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 2808 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
2809 (highest_outgoing_arg_in_use
2810 - initial_highest_arg_in_use));
2811 needed = 0;
2f4aa534 2812
f8a097cd
JH
2813 /* The address of the outgoing argument list must not be
2814 copied to a register here, because argblock would be left
2815 pointing to the wrong place after the call to
f725a3ec 2816 allocate_dynamic_stack_space below. */
2f4aa534 2817
f73ad30e 2818 argblock = virtual_outgoing_args_rtx;
f725a3ec 2819 }
f73ad30e 2820 else
26a258fe 2821 {
f73ad30e 2822 if (inhibit_defer_pop == 0)
0a1c58a2 2823 {
f73ad30e 2824 /* Try to reuse some or all of the pending_stack_adjust
ce48579b
RH
2825 to get this space. */
2826 needed
f725a3ec 2827 = (combine_pending_stack_adjustment_and_call
ce48579b 2828 (unadjusted_args_size,
099e9712 2829 &adjusted_args_size,
ce48579b
RH
2830 preferred_unit_stack_boundary));
2831
2832 /* combine_pending_stack_adjustment_and_call computes
2833 an adjustment before the arguments are allocated.
2834 Account for them and see whether or not the stack
2835 needs to go up or down. */
2836 needed = unadjusted_args_size - needed;
2837
2838 if (needed < 0)
f73ad30e 2839 {
ce48579b
RH
2840 /* We're releasing stack space. */
2841 /* ??? We can avoid any adjustment at all if we're
2842 already aligned. FIXME. */
2843 pending_stack_adjust = -needed;
2844 do_pending_stack_adjust ();
f73ad30e
JH
2845 needed = 0;
2846 }
f725a3ec 2847 else
ce48579b
RH
2848 /* We need to allocate space. We'll do that in
2849 push_block below. */
2850 pending_stack_adjust = 0;
0a1c58a2 2851 }
ce48579b
RH
2852
2853 /* Special case this because overhead of `push_block' in
2854 this case is non-trivial. */
f73ad30e
JH
2855 if (needed == 0)
2856 argblock = virtual_outgoing_args_rtx;
0a1c58a2 2857 else
d892f288
DD
2858 {
2859 argblock = push_block (GEN_INT (needed), 0, 0);
2860#ifdef ARGS_GROW_DOWNWARD
2861 argblock = plus_constant (argblock, needed);
2862#endif
2863 }
f73ad30e 2864
f8a097cd
JH
2865 /* We only really need to call `copy_to_reg' in the case
2866 where push insns are going to be used to pass ARGBLOCK
2867 to a function call in ARGS. In that case, the stack
2868 pointer changes value from the allocation point to the
2869 call point, and hence the value of
2870 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2871 as well always do it. */
f73ad30e 2872 argblock = copy_to_reg (argblock);
38afb23f
OH
2873 }
2874 }
2875 }
0a1c58a2 2876
38afb23f
OH
2877 if (ACCUMULATE_OUTGOING_ARGS)
2878 {
2879 /* The save/restore code in store_one_arg handles all
2880 cases except one: a constructor call (including a C
2881 function returning a BLKmode struct) to initialize
2882 an argument. */
2883 if (stack_arg_under_construction)
2884 {
e5e809f4 2885#ifndef OUTGOING_REG_PARM_STACK_SPACE
38afb23f
OH
2886 rtx push_size = GEN_INT (reg_parm_stack_space
2887 + adjusted_args_size.constant);
bfbf933a 2888#else
38afb23f 2889 rtx push_size = GEN_INT (adjusted_args_size.constant);
bfbf933a 2890#endif
38afb23f
OH
2891 if (old_stack_level == 0)
2892 {
2893 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2894 NULL_RTX);
2895 old_stack_pointer_delta = stack_pointer_delta;
2896 old_pending_adj = pending_stack_adjust;
2897 pending_stack_adjust = 0;
2898 /* stack_arg_under_construction says whether a stack
2899 arg is being constructed at the old stack level.
2900 Pushing the stack gets a clean outgoing argument
2901 block. */
2902 old_stack_arg_under_construction
2903 = stack_arg_under_construction;
2904 stack_arg_under_construction = 0;
2905 /* Make a new map for the new argument list. */
703ad42b 2906 stack_usage_map = alloca (highest_outgoing_arg_in_use);
38afb23f
OH
2907 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2908 highest_outgoing_arg_in_use = 0;
f73ad30e 2909 }
38afb23f
OH
2910 allocate_dynamic_stack_space (push_size, NULL_RTX,
2911 BITS_PER_UNIT);
0a1c58a2 2912 }
bfbf933a 2913
38afb23f
OH
2914 /* If argument evaluation might modify the stack pointer,
2915 copy the address of the argument list to a register. */
2916 for (i = 0; i < num_actuals; i++)
2917 if (args[i].pass_on_stack)
2918 {
2919 argblock = copy_addr_to_reg (argblock);
2920 break;
2921 }
2922 }
d329e058 2923
0a1c58a2 2924 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 2925
0a1c58a2
JL
2926 /* If we push args individually in reverse order, perform stack alignment
2927 before the first push (the last arg). */
f73ad30e 2928 if (PUSH_ARGS_REVERSED && argblock == 0
099e9712 2929 && adjusted_args_size.constant != unadjusted_args_size)
4e217aed 2930 {
0a1c58a2
JL
2931 /* When the stack adjustment is pending, we get better code
2932 by combining the adjustments. */
f725a3ec 2933 if (pending_stack_adjust
53d4257f 2934 && ! (flags & ECF_LIBCALL_BLOCK)
0a1c58a2 2935 && ! inhibit_defer_pop)
ce48579b
RH
2936 {
2937 pending_stack_adjust
f725a3ec 2938 = (combine_pending_stack_adjustment_and_call
ce48579b 2939 (unadjusted_args_size,
099e9712 2940 &adjusted_args_size,
ce48579b
RH
2941 preferred_unit_stack_boundary));
2942 do_pending_stack_adjust ();
2943 }
0a1c58a2 2944 else if (argblock == 0)
099e9712 2945 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 2946 - unadjusted_args_size));
0a1c58a2 2947 }
ebcd0b57
JH
2948 /* Now that the stack is properly aligned, pops can't safely
2949 be deferred during the evaluation of the arguments. */
2950 NO_DEFER_POP;
51bbfa0c 2951
09e2bf48 2952 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 2953
0a1c58a2
JL
2954 /* Figure out the register where the value, if any, will come back. */
2955 valreg = 0;
2956 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2957 && ! structure_value_addr)
2958 {
2959 if (pcc_struct_value)
2960 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
7d167afd 2961 fndecl, (pass == 0));
0a1c58a2 2962 else
7d167afd 2963 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
0a1c58a2 2964 }
51bbfa0c 2965
0a1c58a2
JL
2966 /* Precompute all register parameters. It isn't safe to compute anything
2967 once we have started filling any specific hard regs. */
2968 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
51bbfa0c 2969
f73ad30e 2970#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
2971 /* Save the fixed argument area if it's part of the caller's frame and
2972 is clobbered by argument setup for this call. */
f8a097cd 2973 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
2974 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2975 &low_to_save, &high_to_save);
b94301c2 2976#endif
51bbfa0c 2977
0a1c58a2
JL
2978 /* Now store (and compute if necessary) all non-register parms.
2979 These come before register parms, since they can require block-moves,
2980 which could clobber the registers used for register parms.
2981 Parms which have partial registers are not stored here,
2982 but we do preallocate space here if they want that. */
51bbfa0c 2983
0a1c58a2
JL
2984 for (i = 0; i < num_actuals; i++)
2985 if (args[i].reg == 0 || args[i].pass_on_stack)
c67846f2
JJ
2986 {
2987 rtx before_arg = get_last_insn ();
2988
4c6b3b2a
JJ
2989 if (store_one_arg (&args[i], argblock, flags,
2990 adjusted_args_size.var != 0,
2991 reg_parm_stack_space)
2992 || (pass == 0
2993 && check_sibcall_argument_overlap (before_arg,
0cdca92b 2994 &args[i], 1)))
c67846f2
JJ
2995 sibcall_failure = 1;
2996 }
0a1c58a2
JL
2997
2998 /* If we have a parm that is passed in registers but not in memory
2999 and whose alignment does not permit a direct copy into registers,
3000 make a group of pseudos that correspond to each register that we
3001 will later fill. */
3002 if (STRICT_ALIGNMENT)
3003 store_unaligned_arguments_into_pseudos (args, num_actuals);
3004
3005 /* Now store any partially-in-registers parm.
3006 This is the last place a block-move can happen. */
3007 if (reg_parm_seen)
3008 for (i = 0; i < num_actuals; i++)
3009 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2
JJ
3010 {
3011 rtx before_arg = get_last_insn ();
3012
4c6b3b2a
JJ
3013 if (store_one_arg (&args[i], argblock, flags,
3014 adjusted_args_size.var != 0,
3015 reg_parm_stack_space)
3016 || (pass == 0
3017 && check_sibcall_argument_overlap (before_arg,
0cdca92b 3018 &args[i], 1)))
c67846f2
JJ
3019 sibcall_failure = 1;
3020 }
51bbfa0c 3021
0a1c58a2
JL
3022 /* If we pushed args in forward order, perform stack alignment
3023 after pushing the last arg. */
f73ad30e 3024 if (!PUSH_ARGS_REVERSED && argblock == 0)
099e9712 3025 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
0a1c58a2 3026 - unadjusted_args_size));
51bbfa0c 3027
0a1c58a2
JL
3028 /* If register arguments require space on the stack and stack space
3029 was not preallocated, allocate stack space here for arguments
3030 passed in registers. */
f73ad30e
JH
3031#ifdef OUTGOING_REG_PARM_STACK_SPACE
3032 if (!ACCUMULATE_OUTGOING_ARGS
f725a3ec 3033 && must_preallocate == 0 && reg_parm_stack_space > 0)
0a1c58a2 3034 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
3035#endif
3036
0a1c58a2
JL
3037 /* Pass the function the address in which to return a
3038 structure value. */
3039 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3040 {
5ae6cd0d
MM
3041 structure_value_addr
3042 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 3043 emit_move_insn (struct_value,
0a1c58a2
JL
3044 force_reg (Pmode,
3045 force_operand (structure_value_addr,
3046 NULL_RTX)));
3047
61f71b34
DD
3048 if (GET_CODE (struct_value) == REG)
3049 use_reg (&call_fusage, struct_value);
0a1c58a2 3050 }
c2939b57 3051
0a1c58a2 3052 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3affaf29 3053 reg_parm_seen, pass == 0);
51bbfa0c 3054
0cdca92b
DJ
3055 load_register_parameters (args, num_actuals, &call_fusage, flags,
3056 pass == 0, &sibcall_failure);
f725a3ec 3057
0a1c58a2
JL
3058 /* Perform postincrements before actually calling the function. */
3059 emit_queue ();
51bbfa0c 3060
0a1c58a2
JL
3061 /* Save a pointer to the last insn before the call, so that we can
3062 later safely search backwards to find the CALL_INSN. */
3063 before_call = get_last_insn ();
51bbfa0c 3064
7d167afd
JJ
3065 /* Set up next argument register. For sibling calls on machines
3066 with register windows this should be the incoming register. */
3067#ifdef FUNCTION_INCOMING_ARG
3068 if (pass == 0)
3069 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3070 void_type_node, 1);
3071 else
3072#endif
3073 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3074 void_type_node, 1);
3075
0a1c58a2
JL
3076 /* All arguments and registers used for the call must be set up by
3077 now! */
3078
ce48579b
RH
3079 /* Stack must be properly aligned now. */
3080 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
d9a7d592 3081 abort ();
ebcd0b57 3082
0a1c58a2
JL
3083 /* Generate the actual call instruction. */
3084 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
099e9712 3085 adjusted_args_size.constant, struct_value_size,
7d167afd 3086 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
fa5322fa 3087 flags, & args_so_far);
0a1c58a2
JL
3088
3089 /* If call is cse'able, make appropriate pair of reg-notes around it.
3090 Test valreg so we don't crash; may safely ignore `const'
3091 if return type is void. Disable for PARALLEL return values, because
3092 we have no way to move such values into a pseudo register. */
53d4257f 3093 if (pass && (flags & ECF_LIBCALL_BLOCK))
9ae8ffe7 3094 {
0a1c58a2 3095 rtx insns;
9778f2f8
JH
3096 rtx insn;
3097 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
9ae8ffe7 3098
9778f2f8
JH
3099 insns = get_insns ();
3100
3101 /* Expansion of block moves possibly introduced a loop that may
3102 not appear inside libcall block. */
3103 for (insn = insns; insn; insn = NEXT_INSN (insn))
3104 if (GET_CODE (insn) == JUMP_INSN)
3105 failed = true;
3106
3107 if (failed)
e4abc3d5 3108 {
e4abc3d5 3109 end_sequence ();
2f937369 3110 emit_insn (insns);
e4abc3d5
RH
3111 }
3112 else
3113 {
3114 rtx note = 0;
3115 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3116
3117 /* Mark the return value as a pointer if needed. */
3118 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3119 mark_reg_pointer (temp,
3120 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3121
3122 /* Construct an "equal form" for the value which mentions all the
3123 arguments in order as well as the function name. */
3124 for (i = 0; i < num_actuals; i++)
3125 note = gen_rtx_EXPR_LIST (VOIDmode,
3126 args[i].initial_value, note);
3127 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3128
e4abc3d5
RH
3129 end_sequence ();
3130
3131 if (flags & ECF_PURE)
3132 note = gen_rtx_EXPR_LIST (VOIDmode,
3133 gen_rtx_USE (VOIDmode,
3134 gen_rtx_MEM (BLKmode,
3135 gen_rtx_SCRATCH (VOIDmode))),
3136 note);
3137
3138 emit_libcall_block (insns, temp, valreg, note);
3139
3140 valreg = temp;
3141 }
0a1c58a2 3142 }
53d4257f 3143 else if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3144 {
3145 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3146 rtx last, insns;
3147
f725a3ec 3148 /* The return value from a malloc-like function is a pointer. */
0a1c58a2 3149 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
bdb429a5 3150 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
0a1c58a2
JL
3151
3152 emit_move_insn (temp, valreg);
3153
3154 /* The return value from a malloc-like function can not alias
3155 anything else. */
3156 last = get_last_insn ();
f725a3ec 3157 REG_NOTES (last) =
0a1c58a2
JL
3158 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3159
3160 /* Write out the sequence. */
3161 insns = get_insns ();
3162 end_sequence ();
2f937369 3163 emit_insn (insns);
0a1c58a2
JL
3164 valreg = temp;
3165 }
51bbfa0c 3166
0a1c58a2
JL
3167 /* For calls to `setjmp', etc., inform flow.c it should complain
3168 if nonvolatile values are live. For functions that cannot return,
3169 inform flow that control does not fall through. */
51bbfa0c 3170
570a98eb 3171 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
c2939b57 3172 {
570a98eb 3173 /* The barrier must be emitted
0a1c58a2
JL
3174 immediately after the CALL_INSN. Some ports emit more
3175 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 3176
0a1c58a2
JL
3177 rtx last = get_last_insn ();
3178 while (GET_CODE (last) != CALL_INSN)
3179 {
3180 last = PREV_INSN (last);
3181 /* There was no CALL_INSN? */
3182 if (last == before_call)
3183 abort ();
3184 }
51bbfa0c 3185
570a98eb 3186 emit_barrier_after (last);
8af61113 3187
f451eeef
JS
3188 /* Stack adjustments after a noreturn call are dead code.
3189 However when NO_DEFER_POP is in effect, we must preserve
3190 stack_pointer_delta. */
3191 if (inhibit_defer_pop == 0)
3192 {
3193 stack_pointer_delta = old_stack_allocated;
3194 pending_stack_adjust = 0;
3195 }
0a1c58a2 3196 }
51bbfa0c 3197
f2d33f13 3198 if (flags & ECF_LONGJMP)
099e9712 3199 current_function_calls_longjmp = 1;
51bbfa0c 3200
0a1c58a2 3201 /* If value type not void, return an rtx for the value. */
51bbfa0c 3202
0a1c58a2
JL
3203 /* If there are cleanups to be called, don't use a hard reg as target.
3204 We need to double check this and see if it matters anymore. */
de1f5659 3205 if (any_pending_cleanups ())
194c7c45
RH
3206 {
3207 if (target && REG_P (target)
3208 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3209 target = 0;
3210 sibcall_failure = 1;
3211 }
51bbfa0c 3212
0a1c58a2
JL
3213 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3214 || ignore)
b5cd4ed4 3215 target = const0_rtx;
0a1c58a2
JL
3216 else if (structure_value_addr)
3217 {
3218 if (target == 0 || GET_CODE (target) != MEM)
3219 {
3bdf5ad1
RK
3220 target
3221 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3222 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3223 structure_value_addr));
3224 set_mem_attributes (target, exp, 1);
0a1c58a2
JL
3225 }
3226 }
3227 else if (pcc_struct_value)
cacbd532 3228 {
0a1c58a2
JL
3229 /* This is the special C++ case where we need to
3230 know what the true target was. We take care to
3231 never use this value more than once in one expression. */
3232 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3233 copy_to_reg (valreg));
3bdf5ad1 3234 set_mem_attributes (target, exp, 1);
cacbd532 3235 }
0a1c58a2
JL
3236 /* Handle calls that return values in multiple non-contiguous locations.
3237 The Irix 6 ABI has examples of this. */
3238 else if (GET_CODE (valreg) == PARALLEL)
3239 {
0a1c58a2
JL
3240 if (target == 0)
3241 {
1da68f56
RK
3242 /* This will only be assigned once, so it can be readonly. */
3243 tree nt = build_qualified_type (TREE_TYPE (exp),
3244 (TYPE_QUALS (TREE_TYPE (exp))
3245 | TYPE_QUAL_CONST));
3246
3247 target = assign_temp (nt, 0, 1, 1);
0a1c58a2
JL
3248 preserve_temp_slots (target);
3249 }
3250
3251 if (! rtx_equal_p (target, valreg))
6e985040 3252 emit_group_store (target, valreg, TREE_TYPE (exp),
04050c69 3253 int_size_in_bytes (TREE_TYPE (exp)));
19caa751 3254
0a1c58a2
JL
3255 /* We can not support sibling calls for this case. */
3256 sibcall_failure = 1;
3257 }
3258 else if (target
3259 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3260 && GET_MODE (target) == GET_MODE (valreg))
3261 {
3262 /* TARGET and VALREG cannot be equal at this point because the
3263 latter would not have REG_FUNCTION_VALUE_P true, while the
3264 former would if it were referring to the same register.
3265
3266 If they refer to the same register, this move will be a no-op,
3267 except when function inlining is being done. */
3268 emit_move_insn (target, valreg);
0219237c
OH
3269
3270 /* If we are setting a MEM, this code must be executed. Since it is
3271 emitted after the call insn, sibcall optimization cannot be
3272 performed in that case. */
3273 if (GET_CODE (target) == MEM)
3274 sibcall_failure = 1;
0a1c58a2
JL
3275 }
3276 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
8eb99146
RH
3277 {
3278 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3279
3280 /* We can not support sibling calls for this case. */
3281 sibcall_failure = 1;
3282 }
0a1c58a2
JL
3283 else
3284 target = copy_to_reg (valreg);
51bbfa0c 3285
61f71b34
DD
3286 if (targetm.calls.promote_function_return(funtype))
3287 {
0a1c58a2
JL
3288 /* If we promoted this return value, make the proper SUBREG. TARGET
3289 might be const0_rtx here, so be careful. */
3290 if (GET_CODE (target) == REG
3291 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3292 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3293 {
3294 tree type = TREE_TYPE (exp);
3295 int unsignedp = TREE_UNSIGNED (type);
ddef6bc7 3296 int offset = 0;
84b55618 3297
0a1c58a2
JL
3298 /* If we don't promote as expected, something is wrong. */
3299 if (GET_MODE (target)
3300 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3301 abort ();
5d2ac65e 3302
ddef6bc7
JJ
3303 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3304 && GET_MODE_SIZE (GET_MODE (target))
3305 > GET_MODE_SIZE (TYPE_MODE (type)))
3306 {
3307 offset = GET_MODE_SIZE (GET_MODE (target))
3308 - GET_MODE_SIZE (TYPE_MODE (type));
3309 if (! BYTES_BIG_ENDIAN)
3310 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3311 else if (! WORDS_BIG_ENDIAN)
3312 offset %= UNITS_PER_WORD;
3313 }
3314 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
0a1c58a2 3315 SUBREG_PROMOTED_VAR_P (target) = 1;
7879b81e 3316 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
0a1c58a2 3317 }
61f71b34 3318 }
84b55618 3319
0a1c58a2
JL
3320 /* If size of args is variable or this was a constructor call for a stack
3321 argument, restore saved stack-pointer value. */
51bbfa0c 3322
7393c642 3323 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
0a1c58a2
JL
3324 {
3325 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
38afb23f 3326 stack_pointer_delta = old_stack_pointer_delta;
0a1c58a2 3327 pending_stack_adjust = old_pending_adj;
0a1c58a2
JL
3328 stack_arg_under_construction = old_stack_arg_under_construction;
3329 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3330 stack_usage_map = initial_stack_usage_map;
0a1c58a2
JL
3331 sibcall_failure = 1;
3332 }
f8a097cd 3333 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3334 {
51bbfa0c 3335#ifdef REG_PARM_STACK_SPACE
0a1c58a2 3336 if (save_area)
b820d2b8
AM
3337 restore_fixed_argument_area (save_area, argblock,
3338 high_to_save, low_to_save);
b94301c2 3339#endif
51bbfa0c 3340
0a1c58a2
JL
3341 /* If we saved any argument areas, restore them. */
3342 for (i = 0; i < num_actuals; i++)
3343 if (args[i].save_area)
3344 {
3345 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3346 rtx stack_area
3347 = gen_rtx_MEM (save_mode,
3348 memory_address (save_mode,
3349 XEXP (args[i].stack_slot, 0)));
3350
3351 if (save_mode != BLKmode)
3352 emit_move_insn (stack_area, args[i].save_area);
3353 else
44bb111a 3354 emit_block_move (stack_area, args[i].save_area,
e7949876 3355 GEN_INT (args[i].locate.size.constant),
44bb111a 3356 BLOCK_OP_CALL_PARM);
0a1c58a2 3357 }
51bbfa0c 3358
0a1c58a2
JL
3359 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3360 stack_usage_map = initial_stack_usage_map;
3361 }
51bbfa0c 3362
f725a3ec 3363 /* If this was alloca, record the new stack level for nonlocal gotos.
0a1c58a2
JL
3364 Check for the handler slots since we might not have a save area
3365 for non-local gotos. */
59257ff7 3366
f2d33f13 3367 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
0a1c58a2 3368 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c 3369
0a1c58a2
JL
3370 /* Free up storage we no longer need. */
3371 for (i = 0; i < num_actuals; ++i)
3372 if (args[i].aligned_regs)
3373 free (args[i].aligned_regs);
3374
e245d3af
RH
3375 if (pass == 0)
3376 {
3377 /* Undo the fake expand_start_target_temps we did earlier. If
3378 there had been any cleanups created, we've already set
3379 sibcall_failure. */
3380 expand_end_target_temps ();
3381 }
3382
ee960939
OH
3383 /* If this function is returning into a memory location marked as
3384 readonly, it means it is initializing that location. We normally treat
3385 functions as not clobbering such locations, so we need to specify that
3386 this one does. We do this by adding the appropriate CLOBBER to the
3387 CALL_INSN function usage list. This cannot be done by emitting a
3388 standalone CLOBBER after the call because the latter would be ignored
3389 by at least the delay slot scheduling pass. We do this now instead of
3390 adding to call_fusage before the call to emit_call_1 because TARGET
3391 may be modified in the meantime. */
d329e058 3392 if (structure_value_addr != 0 && target != 0
ee960939
OH
3393 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3394 add_function_usage_to
3395 (last_call_insn (),
3396 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3397 NULL_RTX));
d329e058 3398
0a1c58a2
JL
3399 insns = get_insns ();
3400 end_sequence ();
3401
3402 if (pass == 0)
3403 {
3404 tail_call_insns = insns;
3405
0a1c58a2
JL
3406 /* Restore the pending stack adjustment now that we have
3407 finished generating the sibling call sequence. */
1503a7ec 3408
0a1c58a2 3409 pending_stack_adjust = save_pending_stack_adjust;
1503a7ec 3410 stack_pointer_delta = save_stack_pointer_delta;
099e9712
JH
3411
3412 /* Prepare arg structure for next iteration. */
f725a3ec 3413 for (i = 0; i < num_actuals; i++)
099e9712
JH
3414 {
3415 args[i].value = 0;
3416 args[i].aligned_regs = 0;
3417 args[i].stack = 0;
3418 }
c67846f2
JJ
3419
3420 sbitmap_free (stored_args_map);
0a1c58a2
JL
3421 }
3422 else
38afb23f
OH
3423 {
3424 normal_call_insns = insns;
3425
3426 /* Verify that we've deallocated all the stack we used. */
8af61113
RS
3427 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3428 && old_stack_allocated != stack_pointer_delta
3429 - pending_stack_adjust)
38afb23f
OH
3430 abort ();
3431 }
fadb729c
JJ
3432
3433 /* If something prevents making this a sibling call,
3434 zero out the sequence. */
3435 if (sibcall_failure)
3436 tail_call_insns = NULL_RTX;
0a1c58a2
JL
3437 }
3438
3439 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3440 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3441 can happen if the arguments to this function call an inline
3442 function who's expansion contains another CALL_PLACEHOLDER.
3443
3444 If there are any C_Ps in any of these sequences, replace them
f725a3ec 3445 with their normal call. */
0a1c58a2
JL
3446
3447 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3448 if (GET_CODE (insn) == CALL_INSN
3449 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3450 replace_call_placeholder (insn, sibcall_use_normal);
3451
3452 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3453 if (GET_CODE (insn) == CALL_INSN
3454 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3455 replace_call_placeholder (insn, sibcall_use_normal);
3456
3457 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3458 if (GET_CODE (insn) == CALL_INSN
3459 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3460 replace_call_placeholder (insn, sibcall_use_normal);
3461
3462 /* If this was a potential tail recursion site, then emit a
3463 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3464 One of them will be selected later. */
3465 if (tail_recursion_insns || tail_call_insns)
3466 {
3467 /* The tail recursion label must be kept around. We could expose
3468 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3469 and makes determining true tail recursion sites difficult.
3470
3471 So we set LABEL_PRESERVE_P here, then clear it when we select
3472 one of the call sequences after rtl generation is complete. */
3473 if (tail_recursion_insns)
3474 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3475 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3476 tail_call_insns,
3477 tail_recursion_insns,
3478 tail_recursion_label));
3479 }
3480 else
2f937369 3481 emit_insn (normal_call_insns);
51bbfa0c 3482
0a1c58a2 3483 currently_expanding_call--;
8e6a59fe 3484
7393c642
RK
3485 /* If this function returns with the stack pointer depressed, ensure
3486 this block saves and restores the stack pointer, show it was
3487 changed, and adjust for any outgoing arg space. */
3488 if (flags & ECF_SP_DEPRESSED)
3489 {
3490 clear_pending_stack_adjust ();
3491 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3492 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3493 save_stack_pointer ();
3494 }
3495
51bbfa0c
RS
3496 return target;
3497}
ded9bf77
AH
3498
3499/* Traverse an argument list in VALUES and expand all complex
3500 arguments into their components. */
3501tree
3502split_complex_values (tree values)
3503{
3504 tree p;
3505
3506 values = copy_list (values);
3507
3508 for (p = values; p; p = TREE_CHAIN (p))
3509 {
3510 tree complex_value = TREE_VALUE (p);
3511 tree complex_type;
3512
3513 complex_type = TREE_TYPE (complex_value);
3514 if (!complex_type)
3515 continue;
3516
3517 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3518 {
3519 tree subtype;
3520 tree real, imag, next;
3521
3522 subtype = TREE_TYPE (complex_type);
3523 complex_value = save_expr (complex_value);
3524 real = build1 (REALPART_EXPR, subtype, complex_value);
3525 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3526
3527 TREE_VALUE (p) = real;
3528 next = TREE_CHAIN (p);
3529 imag = build_tree_list (NULL_TREE, imag);
3530 TREE_CHAIN (p) = imag;
3531 TREE_CHAIN (imag) = next;
3532
3533 /* Skip the newly created node. */
3534 p = TREE_CHAIN (p);
3535 }
3536 }
3537
3538 return values;
3539}
3540
3541/* Traverse a list of TYPES and expand all complex types into their
3542 components. */
3543tree
3544split_complex_types (tree types)
3545{
3546 tree p;
3547
3548 types = copy_list (types);
3549
3550 for (p = types; p; p = TREE_CHAIN (p))
3551 {
3552 tree complex_type = TREE_VALUE (p);
3553
3554 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3555 {
3556 tree next, imag;
3557
3558 /* Rewrite complex type with component type. */
3559 TREE_VALUE (p) = TREE_TYPE (complex_type);
3560 next = TREE_CHAIN (p);
3561
3562 /* Add another component type for the imaginary part. */
3563 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3564 TREE_CHAIN (p) = imag;
3565 TREE_CHAIN (imag) = next;
3566
3567 /* Skip the newly created node. */
3568 p = TREE_CHAIN (p);
3569 }
3570 }
3571
3572 return types;
3573}
51bbfa0c 3574\f
de76b467 3575/* Output a library call to function FUN (a SYMBOL_REF rtx).
f725a3ec 3576 The RETVAL parameter specifies whether return value needs to be saved, other
0407c02b 3577 parameters are documented in the emit_library_call function below. */
8ac61af7 3578
de76b467 3579static rtx
d329e058
AJ
3580emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3581 enum libcall_type fn_type,
3582 enum machine_mode outmode, int nargs, va_list p)
43bc5f13 3583{
3c0fca12
RH
3584 /* Total size in bytes of all the stack-parms scanned so far. */
3585 struct args_size args_size;
3586 /* Size of arguments before any adjustments (such as rounding). */
3587 struct args_size original_args_size;
b3694847 3588 int argnum;
3c0fca12
RH
3589 rtx fun;
3590 int inc;
3591 int count;
3c0fca12
RH
3592 rtx argblock = 0;
3593 CUMULATIVE_ARGS args_so_far;
f725a3ec
KH
3594 struct arg
3595 {
3596 rtx value;
3597 enum machine_mode mode;
3598 rtx reg;
3599 int partial;
e7949876 3600 struct locate_and_pad_arg_data locate;
f725a3ec
KH
3601 rtx save_area;
3602 };
3c0fca12
RH
3603 struct arg *argvec;
3604 int old_inhibit_defer_pop = inhibit_defer_pop;
3605 rtx call_fusage = 0;
3606 rtx mem_value = 0;
5591ee6f 3607 rtx valreg;
3c0fca12
RH
3608 int pcc_struct_value = 0;
3609 int struct_value_size = 0;
52a11cbf 3610 int flags;
3c0fca12 3611 int reg_parm_stack_space = 0;
3c0fca12 3612 int needed;
695ee791 3613 rtx before_call;
b0c48229 3614 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 3615
f73ad30e 3616#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
3617 /* Define the boundary of the register parm stack space that needs to be
3618 save, if any. */
b820d2b8 3619 int low_to_save, high_to_save;
f725a3ec 3620 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
3621#endif
3622
3c0fca12
RH
3623 /* Size of the stack reserved for parameter registers. */
3624 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3625 char *initial_stack_usage_map = stack_usage_map;
3c0fca12 3626
61f71b34
DD
3627 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3628
3c0fca12
RH
3629#ifdef REG_PARM_STACK_SPACE
3630#ifdef MAYBE_REG_PARM_STACK_SPACE
3631 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3632#else
3633 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3634#endif
3635#endif
3636
9555a122 3637 /* By default, library functions can not throw. */
52a11cbf
RH
3638 flags = ECF_NOTHROW;
3639
9555a122
RH
3640 switch (fn_type)
3641 {
3642 case LCT_NORMAL:
53d4257f 3643 break;
9555a122 3644 case LCT_CONST:
53d4257f
JH
3645 flags |= ECF_CONST;
3646 break;
9555a122 3647 case LCT_PURE:
53d4257f 3648 flags |= ECF_PURE;
9555a122
RH
3649 break;
3650 case LCT_CONST_MAKE_BLOCK:
53d4257f 3651 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
9555a122
RH
3652 break;
3653 case LCT_PURE_MAKE_BLOCK:
53d4257f 3654 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
9555a122
RH
3655 break;
3656 case LCT_NORETURN:
3657 flags |= ECF_NORETURN;
3658 break;
3659 case LCT_THROW:
3660 flags = ECF_NORETURN;
3661 break;
9d98f8f9
JH
3662 case LCT_ALWAYS_RETURN:
3663 flags = ECF_ALWAYS_RETURN;
3664 break;
9defc9b7
RH
3665 case LCT_RETURNS_TWICE:
3666 flags = ECF_RETURNS_TWICE;
3667 break;
9555a122 3668 }
3c0fca12
RH
3669 fun = orgfun;
3670
3c0fca12
RH
3671 /* Ensure current function's preferred stack boundary is at least
3672 what we need. */
3673 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3674 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
3675
3676 /* If this kind of value comes back in memory,
3677 decide where in memory it should come back. */
b0c48229 3678 if (outmode != VOIDmode)
3c0fca12 3679 {
b0c48229 3680 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
61f71b34 3681 if (aggregate_value_p (tfom, 0))
b0c48229 3682 {
3c0fca12 3683#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229
NB
3684 rtx pointer_reg
3685 = hard_function_value (build_pointer_type (tfom), 0, 0);
3686 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3687 pcc_struct_value = 1;
3688 if (value == 0)
3689 value = gen_reg_rtx (outmode);
3c0fca12 3690#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229
NB
3691 struct_value_size = GET_MODE_SIZE (outmode);
3692 if (value != 0 && GET_CODE (value) == MEM)
3693 mem_value = value;
3694 else
3695 mem_value = assign_temp (tfom, 0, 1, 1);
3c0fca12 3696#endif
b0c48229
NB
3697 /* This call returns a big structure. */
3698 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3699 }
3c0fca12 3700 }
b0c48229
NB
3701 else
3702 tfom = void_type_node;
3c0fca12
RH
3703
3704 /* ??? Unfinished: must pass the memory address as an argument. */
3705
3706 /* Copy all the libcall-arguments out of the varargs data
3707 and into a vector ARGVEC.
3708
3709 Compute how to pass each argument. We only support a very small subset
3710 of the full argument passing conventions to limit complexity here since
3711 library functions shouldn't have many args. */
3712
703ad42b
KG
3713 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3714 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 3715
97fc4caf
AO
3716#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3717 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3718#else
3c0fca12 3719 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
97fc4caf 3720#endif
3c0fca12
RH
3721
3722 args_size.constant = 0;
3723 args_size.var = 0;
3724
3725 count = 0;
3726
ebb1b59a
BS
3727 /* Now we are about to start emitting insns that can be deleted
3728 if a libcall is deleted. */
53d4257f 3729 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a
BS
3730 start_sequence ();
3731
3c0fca12
RH
3732 push_temp_slots ();
3733
3734 /* If there's a structure value address to be passed,
3735 either pass it in the special place, or pass it as an extra argument. */
61f71b34 3736 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
3737 {
3738 rtx addr = XEXP (mem_value, 0);
3739 nargs++;
3740
3741 /* Make sure it is a reasonable operand for a move or push insn. */
3742 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3743 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3744 addr = force_operand (addr, NULL_RTX);
3745
3746 argvec[count].value = addr;
3747 argvec[count].mode = Pmode;
3748 argvec[count].partial = 0;
3749
3750 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3751#ifdef FUNCTION_ARG_PARTIAL_NREGS
3752 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3753 abort ();
3754#endif
3755
3756 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f
CM
3757#ifdef STACK_PARMS_IN_REG_PARM_AREA
3758 1,
3759#else
3760 argvec[count].reg != 0,
3761#endif
e7949876 3762 0, NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 3763
3c0fca12
RH
3764 if (argvec[count].reg == 0 || argvec[count].partial != 0
3765 || reg_parm_stack_space > 0)
e7949876 3766 args_size.constant += argvec[count].locate.size.constant;
3c0fca12
RH
3767
3768 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3769
3770 count++;
3771 }
3772
3773 for (; count < nargs; count++)
3774 {
3775 rtx val = va_arg (p, rtx);
3776 enum machine_mode mode = va_arg (p, enum machine_mode);
3777
3778 /* We cannot convert the arg value to the mode the library wants here;
3779 must do it earlier where we know the signedness of the arg. */
3780 if (mode == BLKmode
3781 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3782 abort ();
3783
3c0fca12
RH
3784 /* There's no need to call protect_from_queue, because
3785 either emit_move_insn or emit_push_insn will do that. */
3786
3787 /* Make sure it is a reasonable operand for a move or push insn. */
3788 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3789 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3790 val = force_operand (val, NULL_RTX);
3791
3792#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3793 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3794 {
f474c6f8
AO
3795 rtx slot;
3796 int must_copy = 1
d329e058 3797#ifdef FUNCTION_ARG_CALLEE_COPIES
f474c6f8
AO
3798 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3799 NULL_TREE, 1)
3800#endif
3801 ;
3802
a0dc500c
R
3803 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3804 functions, so we have to pretend this isn't such a function. */
3805 if (flags & ECF_LIBCALL_BLOCK)
3806 {
3807 rtx insns = get_insns ();
3808 end_sequence ();
3809 emit_insn (insns);
3810 }
3811 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3812
99a32567
DM
3813 /* If this was a CONST function, it is now PURE since
3814 it now reads memory. */
3815 if (flags & ECF_CONST)
3816 {
3817 flags &= ~ECF_CONST;
3818 flags |= ECF_PURE;
3819 }
3820
f474c6f8
AO
3821 if (GET_MODE (val) == MEM && ! must_copy)
3822 slot = val;
3823 else if (must_copy)
3824 {
b0c48229
NB
3825 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3826 0, 1, 1);
f474c6f8
AO
3827 emit_move_insn (slot, val);
3828 }
3829 else
3830 {
b0c48229 3831 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
f474c6f8 3832
546ff777
AM
3833 slot
3834 = gen_rtx_MEM (mode,
3835 expand_expr (build1 (ADDR_EXPR,
3836 build_pointer_type (type),
3837 make_tree (type, val)),
3838 NULL_RTX, VOIDmode, 0));
f474c6f8 3839 }
1da68f56 3840
6b5273c3
AO
3841 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3842 gen_rtx_USE (VOIDmode, slot),
3843 call_fusage);
f474c6f8
AO
3844 if (must_copy)
3845 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3846 gen_rtx_CLOBBER (VOIDmode,
3847 slot),
3848 call_fusage);
3849
3c0fca12 3850 mode = Pmode;
f474c6f8 3851 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12
RH
3852 }
3853#endif
3854
3855 argvec[count].value = val;
3856 argvec[count].mode = mode;
3857
3858 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3859
3860#ifdef FUNCTION_ARG_PARTIAL_NREGS
3861 argvec[count].partial
3862 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3863#else
3864 argvec[count].partial = 0;
3865#endif
3866
3867 locate_and_pad_parm (mode, NULL_TREE,
a4d5044f 3868#ifdef STACK_PARMS_IN_REG_PARM_AREA
f725a3ec 3869 1,
a4d5044f
CM
3870#else
3871 argvec[count].reg != 0,
3872#endif
e7949876
AM
3873 argvec[count].partial,
3874 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 3875
e7949876 3876 if (argvec[count].locate.size.var)
3c0fca12
RH
3877 abort ();
3878
3c0fca12
RH
3879 if (argvec[count].reg == 0 || argvec[count].partial != 0
3880 || reg_parm_stack_space > 0)
e7949876 3881 args_size.constant += argvec[count].locate.size.constant;
3c0fca12
RH
3882
3883 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3884 }
3c0fca12
RH
3885
3886#ifdef FINAL_REG_PARM_STACK_SPACE
3887 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3888 args_size.var);
3889#endif
3890 /* If this machine requires an external definition for library
3891 functions, write one out. */
3892 assemble_external_libcall (fun);
3893
3894 original_args_size = args_size;
1503a7ec
JH
3895 args_size.constant = (((args_size.constant
3896 + stack_pointer_delta
3897 + STACK_BYTES - 1)
3898 / STACK_BYTES
3899 * STACK_BYTES)
3900 - stack_pointer_delta);
3c0fca12
RH
3901
3902 args_size.constant = MAX (args_size.constant,
3903 reg_parm_stack_space);
3904
3905#ifndef OUTGOING_REG_PARM_STACK_SPACE
3906 args_size.constant -= reg_parm_stack_space;
3907#endif
3908
3909 if (args_size.constant > current_function_outgoing_args_size)
3910 current_function_outgoing_args_size = args_size.constant;
3911
f73ad30e
JH
3912 if (ACCUMULATE_OUTGOING_ARGS)
3913 {
3914 /* Since the stack pointer will never be pushed, it is possible for
3915 the evaluation of a parm to clobber something we have already
3916 written to the stack. Since most function calls on RISC machines
3917 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 3918
f73ad30e
JH
3919 Therefore, we save any area of the stack that was already written
3920 and that we are using. Here we set up to do this by making a new
3921 stack usage map from the old one.
3c0fca12 3922
f73ad30e
JH
3923 Another approach might be to try to reorder the argument
3924 evaluations to avoid this conflicting stack usage. */
3c0fca12 3925
f73ad30e 3926 needed = args_size.constant;
3c0fca12
RH
3927
3928#ifndef OUTGOING_REG_PARM_STACK_SPACE
f73ad30e
JH
3929 /* Since we will be writing into the entire argument area, the
3930 map must be allocated for its entire size, not just the part that
3931 is the responsibility of the caller. */
3932 needed += reg_parm_stack_space;
3c0fca12
RH
3933#endif
3934
3935#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
3936 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3937 needed + 1);
3c0fca12 3938#else
f73ad30e
JH
3939 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3940 needed);
3c0fca12 3941#endif
703ad42b 3942 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3c0fca12 3943
f73ad30e 3944 if (initial_highest_arg_in_use)
2e09e75a
JM
3945 memcpy (stack_usage_map, initial_stack_usage_map,
3946 initial_highest_arg_in_use);
3c0fca12 3947
f73ad30e 3948 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3949 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3950 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3951 needed = 0;
3c0fca12 3952
c39ada04
DD
3953 /* We must be careful to use virtual regs before they're instantiated,
3954 and real regs afterwards. Loop optimization, for example, can create
3955 new libcalls after we've instantiated the virtual regs, and if we
3956 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 3957
c39ada04
DD
3958 if (virtuals_instantiated)
3959 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3960 else
3961 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
3962 }
3963 else
3964 {
3965 if (!PUSH_ARGS)
3966 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3967 }
3c0fca12 3968
3c0fca12
RH
3969 /* If we push args individually in reverse order, perform stack alignment
3970 before the first push (the last arg). */
f73ad30e 3971 if (argblock == 0 && PUSH_ARGS_REVERSED)
3c0fca12
RH
3972 anti_adjust_stack (GEN_INT (args_size.constant
3973 - original_args_size.constant));
3c0fca12 3974
f73ad30e
JH
3975 if (PUSH_ARGS_REVERSED)
3976 {
3977 inc = -1;
3978 argnum = nargs - 1;
3979 }
3980 else
3981 {
3982 inc = 1;
3983 argnum = 0;
3984 }
3c0fca12 3985
f73ad30e
JH
3986#ifdef REG_PARM_STACK_SPACE
3987 if (ACCUMULATE_OUTGOING_ARGS)
3988 {
3989 /* The argument list is the property of the called routine and it
3990 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
3991 parameters, we must save and restore it. */
3992 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3993 &low_to_save, &high_to_save);
3c0fca12
RH
3994 }
3995#endif
f725a3ec 3996
3c0fca12
RH
3997 /* Push the args that need to be pushed. */
3998
3999 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4000 are to be pushed. */
4001 for (count = 0; count < nargs; count++, argnum += inc)
4002 {
b3694847
SS
4003 enum machine_mode mode = argvec[argnum].mode;
4004 rtx val = argvec[argnum].value;
3c0fca12
RH
4005 rtx reg = argvec[argnum].reg;
4006 int partial = argvec[argnum].partial;
f73ad30e 4007 int lower_bound = 0, upper_bound = 0, i;
3c0fca12
RH
4008
4009 if (! (reg != 0 && partial == 0))
4010 {
f73ad30e
JH
4011 if (ACCUMULATE_OUTGOING_ARGS)
4012 {
f8a097cd
JH
4013 /* If this is being stored into a pre-allocated, fixed-size,
4014 stack area, save any previous data at that location. */
3c0fca12
RH
4015
4016#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4017 /* stack_slot is negative, but we want to index stack_usage_map
4018 with positive values. */
e7949876
AM
4019 upper_bound = -argvec[argnum].locate.offset.constant + 1;
4020 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3c0fca12 4021#else
e7949876
AM
4022 lower_bound = argvec[argnum].locate.offset.constant;
4023 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3c0fca12
RH
4024#endif
4025
546ff777
AM
4026 i = lower_bound;
4027 /* Don't worry about things in the fixed argument area;
4028 it has already been saved. */
4029 if (i < reg_parm_stack_space)
4030 i = reg_parm_stack_space;
4031 while (i < upper_bound && stack_usage_map[i] == 0)
4032 i++;
3c0fca12 4033
546ff777 4034 if (i < upper_bound)
f73ad30e 4035 {
e7949876
AM
4036 /* We need to make a save area. */
4037 unsigned int size
4038 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
f73ad30e 4039 enum machine_mode save_mode
e7949876
AM
4040 = mode_for_size (size, MODE_INT, 1);
4041 rtx adr
4042 = plus_constant (argblock,
4043 argvec[argnum].locate.offset.constant);
f73ad30e 4044 rtx stack_area
e7949876 4045 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 4046
9778f2f8
JH
4047 if (save_mode == BLKmode)
4048 {
4049 argvec[argnum].save_area
4050 = assign_stack_temp (BLKmode,
4051 argvec[argnum].locate.size.constant,
4052 0);
4053
4054 emit_block_move (validize_mem (argvec[argnum].save_area),
4055 stack_area,
4056 GEN_INT (argvec[argnum].locate.size.constant),
4057 BLOCK_OP_CALL_PARM);
4058 }
4059 else
4060 {
4061 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4062
4063 emit_move_insn (argvec[argnum].save_area, stack_area);
4064 }
f73ad30e 4065 }
3c0fca12 4066 }
19caa751 4067
44bb111a
RH
4068 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4069 partial, reg, 0, argblock,
e7949876
AM
4070 GEN_INT (argvec[argnum].locate.offset.constant),
4071 reg_parm_stack_space,
4072 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3c0fca12 4073
3c0fca12 4074 /* Now mark the segment we just used. */
f73ad30e
JH
4075 if (ACCUMULATE_OUTGOING_ARGS)
4076 for (i = lower_bound; i < upper_bound; i++)
4077 stack_usage_map[i] = 1;
3c0fca12
RH
4078
4079 NO_DEFER_POP;
4080 }
4081 }
4082
3c0fca12
RH
4083 /* If we pushed args in forward order, perform stack alignment
4084 after pushing the last arg. */
f73ad30e 4085 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3c0fca12
RH
4086 anti_adjust_stack (GEN_INT (args_size.constant
4087 - original_args_size.constant));
3c0fca12 4088
f73ad30e
JH
4089 if (PUSH_ARGS_REVERSED)
4090 argnum = nargs - 1;
4091 else
4092 argnum = 0;
3c0fca12 4093
3affaf29 4094 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
3c0fca12
RH
4095
4096 /* Now load any reg parms into their regs. */
4097
4098 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4099 are to be pushed. */
4100 for (count = 0; count < nargs; count++, argnum += inc)
4101 {
b3694847 4102 rtx val = argvec[argnum].value;
3c0fca12
RH
4103 rtx reg = argvec[argnum].reg;
4104 int partial = argvec[argnum].partial;
4105
4106 /* Handle calls that pass values in multiple non-contiguous
4107 locations. The PA64 has examples of this for library calls. */
4108 if (reg != 0 && GET_CODE (reg) == PARALLEL)
6e985040 4109 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
3c0fca12
RH
4110 else if (reg != 0 && partial == 0)
4111 emit_move_insn (reg, val);
4112
4113 NO_DEFER_POP;
4114 }
4115
3c0fca12
RH
4116 /* Any regs containing parms remain in use through the call. */
4117 for (count = 0; count < nargs; count++)
4118 {
4119 rtx reg = argvec[count].reg;
4120 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4121 use_group_regs (&call_fusage, reg);
4122 else if (reg != 0)
4123 use_reg (&call_fusage, reg);
4124 }
4125
4126 /* Pass the function the address in which to return a structure value. */
61f71b34 4127 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 4128 {
61f71b34 4129 emit_move_insn (struct_value,
3c0fca12
RH
4130 force_reg (Pmode,
4131 force_operand (XEXP (mem_value, 0),
4132 NULL_RTX)));
61f71b34
DD
4133 if (GET_CODE (struct_value) == REG)
4134 use_reg (&call_fusage, struct_value);
3c0fca12
RH
4135 }
4136
4137 /* Don't allow popping to be deferred, since then
4138 cse'ing of library calls could delete a call and leave the pop. */
4139 NO_DEFER_POP;
5591ee6f
JH
4140 valreg = (mem_value == 0 && outmode != VOIDmode
4141 ? hard_libcall_value (outmode) : NULL_RTX);
3c0fca12 4142
ce48579b 4143 /* Stack must be properly aligned now. */
ebcd0b57 4144 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
f725a3ec 4145 abort ();
ebcd0b57 4146
695ee791
RH
4147 before_call = get_last_insn ();
4148
3c0fca12
RH
4149 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4150 will set inhibit_defer_pop to that value. */
de76b467
JH
4151 /* The return type is needed to decide how many bytes the function pops.
4152 Signedness plays no role in that, so for simplicity, we pretend it's
4153 always signed. We also assume that the list of arguments passed has
4154 no impact, so we pretend it is unknown. */
3c0fca12 4155
f725a3ec
KH
4156 emit_call_1 (fun,
4157 get_identifier (XSTR (orgfun, 0)),
b0c48229 4158 build_function_type (tfom, NULL_TREE),
f725a3ec 4159 original_args_size.constant, args_size.constant,
3c0fca12
RH
4160 struct_value_size,
4161 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
5591ee6f 4162 valreg,
fa5322fa 4163 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3c0fca12 4164
695ee791
RH
4165 /* For calls to `setjmp', etc., inform flow.c it should complain
4166 if nonvolatile values are live. For functions that cannot return,
4167 inform flow that control does not fall through. */
4168
570a98eb 4169 if (flags & (ECF_NORETURN | ECF_LONGJMP))
695ee791 4170 {
570a98eb 4171 /* The barrier note must be emitted
695ee791
RH
4172 immediately after the CALL_INSN. Some ports emit more than
4173 just a CALL_INSN above, so we must search for it here. */
4174
4175 rtx last = get_last_insn ();
4176 while (GET_CODE (last) != CALL_INSN)
4177 {
4178 last = PREV_INSN (last);
4179 /* There was no CALL_INSN? */
4180 if (last == before_call)
4181 abort ();
4182 }
4183
570a98eb 4184 emit_barrier_after (last);
695ee791
RH
4185 }
4186
3c0fca12
RH
4187 /* Now restore inhibit_defer_pop to its actual original value. */
4188 OK_DEFER_POP;
4189
ebb1b59a
BS
4190 /* If call is cse'able, make appropriate pair of reg-notes around it.
4191 Test valreg so we don't crash; may safely ignore `const'
4192 if return type is void. Disable for PARALLEL return values, because
4193 we have no way to move such values into a pseudo register. */
53d4257f 4194 if (flags & ECF_LIBCALL_BLOCK)
ebb1b59a 4195 {
ebb1b59a 4196 rtx insns;
ebb1b59a 4197
c3297561 4198 if (valreg == 0)
e4abc3d5
RH
4199 {
4200 insns = get_insns ();
4201 end_sequence ();
2f937369 4202 emit_insn (insns);
e4abc3d5
RH
4203 }
4204 else
4205 {
4206 rtx note = 0;
c3297561 4207 rtx temp;
e4abc3d5 4208 int i;
ebb1b59a 4209
c3297561
AO
4210 if (GET_CODE (valreg) == PARALLEL)
4211 {
4212 temp = gen_reg_rtx (outmode);
643642eb
DJ
4213 emit_group_store (temp, valreg, NULL_TREE,
4214 GET_MODE_SIZE (outmode));
c3297561
AO
4215 valreg = temp;
4216 }
4217
4218 temp = gen_reg_rtx (GET_MODE (valreg));
4219
e4abc3d5
RH
4220 /* Construct an "equal form" for the value which mentions all the
4221 arguments in order as well as the function name. */
4222 for (i = 0; i < nargs; i++)
4223 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4224 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
ebb1b59a 4225
e4abc3d5
RH
4226 insns = get_insns ();
4227 end_sequence ();
ebb1b59a 4228
e4abc3d5
RH
4229 if (flags & ECF_PURE)
4230 note = gen_rtx_EXPR_LIST (VOIDmode,
4231 gen_rtx_USE (VOIDmode,
4232 gen_rtx_MEM (BLKmode,
4233 gen_rtx_SCRATCH (VOIDmode))),
4234 note);
4235
4236 emit_libcall_block (insns, temp, valreg, note);
ebb1b59a 4237
e4abc3d5
RH
4238 valreg = temp;
4239 }
ebb1b59a 4240 }
3c0fca12
RH
4241 pop_temp_slots ();
4242
4243 /* Copy the value to the right place. */
de76b467 4244 if (outmode != VOIDmode && retval)
3c0fca12
RH
4245 {
4246 if (mem_value)
4247 {
4248 if (value == 0)
4249 value = mem_value;
4250 if (value != mem_value)
4251 emit_move_insn (value, mem_value);
4252 }
c3297561
AO
4253 else if (GET_CODE (valreg) == PARALLEL)
4254 {
4255 if (value == 0)
4256 value = gen_reg_rtx (outmode);
643642eb 4257 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 4258 }
3c0fca12 4259 else if (value != 0)
d57551c7 4260 emit_move_insn (value, valreg);
3c0fca12 4261 else
d57551c7 4262 value = valreg;
3c0fca12
RH
4263 }
4264
f73ad30e 4265 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 4266 {
f73ad30e
JH
4267#ifdef REG_PARM_STACK_SPACE
4268 if (save_area)
b820d2b8
AM
4269 restore_fixed_argument_area (save_area, argblock,
4270 high_to_save, low_to_save);
3c0fca12 4271#endif
f725a3ec 4272
f73ad30e
JH
4273 /* If we saved any argument areas, restore them. */
4274 for (count = 0; count < nargs; count++)
4275 if (argvec[count].save_area)
4276 {
4277 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
e7949876
AM
4278 rtx adr = plus_constant (argblock,
4279 argvec[count].locate.offset.constant);
4280 rtx stack_area = gen_rtx_MEM (save_mode,
4281 memory_address (save_mode, adr));
f73ad30e 4282
9778f2f8
JH
4283 if (save_mode == BLKmode)
4284 emit_block_move (stack_area,
4285 validize_mem (argvec[count].save_area),
4286 GEN_INT (argvec[count].locate.size.constant),
4287 BLOCK_OP_CALL_PARM);
4288 else
4289 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 4290 }
3c0fca12 4291
f73ad30e
JH
4292 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4293 stack_usage_map = initial_stack_usage_map;
4294 }
43bc5f13 4295
de76b467
JH
4296 return value;
4297
4298}
4299\f
4300/* Output a library call to function FUN (a SYMBOL_REF rtx)
4301 (emitting the queue unless NO_QUEUE is nonzero),
4302 for a value of mode OUTMODE,
4303 with NARGS different arguments, passed as alternating rtx values
4304 and machine_modes to convert them to.
4305 The rtx values should have been passed through protect_from_queue already.
4306
1258ee80
JJ
4307 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4308 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4309 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4310 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4311 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4312 or other LCT_ value for other types of library calls. */
de76b467
JH
4313
4314void
e34d07f2
KG
4315emit_library_call (rtx orgfun, enum libcall_type fn_type,
4316 enum machine_mode outmode, int nargs, ...)
de76b467 4317{
e34d07f2 4318 va_list p;
d329e058 4319
e34d07f2 4320 va_start (p, nargs);
2a8f6b90 4321 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
e34d07f2 4322 va_end (p);
de76b467
JH
4323}
4324\f
4325/* Like emit_library_call except that an extra argument, VALUE,
4326 comes second and says where to store the result.
4327 (If VALUE is zero, this function chooses a convenient way
4328 to return the value.
4329
4330 This function returns an rtx for where the value is to be found.
4331 If VALUE is nonzero, VALUE is returned. */
4332
4333rtx
e34d07f2
KG
4334emit_library_call_value (rtx orgfun, rtx value,
4335 enum libcall_type fn_type,
4336 enum machine_mode outmode, int nargs, ...)
de76b467 4337{
6268b922 4338 rtx result;
e34d07f2 4339 va_list p;
d329e058 4340
e34d07f2 4341 va_start (p, nargs);
6268b922
KG
4342 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4343 nargs, p);
e34d07f2 4344 va_end (p);
de76b467 4345
6268b922 4346 return result;
322e3e34
RK
4347}
4348\f
51bbfa0c
RS
4349/* Store a single argument for a function call
4350 into the register or memory area where it must be passed.
4351 *ARG describes the argument value and where to pass it.
4352
4353 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4354 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4355
4356 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4357 so must be careful about how the stack is used.
51bbfa0c
RS
4358
4359 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4360 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4361 that we need not worry about saving and restoring the stack.
4362
4c6b3b2a 4363 FNDECL is the declaration of the function we are calling.
f725a3ec 4364
da7d8304 4365 Return nonzero if this arg should cause sibcall failure,
4c6b3b2a 4366 zero otherwise. */
51bbfa0c 4367
4c6b3b2a 4368static int
d329e058
AJ
4369store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4370 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 4371{
b3694847 4372 tree pval = arg->tree_value;
51bbfa0c
RS
4373 rtx reg = 0;
4374 int partial = 0;
4375 int used = 0;
6a651371 4376 int i, lower_bound = 0, upper_bound = 0;
4c6b3b2a 4377 int sibcall_failure = 0;
51bbfa0c
RS
4378
4379 if (TREE_CODE (pval) == ERROR_MARK)
4c6b3b2a 4380 return 1;
51bbfa0c 4381
cc79451b
RK
4382 /* Push a new temporary level for any temporaries we make for
4383 this argument. */
4384 push_temp_slots ();
4385
f8a097cd 4386 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4387 {
f73ad30e
JH
4388 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4389 save any previous data at that location. */
4390 if (argblock && ! variable_size && arg->stack)
4391 {
51bbfa0c 4392#ifdef ARGS_GROW_DOWNWARD
f73ad30e
JH
4393 /* stack_slot is negative, but we want to index stack_usage_map
4394 with positive values. */
4395 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4396 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4397 else
4398 upper_bound = 0;
51bbfa0c 4399
e7949876 4400 lower_bound = upper_bound - arg->locate.size.constant;
51bbfa0c 4401#else
f73ad30e
JH
4402 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4403 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4404 else
4405 lower_bound = 0;
51bbfa0c 4406
e7949876 4407 upper_bound = lower_bound + arg->locate.size.constant;
51bbfa0c
RS
4408#endif
4409
546ff777
AM
4410 i = lower_bound;
4411 /* Don't worry about things in the fixed argument area;
4412 it has already been saved. */
4413 if (i < reg_parm_stack_space)
4414 i = reg_parm_stack_space;
4415 while (i < upper_bound && stack_usage_map[i] == 0)
4416 i++;
51bbfa0c 4417
546ff777 4418 if (i < upper_bound)
51bbfa0c 4419 {
e7949876
AM
4420 /* We need to make a save area. */
4421 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4422 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4423 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4424 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
4425
4426 if (save_mode == BLKmode)
4427 {
1da68f56
RK
4428 tree ot = TREE_TYPE (arg->tree_value);
4429 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4430 | TYPE_QUAL_CONST));
4431
4432 arg->save_area = assign_temp (nt, 0, 1, 1);
f73ad30e
JH
4433 preserve_temp_slots (arg->save_area);
4434 emit_block_move (validize_mem (arg->save_area), stack_area,
44bb111a
RH
4435 expr_size (arg->tree_value),
4436 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4437 }
4438 else
4439 {
4440 arg->save_area = gen_reg_rtx (save_mode);
4441 emit_move_insn (arg->save_area, stack_area);
4442 }
51bbfa0c
RS
4443 }
4444 }
4445 }
b564df06 4446
51bbfa0c
RS
4447 /* If this isn't going to be placed on both the stack and in registers,
4448 set up the register and number of words. */
4449 if (! arg->pass_on_stack)
aa7634dd
DM
4450 {
4451 if (flags & ECF_SIBCALL)
4452 reg = arg->tail_call_reg;
4453 else
4454 reg = arg->reg;
4455 partial = arg->partial;
4456 }
51bbfa0c
RS
4457
4458 if (reg != 0 && partial == 0)
4459 /* Being passed entirely in a register. We shouldn't be called in
6d2f8887 4460 this case. */
51bbfa0c
RS
4461 abort ();
4462
4ab56118
RK
4463 /* If this arg needs special alignment, don't load the registers
4464 here. */
4465 if (arg->n_aligned_regs != 0)
4466 reg = 0;
f725a3ec 4467
4ab56118 4468 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4469 it directly into its stack slot. Otherwise, we can. */
4470 if (arg->value == 0)
d64f5a78 4471 {
d64f5a78
RS
4472 /* stack_arg_under_construction is nonzero if a function argument is
4473 being evaluated directly into the outgoing argument list and
4474 expand_call must take special action to preserve the argument list
4475 if it is called recursively.
4476
4477 For scalar function arguments stack_usage_map is sufficient to
4478 determine which stack slots must be saved and restored. Scalar
4479 arguments in general have pass_on_stack == 0.
4480
4481 If this argument is initialized by a function which takes the
4482 address of the argument (a C++ constructor or a C function
4483 returning a BLKmode structure), then stack_usage_map is
4484 insufficient and expand_call must push the stack around the
4485 function call. Such arguments have pass_on_stack == 1.
4486
4487 Note that it is always safe to set stack_arg_under_construction,
4488 but this generates suboptimal code if set when not needed. */
4489
4490 if (arg->pass_on_stack)
4491 stack_arg_under_construction++;
f73ad30e 4492
3a08477a
RK
4493 arg->value = expand_expr (pval,
4494 (partial
4495 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4496 ? NULL_RTX : arg->stack,
8403445a 4497 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
4498
4499 /* If we are promoting object (or for any other reason) the mode
4500 doesn't agree, convert the mode. */
4501
7373d92d
RK
4502 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4503 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4504 arg->value, arg->unsignedp);
1efe6448 4505
d64f5a78
RS
4506 if (arg->pass_on_stack)
4507 stack_arg_under_construction--;
d64f5a78 4508 }
51bbfa0c
RS
4509
4510 /* Don't allow anything left on stack from computation
4511 of argument to alloca. */
f8a097cd 4512 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
4513 do_pending_stack_adjust ();
4514
4515 if (arg->value == arg->stack)
37a08a29
RK
4516 /* If the value is already in the stack slot, we are done. */
4517 ;
1efe6448 4518 else if (arg->mode != BLKmode)
51bbfa0c 4519 {
b3694847 4520 int size;
51bbfa0c
RS
4521
4522 /* Argument is a scalar, not entirely passed in registers.
4523 (If part is passed in registers, arg->partial says how much
4524 and emit_push_insn will take care of putting it there.)
f725a3ec 4525
51bbfa0c
RS
4526 Push it, and if its size is less than the
4527 amount of space allocated to it,
4528 also bump stack pointer by the additional space.
4529 Note that in C the default argument promotions
4530 will prevent such mismatches. */
4531
1efe6448 4532 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
4533 /* Compute how much space the push instruction will push.
4534 On many machines, pushing a byte will advance the stack
4535 pointer by a halfword. */
4536#ifdef PUSH_ROUNDING
4537 size = PUSH_ROUNDING (size);
4538#endif
4539 used = size;
4540
4541 /* Compute how much space the argument should get:
4542 round up to a multiple of the alignment for arguments. */
1efe6448 4543 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
4544 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4545 / (PARM_BOUNDARY / BITS_PER_UNIT))
4546 * (PARM_BOUNDARY / BITS_PER_UNIT));
4547
4548 /* This isn't already where we want it on the stack, so put it there.
4549 This can either be done with push or copy insns. */
d329e058 4550 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
44bb111a 4551 PARM_BOUNDARY, partial, reg, used - size, argblock,
e7949876
AM
4552 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4553 ARGS_SIZE_RTX (arg->locate.alignment_pad));
841404cd
AO
4554
4555 /* Unless this is a partially-in-register argument, the argument is now
4556 in the stack. */
4557 if (partial == 0)
4558 arg->value = arg->stack;
51bbfa0c
RS
4559 }
4560 else
4561 {
4562 /* BLKmode, at least partly to be pushed. */
4563
1b1f20ca 4564 unsigned int parm_align;
b3694847 4565 int excess;
51bbfa0c
RS
4566 rtx size_rtx;
4567
4568 /* Pushing a nonscalar.
4569 If part is passed in registers, PARTIAL says how much
4570 and emit_push_insn will take care of putting it there. */
4571
4572 /* Round its size up to a multiple
4573 of the allocation unit for arguments. */
4574
e7949876 4575 if (arg->locate.size.var != 0)
51bbfa0c
RS
4576 {
4577 excess = 0;
e7949876 4578 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
4579 }
4580 else
4581 {
51bbfa0c
RS
4582 /* PUSH_ROUNDING has no effect on us, because
4583 emit_push_insn for BLKmode is careful to avoid it. */
e7949876
AM
4584 excess = (arg->locate.size.constant
4585 - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 4586 + partial * UNITS_PER_WORD);
db4c55f6
JM
4587 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4588 NULL_RTX, TYPE_MODE (sizetype), 0);
51bbfa0c
RS
4589 }
4590
1b1f20ca
RH
4591 /* Some types will require stricter alignment, which will be
4592 provided for elsewhere in argument layout. */
4593 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4594
4595 /* When an argument is padded down, the block is aligned to
4596 PARM_BOUNDARY, but the actual argument isn't. */
4597 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4598 {
e7949876 4599 if (arg->locate.size.var)
1b1f20ca
RH
4600 parm_align = BITS_PER_UNIT;
4601 else if (excess)
4602 {
97d05bfd 4603 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
1b1f20ca
RH
4604 parm_align = MIN (parm_align, excess_align);
4605 }
4606 }
4607
4c6b3b2a
JJ
4608 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4609 {
4610 /* emit_push_insn might not work properly if arg->value and
e7949876 4611 argblock + arg->locate.offset areas overlap. */
4c6b3b2a
JJ
4612 rtx x = arg->value;
4613 int i = 0;
4614
4615 if (XEXP (x, 0) == current_function_internal_arg_pointer
4616 || (GET_CODE (XEXP (x, 0)) == PLUS
4617 && XEXP (XEXP (x, 0), 0) ==
4618 current_function_internal_arg_pointer
4619 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4620 {
4621 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4622 i = INTVAL (XEXP (XEXP (x, 0), 1));
4623
4624 /* expand_call should ensure this */
e7949876 4625 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4c6b3b2a
JJ
4626 abort ();
4627
e7949876 4628 if (arg->locate.offset.constant > i)
4c6b3b2a 4629 {
e7949876 4630 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4c6b3b2a
JJ
4631 sibcall_failure = 1;
4632 }
e7949876 4633 else if (arg->locate.offset.constant < i)
4c6b3b2a 4634 {
e7949876 4635 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4c6b3b2a
JJ
4636 sibcall_failure = 1;
4637 }
4638 }
4639 }
4640
1efe6448 4641 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
1b1f20ca 4642 parm_align, partial, reg, excess, argblock,
e7949876
AM
4643 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4644 ARGS_SIZE_RTX (arg->locate.alignment_pad));
51bbfa0c 4645
841404cd
AO
4646 /* Unless this is a partially-in-register argument, the argument is now
4647 in the stack.
51bbfa0c 4648
841404cd
AO
4649 ??? Unlike the case above, in which we want the actual
4650 address of the data, so that we can load it directly into a
4651 register, here we want the address of the stack slot, so that
4652 it's properly aligned for word-by-word copying or something
4653 like that. It's not clear that this is always correct. */
4654 if (partial == 0)
4655 arg->value = arg->stack_slot;
4656 }
51bbfa0c 4657
8403445a
AM
4658 /* Mark all slots this store used. */
4659 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4660 && argblock && ! variable_size && arg->stack)
4661 for (i = lower_bound; i < upper_bound; i++)
4662 stack_usage_map[i] = 1;
4663
51bbfa0c
RS
4664 /* Once we have pushed something, pops can't safely
4665 be deferred during the rest of the arguments. */
4666 NO_DEFER_POP;
4667
4668 /* ANSI doesn't require a sequence point here,
4669 but PCC has one, so this will avoid some problems. */
4670 emit_queue ();
4671
db907e7b
RK
4672 /* Free any temporary slots made in processing this argument. Show
4673 that we might have taken the address of something and pushed that
4674 as an operand. */
4675 preserve_temp_slots (NULL_RTX);
51bbfa0c 4676 free_temp_slots ();
cc79451b 4677 pop_temp_slots ();
4c6b3b2a
JJ
4678
4679 return sibcall_failure;
51bbfa0c 4680}
a4b1b92a 4681
a4b1b92a
RH
4682/* Nonzero if we do not know how to pass TYPE solely in registers.
4683 We cannot do so in the following cases:
4684
4685 - if the type has variable size
4686 - if the type is marked as addressable (it is required to be constructed
4687 into the stack)
4688 - if the padding and mode of the type is such that a copy into a register
4689 would put it into the wrong part of the register.
4690
4691 Which padding can't be supported depends on the byte endianness.
4692
4693 A value in a register is implicitly padded at the most significant end.
4694 On a big-endian machine, that is the lower end in memory.
4695 So a value padded in memory at the upper end can't go in a register.
4696 For a little-endian machine, the reverse is true. */
4697
4698bool
d329e058 4699default_must_pass_in_stack (enum machine_mode mode, tree type)
a4b1b92a
RH
4700{
4701 if (!type)
40cdfd5a 4702 return false;
a4b1b92a
RH
4703
4704 /* If the type has variable size... */
4705 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4706 return true;
4707
4708 /* If the type is marked as addressable (it is required
4709 to be constructed into the stack)... */
4710 if (TREE_ADDRESSABLE (type))
4711 return true;
4712
4713 /* If the padding and mode of the type is such that a copy into
4714 a register would put it into the wrong part of the register. */
4715 if (mode == BLKmode
4716 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4717 && (FUNCTION_ARG_PADDING (mode, type)
4718 == (BYTES_BIG_ENDIAN ? upward : downward)))
4719 return true;
4720
4721 return false;
4722}