]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
* Check in merge from gcc2. See ChangeLog.11 and ChangeLog.12
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
2e0dd623 2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
51bbfa0c
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
51bbfa0c
RS
20
21#include "config.h"
4f90e4a0 22#ifdef __STDC__
04fe4385 23#include <stdarg.h>
4f90e4a0 24#else
04fe4385 25#include <varargs.h>
4f90e4a0 26#endif
670ee920
KG
27#include "system.h"
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
31#include "expr.h"
32#include "regs.h"
51bbfa0c
RS
33#include "insn-flags.h"
34
35/* Decide whether a function's arguments should be processed
bbc8a071
RK
36 from first to last or from last to first.
37
38 They should if the stack and args grow in opposite directions, but
39 only if we have push insns. */
51bbfa0c 40
51bbfa0c 41#ifdef PUSH_ROUNDING
bbc8a071 42
40083ddf 43#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51bbfa0c
RS
44#define PUSH_ARGS_REVERSED /* If it's last to first */
45#endif
bbc8a071 46
51bbfa0c
RS
47#endif
48
49/* Like STACK_BOUNDARY but in units of bytes, not bits. */
50#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
51
52/* Data structure and subroutines used within expand_call. */
53
54struct arg_data
55{
56 /* Tree node for this argument. */
57 tree tree_value;
1efe6448
RK
58 /* Mode for value; TYPE_MODE unless promoted. */
59 enum machine_mode mode;
51bbfa0c
RS
60 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 rtx value;
62 /* Initially-compute RTL value for argument; only for const functions. */
63 rtx initial_value;
64 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 65 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
66 registers. */
67 rtx reg;
84b55618
RK
68 /* If REG was promoted from the actual mode of the argument expression,
69 indicates whether the promotion is sign- or zero-extended. */
70 int unsignedp;
51bbfa0c
RS
71 /* Number of registers to use. 0 means put the whole arg in registers.
72 Also 0 if not passed in registers. */
73 int partial;
d64f5a78
RS
74 /* Non-zero if argument must be passed on stack.
75 Note that some arguments may be passed on the stack
76 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
77 pass_on_stack identifies arguments that *cannot* go in registers. */
51bbfa0c
RS
78 int pass_on_stack;
79 /* Offset of this argument from beginning of stack-args. */
80 struct args_size offset;
81 /* Similar, but offset to the start of the stack slot. Different from
82 OFFSET if this arg pads downward. */
83 struct args_size slot_offset;
84 /* Size of this argument on the stack, rounded up for any padding it gets,
85 parts of the argument passed in registers do not count.
86 If REG_PARM_STACK_SPACE is defined, then register parms
87 are counted here as well. */
88 struct args_size size;
89 /* Location on the stack at which parameter should be stored. The store
90 has already been done if STACK == VALUE. */
91 rtx stack;
92 /* Location on the stack of the start of this argument slot. This can
93 differ from STACK if this arg pads downward. This location is known
94 to be aligned to FUNCTION_ARG_BOUNDARY. */
95 rtx stack_slot;
96#ifdef ACCUMULATE_OUTGOING_ARGS
97 /* Place that this stack area has been saved, if needed. */
98 rtx save_area;
99#endif
4ab56118
RK
100 /* If an argument's alignment does not permit direct copying into registers,
101 copy in smaller-sized pieces into pseudos. These are stored in a
102 block pointed to by this field. The next field says how many
103 word-sized pseudos we made. */
104 rtx *aligned_regs;
105 int n_aligned_regs;
51bbfa0c
RS
106};
107
108#ifdef ACCUMULATE_OUTGOING_ARGS
b94301c2 109/* A vector of one char per byte of stack space. A byte if non-zero if
51bbfa0c
RS
110 the corresponding stack location has been used.
111 This vector is used to prevent a function call within an argument from
112 clobbering any stack already set up. */
113static char *stack_usage_map;
114
115/* Size of STACK_USAGE_MAP. */
116static int highest_outgoing_arg_in_use;
2f4aa534
RS
117
118/* stack_arg_under_construction is nonzero when an argument may be
119 initialized with a constructor call (including a C function that
120 returns a BLKmode struct) and expand_call must take special action
121 to make sure the object being constructed does not overlap the
122 argument list for the constructor call. */
123int stack_arg_under_construction;
51bbfa0c
RS
124#endif
125
322e3e34 126static int calls_function PROTO((tree, int));
9f4d9f6c 127static int calls_function_1 PROTO((tree, int));
e5e809f4
JL
128static void emit_call_1 PROTO((rtx, tree, tree, HOST_WIDE_INT,
129 HOST_WIDE_INT, rtx, rtx,
5d6155d4 130 int, rtx, int));
322e3e34
RK
131static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
132 tree, int));
51bbfa0c 133\f
1ce0cb53
JW
134/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
135 `alloca'.
136
137 If WHICH is 0, return 1 if EXP contains a call to any function.
138 Actually, we only need return 1 if evaluating EXP would require pushing
139 arguments on the stack, but that is too difficult to compute, so we just
140 assume any function call might require the stack. */
51bbfa0c 141
1c8d7aef
RS
142static tree calls_function_save_exprs;
143
51bbfa0c 144static int
1ce0cb53 145calls_function (exp, which)
51bbfa0c 146 tree exp;
1ce0cb53 147 int which;
1c8d7aef
RS
148{
149 int val;
150 calls_function_save_exprs = 0;
151 val = calls_function_1 (exp, which);
152 calls_function_save_exprs = 0;
153 return val;
154}
155
156static int
157calls_function_1 (exp, which)
158 tree exp;
159 int which;
51bbfa0c
RS
160{
161 register int i;
0207efa2
RK
162 enum tree_code code = TREE_CODE (exp);
163 int type = TREE_CODE_CLASS (code);
164 int length = tree_code_length[(int) code];
51bbfa0c 165
ddd5a7c1 166 /* If this code is language-specific, we don't know what it will do. */
0207efa2
RK
167 if ((int) code >= NUM_TREE_CODES)
168 return 1;
51bbfa0c 169
0207efa2 170 /* Only expressions and references can contain calls. */
3b59a331
RS
171 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
172 && type != 'b')
51bbfa0c
RS
173 return 0;
174
0207efa2 175 switch (code)
51bbfa0c
RS
176 {
177 case CALL_EXPR:
1ce0cb53
JW
178 if (which == 0)
179 return 1;
180 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
181 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
0207efa2
RK
182 == FUNCTION_DECL))
183 {
184 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
185
186 if ((DECL_BUILT_IN (fndecl)
187 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
188 || (DECL_SAVED_INSNS (fndecl)
189 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
190 & FUNCTION_FLAGS_CALLS_ALLOCA)))
191 return 1;
192 }
51bbfa0c
RS
193
194 /* Third operand is RTL. */
195 length = 2;
196 break;
197
198 case SAVE_EXPR:
199 if (SAVE_EXPR_RTL (exp) != 0)
200 return 0;
1c8d7aef
RS
201 if (value_member (exp, calls_function_save_exprs))
202 return 0;
203 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
204 calls_function_save_exprs);
205 return (TREE_OPERAND (exp, 0) != 0
206 && calls_function_1 (TREE_OPERAND (exp, 0), which));
51bbfa0c
RS
207
208 case BLOCK:
ef03bc85
CH
209 {
210 register tree local;
211
212 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
1ce0cb53 213 if (DECL_INITIAL (local) != 0
1c8d7aef 214 && calls_function_1 (DECL_INITIAL (local), which))
ef03bc85
CH
215 return 1;
216 }
217 {
218 register tree subblock;
219
220 for (subblock = BLOCK_SUBBLOCKS (exp);
221 subblock;
222 subblock = TREE_CHAIN (subblock))
1c8d7aef 223 if (calls_function_1 (subblock, which))
ef03bc85
CH
224 return 1;
225 }
226 return 0;
51bbfa0c
RS
227
228 case METHOD_CALL_EXPR:
229 length = 3;
230 break;
231
232 case WITH_CLEANUP_EXPR:
233 length = 1;
234 break;
235
236 case RTL_EXPR:
237 return 0;
e9a25f70
JL
238
239 default:
240 break;
51bbfa0c
RS
241 }
242
243 for (i = 0; i < length; i++)
244 if (TREE_OPERAND (exp, i) != 0
1c8d7aef 245 && calls_function_1 (TREE_OPERAND (exp, i), which))
51bbfa0c
RS
246 return 1;
247
248 return 0;
249}
250\f
251/* Force FUNEXP into a form suitable for the address of a CALL,
252 and return that as an rtx. Also load the static chain register
253 if FNDECL is a nested function.
254
77cac2f2
RK
255 CALL_FUSAGE points to a variable holding the prospective
256 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 257
03dacb02 258rtx
77cac2f2 259prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
51bbfa0c
RS
260 rtx funexp;
261 tree fndecl;
77cac2f2 262 rtx *call_fusage;
01368078 263 int reg_parm_seen;
51bbfa0c
RS
264{
265 rtx static_chain_value = 0;
266
267 funexp = protect_from_queue (funexp, 0);
268
269 if (fndecl != 0)
0f41302f 270 /* Get possible static chain value for nested function in C. */
51bbfa0c
RS
271 static_chain_value = lookup_static_chain (fndecl);
272
273 /* Make a valid memory address and copy constants thru pseudo-regs,
274 but not for a constant address if -fno-function-cse. */
275 if (GET_CODE (funexp) != SYMBOL_REF)
01368078 276 /* If we are using registers for parameters, force the
e9a25f70
JL
277 function address into a register now. */
278 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
279 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
280 : memory_address (FUNCTION_MODE, funexp));
51bbfa0c
RS
281 else
282 {
283#ifndef NO_FUNCTION_CSE
284 if (optimize && ! flag_no_function_cse)
285#ifdef NO_RECURSIVE_FUNCTION_CSE
286 if (fndecl != current_function_decl)
287#endif
288 funexp = force_reg (Pmode, funexp);
289#endif
290 }
291
292 if (static_chain_value != 0)
293 {
294 emit_move_insn (static_chain_rtx, static_chain_value);
295
f991a240
RK
296 if (GET_CODE (static_chain_rtx) == REG)
297 use_reg (call_fusage, static_chain_rtx);
51bbfa0c
RS
298 }
299
300 return funexp;
301}
302
303/* Generate instructions to call function FUNEXP,
304 and optionally pop the results.
305 The CALL_INSN is the first insn generated.
306
607ea900 307 FNDECL is the declaration node of the function. This is given to the
2c8da025
RK
308 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
309
334c4f0f
RK
310 FUNTYPE is the data type of the function. This is given to the macro
311 RETURN_POPS_ARGS to determine whether this function pops its own args.
312 We used to allow an identifier for library functions, but that doesn't
313 work when the return type is an aggregate type and the calling convention
314 says that the pointer to this aggregate is to be popped by the callee.
51bbfa0c
RS
315
316 STACK_SIZE is the number of bytes of arguments on the stack,
317 rounded up to STACK_BOUNDARY; zero if the size is variable.
318 This is both to put into the call insn and
319 to generate explicit popping code if necessary.
320
321 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
322 It is zero if this call doesn't want a structure value.
323
324 NEXT_ARG_REG is the rtx that results from executing
325 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
326 just after all the args have had their registers assigned.
327 This could be whatever you like, but normally it is the first
328 arg-register beyond those used for args in this call,
329 or 0 if all the arg-registers are used in this call.
330 It is passed on to `gen_call' so you can put this info in the call insn.
331
332 VALREG is a hard register in which a value is returned,
333 or 0 if the call does not return a value.
334
335 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
336 the args to this call were processed.
337 We restore `inhibit_defer_pop' to that value.
338
94b25f81
RK
339 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
340 denote registers used by the called function.
51bbfa0c
RS
341
342 IS_CONST is true if this is a `const' call. */
343
322e3e34 344static void
2c8da025 345emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
5d6155d4
RK
346 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
347 is_const)
51bbfa0c 348 rtx funexp;
2c8da025 349 tree fndecl;
51bbfa0c 350 tree funtype;
e5e809f4
JL
351 HOST_WIDE_INT stack_size;
352 HOST_WIDE_INT struct_value_size;
51bbfa0c
RS
353 rtx next_arg_reg;
354 rtx valreg;
355 int old_inhibit_defer_pop;
77cac2f2 356 rtx call_fusage;
51bbfa0c
RS
357 int is_const;
358{
e5d70561
RK
359 rtx stack_size_rtx = GEN_INT (stack_size);
360 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
51bbfa0c 361 rtx call_insn;
081f5e7e 362#ifndef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 363 int already_popped = 0;
081f5e7e 364#endif
51bbfa0c
RS
365
366 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
367 and we don't want to load it into a register as an optimization,
368 because prepare_call_address already did it if it should be done. */
369 if (GET_CODE (funexp) != SYMBOL_REF)
370 funexp = memory_address (FUNCTION_MODE, funexp);
371
372#ifndef ACCUMULATE_OUTGOING_ARGS
373#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
374 if (HAVE_call_pop && HAVE_call_value_pop
2c8da025
RK
375 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
376 || stack_size == 0))
51bbfa0c 377 {
2c8da025 378 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
51bbfa0c
RS
379 rtx pat;
380
381 /* If this subroutine pops its own args, record that in the call insn
382 if possible, for the sake of frame pointer elimination. */
2c8da025 383
51bbfa0c
RS
384 if (valreg)
385 pat = gen_call_value_pop (valreg,
38a448ca 386 gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
387 stack_size_rtx, next_arg_reg, n_pop);
388 else
38a448ca 389 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
390 stack_size_rtx, next_arg_reg, n_pop);
391
392 emit_call_insn (pat);
393 already_popped = 1;
394 }
395 else
396#endif
397#endif
398
399#if defined (HAVE_call) && defined (HAVE_call_value)
400 if (HAVE_call && HAVE_call_value)
401 {
402 if (valreg)
403 emit_call_insn (gen_call_value (valreg,
38a448ca 404 gen_rtx_MEM (FUNCTION_MODE, funexp),
e992302c
BK
405 stack_size_rtx, next_arg_reg,
406 NULL_RTX));
51bbfa0c 407 else
38a448ca 408 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
51bbfa0c
RS
409 stack_size_rtx, next_arg_reg,
410 struct_value_size_rtx));
411 }
412 else
413#endif
414 abort ();
415
77cac2f2 416 /* Find the CALL insn we just emitted. */
51bbfa0c
RS
417 for (call_insn = get_last_insn ();
418 call_insn && GET_CODE (call_insn) != CALL_INSN;
419 call_insn = PREV_INSN (call_insn))
420 ;
421
422 if (! call_insn)
423 abort ();
424
e59e60a7
RK
425 /* Put the register usage information on the CALL. If there is already
426 some usage information, put ours at the end. */
427 if (CALL_INSN_FUNCTION_USAGE (call_insn))
428 {
429 rtx link;
430
431 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
432 link = XEXP (link, 1))
433 ;
434
435 XEXP (link, 1) = call_fusage;
436 }
437 else
438 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
51bbfa0c
RS
439
440 /* If this is a const call, then set the insn's unchanging bit. */
441 if (is_const)
442 CONST_CALL_P (call_insn) = 1;
443
b1e64e0d
RS
444 /* Restore this now, so that we do defer pops for this call's args
445 if the context of the call as a whole permits. */
446 inhibit_defer_pop = old_inhibit_defer_pop;
447
51bbfa0c
RS
448#ifndef ACCUMULATE_OUTGOING_ARGS
449 /* If returning from the subroutine does not automatically pop the args,
450 we need an instruction to pop them sooner or later.
451 Perhaps do it now; perhaps just record how much space to pop later.
452
453 If returning from the subroutine does pop the args, indicate that the
454 stack pointer will be changed. */
455
2c8da025 456 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
51bbfa0c
RS
457 {
458 if (!already_popped)
e3da301d 459 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
460 = gen_rtx_EXPR_LIST (VOIDmode,
461 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
462 CALL_INSN_FUNCTION_USAGE (call_insn));
2c8da025 463 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
e5d70561 464 stack_size_rtx = GEN_INT (stack_size);
51bbfa0c
RS
465 }
466
467 if (stack_size != 0)
468 {
70a73141 469 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
51bbfa0c
RS
470 pending_stack_adjust += stack_size;
471 else
472 adjust_stack (stack_size_rtx);
473 }
474#endif
475}
476
477/* Generate all the code for a function call
478 and return an rtx for its value.
479 Store the value in TARGET (specified as an rtx) if convenient.
480 If the value is stored in TARGET then TARGET is returned.
481 If IGNORE is nonzero, then we ignore the value of the function call. */
482
483rtx
8129842c 484expand_call (exp, target, ignore)
51bbfa0c
RS
485 tree exp;
486 rtx target;
487 int ignore;
51bbfa0c
RS
488{
489 /* List of actual parameters. */
490 tree actparms = TREE_OPERAND (exp, 1);
491 /* RTX for the function to be called. */
492 rtx funexp;
51bbfa0c
RS
493 /* Data type of the function. */
494 tree funtype;
495 /* Declaration of the function being called,
496 or 0 if the function is computed (not known by name). */
497 tree fndecl = 0;
498 char *name = 0;
499
500 /* Register in which non-BLKmode value will be returned,
501 or 0 if no value or if value is BLKmode. */
502 rtx valreg;
503 /* Address where we should return a BLKmode value;
504 0 if value not BLKmode. */
505 rtx structure_value_addr = 0;
506 /* Nonzero if that address is being passed by treating it as
507 an extra, implicit first parameter. Otherwise,
508 it is passed by being copied directly into struct_value_rtx. */
509 int structure_value_addr_parm = 0;
510 /* Size of aggregate value wanted, or zero if none wanted
511 or if we are using the non-reentrant PCC calling convention
512 or expecting the value in registers. */
e5e809f4 513 HOST_WIDE_INT struct_value_size = 0;
51bbfa0c
RS
514 /* Nonzero if called function returns an aggregate in memory PCC style,
515 by returning the address of where to find it. */
516 int pcc_struct_value = 0;
517
518 /* Number of actual parameters in this call, including struct value addr. */
519 int num_actuals;
520 /* Number of named args. Args after this are anonymous ones
521 and they must all go on the stack. */
522 int n_named_args;
523 /* Count arg position in order args appear. */
524 int argpos;
525
526 /* Vector of information about each argument.
527 Arguments are numbered in the order they will be pushed,
528 not the order they are written. */
529 struct arg_data *args;
530
531 /* Total size in bytes of all the stack-parms scanned so far. */
532 struct args_size args_size;
533 /* Size of arguments before any adjustments (such as rounding). */
534 struct args_size original_args_size;
535 /* Data on reg parms scanned so far. */
536 CUMULATIVE_ARGS args_so_far;
537 /* Nonzero if a reg parm has been scanned. */
538 int reg_parm_seen;
efd65a8b 539 /* Nonzero if this is an indirect function call. */
51bbfa0c
RS
540
541 /* Nonzero if we must avoid push-insns in the args for this call.
542 If stack space is allocated for register parameters, but not by the
543 caller, then it is preallocated in the fixed part of the stack frame.
544 So the entire argument block must then be preallocated (i.e., we
545 ignore PUSH_ROUNDING in that case). */
546
51bbfa0c
RS
547#ifdef PUSH_ROUNDING
548 int must_preallocate = 0;
549#else
550 int must_preallocate = 1;
51bbfa0c
RS
551#endif
552
f72aed24 553 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
554 int reg_parm_stack_space = 0;
555
51bbfa0c
RS
556 /* 1 if scanning parms front to back, -1 if scanning back to front. */
557 int inc;
558 /* Address of space preallocated for stack parms
559 (on machines that lack push insns), or 0 if space not preallocated. */
560 rtx argblock = 0;
561
562 /* Nonzero if it is plausible that this is a call to alloca. */
563 int may_be_alloca;
9ae8ffe7
JL
564 /* Nonzero if this is a call to malloc or a related function. */
565 int is_malloc;
51bbfa0c
RS
566 /* Nonzero if this is a call to setjmp or a related function. */
567 int returns_twice;
568 /* Nonzero if this is a call to `longjmp'. */
569 int is_longjmp;
570 /* Nonzero if this is a call to an inline function. */
571 int is_integrable = 0;
51bbfa0c
RS
572 /* Nonzero if this is a call to a `const' function.
573 Note that only explicitly named functions are handled as `const' here. */
574 int is_const = 0;
575 /* Nonzero if this is a call to a `volatile' function. */
576 int is_volatile = 0;
577#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
578 /* Define the boundary of the register parm stack space that needs to be
579 save, if any. */
580 int low_to_save = -1, high_to_save;
581 rtx save_area = 0; /* Place that it is saved */
582#endif
583
584#ifdef ACCUMULATE_OUTGOING_ARGS
585 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
586 char *initial_stack_usage_map = stack_usage_map;
69d4ca36 587 int old_stack_arg_under_construction;
51bbfa0c
RS
588#endif
589
590 rtx old_stack_level = 0;
79be3418 591 int old_pending_adj = 0;
51bbfa0c 592 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 593 rtx call_fusage = 0;
51bbfa0c 594 register tree p;
4ab56118 595 register int i, j;
51bbfa0c 596
7815214e
RK
597 /* The value of the function call can be put in a hard register. But
598 if -fcheck-memory-usage, code which invokes functions (and thus
599 damages some hard registers) can be inserted before using the value.
600 So, target is always a pseudo-register in that case. */
601 if (flag_check_memory_usage)
602 target = 0;
603
51bbfa0c
RS
604 /* See if we can find a DECL-node for the actual function.
605 As a result, decide whether this is a call to an integrable function. */
606
607 p = TREE_OPERAND (exp, 0);
608 if (TREE_CODE (p) == ADDR_EXPR)
609 {
610 fndecl = TREE_OPERAND (p, 0);
611 if (TREE_CODE (fndecl) != FUNCTION_DECL)
fdff8c6d 612 fndecl = 0;
51bbfa0c
RS
613 else
614 {
615 if (!flag_no_inline
616 && fndecl != current_function_decl
aa10adff 617 && DECL_INLINE (fndecl)
1cf4f698
RK
618 && DECL_SAVED_INSNS (fndecl)
619 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
51bbfa0c
RS
620 is_integrable = 1;
621 else if (! TREE_ADDRESSABLE (fndecl))
622 {
13d39dbc 623 /* In case this function later becomes inlinable,
51bbfa0c
RS
624 record that there was already a non-inline call to it.
625
626 Use abstraction instead of setting TREE_ADDRESSABLE
627 directly. */
da8c1713
RK
628 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
629 && optimize > 0)
1907795e
JM
630 {
631 warning_with_decl (fndecl, "can't inline call to `%s'");
632 warning ("called from here");
633 }
51bbfa0c
RS
634 mark_addressable (fndecl);
635 }
636
d45cf215
RS
637 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
638 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
51bbfa0c 639 is_const = 1;
5e24110e
RS
640
641 if (TREE_THIS_VOLATILE (fndecl))
642 is_volatile = 1;
51bbfa0c
RS
643 }
644 }
645
fdff8c6d
RK
646 /* If we don't have specific function to call, see if we have a
647 constant or `noreturn' function from the type. */
648 if (fndecl == 0)
649 {
650 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
651 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
652 }
653
6f90e075
JW
654#ifdef REG_PARM_STACK_SPACE
655#ifdef MAYBE_REG_PARM_STACK_SPACE
656 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
657#else
658 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
659#endif
660#endif
661
e5e809f4
JL
662#if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
663 if (reg_parm_stack_space > 0)
664 must_preallocate = 1;
665#endif
666
51bbfa0c
RS
667 /* Warn if this value is an aggregate type,
668 regardless of which calling convention we are using for it. */
05e3bdb9 669 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
51bbfa0c
RS
670 warning ("function call has aggregate value");
671
672 /* Set up a place to return a structure. */
673
674 /* Cater to broken compilers. */
675 if (aggregate_value_p (exp))
676 {
677 /* This call returns a big structure. */
678 is_const = 0;
679
680#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a
RS
681 {
682 pcc_struct_value = 1;
0dd532dc
JW
683 /* Easier than making that case work right. */
684 if (is_integrable)
685 {
686 /* In case this is a static function, note that it has been
687 used. */
688 if (! TREE_ADDRESSABLE (fndecl))
689 mark_addressable (fndecl);
690 is_integrable = 0;
691 }
9e7b1d0a
RS
692 }
693#else /* not PCC_STATIC_STRUCT_RETURN */
694 {
695 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
51bbfa0c 696
9e7b1d0a
RS
697 if (target && GET_CODE (target) == MEM)
698 structure_value_addr = XEXP (target, 0);
699 else
700 {
e9a25f70
JL
701 /* Assign a temporary to hold the value. */
702 tree d;
51bbfa0c 703
9e7b1d0a
RS
704 /* For variable-sized objects, we must be called with a target
705 specified. If we were to allocate space on the stack here,
706 we would have no way of knowing when to free it. */
51bbfa0c 707
002bdd6c
RK
708 if (struct_value_size < 0)
709 abort ();
710
e9a25f70
JL
711 /* This DECL is just something to feed to mark_addressable;
712 it doesn't get pushed. */
713 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
714 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
715 mark_addressable (d);
716 structure_value_addr = XEXP (DECL_RTL (d), 0);
e5e809f4 717 TREE_USED (d) = 1;
9e7b1d0a
RS
718 target = 0;
719 }
720 }
721#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
722 }
723
724 /* If called function is inline, try to integrate it. */
725
726 if (is_integrable)
727 {
728 rtx temp;
69d4ca36 729#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534 730 rtx before_call = get_last_insn ();
69d4ca36 731#endif
51bbfa0c
RS
732
733 temp = expand_inline_function (fndecl, actparms, target,
734 ignore, TREE_TYPE (exp),
735 structure_value_addr);
736
737 /* If inlining succeeded, return. */
2e0dd623 738 if (temp != (rtx) (HOST_WIDE_INT) -1)
51bbfa0c 739 {
d64f5a78 740#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
741 /* If the outgoing argument list must be preserved, push
742 the stack before executing the inlined function if it
743 makes any calls. */
744
745 for (i = reg_parm_stack_space - 1; i >= 0; i--)
746 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
747 break;
748
749 if (stack_arg_under_construction || i >= 0)
750 {
a1917650
RK
751 rtx first_insn
752 = before_call ? NEXT_INSN (before_call) : get_insns ();
753 rtx insn, seq;
2f4aa534 754
d64f5a78
RS
755 /* Look for a call in the inline function code.
756 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
757 nonzero then there is a call and it is not necessary
758 to scan the insns. */
759
760 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
a1917650 761 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
d64f5a78
RS
762 if (GET_CODE (insn) == CALL_INSN)
763 break;
2f4aa534
RS
764
765 if (insn)
766 {
d64f5a78
RS
767 /* Reserve enough stack space so that the largest
768 argument list of any function call in the inline
769 function does not overlap the argument list being
770 evaluated. This is usually an overestimate because
771 allocate_dynamic_stack_space reserves space for an
772 outgoing argument list in addition to the requested
773 space, but there is no way to ask for stack space such
774 that an argument list of a certain length can be
e5e809f4 775 safely constructed.
d64f5a78 776
e5e809f4
JL
777 Add the stack space reserved for register arguments, if
778 any, in the inline function. What is really needed is the
d64f5a78
RS
779 largest value of reg_parm_stack_space in the inline
780 function, but that is not available. Using the current
781 value of reg_parm_stack_space is wrong, but gives
782 correct results on all supported machines. */
e5e809f4
JL
783
784 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
785 + reg_parm_stack_space);
786
2f4aa534 787 start_sequence ();
ccf5d244 788 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
e5d70561
RK
789 allocate_dynamic_stack_space (GEN_INT (adjust),
790 NULL_RTX, BITS_PER_UNIT);
2f4aa534
RS
791 seq = get_insns ();
792 end_sequence ();
a1917650 793 emit_insns_before (seq, first_insn);
e5d70561 794 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2f4aa534
RS
795 }
796 }
d64f5a78 797#endif
51bbfa0c
RS
798
799 /* If the result is equivalent to TARGET, return TARGET to simplify
800 checks in store_expr. They can be equivalent but not equal in the
801 case of a function that returns BLKmode. */
802 if (temp != target && rtx_equal_p (temp, target))
803 return target;
804 return temp;
805 }
806
807 /* If inlining failed, mark FNDECL as needing to be compiled
0481a55e
RK
808 separately after all. If function was declared inline,
809 give a warning. */
810 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
da8c1713 811 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1907795e
JM
812 {
813 warning_with_decl (fndecl, "inlining failed in call to `%s'");
814 warning ("called from here");
815 }
51bbfa0c
RS
816 mark_addressable (fndecl);
817 }
818
819 /* When calling a const function, we must pop the stack args right away,
820 so that the pop is deleted or moved with the call. */
821 if (is_const)
822 NO_DEFER_POP;
823
824 function_call_count++;
825
826 if (fndecl && DECL_NAME (fndecl))
827 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
828
829#if 0
830 /* Unless it's a call to a specific function that isn't alloca,
831 if it has one argument, we must assume it might be alloca. */
832
e3da301d
MS
833 may_be_alloca
834 = (!(fndecl != 0 && strcmp (name, "alloca"))
835 && actparms != 0
836 && TREE_CHAIN (actparms) == 0);
51bbfa0c
RS
837#else
838 /* We assume that alloca will always be called by name. It
839 makes no sense to pass it as a pointer-to-function to
840 anything that does not understand its behavior. */
e3da301d
MS
841 may_be_alloca
842 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
51bbfa0c
RS
843 && name[0] == 'a'
844 && ! strcmp (name, "alloca"))
845 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
846 && name[0] == '_'
847 && ! strcmp (name, "__builtin_alloca"))));
848#endif
849
850 /* See if this is a call to a function that can return more than once
851 or a call to longjmp. */
852
853 returns_twice = 0;
854 is_longjmp = 0;
9ae8ffe7 855 is_malloc = 0;
51bbfa0c 856
259620a8
MM
857 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15
858 /* Exclude functions not at the file scope, or not `extern',
859 since they are not the magic functions we would otherwise
860 think they are. */
861 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
51bbfa0c
RS
862 {
863 char *tname = name;
864
8d515633 865 /* Disregard prefix _, __ or __x. */
51bbfa0c 866 if (name[0] == '_')
8d515633
RS
867 {
868 if (name[1] == '_' && name[2] == 'x')
869 tname += 3;
870 else if (name[1] == '_')
871 tname += 2;
872 else
873 tname += 1;
874 }
51bbfa0c
RS
875
876 if (tname[0] == 's')
877 {
878 returns_twice
879 = ((tname[1] == 'e'
880 && (! strcmp (tname, "setjmp")
881 || ! strcmp (tname, "setjmp_syscall")))
882 || (tname[1] == 'i'
883 && ! strcmp (tname, "sigsetjmp"))
884 || (tname[1] == 'a'
885 && ! strcmp (tname, "savectx")));
886 if (tname[1] == 'i'
887 && ! strcmp (tname, "siglongjmp"))
888 is_longjmp = 1;
889 }
890 else if ((tname[0] == 'q' && tname[1] == 's'
891 && ! strcmp (tname, "qsetjmp"))
892 || (tname[0] == 'v' && tname[1] == 'f'
893 && ! strcmp (tname, "vfork")))
894 returns_twice = 1;
895
896 else if (tname[0] == 'l' && tname[1] == 'o'
897 && ! strcmp (tname, "longjmp"))
898 is_longjmp = 1;
6e73e666
JC
899 /* XXX should have "malloc" attribute on functions instead
900 of recognizing them by name. */
901 else if (! strcmp (tname, "malloc")
902 || ! strcmp (tname, "calloc")
903 || ! strcmp (tname, "realloc")
904 || ! strcmp (tname, "__builtin_new")
905 || ! strcmp (tname, "__builtin_vec_new"))
9ae8ffe7 906 is_malloc = 1;
51bbfa0c
RS
907 }
908
51bbfa0c
RS
909 if (may_be_alloca)
910 current_function_calls_alloca = 1;
911
912 /* Don't let pending stack adjusts add up to too much.
913 Also, do all pending adjustments now
914 if there is any chance this might be a call to alloca. */
915
916 if (pending_stack_adjust >= 32
917 || (pending_stack_adjust > 0 && may_be_alloca))
918 do_pending_stack_adjust ();
919
920 /* Operand 0 is a pointer-to-function; get the type of the function. */
921 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
922 if (TREE_CODE (funtype) != POINTER_TYPE)
923 abort ();
924 funtype = TREE_TYPE (funtype);
925
cc79451b
RK
926 /* Push the temporary stack slot level so that we can free any temporaries
927 we make. */
51bbfa0c
RS
928 push_temp_slots ();
929
eecb6f50
JL
930 /* Start updating where the next arg would go.
931
932 On some machines (such as the PA) indirect calls have a different
933 calling convention than normal calls. The last argument in
934 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
935 or not. */
936 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
51bbfa0c
RS
937
938 /* If struct_value_rtx is 0, it means pass the address
939 as if it were an extra parameter. */
940 if (structure_value_addr && struct_value_rtx == 0)
941 {
5582b006
RK
942 /* If structure_value_addr is a REG other than
943 virtual_outgoing_args_rtx, we can use always use it. If it
944 is not a REG, we must always copy it into a register.
945 If it is virtual_outgoing_args_rtx, we must copy it to another
946 register in some cases. */
947 rtx temp = (GET_CODE (structure_value_addr) != REG
d64f5a78 948#ifdef ACCUMULATE_OUTGOING_ARGS
5582b006
RK
949 || (stack_arg_under_construction
950 && structure_value_addr == virtual_outgoing_args_rtx)
d64f5a78 951#endif
5582b006
RK
952 ? copy_addr_to_reg (structure_value_addr)
953 : structure_value_addr);
d64f5a78 954
51bbfa0c
RS
955 actparms
956 = tree_cons (error_mark_node,
957 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2f4aa534 958 temp),
51bbfa0c
RS
959 actparms);
960 structure_value_addr_parm = 1;
961 }
962
963 /* Count the arguments and set NUM_ACTUALS. */
964 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
965 num_actuals = i;
966
967 /* Compute number of named args.
968 Normally, don't include the last named arg if anonymous args follow.
e5e809f4 969 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
469225d8
JW
970 (If no anonymous args follow, the result of list_length is actually
971 one too large. This is harmless.)
51bbfa0c 972
e5e809f4 973 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is zero,
469225d8
JW
974 this machine will be able to place unnamed args that were passed in
975 registers into the stack. So treat all args as named. This allows the
976 insns emitting for a specific argument list to be independent of the
977 function declaration.
51bbfa0c
RS
978
979 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
980 way to pass unnamed args in registers, so we must force them into
981 memory. */
e5e809f4
JL
982
983 if ((STRICT_ARGUMENT_NAMING
984#ifndef SETUP_INCOMING_VARARGS
985 || 1
986#endif
987 )
988 && TYPE_ARG_TYPES (funtype) != 0)
51bbfa0c 989 n_named_args
0ee902cb 990 = (list_length (TYPE_ARG_TYPES (funtype))
0ee902cb 991 /* Don't include the last named arg. */
e5e809f4 992 - (STRICT_ARGUMENT_NAMING ? 0 : -1)
0ee902cb
RM
993 /* Count the struct value address, if it is passed as a parm. */
994 + structure_value_addr_parm);
51bbfa0c 995 else
51bbfa0c
RS
996 /* If we know nothing, treat all args as named. */
997 n_named_args = num_actuals;
998
999 /* Make a vector to hold all the information about each arg. */
1000 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
4c9a05bc 1001 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
51bbfa0c
RS
1002
1003 args_size.constant = 0;
1004 args_size.var = 0;
1005
1006 /* In this loop, we consider args in the order they are written.
0ee902cb 1007 We fill up ARGS from the front or from the back if necessary
51bbfa0c
RS
1008 so that in any case the first arg to be pushed ends up at the front. */
1009
1010#ifdef PUSH_ARGS_REVERSED
1011 i = num_actuals - 1, inc = -1;
1012 /* In this case, must reverse order of args
1013 so that we compute and push the last arg first. */
1014#else
1015 i = 0, inc = 1;
1016#endif
1017
1018 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1019 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1020 {
1021 tree type = TREE_TYPE (TREE_VALUE (p));
321e0bba 1022 int unsignedp;
84b55618 1023 enum machine_mode mode;
51bbfa0c
RS
1024
1025 args[i].tree_value = TREE_VALUE (p);
1026
1027 /* Replace erroneous argument with constant zero. */
1028 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1029 args[i].tree_value = integer_zero_node, type = integer_type_node;
1030
5c1c34d3
RK
1031 /* If TYPE is a transparent union, pass things the way we would
1032 pass the first field of the union. We have already verified that
1033 the modes are the same. */
1034 if (TYPE_TRANSPARENT_UNION (type))
1035 type = TREE_TYPE (TYPE_FIELDS (type));
1036
51bbfa0c
RS
1037 /* Decide where to pass this arg.
1038
1039 args[i].reg is nonzero if all or part is passed in registers.
1040
1041 args[i].partial is nonzero if part but not all is passed in registers,
1042 and the exact value says how many words are passed in registers.
1043
1044 args[i].pass_on_stack is nonzero if the argument must at least be
1045 computed on the stack. It may then be loaded back into registers
1046 if args[i].reg is nonzero.
1047
1048 These decisions are driven by the FUNCTION_... macros and must agree
1049 with those made by function.c. */
1050
51bbfa0c 1051 /* See if this argument should be passed by invisible reference. */
7ef1fbd7
RK
1052 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1053 && contains_placeholder_p (TYPE_SIZE (type)))
657bb6dc 1054 || TREE_ADDRESSABLE (type)
7ef1fbd7
RK
1055#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1056 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1057 type, argpos < n_named_args)
1058#endif
1059 )
51bbfa0c 1060 {
173cd503
JM
1061 /* If we're compiling a thunk, pass through invisible
1062 references instead of making a copy. */
1063 if (current_function_is_thunk
5e0de251 1064#ifdef FUNCTION_ARG_CALLEE_COPIES
173cd503
JM
1065 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1066 type, argpos < n_named_args)
1067 /* If it's in a register, we must make a copy of it too. */
1068 /* ??? Is this a sufficient test? Is there a better one? */
1069 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1070 && REG_P (DECL_RTL (args[i].tree_value)))
1071 && ! TREE_ADDRESSABLE (type))
1072#endif
1073 )
51bbfa0c 1074 {
5e0de251
DE
1075 args[i].tree_value = build1 (ADDR_EXPR,
1076 build_pointer_type (type),
1077 args[i].tree_value);
1078 type = build_pointer_type (type);
51bbfa0c
RS
1079 }
1080 else
82c0ff02 1081 {
5e0de251
DE
1082 /* We make a copy of the object and pass the address to the
1083 function being called. */
1084 rtx copy;
51bbfa0c 1085
5e0de251 1086 if (TYPE_SIZE (type) == 0
2d59d98e
RK
1087 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1088 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1089 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1090 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1091 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
5e0de251
DE
1092 {
1093 /* This is a variable-sized object. Make space on the stack
1094 for it. */
1095 rtx size_rtx = expr_size (TREE_VALUE (p));
1096
1097 if (old_stack_level == 0)
1098 {
1099 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1100 old_pending_adj = pending_stack_adjust;
1101 pending_stack_adjust = 0;
1102 }
1103
38a448ca
RH
1104 copy = gen_rtx_MEM (BLKmode,
1105 allocate_dynamic_stack_space (size_rtx,
1106 NULL_RTX,
1107 TYPE_ALIGN (type)));
5e0de251
DE
1108 }
1109 else
1110 {
1111 int size = int_size_in_bytes (type);
6fa51029 1112 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
5e0de251 1113 }
51bbfa0c 1114
05e3bdb9 1115 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
6e87e69e 1116
5e0de251 1117 store_expr (args[i].tree_value, copy, 0);
ba3a053e 1118 is_const = 0;
5e0de251
DE
1119
1120 args[i].tree_value = build1 (ADDR_EXPR,
1121 build_pointer_type (type),
1122 make_tree (type, copy));
1123 type = build_pointer_type (type);
1124 }
51bbfa0c 1125 }
51bbfa0c 1126
84b55618 1127 mode = TYPE_MODE (type);
321e0bba 1128 unsignedp = TREE_UNSIGNED (type);
84b55618
RK
1129
1130#ifdef PROMOTE_FUNCTION_ARGS
321e0bba 1131 mode = promote_mode (type, mode, &unsignedp, 1);
84b55618
RK
1132#endif
1133
321e0bba 1134 args[i].unsignedp = unsignedp;
1efe6448 1135 args[i].mode = mode;
84b55618 1136 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
51bbfa0c
RS
1137 argpos < n_named_args);
1138#ifdef FUNCTION_ARG_PARTIAL_NREGS
1139 if (args[i].reg)
1140 args[i].partial
84b55618 1141 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
51bbfa0c
RS
1142 argpos < n_named_args);
1143#endif
1144
84b55618 1145 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
51bbfa0c 1146
cacbd532
JW
1147 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1148 it means that we are to pass this arg in the register(s) designated
1149 by the PARALLEL, but also to pass it in the stack. */
1150 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1151 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1152 args[i].pass_on_stack = 1;
51bbfa0c
RS
1153
1154 /* If this is an addressable type, we must preallocate the stack
1155 since we must evaluate the object into its final location.
1156
1157 If this is to be passed in both registers and the stack, it is simpler
1158 to preallocate. */
1159 if (TREE_ADDRESSABLE (type)
1160 || (args[i].pass_on_stack && args[i].reg != 0))
1161 must_preallocate = 1;
1162
1163 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1164 we cannot consider this function call constant. */
1165 if (TREE_ADDRESSABLE (type))
1166 is_const = 0;
1167
1168 /* Compute the stack-size of this argument. */
1169 if (args[i].reg == 0 || args[i].partial != 0
6f90e075 1170 || reg_parm_stack_space > 0
51bbfa0c 1171 || args[i].pass_on_stack)
1efe6448 1172 locate_and_pad_parm (mode, type,
51bbfa0c
RS
1173#ifdef STACK_PARMS_IN_REG_PARM_AREA
1174 1,
1175#else
1176 args[i].reg != 0,
1177#endif
1178 fndecl, &args_size, &args[i].offset,
1179 &args[i].size);
1180
1181#ifndef ARGS_GROW_DOWNWARD
1182 args[i].slot_offset = args_size;
1183#endif
1184
51bbfa0c
RS
1185 /* If a part of the arg was put into registers,
1186 don't include that part in the amount pushed. */
e5e809f4 1187 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
51bbfa0c
RS
1188 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1189 / (PARM_BOUNDARY / BITS_PER_UNIT)
1190 * (PARM_BOUNDARY / BITS_PER_UNIT));
51bbfa0c
RS
1191
1192 /* Update ARGS_SIZE, the total stack space for args so far. */
1193
1194 args_size.constant += args[i].size.constant;
1195 if (args[i].size.var)
1196 {
1197 ADD_PARM_SIZE (args_size, args[i].size.var);
1198 }
1199
1200 /* Since the slot offset points to the bottom of the slot,
1201 we must record it after incrementing if the args grow down. */
1202#ifdef ARGS_GROW_DOWNWARD
1203 args[i].slot_offset = args_size;
1204
1205 args[i].slot_offset.constant = -args_size.constant;
1206 if (args_size.var)
1207 {
1208 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1209 }
1210#endif
1211
1212 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1213 have been used, etc. */
1214
1215 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1216 argpos < n_named_args);
1217 }
1218
6f90e075
JW
1219#ifdef FINAL_REG_PARM_STACK_SPACE
1220 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1221 args_size.var);
1222#endif
1223
51bbfa0c
RS
1224 /* Compute the actual size of the argument block required. The variable
1225 and constant sizes must be combined, the size may have to be rounded,
1226 and there may be a minimum required size. */
1227
1228 original_args_size = args_size;
1229 if (args_size.var)
1230 {
1231 /* If this function requires a variable-sized argument list, don't try to
1232 make a cse'able block for this call. We may be able to do this
1233 eventually, but it is too complicated to keep track of what insns go
1234 in the cse'able block and which don't. */
1235
1236 is_const = 0;
1237 must_preallocate = 1;
1238
1239 args_size.var = ARGS_SIZE_TREE (args_size);
1240 args_size.constant = 0;
1241
1242#ifdef STACK_BOUNDARY
1243 if (STACK_BOUNDARY != BITS_PER_UNIT)
1244 args_size.var = round_up (args_size.var, STACK_BYTES);
1245#endif
1246
6f90e075 1247 if (reg_parm_stack_space > 0)
51bbfa0c
RS
1248 {
1249 args_size.var
1250 = size_binop (MAX_EXPR, args_size.var,
e5e809f4 1251 size_int (reg_parm_stack_space));
51bbfa0c
RS
1252
1253#ifndef OUTGOING_REG_PARM_STACK_SPACE
1254 /* The area corresponding to register parameters is not to count in
1255 the size of the block we need. So make the adjustment. */
1256 args_size.var
1257 = size_binop (MINUS_EXPR, args_size.var,
6f90e075 1258 size_int (reg_parm_stack_space));
51bbfa0c
RS
1259#endif
1260 }
51bbfa0c
RS
1261 }
1262 else
1263 {
1264#ifdef STACK_BOUNDARY
1265 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1266 / STACK_BYTES) * STACK_BYTES);
1267#endif
1268
51bbfa0c 1269 args_size.constant = MAX (args_size.constant,
6f90e075 1270 reg_parm_stack_space);
e5e809f4 1271
e1336658
JW
1272#ifdef MAYBE_REG_PARM_STACK_SPACE
1273 if (reg_parm_stack_space == 0)
1274 args_size.constant = 0;
1275#endif
e5e809f4 1276
51bbfa0c 1277#ifndef OUTGOING_REG_PARM_STACK_SPACE
6f90e075 1278 args_size.constant -= reg_parm_stack_space;
51bbfa0c
RS
1279#endif
1280 }
1281
1282 /* See if we have or want to preallocate stack space.
1283
1284 If we would have to push a partially-in-regs parm
1285 before other stack parms, preallocate stack space instead.
1286
1287 If the size of some parm is not a multiple of the required stack
1288 alignment, we must preallocate.
1289
1290 If the total size of arguments that would otherwise create a copy in
1291 a temporary (such as a CALL) is more than half the total argument list
1292 size, preallocation is faster.
1293
1294 Another reason to preallocate is if we have a machine (like the m88k)
1295 where stack alignment is required to be maintained between every
1296 pair of insns, not just when the call is made. However, we assume here
1297 that such machines either do not have push insns (and hence preallocation
1298 would occur anyway) or the problem is taken care of with
1299 PUSH_ROUNDING. */
1300
1301 if (! must_preallocate)
1302 {
1303 int partial_seen = 0;
1304 int copy_to_evaluate_size = 0;
1305
1306 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1307 {
1308 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1309 partial_seen = 1;
1310 else if (partial_seen && args[i].reg == 0)
1311 must_preallocate = 1;
1312
1313 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1314 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1315 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1316 || TREE_CODE (args[i].tree_value) == COND_EXPR
1317 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1318 copy_to_evaluate_size
1319 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1320 }
1321
c62f36cf
RS
1322 if (copy_to_evaluate_size * 2 >= args_size.constant
1323 && args_size.constant > 0)
51bbfa0c
RS
1324 must_preallocate = 1;
1325 }
1326
1327 /* If the structure value address will reference the stack pointer, we must
1328 stabilize it. We don't need to do this if we know that we are not going
1329 to adjust the stack pointer in processing this call. */
1330
1331 if (structure_value_addr
1332 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1333 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1334 && (args_size.var
1335#ifndef ACCUMULATE_OUTGOING_ARGS
1336 || args_size.constant
1337#endif
1338 ))
1339 structure_value_addr = copy_to_reg (structure_value_addr);
1340
1341 /* If this function call is cse'able, precompute all the parameters.
1342 Note that if the parameter is constructed into a temporary, this will
1343 cause an additional copy because the parameter will be constructed
1344 into a temporary location and then copied into the outgoing arguments.
1345 If a parameter contains a call to alloca and this function uses the
1346 stack, precompute the parameter. */
1347
1ce0cb53
JW
1348 /* If we preallocated the stack space, and some arguments must be passed
1349 on the stack, then we must precompute any parameter which contains a
1350 function call which will store arguments on the stack.
1351 Otherwise, evaluating the parameter may clobber previous parameters
1352 which have already been stored into the stack. */
1353
51bbfa0c
RS
1354 for (i = 0; i < num_actuals; i++)
1355 if (is_const
1356 || ((args_size.var != 0 || args_size.constant != 0)
1ce0cb53
JW
1357 && calls_function (args[i].tree_value, 1))
1358 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1359 && calls_function (args[i].tree_value, 0)))
51bbfa0c 1360 {
657bb6dc
JM
1361 /* If this is an addressable type, we cannot pre-evaluate it. */
1362 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1363 abort ();
1364
cc79451b
RK
1365 push_temp_slots ();
1366
51bbfa0c 1367 args[i].initial_value = args[i].value
e5d70561 1368 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1efe6448 1369
51bbfa0c 1370 preserve_temp_slots (args[i].value);
cc79451b 1371 pop_temp_slots ();
51bbfa0c
RS
1372
1373 /* ANSI doesn't require a sequence point here,
1374 but PCC has one, so this will avoid some problems. */
1375 emit_queue ();
8e6c802b
RK
1376
1377 args[i].initial_value = args[i].value
1378 = protect_from_queue (args[i].initial_value, 0);
1379
1380 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1381 args[i].value
1382 = convert_modes (args[i].mode,
1383 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1384 args[i].value, args[i].unsignedp);
51bbfa0c
RS
1385 }
1386
1387 /* Now we are about to start emitting insns that can be deleted
1388 if a libcall is deleted. */
9ae8ffe7 1389 if (is_const || is_malloc)
51bbfa0c
RS
1390 start_sequence ();
1391
1392 /* If we have no actual push instructions, or shouldn't use them,
1393 make space for all args right now. */
1394
1395 if (args_size.var != 0)
1396 {
1397 if (old_stack_level == 0)
1398 {
e5d70561 1399 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
51bbfa0c
RS
1400 old_pending_adj = pending_stack_adjust;
1401 pending_stack_adjust = 0;
d64f5a78 1402#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
1403 /* stack_arg_under_construction says whether a stack arg is
1404 being constructed at the old stack level. Pushing the stack
1405 gets a clean outgoing argument block. */
1406 old_stack_arg_under_construction = stack_arg_under_construction;
1407 stack_arg_under_construction = 0;
d64f5a78 1408#endif
51bbfa0c
RS
1409 }
1410 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1411 }
26a258fe 1412 else
51bbfa0c
RS
1413 {
1414 /* Note that we must go through the motions of allocating an argument
1415 block even if the size is zero because we may be storing args
1416 in the area reserved for register arguments, which may be part of
1417 the stack frame. */
26a258fe 1418
51bbfa0c
RS
1419 int needed = args_size.constant;
1420
0f41302f
MS
1421 /* Store the maximum argument space used. It will be pushed by
1422 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1423 checking). */
51bbfa0c
RS
1424
1425 if (needed > current_function_outgoing_args_size)
1426 current_function_outgoing_args_size = needed;
1427
26a258fe
PB
1428 if (must_preallocate)
1429 {
1430#ifdef ACCUMULATE_OUTGOING_ARGS
1431 /* Since the stack pointer will never be pushed, it is possible for
1432 the evaluation of a parm to clobber something we have already
1433 written to the stack. Since most function calls on RISC machines
1434 do not use the stack, this is uncommon, but must work correctly.
1435
1436 Therefore, we save any area of the stack that was already written
1437 and that we are using. Here we set up to do this by making a new
1438 stack usage map from the old one. The actual save will be done
1439 by store_one_arg.
1440
1441 Another approach might be to try to reorder the argument
1442 evaluations to avoid this conflicting stack usage. */
1443
e5e809f4 1444#ifndef OUTGOING_REG_PARM_STACK_SPACE
26a258fe
PB
1445 /* Since we will be writing into the entire argument area, the
1446 map must be allocated for its entire size, not just the part that
1447 is the responsibility of the caller. */
1448 needed += reg_parm_stack_space;
51bbfa0c
RS
1449#endif
1450
1451#ifdef ARGS_GROW_DOWNWARD
26a258fe
PB
1452 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1453 needed + 1);
51bbfa0c 1454#else
26a258fe
PB
1455 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1456 needed);
51bbfa0c 1457#endif
26a258fe 1458 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
51bbfa0c 1459
26a258fe
PB
1460 if (initial_highest_arg_in_use)
1461 bcopy (initial_stack_usage_map, stack_usage_map,
1462 initial_highest_arg_in_use);
51bbfa0c 1463
26a258fe
PB
1464 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1465 bzero (&stack_usage_map[initial_highest_arg_in_use],
1466 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1467 needed = 0;
2f4aa534 1468
26a258fe
PB
1469 /* The address of the outgoing argument list must not be copied to a
1470 register here, because argblock would be left pointing to the
1471 wrong place after the call to allocate_dynamic_stack_space below.
1472 */
2f4aa534 1473
26a258fe 1474 argblock = virtual_outgoing_args_rtx;
2f4aa534 1475
51bbfa0c 1476#else /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1477 if (inhibit_defer_pop == 0)
51bbfa0c 1478 {
26a258fe
PB
1479 /* Try to reuse some or all of the pending_stack_adjust
1480 to get this space. Maybe we can avoid any pushing. */
1481 if (needed > pending_stack_adjust)
1482 {
1483 needed -= pending_stack_adjust;
1484 pending_stack_adjust = 0;
1485 }
1486 else
1487 {
1488 pending_stack_adjust -= needed;
1489 needed = 0;
1490 }
51bbfa0c 1491 }
26a258fe
PB
1492 /* Special case this because overhead of `push_block' in this
1493 case is non-trivial. */
1494 if (needed == 0)
1495 argblock = virtual_outgoing_args_rtx;
51bbfa0c 1496 else
26a258fe
PB
1497 argblock = push_block (GEN_INT (needed), 0, 0);
1498
1499 /* We only really need to call `copy_to_reg' in the case where push
1500 insns are going to be used to pass ARGBLOCK to a function
1501 call in ARGS. In that case, the stack pointer changes value
1502 from the allocation point to the call point, and hence
1503 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1504 But might as well always do it. */
1505 argblock = copy_to_reg (argblock);
51bbfa0c 1506#endif /* not ACCUMULATE_OUTGOING_ARGS */
26a258fe 1507 }
51bbfa0c
RS
1508 }
1509
bfbf933a
RS
1510#ifdef ACCUMULATE_OUTGOING_ARGS
1511 /* The save/restore code in store_one_arg handles all cases except one:
1512 a constructor call (including a C function returning a BLKmode struct)
1513 to initialize an argument. */
1514 if (stack_arg_under_construction)
1515 {
e5e809f4 1516#ifndef OUTGOING_REG_PARM_STACK_SPACE
e5d70561 1517 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
bfbf933a 1518#else
e5d70561 1519 rtx push_size = GEN_INT (args_size.constant);
bfbf933a
RS
1520#endif
1521 if (old_stack_level == 0)
1522 {
e5d70561 1523 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bfbf933a
RS
1524 old_pending_adj = pending_stack_adjust;
1525 pending_stack_adjust = 0;
1526 /* stack_arg_under_construction says whether a stack arg is
1527 being constructed at the old stack level. Pushing the stack
1528 gets a clean outgoing argument block. */
1529 old_stack_arg_under_construction = stack_arg_under_construction;
1530 stack_arg_under_construction = 0;
1531 /* Make a new map for the new argument list. */
1532 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1533 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1534 highest_outgoing_arg_in_use = 0;
1535 }
e5d70561 1536 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
bfbf933a
RS
1537 }
1538 /* If argument evaluation might modify the stack pointer, copy the
1539 address of the argument list to a register. */
1540 for (i = 0; i < num_actuals; i++)
1541 if (args[i].pass_on_stack)
1542 {
1543 argblock = copy_addr_to_reg (argblock);
1544 break;
1545 }
1546#endif
1547
1548
51bbfa0c
RS
1549 /* If we preallocated stack space, compute the address of each argument.
1550 We need not ensure it is a valid memory address here; it will be
1551 validized when it is used. */
1552 if (argblock)
1553 {
1554 rtx arg_reg = argblock;
1555 int arg_offset = 0;
1556
1557 if (GET_CODE (argblock) == PLUS)
1558 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1559
1560 for (i = 0; i < num_actuals; i++)
1561 {
1562 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1563 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1564 rtx addr;
1565
1566 /* Skip this parm if it will not be passed on the stack. */
1567 if (! args[i].pass_on_stack && args[i].reg != 0)
1568 continue;
1569
1570 if (GET_CODE (offset) == CONST_INT)
1571 addr = plus_constant (arg_reg, INTVAL (offset));
1572 else
38a448ca 1573 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
51bbfa0c
RS
1574
1575 addr = plus_constant (addr, arg_offset);
38a448ca 1576 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
0c0600d5 1577 MEM_IN_STRUCT_P (args[i].stack)
05e3bdb9 1578 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
51bbfa0c
RS
1579
1580 if (GET_CODE (slot_offset) == CONST_INT)
1581 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1582 else
38a448ca 1583 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
51bbfa0c
RS
1584
1585 addr = plus_constant (addr, arg_offset);
38a448ca 1586 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
51bbfa0c
RS
1587 }
1588 }
1589
1590#ifdef PUSH_ARGS_REVERSED
1591#ifdef STACK_BOUNDARY
1592 /* If we push args individually in reverse order, perform stack alignment
1593 before the first push (the last arg). */
1594 if (argblock == 0)
e5d70561
RK
1595 anti_adjust_stack (GEN_INT (args_size.constant
1596 - original_args_size.constant));
51bbfa0c
RS
1597#endif
1598#endif
1599
1600 /* Don't try to defer pops if preallocating, not even from the first arg,
1601 since ARGBLOCK probably refers to the SP. */
1602 if (argblock)
1603 NO_DEFER_POP;
1604
1605 /* Get the function to call, in the form of RTL. */
1606 if (fndecl)
ef5d30c9
RK
1607 {
1608 /* If this is the first use of the function, see if we need to
1609 make an external definition for it. */
1610 if (! TREE_USED (fndecl))
1611 {
1612 assemble_external (fndecl);
1613 TREE_USED (fndecl) = 1;
1614 }
1615
1616 /* Get a SYMBOL_REF rtx for the function address. */
1617 funexp = XEXP (DECL_RTL (fndecl), 0);
1618 }
51bbfa0c
RS
1619 else
1620 /* Generate an rtx (probably a pseudo-register) for the address. */
1621 {
cc79451b 1622 push_temp_slots ();
e5d70561 1623 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
cc79451b 1624 pop_temp_slots (); /* FUNEXP can't be BLKmode */
7815214e
RK
1625
1626 /* Check the function is executable. */
1627 if (flag_check_memory_usage)
1628 emit_library_call (chkr_check_exec_libfunc, 1,
1629 VOIDmode, 1,
1630 funexp, ptr_mode);
51bbfa0c
RS
1631 emit_queue ();
1632 }
1633
1634 /* Figure out the register where the value, if any, will come back. */
1635 valreg = 0;
1636 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1637 && ! structure_value_addr)
1638 {
1639 if (pcc_struct_value)
1640 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1641 fndecl);
1642 else
1643 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1644 }
1645
1646 /* Precompute all register parameters. It isn't safe to compute anything
0f41302f 1647 once we have started filling any specific hard regs. */
51bbfa0c
RS
1648 reg_parm_seen = 0;
1649 for (i = 0; i < num_actuals; i++)
1650 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1651 {
1652 reg_parm_seen = 1;
1653
1654 if (args[i].value == 0)
1655 {
cc79451b 1656 push_temp_slots ();
e5d70561
RK
1657 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1658 VOIDmode, 0);
51bbfa0c 1659 preserve_temp_slots (args[i].value);
cc79451b 1660 pop_temp_slots ();
51bbfa0c
RS
1661
1662 /* ANSI doesn't require a sequence point here,
1663 but PCC has one, so this will avoid some problems. */
1664 emit_queue ();
1665 }
84b55618
RK
1666
1667 /* If we are to promote the function arg to a wider mode,
1668 do it now. */
84b55618 1669
843fec55
RK
1670 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1671 args[i].value
1672 = convert_modes (args[i].mode,
1673 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1674 args[i].value, args[i].unsignedp);
ebef2728
RK
1675
1676 /* If the value is expensive, and we are inside an appropriately
1677 short loop, put the value into a pseudo and then put the pseudo
01368078
RK
1678 into the hard reg.
1679
1680 For small register classes, also do this if this call uses
1681 register parameters. This is to avoid reload conflicts while
1682 loading the parameters registers. */
ebef2728
RK
1683
1684 if ((! (GET_CODE (args[i].value) == REG
1685 || (GET_CODE (args[i].value) == SUBREG
1686 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1687 && args[i].mode != BLKmode
1688 && rtx_cost (args[i].value, SET) > 2
f95182a4 1689 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
e9a25f70 1690 || preserve_subexpressions_p ()))
ebef2728 1691 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
51bbfa0c
RS
1692 }
1693
1694#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
e5e809f4 1695
51bbfa0c
RS
1696 /* The argument list is the property of the called routine and it
1697 may clobber it. If the fixed area has been used for previous
1698 parameters, we must save and restore it.
1699
1700 Here we compute the boundary of the that needs to be saved, if any. */
1701
b94301c2
RS
1702#ifdef ARGS_GROW_DOWNWARD
1703 for (i = 0; i < reg_parm_stack_space + 1; i++)
1704#else
6f90e075 1705 for (i = 0; i < reg_parm_stack_space; i++)
b94301c2 1706#endif
51bbfa0c
RS
1707 {
1708 if (i >= highest_outgoing_arg_in_use
1709 || stack_usage_map[i] == 0)
1710 continue;
1711
1712 if (low_to_save == -1)
1713 low_to_save = i;
1714
1715 high_to_save = i;
1716 }
1717
1718 if (low_to_save >= 0)
1719 {
1720 int num_to_save = high_to_save - low_to_save + 1;
1721 enum machine_mode save_mode
1722 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1723 rtx stack_area;
1724
1725 /* If we don't have the required alignment, must do this in BLKmode. */
1726 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1727 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1728 save_mode = BLKmode;
1729
ceb83206 1730#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
1731 stack_area = gen_rtx_MEM (save_mode,
1732 memory_address (save_mode,
38a448ca 1733 plus_constant (argblock,
ceb83206 1734 - high_to_save)));
b94301c2 1735#else
ceb83206
JL
1736 stack_area = gen_rtx_MEM (save_mode,
1737 memory_address (save_mode,
38a448ca 1738 plus_constant (argblock,
ceb83206 1739 low_to_save)));
b94301c2 1740#endif
51bbfa0c
RS
1741 if (save_mode == BLKmode)
1742 {
6fa51029 1743 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3668e76e 1744 MEM_IN_STRUCT_P (save_area) = 0;
51bbfa0c 1745 emit_block_move (validize_mem (save_area), stack_area,
e5d70561 1746 GEN_INT (num_to_save),
51bbfa0c
RS
1747 PARM_BOUNDARY / BITS_PER_UNIT);
1748 }
1749 else
1750 {
1751 save_area = gen_reg_rtx (save_mode);
1752 emit_move_insn (save_area, stack_area);
1753 }
1754 }
1755#endif
1756
1757
1758 /* Now store (and compute if necessary) all non-register parms.
1759 These come before register parms, since they can require block-moves,
1760 which could clobber the registers used for register parms.
1761 Parms which have partial registers are not stored here,
1762 but we do preallocate space here if they want that. */
1763
1764 for (i = 0; i < num_actuals; i++)
1765 if (args[i].reg == 0 || args[i].pass_on_stack)
1766 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1767 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c 1768
4ab56118
RK
1769 /* If we have a parm that is passed in registers but not in memory
1770 and whose alignment does not permit a direct copy into registers,
1771 make a group of pseudos that correspond to each register that we
1772 will later fill. */
1773
45d44c98
RK
1774 if (STRICT_ALIGNMENT)
1775 for (i = 0; i < num_actuals; i++)
1776 if (args[i].reg != 0 && ! args[i].pass_on_stack
4ab56118 1777 && args[i].mode == BLKmode
45d44c98
RK
1778 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1779 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1780 {
1781 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1782 int big_endian_correction = 0;
4ab56118 1783
45d44c98
RK
1784 args[i].n_aligned_regs
1785 = args[i].partial ? args[i].partial
1786 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
4ab56118 1787
45d44c98
RK
1788 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1789 * args[i].n_aligned_regs);
4ab56118 1790
45d44c98
RK
1791 /* Structures smaller than a word are aligned to the least
1792 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1793 this means we must skip the empty high order bytes when
1794 calculating the bit offset. */
1795 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1796 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
8498efd0 1797
45d44c98
RK
1798 for (j = 0; j < args[i].n_aligned_regs; j++)
1799 {
1800 rtx reg = gen_reg_rtx (word_mode);
1801 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1802 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1803 int bitpos;
1804
1805 args[i].aligned_regs[j] = reg;
1806
1807 /* Clobber REG and move each partword into it. Ensure we don't
1808 go past the end of the structure. Note that the loop below
1809 works because we've already verified that padding
a22ad972 1810 and endianness are compatible.
45d44c98 1811
a22ad972
DE
1812 We use to emit a clobber here but that doesn't let later
1813 passes optimize the instructions we emit. By storing 0 into
1814 the register later passes know the first AND to zero out the
1815 bitfield being set in the register is unnecessary. The store
1816 of 0 will be deleted as will at least the first AND. */
1817
1818 emit_move_insn (reg, const0_rtx);
45d44c98
RK
1819
1820 for (bitpos = 0;
1821 bitpos < BITS_PER_WORD && bytes > 0;
1822 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1823 {
1824 int xbitpos = bitpos + big_endian_correction;
1825
1826 store_bit_field (reg, bitsize, xbitpos, word_mode,
1827 extract_bit_field (word, bitsize, bitpos, 1,
1828 NULL_RTX, word_mode,
1829 word_mode,
1830 bitsize / BITS_PER_UNIT,
1831 BITS_PER_WORD),
1832 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1833 }
1834 }
1835 }
4ab56118 1836
51bbfa0c
RS
1837 /* Now store any partially-in-registers parm.
1838 This is the last place a block-move can happen. */
1839 if (reg_parm_seen)
1840 for (i = 0; i < num_actuals; i++)
1841 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1842 store_one_arg (&args[i], argblock, may_be_alloca,
6f90e075 1843 args_size.var != 0, fndecl, reg_parm_stack_space);
51bbfa0c
RS
1844
1845#ifndef PUSH_ARGS_REVERSED
1846#ifdef STACK_BOUNDARY
1847 /* If we pushed args in forward order, perform stack alignment
1848 after pushing the last arg. */
1849 if (argblock == 0)
e5d70561
RK
1850 anti_adjust_stack (GEN_INT (args_size.constant
1851 - original_args_size.constant));
51bbfa0c
RS
1852#endif
1853#endif
1854
756e0e12
RS
1855 /* If register arguments require space on the stack and stack space
1856 was not preallocated, allocate stack space here for arguments
1857 passed in registers. */
6e716e89 1858#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
756e0e12 1859 if (must_preallocate == 0 && reg_parm_stack_space > 0)
e5d70561 1860 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12
RS
1861#endif
1862
51bbfa0c
RS
1863 /* Pass the function the address in which to return a structure value. */
1864 if (structure_value_addr && ! structure_value_addr_parm)
1865 {
1866 emit_move_insn (struct_value_rtx,
1867 force_reg (Pmode,
e5d70561
RK
1868 force_operand (structure_value_addr,
1869 NULL_RTX)));
7815214e
RK
1870
1871 /* Mark the memory for the aggregate as write-only. */
1872 if (flag_check_memory_usage)
1873 emit_library_call (chkr_set_right_libfunc, 1,
1874 VOIDmode, 3,
1875 structure_value_addr, ptr_mode,
1876 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
956d6950
JL
1877 GEN_INT (MEMORY_USE_WO),
1878 TYPE_MODE (integer_type_node));
7815214e 1879
51bbfa0c 1880 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 1881 use_reg (&call_fusage, struct_value_rtx);
51bbfa0c
RS
1882 }
1883
77cac2f2 1884 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
8b0f9101 1885
51bbfa0c
RS
1886 /* Now do the register loads required for any wholly-register parms or any
1887 parms which are passed both on the stack and in a register. Their
1888 expressions were already evaluated.
1889
1890 Mark all register-parms as living through the call, putting these USE
77cac2f2 1891 insns in the CALL_INSN_FUNCTION_USAGE field. */
51bbfa0c 1892
bb1b857a
GK
1893#ifdef LOAD_ARGS_REVERSED
1894 for (i = num_actuals - 1; i >= 0; i--)
1895#else
51bbfa0c 1896 for (i = 0; i < num_actuals; i++)
bb1b857a 1897#endif
51bbfa0c 1898 {
cacbd532 1899 rtx reg = args[i].reg;
51bbfa0c 1900 int partial = args[i].partial;
cacbd532 1901 int nregs;
51bbfa0c 1902
cacbd532 1903 if (reg)
51bbfa0c 1904 {
6b972c4f
JW
1905 /* Set to non-negative if must move a word at a time, even if just
1906 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1907 we just use a normal move insn. This value can be zero if the
1908 argument is a zero size structure with no fields. */
51bbfa0c
RS
1909 nregs = (partial ? partial
1910 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
6b972c4f
JW
1911 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1912 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1913 : -1));
51bbfa0c 1914
cacbd532
JW
1915 /* Handle calls that pass values in multiple non-contiguous
1916 locations. The Irix 6 ABI has examples of this. */
1917
1918 if (GET_CODE (reg) == PARALLEL)
1919 emit_group_load (reg, args[i].value);
1920
51bbfa0c
RS
1921 /* If simple case, just do move. If normal partial, store_one_arg
1922 has already loaded the register for us. In all other cases,
1923 load the register(s) from memory. */
1924
cacbd532 1925 else if (nregs == -1)
51bbfa0c 1926 emit_move_insn (reg, args[i].value);
4ab56118 1927
4ab56118
RK
1928 /* If we have pre-computed the values to put in the registers in
1929 the case of non-aligned structures, copy them in now. */
1930
1931 else if (args[i].n_aligned_regs != 0)
1932 for (j = 0; j < args[i].n_aligned_regs; j++)
38a448ca 1933 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
4ab56118 1934 args[i].aligned_regs[j]);
4ab56118 1935
cacbd532 1936 else if (partial == 0 || args[i].pass_on_stack)
6b972c4f
JW
1937 move_block_to_reg (REGNO (reg),
1938 validize_mem (args[i].value), nregs,
1939 args[i].mode);
0304dfbb 1940
cacbd532
JW
1941 /* Handle calls that pass values in multiple non-contiguous
1942 locations. The Irix 6 ABI has examples of this. */
1943 if (GET_CODE (reg) == PARALLEL)
1944 use_group_regs (&call_fusage, reg);
1945 else if (nregs == -1)
0304dfbb
DE
1946 use_reg (&call_fusage, reg);
1947 else
1948 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
51bbfa0c
RS
1949 }
1950 }
1951
1952 /* Perform postincrements before actually calling the function. */
1953 emit_queue ();
1954
1955 /* All arguments and registers used for the call must be set up by now! */
1956
51bbfa0c 1957 /* Generate the actual call instruction. */
2c8da025 1958 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
51bbfa0c 1959 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
77cac2f2 1960 valreg, old_inhibit_defer_pop, call_fusage, is_const);
51bbfa0c
RS
1961
1962 /* If call is cse'able, make appropriate pair of reg-notes around it.
1963 Test valreg so we don't crash; may safely ignore `const'
80a3ad45
JW
1964 if return type is void. Disable for PARALLEL return values, because
1965 we have no way to move such values into a pseudo register. */
1966 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
51bbfa0c
RS
1967 {
1968 rtx note = 0;
1969 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1970 rtx insns;
1971
9ae8ffe7
JL
1972 /* Mark the return value as a pointer if needed. */
1973 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1974 {
1975 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1976 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1977 }
1978
51bbfa0c
RS
1979 /* Construct an "equal form" for the value which mentions all the
1980 arguments in order as well as the function name. */
1981#ifdef PUSH_ARGS_REVERSED
1982 for (i = 0; i < num_actuals; i++)
38a448ca 1983 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c
RS
1984#else
1985 for (i = num_actuals - 1; i >= 0; i--)
38a448ca 1986 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
51bbfa0c 1987#endif
38a448ca 1988 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
51bbfa0c
RS
1989
1990 insns = get_insns ();
1991 end_sequence ();
1992
1993 emit_libcall_block (insns, temp, valreg, note);
1994
1995 valreg = temp;
1996 }
4f48d56a
RK
1997 else if (is_const)
1998 {
1999 /* Otherwise, just write out the sequence without a note. */
2000 rtx insns = get_insns ();
2001
2002 end_sequence ();
2003 emit_insns (insns);
2004 }
9ae8ffe7
JL
2005 else if (is_malloc)
2006 {
2007 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2008 rtx last, insns;
2009
2010 /* The return value from a malloc-like function is a pointer. */
2011 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2012 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2013
2014 emit_move_insn (temp, valreg);
2015
2016 /* The return value from a malloc-like function can not alias
2017 anything else. */
2018 last = get_last_insn ();
2019 REG_NOTES (last) =
38a448ca 2020 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
9ae8ffe7
JL
2021
2022 /* Write out the sequence. */
2023 insns = get_insns ();
2024 end_sequence ();
2025 emit_insns (insns);
2026 valreg = temp;
2027 }
51bbfa0c
RS
2028
2029 /* For calls to `setjmp', etc., inform flow.c it should complain
2030 if nonvolatile values are live. */
2031
2032 if (returns_twice)
2033 {
2034 emit_note (name, NOTE_INSN_SETJMP);
2035 current_function_calls_setjmp = 1;
2036 }
2037
2038 if (is_longjmp)
2039 current_function_calls_longjmp = 1;
2040
2041 /* Notice functions that cannot return.
2042 If optimizing, insns emitted below will be dead.
2043 If not optimizing, they will exist, which is useful
2044 if the user uses the `return' command in the debugger. */
2045
2046 if (is_volatile || is_longjmp)
2047 emit_barrier ();
2048
51bbfa0c
RS
2049 /* If value type not void, return an rtx for the value. */
2050
e976b8b2
MS
2051 /* If there are cleanups to be called, don't use a hard reg as target.
2052 We need to double check this and see if it matters anymore. */
e9a25f70 2053 if (any_pending_cleanups (1)
51bbfa0c
RS
2054 && target && REG_P (target)
2055 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2056 target = 0;
2057
2058 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2059 || ignore)
2060 {
2061 target = const0_rtx;
2062 }
2063 else if (structure_value_addr)
2064 {
2065 if (target == 0 || GET_CODE (target) != MEM)
29008b51 2066 {
38a448ca
RH
2067 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2068 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2069 structure_value_addr));
05e3bdb9 2070 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
29008b51 2071 }
51bbfa0c
RS
2072 }
2073 else if (pcc_struct_value)
2074 {
f78b5ca1
JL
2075 /* This is the special C++ case where we need to
2076 know what the true target was. We take care to
2077 never use this value more than once in one expression. */
38a448ca
RH
2078 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2079 copy_to_reg (valreg));
f78b5ca1 2080 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
51bbfa0c 2081 }
cacbd532
JW
2082 /* Handle calls that return values in multiple non-contiguous locations.
2083 The Irix 6 ABI has examples of this. */
2084 else if (GET_CODE (valreg) == PARALLEL)
2085 {
2086 if (target == 0)
2087 {
2088 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2b4092f2 2089 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
cacbd532
JW
2090 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2091 preserve_temp_slots (target);
2092 }
2093
2094 emit_group_store (target, valreg);
2095 }
059c3d84
JW
2096 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2097 && GET_MODE (target) == GET_MODE (valreg))
2098 /* TARGET and VALREG cannot be equal at this point because the latter
2099 would not have REG_FUNCTION_VALUE_P true, while the former would if
2100 it were referring to the same register.
2101
2102 If they refer to the same register, this move will be a no-op, except
2103 when function inlining is being done. */
2104 emit_move_insn (target, valreg);
766b19fb
JL
2105 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2106 {
2107 /* Some machines (the PA for example) want to return all small
2108 structures in registers regardless of the structure's alignment.
2109
2110 Deal with them explicitly by copying from the return registers
2111 into the target MEM locations. */
2112 int bytes = int_size_in_bytes (TREE_TYPE (exp));
1b5c5873
RK
2113 rtx src, dst;
2114 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2115 int bitpos, xbitpos, big_endian_correction = 0;
766b19fb
JL
2116
2117 if (target == 0)
822e3422
RK
2118 {
2119 target = assign_stack_temp (BLKmode, bytes, 0);
2120 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2121 preserve_temp_slots (target);
2122 }
766b19fb 2123
e934eef9
RK
2124 /* This code assumes valreg is at least a full word. If it isn't,
2125 copy it into a new pseudo which is a full word. */
2126 if (GET_MODE (valreg) != BLKmode
2127 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
144a3150 2128 valreg = convert_to_mode (word_mode, valreg,
e934eef9
RK
2129 TREE_UNSIGNED (TREE_TYPE (exp)));
2130
1b5c5873
RK
2131 /* Structures whose size is not a multiple of a word are aligned
2132 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2133 machine, this means we must skip the empty high order bytes when
2134 calculating the bit offset. */
2135 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2136 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2137 * BITS_PER_UNIT));
2138
2139 /* Copy the structure BITSIZE bites at a time.
2140
2141 We could probably emit more efficient code for machines
766b19fb
JL
2142 which do not use strict alignment, but it doesn't seem
2143 worth the effort at the current time. */
1b5c5873
RK
2144 for (bitpos = 0, xbitpos = big_endian_correction;
2145 bitpos < bytes * BITS_PER_UNIT;
2146 bitpos += bitsize, xbitpos += bitsize)
766b19fb 2147 {
1b5c5873
RK
2148
2149 /* We need a new source operand each time xbitpos is on a
2150 word boundary and when xbitpos == big_endian_correction
2151 (the first time through). */
2152 if (xbitpos % BITS_PER_WORD == 0
2153 || xbitpos == big_endian_correction)
2154 src = operand_subword_force (valreg,
2155 xbitpos / BITS_PER_WORD,
2156 BLKmode);
2157
2158 /* We need a new destination operand each time bitpos is on
2159 a word boundary. */
2160 if (bitpos % BITS_PER_WORD == 0)
2161 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
766b19fb 2162
1b5c5873
RK
2163 /* Use xbitpos for the source extraction (right justified) and
2164 xbitpos for the destination store (left justified). */
2165 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2166 extract_bit_field (src, bitsize,
2167 xbitpos % BITS_PER_WORD, 1,
2168 NULL_RTX, word_mode,
2169 word_mode,
2170 bitsize / BITS_PER_UNIT,
2171 BITS_PER_WORD),
2172 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
766b19fb
JL
2173 }
2174 }
51bbfa0c
RS
2175 else
2176 target = copy_to_reg (valreg);
2177
84b55618 2178#ifdef PROMOTE_FUNCTION_RETURN
5d2ac65e
RK
2179 /* If we promoted this return value, make the proper SUBREG. TARGET
2180 might be const0_rtx here, so be careful. */
2181 if (GET_CODE (target) == REG
766b19fb 2182 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
5d2ac65e 2183 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
84b55618 2184 {
321e0bba
RK
2185 tree type = TREE_TYPE (exp);
2186 int unsignedp = TREE_UNSIGNED (type);
84b55618 2187
321e0bba
RK
2188 /* If we don't promote as expected, something is wrong. */
2189 if (GET_MODE (target)
2190 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
5d2ac65e
RK
2191 abort ();
2192
38a448ca 2193 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
84b55618
RK
2194 SUBREG_PROMOTED_VAR_P (target) = 1;
2195 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2196 }
2197#endif
2198
2f4aa534
RS
2199 /* If size of args is variable or this was a constructor call for a stack
2200 argument, restore saved stack-pointer value. */
51bbfa0c
RS
2201
2202 if (old_stack_level)
2203 {
e5d70561 2204 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
51bbfa0c 2205 pending_stack_adjust = old_pending_adj;
d64f5a78 2206#ifdef ACCUMULATE_OUTGOING_ARGS
2f4aa534
RS
2207 stack_arg_under_construction = old_stack_arg_under_construction;
2208 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2209 stack_usage_map = initial_stack_usage_map;
d64f5a78 2210#endif
51bbfa0c 2211 }
51bbfa0c
RS
2212#ifdef ACCUMULATE_OUTGOING_ARGS
2213 else
2214 {
2215#ifdef REG_PARM_STACK_SPACE
2216 if (save_area)
2217 {
2218 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 2219#ifdef ARGS_GROW_DOWNWARD
51bbfa0c 2220 rtx stack_area
38a448ca
RH
2221 = gen_rtx_MEM (save_mode,
2222 memory_address (save_mode,
38a448ca 2223 plus_constant (argblock,
ceb83206 2224 - high_to_save)));
b94301c2 2225#else
ceb83206
JL
2226 rtx stack_area
2227 = gen_rtx_MEM (save_mode,
2228 memory_address (save_mode,
38a448ca 2229 plus_constant (argblock,
ceb83206 2230 low_to_save)));
b94301c2 2231#endif
51bbfa0c
RS
2232
2233 if (save_mode != BLKmode)
2234 emit_move_insn (stack_area, save_area);
2235 else
2236 emit_block_move (stack_area, validize_mem (save_area),
e5d70561
RK
2237 GEN_INT (high_to_save - low_to_save + 1),
2238 PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c
RS
2239 }
2240#endif
2241
2242 /* If we saved any argument areas, restore them. */
2243 for (i = 0; i < num_actuals; i++)
2244 if (args[i].save_area)
2245 {
2246 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2247 rtx stack_area
38a448ca
RH
2248 = gen_rtx_MEM (save_mode,
2249 memory_address (save_mode,
2250 XEXP (args[i].stack_slot, 0)));
51bbfa0c
RS
2251
2252 if (save_mode != BLKmode)
2253 emit_move_insn (stack_area, args[i].save_area);
2254 else
2255 emit_block_move (stack_area, validize_mem (args[i].save_area),
e5d70561 2256 GEN_INT (args[i].size.constant),
51bbfa0c
RS
2257 PARM_BOUNDARY / BITS_PER_UNIT);
2258 }
2259
2260 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2261 stack_usage_map = initial_stack_usage_map;
2262 }
2263#endif
2264
59257ff7
RK
2265 /* If this was alloca, record the new stack level for nonlocal gotos.
2266 Check for the handler slots since we might not have a save area
0f41302f 2267 for non-local gotos. */
59257ff7
RK
2268
2269 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
e5d70561 2270 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
51bbfa0c
RS
2271
2272 pop_temp_slots ();
2273
2274 return target;
2275}
2276\f
322e3e34
RK
2277/* Output a library call to function FUN (a SYMBOL_REF rtx)
2278 (emitting the queue unless NO_QUEUE is nonzero),
2279 for a value of mode OUTMODE,
2280 with NARGS different arguments, passed as alternating rtx values
2281 and machine_modes to convert them to.
2282 The rtx values should have been passed through protect_from_queue already.
2283
2284 NO_QUEUE will be true if and only if the library call is a `const' call
2285 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2286 to the variable is_const in expand_call.
2287
2288 NO_QUEUE must be true for const calls, because if it isn't, then
2289 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2290 and will be lost if the libcall sequence is optimized away.
2291
2292 NO_QUEUE must be false for non-const calls, because if it isn't, the
2293 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2294 optimized. For instance, the instruction scheduler may incorrectly
2295 move memory references across the non-const call. */
2296
2297void
4f90e4a0
RK
2298emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2299 int nargs, ...))
322e3e34 2300{
4f90e4a0
RK
2301#ifndef __STDC__
2302 rtx orgfun;
2303 int no_queue;
2304 enum machine_mode outmode;
2305 int nargs;
2306#endif
322e3e34
RK
2307 va_list p;
2308 /* Total size in bytes of all the stack-parms scanned so far. */
2309 struct args_size args_size;
2310 /* Size of arguments before any adjustments (such as rounding). */
2311 struct args_size original_args_size;
2312 register int argnum;
322e3e34 2313 rtx fun;
322e3e34
RK
2314 int inc;
2315 int count;
2316 rtx argblock = 0;
2317 CUMULATIVE_ARGS args_so_far;
2318 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2319 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2320 struct arg *argvec;
2321 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2322 rtx call_fusage = 0;
e5e809f4 2323 int reg_parm_stack_space = 0;
f046b3cc
JL
2324#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2325 /* Define the boundary of the register parm stack space that needs to be
2326 save, if any. */
2327 int low_to_save = -1, high_to_save;
2328 rtx save_area = 0; /* Place that it is saved */
2329#endif
2330
2331#ifdef ACCUMULATE_OUTGOING_ARGS
2332 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2333 char *initial_stack_usage_map = stack_usage_map;
2334 int needed;
2335#endif
2336
2337#ifdef REG_PARM_STACK_SPACE
69d4ca36 2338 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2339#ifdef MAYBE_REG_PARM_STACK_SPACE
2340 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2341#else
2342 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2343#endif
2344#endif
322e3e34 2345
4f90e4a0
RK
2346 VA_START (p, nargs);
2347
2348#ifndef __STDC__
2349 orgfun = va_arg (p, rtx);
322e3e34
RK
2350 no_queue = va_arg (p, int);
2351 outmode = va_arg (p, enum machine_mode);
2352 nargs = va_arg (p, int);
4f90e4a0
RK
2353#endif
2354
2355 fun = orgfun;
322e3e34
RK
2356
2357 /* Copy all the libcall-arguments out of the varargs data
2358 and into a vector ARGVEC.
2359
2360 Compute how to pass each argument. We only support a very small subset
2361 of the full argument passing conventions to limit complexity here since
2362 library functions shouldn't have many args. */
2363
2364 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
f046b3cc
JL
2365 bzero ((char *) argvec, nargs * sizeof (struct arg));
2366
322e3e34 2367
eecb6f50 2368 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2369
2370 args_size.constant = 0;
2371 args_size.var = 0;
2372
888aa7a9
RS
2373 push_temp_slots ();
2374
322e3e34
RK
2375 for (count = 0; count < nargs; count++)
2376 {
2377 rtx val = va_arg (p, rtx);
2378 enum machine_mode mode = va_arg (p, enum machine_mode);
2379
2380 /* We cannot convert the arg value to the mode the library wants here;
2381 must do it earlier where we know the signedness of the arg. */
2382 if (mode == BLKmode
2383 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2384 abort ();
2385
2386 /* On some machines, there's no way to pass a float to a library fcn.
2387 Pass it as a double instead. */
2388#ifdef LIBGCC_NEEDS_DOUBLE
2389 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2390 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2391#endif
2392
2393 /* There's no need to call protect_from_queue, because
2394 either emit_move_insn or emit_push_insn will do that. */
2395
2396 /* Make sure it is a reasonable operand for a move or push insn. */
2397 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2398 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2399 val = force_operand (val, NULL_RTX);
2400
322e3e34
RK
2401#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2402 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2403 {
a44492f0
RK
2404 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2405 be viewed as just an efficiency improvement. */
888aa7a9
RS
2406 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2407 emit_move_insn (slot, val);
8301b6e2 2408 val = force_operand (XEXP (slot, 0), NULL_RTX);
a44492f0 2409 mode = Pmode;
888aa7a9 2410 }
322e3e34
RK
2411#endif
2412
888aa7a9
RS
2413 argvec[count].value = val;
2414 argvec[count].mode = mode;
2415
322e3e34 2416 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2417 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2418 abort ();
2419#ifdef FUNCTION_ARG_PARTIAL_NREGS
2420 argvec[count].partial
2421 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2422#else
2423 argvec[count].partial = 0;
2424#endif
2425
2426 locate_and_pad_parm (mode, NULL_TREE,
2427 argvec[count].reg && argvec[count].partial == 0,
2428 NULL_TREE, &args_size, &argvec[count].offset,
2429 &argvec[count].size);
2430
2431 if (argvec[count].size.var)
2432 abort ();
2433
e5e809f4 2434 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 2435 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
2436
2437 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2438 || reg_parm_stack_space > 0)
322e3e34
RK
2439 args_size.constant += argvec[count].size.constant;
2440
0f41302f 2441 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
2442 }
2443 va_end (p);
2444
f046b3cc
JL
2445#ifdef FINAL_REG_PARM_STACK_SPACE
2446 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2447 args_size.var);
2448#endif
2449
322e3e34
RK
2450 /* If this machine requires an external definition for library
2451 functions, write one out. */
2452 assemble_external_libcall (fun);
2453
2454 original_args_size = args_size;
2455#ifdef STACK_BOUNDARY
2456 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2457 / STACK_BYTES) * STACK_BYTES);
2458#endif
2459
322e3e34 2460 args_size.constant = MAX (args_size.constant,
f046b3cc 2461 reg_parm_stack_space);
e5e809f4 2462
322e3e34 2463#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc 2464 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
2465#endif
2466
322e3e34
RK
2467 if (args_size.constant > current_function_outgoing_args_size)
2468 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
2469
2470#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
2471 /* Since the stack pointer will never be pushed, it is possible for
2472 the evaluation of a parm to clobber something we have already
2473 written to the stack. Since most function calls on RISC machines
2474 do not use the stack, this is uncommon, but must work correctly.
2475
2476 Therefore, we save any area of the stack that was already written
2477 and that we are using. Here we set up to do this by making a new
2478 stack usage map from the old one.
2479
2480 Another approach might be to try to reorder the argument
2481 evaluations to avoid this conflicting stack usage. */
2482
2483 needed = args_size.constant;
e5e809f4
JL
2484
2485#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
2486 /* Since we will be writing into the entire argument area, the
2487 map must be allocated for its entire size, not just the part that
2488 is the responsibility of the caller. */
2489 needed += reg_parm_stack_space;
2490#endif
2491
2492#ifdef ARGS_GROW_DOWNWARD
2493 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2494 needed + 1);
2495#else
2496 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2497 needed);
322e3e34 2498#endif
f046b3cc
JL
2499 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2500
2501 if (initial_highest_arg_in_use)
2502 bcopy (initial_stack_usage_map, stack_usage_map,
2503 initial_highest_arg_in_use);
2504
2505 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2506 bzero (&stack_usage_map[initial_highest_arg_in_use],
2507 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2508 needed = 0;
322e3e34 2509
f046b3cc
JL
2510 /* The address of the outgoing argument list must not be copied to a
2511 register here, because argblock would be left pointing to the
2512 wrong place after the call to allocate_dynamic_stack_space below.
2513 */
2514
2515 argblock = virtual_outgoing_args_rtx;
2516#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
2517#ifndef PUSH_ROUNDING
2518 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2519#endif
f046b3cc 2520#endif
322e3e34
RK
2521
2522#ifdef PUSH_ARGS_REVERSED
2523#ifdef STACK_BOUNDARY
2524 /* If we push args individually in reverse order, perform stack alignment
2525 before the first push (the last arg). */
2526 if (argblock == 0)
2527 anti_adjust_stack (GEN_INT (args_size.constant
2528 - original_args_size.constant));
2529#endif
2530#endif
2531
2532#ifdef PUSH_ARGS_REVERSED
2533 inc = -1;
2534 argnum = nargs - 1;
2535#else
2536 inc = 1;
2537 argnum = 0;
2538#endif
2539
f046b3cc
JL
2540#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2541 /* The argument list is the property of the called routine and it
2542 may clobber it. If the fixed area has been used for previous
2543 parameters, we must save and restore it.
2544
2545 Here we compute the boundary of the that needs to be saved, if any. */
2546
2547#ifdef ARGS_GROW_DOWNWARD
2548 for (count = 0; count < reg_parm_stack_space + 1; count++)
2549#else
2550 for (count = 0; count < reg_parm_stack_space; count++)
2551#endif
2552 {
2553 if (count >= highest_outgoing_arg_in_use
2554 || stack_usage_map[count] == 0)
2555 continue;
2556
2557 if (low_to_save == -1)
2558 low_to_save = count;
2559
2560 high_to_save = count;
2561 }
2562
2563 if (low_to_save >= 0)
2564 {
2565 int num_to_save = high_to_save - low_to_save + 1;
2566 enum machine_mode save_mode
2567 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2568 rtx stack_area;
2569
2570 /* If we don't have the required alignment, must do this in BLKmode. */
2571 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2572 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2573 save_mode = BLKmode;
2574
ceb83206 2575#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
2576 stack_area = gen_rtx_MEM (save_mode,
2577 memory_address (save_mode,
38a448ca 2578 plus_constant (argblock,
ceb83206 2579 - high_to_save)));
f046b3cc 2580#else
ceb83206
JL
2581 stack_area = gen_rtx_MEM (save_mode,
2582 memory_address (save_mode,
38a448ca 2583 plus_constant (argblock,
ceb83206 2584 low_to_save)));
f046b3cc 2585#endif
f046b3cc
JL
2586 if (save_mode == BLKmode)
2587 {
2588 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2589 MEM_IN_STRUCT_P (save_area) = 0;
2590 emit_block_move (validize_mem (save_area), stack_area,
2591 GEN_INT (num_to_save),
2592 PARM_BOUNDARY / BITS_PER_UNIT);
2593 }
2594 else
2595 {
2596 save_area = gen_reg_rtx (save_mode);
2597 emit_move_insn (save_area, stack_area);
2598 }
2599 }
2600#endif
2601
322e3e34
RK
2602 /* Push the args that need to be pushed. */
2603
5e26979c
JL
2604 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2605 are to be pushed. */
322e3e34
RK
2606 for (count = 0; count < nargs; count++, argnum += inc)
2607 {
2608 register enum machine_mode mode = argvec[argnum].mode;
2609 register rtx val = argvec[argnum].value;
2610 rtx reg = argvec[argnum].reg;
2611 int partial = argvec[argnum].partial;
69d4ca36 2612#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2613 int lower_bound, upper_bound, i;
69d4ca36 2614#endif
322e3e34
RK
2615
2616 if (! (reg != 0 && partial == 0))
f046b3cc
JL
2617 {
2618#ifdef ACCUMULATE_OUTGOING_ARGS
2619 /* If this is being stored into a pre-allocated, fixed-size, stack
2620 area, save any previous data at that location. */
2621
2622#ifdef ARGS_GROW_DOWNWARD
2623 /* stack_slot is negative, but we want to index stack_usage_map
2624 with positive values. */
5e26979c
JL
2625 upper_bound = -argvec[argnum].offset.constant + 1;
2626 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 2627#else
5e26979c
JL
2628 lower_bound = argvec[argnum].offset.constant;
2629 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
2630#endif
2631
2632 for (i = lower_bound; i < upper_bound; i++)
2633 if (stack_usage_map[i]
f046b3cc
JL
2634 /* Don't store things in the fixed argument area at this point;
2635 it has already been saved. */
e5e809f4 2636 && i > reg_parm_stack_space)
f046b3cc
JL
2637 break;
2638
2639 if (i != upper_bound)
2640 {
e5e809f4 2641 /* We need to make a save area. See what mode we can make it. */
f046b3cc 2642 enum machine_mode save_mode
5e26979c 2643 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
2644 MODE_INT, 1);
2645 rtx stack_area
38a448ca
RH
2646 = gen_rtx_MEM (save_mode,
2647 memory_address (save_mode,
2648 plus_constant (argblock, argvec[argnum].offset.constant)));
5e26979c
JL
2649 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2650 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
2651 }
2652#endif
2653 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4
JL
2654 argblock, GEN_INT (argvec[argnum].offset.constant),
2655 reg_parm_stack_space);
f046b3cc
JL
2656
2657#ifdef ACCUMULATE_OUTGOING_ARGS
2658 /* Now mark the segment we just used. */
2659 for (i = lower_bound; i < upper_bound; i++)
2660 stack_usage_map[i] = 1;
2661#endif
2662
2663 NO_DEFER_POP;
2664 }
322e3e34
RK
2665 }
2666
2667#ifndef PUSH_ARGS_REVERSED
2668#ifdef STACK_BOUNDARY
2669 /* If we pushed args in forward order, perform stack alignment
2670 after pushing the last arg. */
2671 if (argblock == 0)
2672 anti_adjust_stack (GEN_INT (args_size.constant
2673 - original_args_size.constant));
2674#endif
2675#endif
2676
2677#ifdef PUSH_ARGS_REVERSED
2678 argnum = nargs - 1;
2679#else
2680 argnum = 0;
2681#endif
2682
77cac2f2 2683 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 2684
322e3e34
RK
2685 /* Now load any reg parms into their regs. */
2686
5e26979c
JL
2687 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2688 are to be pushed. */
322e3e34
RK
2689 for (count = 0; count < nargs; count++, argnum += inc)
2690 {
322e3e34
RK
2691 register rtx val = argvec[argnum].value;
2692 rtx reg = argvec[argnum].reg;
2693 int partial = argvec[argnum].partial;
2694
2695 if (reg != 0 && partial == 0)
2696 emit_move_insn (reg, val);
2697 NO_DEFER_POP;
2698 }
2699
2700 /* For version 1.37, try deleting this entirely. */
2701 if (! no_queue)
2702 emit_queue ();
2703
2704 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
2705 for (count = 0; count < nargs; count++)
2706 if (argvec[count].reg != 0)
77cac2f2 2707 use_reg (&call_fusage, argvec[count].reg);
322e3e34 2708
322e3e34
RK
2709 /* Don't allow popping to be deferred, since then
2710 cse'ing of library calls could delete a call and leave the pop. */
2711 NO_DEFER_POP;
2712
2713 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2714 will set inhibit_defer_pop to that value. */
2715
334c4f0f
RK
2716 /* The return type is needed to decide how many bytes the function pops.
2717 Signedness plays no role in that, so for simplicity, we pretend it's
2718 always signed. We also assume that the list of arguments passed has
2719 no impact, so we pretend it is unknown. */
2720
2c8da025
RK
2721 emit_call_1 (fun,
2722 get_identifier (XSTR (orgfun, 0)),
b3776927
RK
2723 build_function_type (outmode == VOIDmode ? void_type_node
2724 : type_for_mode (outmode, 0), NULL_TREE),
334c4f0f 2725 args_size.constant, 0,
322e3e34
RK
2726 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2727 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 2728 old_inhibit_defer_pop + 1, call_fusage, no_queue);
322e3e34 2729
888aa7a9
RS
2730 pop_temp_slots ();
2731
322e3e34
RK
2732 /* Now restore inhibit_defer_pop to its actual original value. */
2733 OK_DEFER_POP;
f046b3cc
JL
2734
2735#ifdef ACCUMULATE_OUTGOING_ARGS
2736#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
2737 if (save_area)
2738 {
2739 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 2740#ifdef ARGS_GROW_DOWNWARD
e9a25f70 2741 rtx stack_area
38a448ca
RH
2742 = gen_rtx_MEM (save_mode,
2743 memory_address (save_mode,
ceb83206
JL
2744 plus_constant (argblock,
2745 - high_to_save)));
f046b3cc 2746#else
ceb83206
JL
2747 rtx stack_area
2748 = gen_rtx_MEM (save_mode,
2749 memory_address (save_mode,
2750 plus_constant (argblock, low_to_save)));
f046b3cc 2751#endif
f046b3cc 2752
e9a25f70
JL
2753 if (save_mode != BLKmode)
2754 emit_move_insn (stack_area, save_area);
2755 else
2756 emit_block_move (stack_area, validize_mem (save_area),
2757 GEN_INT (high_to_save - low_to_save + 1),
2758 PARM_BOUNDARY / BITS_PER_UNIT);
2759 }
f046b3cc
JL
2760#endif
2761
2762 /* If we saved any argument areas, restore them. */
2763 for (count = 0; count < nargs; count++)
2764 if (argvec[count].save_area)
2765 {
2766 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2767 rtx stack_area
38a448ca
RH
2768 = gen_rtx_MEM (save_mode,
2769 memory_address (save_mode,
2770 plus_constant (argblock, argvec[count].offset.constant)));
f046b3cc
JL
2771
2772 emit_move_insn (stack_area, argvec[count].save_area);
2773 }
2774
2775 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2776 stack_usage_map = initial_stack_usage_map;
2777#endif
322e3e34
RK
2778}
2779\f
2780/* Like emit_library_call except that an extra argument, VALUE,
2781 comes second and says where to store the result.
fac0ad80
RS
2782 (If VALUE is zero, this function chooses a convenient way
2783 to return the value.
322e3e34 2784
fac0ad80
RS
2785 This function returns an rtx for where the value is to be found.
2786 If VALUE is nonzero, VALUE is returned. */
2787
2788rtx
4f90e4a0
RK
2789emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2790 enum machine_mode outmode, int nargs, ...))
322e3e34 2791{
4f90e4a0
RK
2792#ifndef __STDC__
2793 rtx orgfun;
2794 rtx value;
2795 int no_queue;
2796 enum machine_mode outmode;
2797 int nargs;
2798#endif
322e3e34
RK
2799 va_list p;
2800 /* Total size in bytes of all the stack-parms scanned so far. */
2801 struct args_size args_size;
2802 /* Size of arguments before any adjustments (such as rounding). */
2803 struct args_size original_args_size;
2804 register int argnum;
322e3e34 2805 rtx fun;
322e3e34
RK
2806 int inc;
2807 int count;
2808 rtx argblock = 0;
2809 CUMULATIVE_ARGS args_so_far;
2810 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
f046b3cc 2811 struct args_size offset; struct args_size size; rtx save_area; };
322e3e34
RK
2812 struct arg *argvec;
2813 int old_inhibit_defer_pop = inhibit_defer_pop;
77cac2f2 2814 rtx call_fusage = 0;
322e3e34 2815 rtx mem_value = 0;
fac0ad80 2816 int pcc_struct_value = 0;
4f389214 2817 int struct_value_size = 0;
d61bee95 2818 int is_const;
e5e809f4 2819 int reg_parm_stack_space = 0;
69d4ca36 2820#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 2821 int needed;
69d4ca36 2822#endif
f046b3cc
JL
2823
2824#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2825 /* Define the boundary of the register parm stack space that needs to be
2826 save, if any. */
2827 int low_to_save = -1, high_to_save;
2828 rtx save_area = 0; /* Place that it is saved */
2829#endif
2830
2831#ifdef ACCUMULATE_OUTGOING_ARGS
69d4ca36 2832 /* Size of the stack reserved for parameter registers. */
f046b3cc
JL
2833 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2834 char *initial_stack_usage_map = stack_usage_map;
2835#endif
2836
2837#ifdef REG_PARM_STACK_SPACE
2838#ifdef MAYBE_REG_PARM_STACK_SPACE
2839 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2840#else
2841 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2842#endif
2843#endif
322e3e34 2844
4f90e4a0
RK
2845 VA_START (p, nargs);
2846
2847#ifndef __STDC__
2848 orgfun = va_arg (p, rtx);
322e3e34
RK
2849 value = va_arg (p, rtx);
2850 no_queue = va_arg (p, int);
2851 outmode = va_arg (p, enum machine_mode);
2852 nargs = va_arg (p, int);
4f90e4a0
RK
2853#endif
2854
d61bee95 2855 is_const = no_queue;
4f90e4a0 2856 fun = orgfun;
322e3e34
RK
2857
2858 /* If this kind of value comes back in memory,
2859 decide where in memory it should come back. */
fac0ad80 2860 if (aggregate_value_p (type_for_mode (outmode, 0)))
322e3e34 2861 {
fac0ad80
RS
2862#ifdef PCC_STATIC_STRUCT_RETURN
2863 rtx pointer_reg
2864 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2865 0);
38a448ca 2866 mem_value = gen_rtx_MEM (outmode, pointer_reg);
fac0ad80
RS
2867 pcc_struct_value = 1;
2868 if (value == 0)
2869 value = gen_reg_rtx (outmode);
2870#else /* not PCC_STATIC_STRUCT_RETURN */
4f389214 2871 struct_value_size = GET_MODE_SIZE (outmode);
fac0ad80 2872 if (value != 0 && GET_CODE (value) == MEM)
322e3e34
RK
2873 mem_value = value;
2874 else
2875 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
fac0ad80 2876#endif
779c643a
JW
2877
2878 /* This call returns a big structure. */
2879 is_const = 0;
322e3e34
RK
2880 }
2881
2882 /* ??? Unfinished: must pass the memory address as an argument. */
2883
2884 /* Copy all the libcall-arguments out of the varargs data
2885 and into a vector ARGVEC.
2886
2887 Compute how to pass each argument. We only support a very small subset
2888 of the full argument passing conventions to limit complexity here since
2889 library functions shouldn't have many args. */
2890
2891 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
d3c4e2ab 2892 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
322e3e34 2893
eecb6f50 2894 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
322e3e34
RK
2895
2896 args_size.constant = 0;
2897 args_size.var = 0;
2898
2899 count = 0;
2900
888aa7a9
RS
2901 push_temp_slots ();
2902
322e3e34
RK
2903 /* If there's a structure value address to be passed,
2904 either pass it in the special place, or pass it as an extra argument. */
fac0ad80 2905 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
322e3e34
RK
2906 {
2907 rtx addr = XEXP (mem_value, 0);
fac0ad80 2908 nargs++;
322e3e34 2909
fac0ad80
RS
2910 /* Make sure it is a reasonable operand for a move or push insn. */
2911 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2912 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2913 addr = force_operand (addr, NULL_RTX);
322e3e34 2914
fac0ad80 2915 argvec[count].value = addr;
4fc3dcd5 2916 argvec[count].mode = Pmode;
fac0ad80 2917 argvec[count].partial = 0;
322e3e34 2918
4fc3dcd5 2919 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
322e3e34 2920#ifdef FUNCTION_ARG_PARTIAL_NREGS
4fc3dcd5 2921 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
fac0ad80 2922 abort ();
322e3e34
RK
2923#endif
2924
4fc3dcd5 2925 locate_and_pad_parm (Pmode, NULL_TREE,
fac0ad80
RS
2926 argvec[count].reg && argvec[count].partial == 0,
2927 NULL_TREE, &args_size, &argvec[count].offset,
2928 &argvec[count].size);
322e3e34
RK
2929
2930
fac0ad80 2931 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 2932 || reg_parm_stack_space > 0)
fac0ad80 2933 args_size.constant += argvec[count].size.constant;
322e3e34 2934
0f41302f 2935 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
fac0ad80
RS
2936
2937 count++;
322e3e34
RK
2938 }
2939
2940 for (; count < nargs; count++)
2941 {
2942 rtx val = va_arg (p, rtx);
2943 enum machine_mode mode = va_arg (p, enum machine_mode);
2944
2945 /* We cannot convert the arg value to the mode the library wants here;
2946 must do it earlier where we know the signedness of the arg. */
2947 if (mode == BLKmode
2948 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2949 abort ();
2950
2951 /* On some machines, there's no way to pass a float to a library fcn.
2952 Pass it as a double instead. */
2953#ifdef LIBGCC_NEEDS_DOUBLE
2954 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
7373d92d 2955 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
322e3e34
RK
2956#endif
2957
2958 /* There's no need to call protect_from_queue, because
2959 either emit_move_insn or emit_push_insn will do that. */
2960
2961 /* Make sure it is a reasonable operand for a move or push insn. */
2962 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2963 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2964 val = force_operand (val, NULL_RTX);
2965
322e3e34
RK
2966#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2967 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
888aa7a9 2968 {
a44492f0
RK
2969 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2970 be viewed as just an efficiency improvement. */
888aa7a9
RS
2971 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2972 emit_move_insn (slot, val);
2973 val = XEXP (slot, 0);
2974 mode = Pmode;
2975 }
322e3e34
RK
2976#endif
2977
888aa7a9
RS
2978 argvec[count].value = val;
2979 argvec[count].mode = mode;
2980
322e3e34 2981 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
cacbd532 2982 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
322e3e34
RK
2983 abort ();
2984#ifdef FUNCTION_ARG_PARTIAL_NREGS
2985 argvec[count].partial
2986 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2987#else
2988 argvec[count].partial = 0;
2989#endif
2990
2991 locate_and_pad_parm (mode, NULL_TREE,
2992 argvec[count].reg && argvec[count].partial == 0,
2993 NULL_TREE, &args_size, &argvec[count].offset,
2994 &argvec[count].size);
2995
2996 if (argvec[count].size.var)
2997 abort ();
2998
e5e809f4 2999 if (reg_parm_stack_space == 0 && argvec[count].partial)
322e3e34 3000 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
322e3e34
RK
3001
3002 if (argvec[count].reg == 0 || argvec[count].partial != 0
e5e809f4 3003 || reg_parm_stack_space > 0)
322e3e34
RK
3004 args_size.constant += argvec[count].size.constant;
3005
0f41302f 3006 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
322e3e34
RK
3007 }
3008 va_end (p);
3009
f046b3cc
JL
3010#ifdef FINAL_REG_PARM_STACK_SPACE
3011 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3012 args_size.var);
3013#endif
322e3e34
RK
3014 /* If this machine requires an external definition for library
3015 functions, write one out. */
3016 assemble_external_libcall (fun);
3017
3018 original_args_size = args_size;
3019#ifdef STACK_BOUNDARY
3020 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3021 / STACK_BYTES) * STACK_BYTES);
3022#endif
3023
322e3e34 3024 args_size.constant = MAX (args_size.constant,
f046b3cc 3025 reg_parm_stack_space);
e5e809f4 3026
322e3e34 3027#ifndef OUTGOING_REG_PARM_STACK_SPACE
fc990856 3028 args_size.constant -= reg_parm_stack_space;
322e3e34
RK
3029#endif
3030
322e3e34
RK
3031 if (args_size.constant > current_function_outgoing_args_size)
3032 current_function_outgoing_args_size = args_size.constant;
26a258fe
PB
3033
3034#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc
JL
3035 /* Since the stack pointer will never be pushed, it is possible for
3036 the evaluation of a parm to clobber something we have already
3037 written to the stack. Since most function calls on RISC machines
3038 do not use the stack, this is uncommon, but must work correctly.
3039
3040 Therefore, we save any area of the stack that was already written
3041 and that we are using. Here we set up to do this by making a new
3042 stack usage map from the old one.
3043
3044 Another approach might be to try to reorder the argument
3045 evaluations to avoid this conflicting stack usage. */
3046
3047 needed = args_size.constant;
e5e809f4
JL
3048
3049#ifndef OUTGOING_REG_PARM_STACK_SPACE
f046b3cc
JL
3050 /* Since we will be writing into the entire argument area, the
3051 map must be allocated for its entire size, not just the part that
3052 is the responsibility of the caller. */
3053 needed += reg_parm_stack_space;
3054#endif
3055
3056#ifdef ARGS_GROW_DOWNWARD
3057 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3058 needed + 1);
3059#else
3060 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3061 needed);
322e3e34 3062#endif
f046b3cc
JL
3063 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3064
3065 if (initial_highest_arg_in_use)
3066 bcopy (initial_stack_usage_map, stack_usage_map,
3067 initial_highest_arg_in_use);
3068
3069 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3070 bzero (&stack_usage_map[initial_highest_arg_in_use],
3071 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3072 needed = 0;
322e3e34 3073
f046b3cc
JL
3074 /* The address of the outgoing argument list must not be copied to a
3075 register here, because argblock would be left pointing to the
3076 wrong place after the call to allocate_dynamic_stack_space below.
3077 */
3078
3079 argblock = virtual_outgoing_args_rtx;
3080#else /* not ACCUMULATE_OUTGOING_ARGS */
322e3e34
RK
3081#ifndef PUSH_ROUNDING
3082 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3083#endif
f046b3cc 3084#endif
322e3e34
RK
3085
3086#ifdef PUSH_ARGS_REVERSED
3087#ifdef STACK_BOUNDARY
3088 /* If we push args individually in reverse order, perform stack alignment
3089 before the first push (the last arg). */
3090 if (argblock == 0)
3091 anti_adjust_stack (GEN_INT (args_size.constant
3092 - original_args_size.constant));
3093#endif
3094#endif
3095
3096#ifdef PUSH_ARGS_REVERSED
3097 inc = -1;
3098 argnum = nargs - 1;
3099#else
3100 inc = 1;
3101 argnum = 0;
3102#endif
3103
f046b3cc
JL
3104#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3105 /* The argument list is the property of the called routine and it
3106 may clobber it. If the fixed area has been used for previous
3107 parameters, we must save and restore it.
3108
3109 Here we compute the boundary of the that needs to be saved, if any. */
3110
3111#ifdef ARGS_GROW_DOWNWARD
3112 for (count = 0; count < reg_parm_stack_space + 1; count++)
3113#else
3114 for (count = 0; count < reg_parm_stack_space; count++)
3115#endif
3116 {
3117 if (count >= highest_outgoing_arg_in_use
3118 || stack_usage_map[count] == 0)
3119 continue;
3120
3121 if (low_to_save == -1)
3122 low_to_save = count;
3123
3124 high_to_save = count;
3125 }
3126
3127 if (low_to_save >= 0)
3128 {
3129 int num_to_save = high_to_save - low_to_save + 1;
3130 enum machine_mode save_mode
3131 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3132 rtx stack_area;
3133
3134 /* If we don't have the required alignment, must do this in BLKmode. */
3135 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3136 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3137 save_mode = BLKmode;
3138
ceb83206 3139#ifdef ARGS_GROW_DOWNWARD
38a448ca
RH
3140 stack_area = gen_rtx_MEM (save_mode,
3141 memory_address (save_mode,
38a448ca 3142 plus_constant (argblock,
ceb83206 3143 - high_to_save)));
f046b3cc 3144#else
ceb83206
JL
3145 stack_area = gen_rtx_MEM (save_mode,
3146 memory_address (save_mode,
38a448ca 3147 plus_constant (argblock,
ceb83206 3148 low_to_save)));
f046b3cc 3149#endif
f046b3cc
JL
3150 if (save_mode == BLKmode)
3151 {
3152 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3153 MEM_IN_STRUCT_P (save_area) = 0;
3154 emit_block_move (validize_mem (save_area), stack_area,
3155 GEN_INT (num_to_save),
3156 PARM_BOUNDARY / BITS_PER_UNIT);
3157 }
3158 else
3159 {
3160 save_area = gen_reg_rtx (save_mode);
3161 emit_move_insn (save_area, stack_area);
3162 }
3163 }
3164#endif
3165
322e3e34
RK
3166 /* Push the args that need to be pushed. */
3167
5e26979c
JL
3168 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3169 are to be pushed. */
322e3e34
RK
3170 for (count = 0; count < nargs; count++, argnum += inc)
3171 {
3172 register enum machine_mode mode = argvec[argnum].mode;
3173 register rtx val = argvec[argnum].value;
3174 rtx reg = argvec[argnum].reg;
3175 int partial = argvec[argnum].partial;
69d4ca36 3176#ifdef ACCUMULATE_OUTGOING_ARGS
f046b3cc 3177 int lower_bound, upper_bound, i;
69d4ca36 3178#endif
322e3e34
RK
3179
3180 if (! (reg != 0 && partial == 0))
f046b3cc
JL
3181 {
3182#ifdef ACCUMULATE_OUTGOING_ARGS
3183 /* If this is being stored into a pre-allocated, fixed-size, stack
3184 area, save any previous data at that location. */
3185
3186#ifdef ARGS_GROW_DOWNWARD
3187 /* stack_slot is negative, but we want to index stack_usage_map
3188 with positive values. */
5e26979c
JL
3189 upper_bound = -argvec[argnum].offset.constant + 1;
3190 lower_bound = upper_bound - argvec[argnum].size.constant;
f046b3cc 3191#else
5e26979c
JL
3192 lower_bound = argvec[argnum].offset.constant;
3193 upper_bound = lower_bound + argvec[argnum].size.constant;
f046b3cc
JL
3194#endif
3195
3196 for (i = lower_bound; i < upper_bound; i++)
3197 if (stack_usage_map[i]
f046b3cc
JL
3198 /* Don't store things in the fixed argument area at this point;
3199 it has already been saved. */
e5e809f4 3200 && i > reg_parm_stack_space)
f046b3cc
JL
3201 break;
3202
3203 if (i != upper_bound)
3204 {
e5e809f4 3205 /* We need to make a save area. See what mode we can make it. */
f046b3cc 3206 enum machine_mode save_mode
5e26979c 3207 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
f046b3cc
JL
3208 MODE_INT, 1);
3209 rtx stack_area
38a448ca
RH
3210 = gen_rtx_MEM (save_mode,
3211 memory_address (save_mode,
3212 plus_constant (argblock,
3213 argvec[argnum].offset.constant)));
5e26979c
JL
3214 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3215 emit_move_insn (argvec[argnum].save_area, stack_area);
f046b3cc
JL
3216 }
3217#endif
3218 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
e5e809f4
JL
3219 argblock, GEN_INT (argvec[argnum].offset.constant),
3220 reg_parm_stack_space);
f046b3cc
JL
3221
3222#ifdef ACCUMULATE_OUTGOING_ARGS
3223 /* Now mark the segment we just used. */
3224 for (i = lower_bound; i < upper_bound; i++)
3225 stack_usage_map[i] = 1;
3226#endif
3227
3228 NO_DEFER_POP;
3229 }
322e3e34
RK
3230 }
3231
3232#ifndef PUSH_ARGS_REVERSED
3233#ifdef STACK_BOUNDARY
3234 /* If we pushed args in forward order, perform stack alignment
3235 after pushing the last arg. */
3236 if (argblock == 0)
3237 anti_adjust_stack (GEN_INT (args_size.constant
3238 - original_args_size.constant));
3239#endif
3240#endif
3241
3242#ifdef PUSH_ARGS_REVERSED
3243 argnum = nargs - 1;
3244#else
3245 argnum = 0;
3246#endif
3247
77cac2f2 3248 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
8b0f9101 3249
322e3e34
RK
3250 /* Now load any reg parms into their regs. */
3251
5e26979c
JL
3252 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3253 are to be pushed. */
322e3e34
RK
3254 for (count = 0; count < nargs; count++, argnum += inc)
3255 {
322e3e34
RK
3256 register rtx val = argvec[argnum].value;
3257 rtx reg = argvec[argnum].reg;
3258 int partial = argvec[argnum].partial;
3259
3260 if (reg != 0 && partial == 0)
3261 emit_move_insn (reg, val);
3262 NO_DEFER_POP;
3263 }
3264
3265#if 0
3266 /* For version 1.37, try deleting this entirely. */
3267 if (! no_queue)
3268 emit_queue ();
3269#endif
3270
3271 /* Any regs containing parms remain in use through the call. */
322e3e34
RK
3272 for (count = 0; count < nargs; count++)
3273 if (argvec[count].reg != 0)
77cac2f2 3274 use_reg (&call_fusage, argvec[count].reg);
322e3e34 3275
fac0ad80
RS
3276 /* Pass the function the address in which to return a structure value. */
3277 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3278 {
3279 emit_move_insn (struct_value_rtx,
3280 force_reg (Pmode,
3281 force_operand (XEXP (mem_value, 0),
3282 NULL_RTX)));
3283 if (GET_CODE (struct_value_rtx) == REG)
77cac2f2 3284 use_reg (&call_fusage, struct_value_rtx);
fac0ad80
RS
3285 }
3286
322e3e34
RK
3287 /* Don't allow popping to be deferred, since then
3288 cse'ing of library calls could delete a call and leave the pop. */
3289 NO_DEFER_POP;
3290
3291 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3292 will set inhibit_defer_pop to that value. */
334c4f0f
RK
3293 /* See the comment in emit_library_call about the function type we build
3294 and pass here. */
322e3e34 3295
2c8da025
RK
3296 emit_call_1 (fun,
3297 get_identifier (XSTR (orgfun, 0)),
334c4f0f
RK
3298 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3299 args_size.constant, struct_value_size,
322e3e34 3300 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4d6a19ff 3301 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
77cac2f2 3302 old_inhibit_defer_pop + 1, call_fusage, is_const);
322e3e34
RK
3303
3304 /* Now restore inhibit_defer_pop to its actual original value. */
3305 OK_DEFER_POP;
3306
888aa7a9
RS
3307 pop_temp_slots ();
3308
322e3e34
RK
3309 /* Copy the value to the right place. */
3310 if (outmode != VOIDmode)
3311 {
3312 if (mem_value)
3313 {
3314 if (value == 0)
fac0ad80 3315 value = mem_value;
322e3e34
RK
3316 if (value != mem_value)
3317 emit_move_insn (value, mem_value);
3318 }
3319 else if (value != 0)
3320 emit_move_insn (value, hard_libcall_value (outmode));
fac0ad80
RS
3321 else
3322 value = hard_libcall_value (outmode);
322e3e34 3323 }
fac0ad80 3324
f046b3cc
JL
3325#ifdef ACCUMULATE_OUTGOING_ARGS
3326#ifdef REG_PARM_STACK_SPACE
e9a25f70
JL
3327 if (save_area)
3328 {
3329 enum machine_mode save_mode = GET_MODE (save_area);
ceb83206 3330#ifdef ARGS_GROW_DOWNWARD
e9a25f70 3331 rtx stack_area
38a448ca
RH
3332 = gen_rtx_MEM (save_mode,
3333 memory_address (save_mode,
ceb83206
JL
3334 plus_constant (argblock,
3335 - high_to_save)));
f046b3cc 3336#else
ceb83206
JL
3337 rtx stack_area
3338 = gen_rtx_MEM (save_mode,
3339 memory_address (save_mode,
3340 plus_constant (argblock, low_to_save)));
f046b3cc 3341#endif
e9a25f70
JL
3342 if (save_mode != BLKmode)
3343 emit_move_insn (stack_area, save_area);
3344 else
3345 emit_block_move (stack_area, validize_mem (save_area),
3346 GEN_INT (high_to_save - low_to_save + 1),
f046b3cc 3347 PARM_BOUNDARY / BITS_PER_UNIT);
e9a25f70 3348 }
f046b3cc
JL
3349#endif
3350
3351 /* If we saved any argument areas, restore them. */
3352 for (count = 0; count < nargs; count++)
3353 if (argvec[count].save_area)
3354 {
3355 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3356 rtx stack_area
38a448ca 3357 = gen_rtx_MEM (save_mode,
f046b3cc
JL
3358 memory_address (save_mode, plus_constant (argblock,
3359 argvec[count].offset.constant)));
3360
3361 emit_move_insn (stack_area, argvec[count].save_area);
3362 }
3363
3364 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3365 stack_usage_map = initial_stack_usage_map;
3366#endif
3367
fac0ad80 3368 return value;
322e3e34
RK
3369}
3370\f
51bbfa0c
RS
3371#if 0
3372/* Return an rtx which represents a suitable home on the stack
3373 given TYPE, the type of the argument looking for a home.
3374 This is called only for BLKmode arguments.
3375
3376 SIZE is the size needed for this target.
3377 ARGS_ADDR is the address of the bottom of the argument block for this call.
3378 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3379 if this machine uses push insns. */
3380
3381static rtx
3382target_for_arg (type, size, args_addr, offset)
3383 tree type;
3384 rtx size;
3385 rtx args_addr;
3386 struct args_size offset;
3387{
3388 rtx target;
3389 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3390
3391 /* We do not call memory_address if possible,
3392 because we want to address as close to the stack
3393 as possible. For non-variable sized arguments,
3394 this will be stack-pointer relative addressing. */
3395 if (GET_CODE (offset_rtx) == CONST_INT)
3396 target = plus_constant (args_addr, INTVAL (offset_rtx));
3397 else
3398 {
3399 /* I have no idea how to guarantee that this
3400 will work in the presence of register parameters. */
38a448ca 3401 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
51bbfa0c
RS
3402 target = memory_address (QImode, target);
3403 }
3404
38a448ca 3405 return gen_rtx_MEM (BLKmode, target);
51bbfa0c
RS
3406}
3407#endif
3408\f
3409/* Store a single argument for a function call
3410 into the register or memory area where it must be passed.
3411 *ARG describes the argument value and where to pass it.
3412
3413 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 3414 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
3415
3416 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3417 so must be careful about how the stack is used.
3418
3419 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3420 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3421 that we need not worry about saving and restoring the stack.
3422
3423 FNDECL is the declaration of the function we are calling. */
3424
3425static void
6f90e075
JW
3426store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
3427 reg_parm_stack_space)
51bbfa0c
RS
3428 struct arg_data *arg;
3429 rtx argblock;
3430 int may_be_alloca;
3431 int variable_size;
3432 tree fndecl;
6f90e075 3433 int reg_parm_stack_space;
51bbfa0c
RS
3434{
3435 register tree pval = arg->tree_value;
3436 rtx reg = 0;
3437 int partial = 0;
3438 int used = 0;
69d4ca36 3439#ifdef ACCUMULATE_OUTGOING_ARGS
51bbfa0c 3440 int i, lower_bound, upper_bound;
69d4ca36 3441#endif
51bbfa0c
RS
3442
3443 if (TREE_CODE (pval) == ERROR_MARK)
3444 return;
3445
cc79451b
RK
3446 /* Push a new temporary level for any temporaries we make for
3447 this argument. */
3448 push_temp_slots ();
3449
51bbfa0c
RS
3450#ifdef ACCUMULATE_OUTGOING_ARGS
3451 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3452 save any previous data at that location. */
3453 if (argblock && ! variable_size && arg->stack)
3454 {
3455#ifdef ARGS_GROW_DOWNWARD
0f41302f
MS
3456 /* stack_slot is negative, but we want to index stack_usage_map
3457 with positive values. */
51bbfa0c
RS
3458 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3459 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3460 else
50eb43ca 3461 upper_bound = 0;
51bbfa0c
RS
3462
3463 lower_bound = upper_bound - arg->size.constant;
3464#else
3465 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3466 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3467 else
3468 lower_bound = 0;
3469
3470 upper_bound = lower_bound + arg->size.constant;
3471#endif
3472
3473 for (i = lower_bound; i < upper_bound; i++)
3474 if (stack_usage_map[i]
51bbfa0c
RS
3475 /* Don't store things in the fixed argument area at this point;
3476 it has already been saved. */
e5e809f4 3477 && i > reg_parm_stack_space)
51bbfa0c
RS
3478 break;
3479
3480 if (i != upper_bound)
3481 {
3482 /* We need to make a save area. See what mode we can make it. */
3483 enum machine_mode save_mode
3484 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3485 rtx stack_area
38a448ca
RH
3486 = gen_rtx_MEM (save_mode,
3487 memory_address (save_mode,
3488 XEXP (arg->stack_slot, 0)));
51bbfa0c
RS
3489
3490 if (save_mode == BLKmode)
3491 {
3492 arg->save_area = assign_stack_temp (BLKmode,
6fa51029 3493 arg->size.constant, 0);
3668e76e
JL
3494 MEM_IN_STRUCT_P (arg->save_area)
3495 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
cc79451b 3496 preserve_temp_slots (arg->save_area);
51bbfa0c 3497 emit_block_move (validize_mem (arg->save_area), stack_area,
e5d70561 3498 GEN_INT (arg->size.constant),
51bbfa0c
RS
3499 PARM_BOUNDARY / BITS_PER_UNIT);
3500 }
3501 else
3502 {
3503 arg->save_area = gen_reg_rtx (save_mode);
3504 emit_move_insn (arg->save_area, stack_area);
3505 }
3506 }
3507 }
3508#endif
3509
3510 /* If this isn't going to be placed on both the stack and in registers,
3511 set up the register and number of words. */
3512 if (! arg->pass_on_stack)
3513 reg = arg->reg, partial = arg->partial;
3514
3515 if (reg != 0 && partial == 0)
3516 /* Being passed entirely in a register. We shouldn't be called in
3517 this case. */
3518 abort ();
3519
4ab56118
RK
3520 /* If this arg needs special alignment, don't load the registers
3521 here. */
3522 if (arg->n_aligned_regs != 0)
3523 reg = 0;
4ab56118 3524
4ab56118 3525 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
3526 it directly into its stack slot. Otherwise, we can. */
3527 if (arg->value == 0)
d64f5a78
RS
3528 {
3529#ifdef ACCUMULATE_OUTGOING_ARGS
3530 /* stack_arg_under_construction is nonzero if a function argument is
3531 being evaluated directly into the outgoing argument list and
3532 expand_call must take special action to preserve the argument list
3533 if it is called recursively.
3534
3535 For scalar function arguments stack_usage_map is sufficient to
3536 determine which stack slots must be saved and restored. Scalar
3537 arguments in general have pass_on_stack == 0.
3538
3539 If this argument is initialized by a function which takes the
3540 address of the argument (a C++ constructor or a C function
3541 returning a BLKmode structure), then stack_usage_map is
3542 insufficient and expand_call must push the stack around the
3543 function call. Such arguments have pass_on_stack == 1.
3544
3545 Note that it is always safe to set stack_arg_under_construction,
3546 but this generates suboptimal code if set when not needed. */
3547
3548 if (arg->pass_on_stack)
3549 stack_arg_under_construction++;
3550#endif
3a08477a
RK
3551 arg->value = expand_expr (pval,
3552 (partial
3553 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3554 ? NULL_RTX : arg->stack,
e5d70561 3555 VOIDmode, 0);
1efe6448
RK
3556
3557 /* If we are promoting object (or for any other reason) the mode
3558 doesn't agree, convert the mode. */
3559
7373d92d
RK
3560 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3561 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3562 arg->value, arg->unsignedp);
1efe6448 3563
d64f5a78
RS
3564#ifdef ACCUMULATE_OUTGOING_ARGS
3565 if (arg->pass_on_stack)
3566 stack_arg_under_construction--;
3567#endif
3568 }
51bbfa0c
RS
3569
3570 /* Don't allow anything left on stack from computation
3571 of argument to alloca. */
3572 if (may_be_alloca)
3573 do_pending_stack_adjust ();
3574
3575 if (arg->value == arg->stack)
7815214e
RK
3576 {
3577 /* If the value is already in the stack slot, we are done. */
3578 if (flag_check_memory_usage && GET_CODE (arg->stack) == MEM)
3579 {
3580 if (arg->mode == BLKmode)
3581 abort ();
3582
3583 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3584 XEXP (arg->stack, 0), ptr_mode,
3585 GEN_INT (GET_MODE_SIZE (arg->mode)),
3586 TYPE_MODE (sizetype),
956d6950
JL
3587 GEN_INT (MEMORY_USE_RW),
3588 TYPE_MODE (integer_type_node));
7815214e
RK
3589 }
3590 }
1efe6448 3591 else if (arg->mode != BLKmode)
51bbfa0c
RS
3592 {
3593 register int size;
3594
3595 /* Argument is a scalar, not entirely passed in registers.
3596 (If part is passed in registers, arg->partial says how much
3597 and emit_push_insn will take care of putting it there.)
3598
3599 Push it, and if its size is less than the
3600 amount of space allocated to it,
3601 also bump stack pointer by the additional space.
3602 Note that in C the default argument promotions
3603 will prevent such mismatches. */
3604
1efe6448 3605 size = GET_MODE_SIZE (arg->mode);
51bbfa0c
RS
3606 /* Compute how much space the push instruction will push.
3607 On many machines, pushing a byte will advance the stack
3608 pointer by a halfword. */
3609#ifdef PUSH_ROUNDING
3610 size = PUSH_ROUNDING (size);
3611#endif
3612 used = size;
3613
3614 /* Compute how much space the argument should get:
3615 round up to a multiple of the alignment for arguments. */
1efe6448 3616 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
51bbfa0c
RS
3617 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3618 / (PARM_BOUNDARY / BITS_PER_UNIT))
3619 * (PARM_BOUNDARY / BITS_PER_UNIT));
3620
3621 /* This isn't already where we want it on the stack, so put it there.
3622 This can either be done with push or copy insns. */
e5e809f4
JL
3623 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3624 partial, reg, used - size, argblock,
3625 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
51bbfa0c
RS
3626 }
3627 else
3628 {
3629 /* BLKmode, at least partly to be pushed. */
3630
3631 register int excess;
3632 rtx size_rtx;
3633
3634 /* Pushing a nonscalar.
3635 If part is passed in registers, PARTIAL says how much
3636 and emit_push_insn will take care of putting it there. */
3637
3638 /* Round its size up to a multiple
3639 of the allocation unit for arguments. */
3640
3641 if (arg->size.var != 0)
3642 {
3643 excess = 0;
3644 size_rtx = ARGS_SIZE_RTX (arg->size);
3645 }
3646 else
3647 {
51bbfa0c
RS
3648 /* PUSH_ROUNDING has no effect on us, because
3649 emit_push_insn for BLKmode is careful to avoid it. */
0cf91217 3650 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
51bbfa0c 3651 + partial * UNITS_PER_WORD);
e4f93898 3652 size_rtx = expr_size (pval);
51bbfa0c
RS
3653 }
3654
1efe6448 3655 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
51bbfa0c 3656 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
e5e809f4
JL
3657 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3658 reg_parm_stack_space);
51bbfa0c
RS
3659 }
3660
3661
3662 /* Unless this is a partially-in-register argument, the argument is now
3663 in the stack.
3664
3665 ??? Note that this can change arg->value from arg->stack to
3666 arg->stack_slot and it matters when they are not the same.
3667 It isn't totally clear that this is correct in all cases. */
3668 if (partial == 0)
3669 arg->value = arg->stack_slot;
3670
3671 /* Once we have pushed something, pops can't safely
3672 be deferred during the rest of the arguments. */
3673 NO_DEFER_POP;
3674
3675 /* ANSI doesn't require a sequence point here,
3676 but PCC has one, so this will avoid some problems. */
3677 emit_queue ();
3678
db907e7b
RK
3679 /* Free any temporary slots made in processing this argument. Show
3680 that we might have taken the address of something and pushed that
3681 as an operand. */
3682 preserve_temp_slots (NULL_RTX);
51bbfa0c 3683 free_temp_slots ();
cc79451b 3684 pop_temp_slots ();
51bbfa0c
RS
3685
3686#ifdef ACCUMULATE_OUTGOING_ARGS
3687 /* Now mark the segment we just used. */
3688 if (argblock && ! variable_size && arg->stack)
3689 for (i = lower_bound; i < upper_bound; i++)
3690 stack_usage_map[i] = 1;
3691#endif
3692}