]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Initial revision
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
51bbfa0c
RS
1/* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20#include "config.h"
21#include "rtl.h"
22#include "tree.h"
23#include "flags.h"
24#include "expr.h"
25#include "insn-flags.h"
26
27/* Decide whether a function's arguments should be processed
28 from first to last or from last to first. */
29
30#ifdef STACK_GROWS_DOWNWARD
31#ifdef PUSH_ROUNDING
32#define PUSH_ARGS_REVERSED /* If it's last to first */
33#endif
34#endif
35
36/* Like STACK_BOUNDARY but in units of bytes, not bits. */
37#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
38
39/* Data structure and subroutines used within expand_call. */
40
41struct arg_data
42{
43 /* Tree node for this argument. */
44 tree tree_value;
45 /* Current RTL value for argument, or 0 if it isn't precomputed. */
46 rtx value;
47 /* Initially-compute RTL value for argument; only for const functions. */
48 rtx initial_value;
49 /* Register to pass this argument in, 0 if passed on stack, or an
50 EXPR_LIST if the arg is to be copied into multiple different
51 registers. */
52 rtx reg;
53 /* Number of registers to use. 0 means put the whole arg in registers.
54 Also 0 if not passed in registers. */
55 int partial;
56 /* Non-zero if argument must be passed on stack. */
57 int pass_on_stack;
58 /* Offset of this argument from beginning of stack-args. */
59 struct args_size offset;
60 /* Similar, but offset to the start of the stack slot. Different from
61 OFFSET if this arg pads downward. */
62 struct args_size slot_offset;
63 /* Size of this argument on the stack, rounded up for any padding it gets,
64 parts of the argument passed in registers do not count.
65 If REG_PARM_STACK_SPACE is defined, then register parms
66 are counted here as well. */
67 struct args_size size;
68 /* Location on the stack at which parameter should be stored. The store
69 has already been done if STACK == VALUE. */
70 rtx stack;
71 /* Location on the stack of the start of this argument slot. This can
72 differ from STACK if this arg pads downward. This location is known
73 to be aligned to FUNCTION_ARG_BOUNDARY. */
74 rtx stack_slot;
75#ifdef ACCUMULATE_OUTGOING_ARGS
76 /* Place that this stack area has been saved, if needed. */
77 rtx save_area;
78#endif
79};
80
81#ifdef ACCUMULATE_OUTGOING_ARGS
82/* A vector of one char per word of stack space. A byte if non-zero if
83 the corresponding stack location has been used.
84 This vector is used to prevent a function call within an argument from
85 clobbering any stack already set up. */
86static char *stack_usage_map;
87
88/* Size of STACK_USAGE_MAP. */
89static int highest_outgoing_arg_in_use;
90#endif
91
92static void store_one_arg ();
93extern enum machine_mode mode_for_size ();
94\f
95/* Return 1 if EXP contains a call to the built-in function `alloca'. */
96
97static int
98calls_alloca (exp)
99 tree exp;
100{
101 register int i;
102 int type = TREE_CODE_CLASS (TREE_CODE (exp));
103 int length = tree_code_length[(int) TREE_CODE (exp)];
104
105 /* Only expressions and references can contain calls. */
106
107 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
108 return 0;
109
110 switch (TREE_CODE (exp))
111 {
112 case CALL_EXPR:
113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
114 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
115 == FUNCTION_DECL)
116 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
117 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
118 == BUILT_IN_ALLOCA))
119 return 1;
120
121 /* Third operand is RTL. */
122 length = 2;
123 break;
124
125 case SAVE_EXPR:
126 if (SAVE_EXPR_RTL (exp) != 0)
127 return 0;
128 break;
129
130 case BLOCK:
131 /* Must not look at BLOCK_SUPERCONTEXT since it will point back to
132 us. */
133 length = 3;
134 break;
135
136 case METHOD_CALL_EXPR:
137 length = 3;
138 break;
139
140 case WITH_CLEANUP_EXPR:
141 length = 1;
142 break;
143
144 case RTL_EXPR:
145 return 0;
146 }
147
148 for (i = 0; i < length; i++)
149 if (TREE_OPERAND (exp, i) != 0
150 && calls_alloca (TREE_OPERAND (exp, i)))
151 return 1;
152
153 return 0;
154}
155\f
156/* Force FUNEXP into a form suitable for the address of a CALL,
157 and return that as an rtx. Also load the static chain register
158 if FNDECL is a nested function.
159
160 USE_INSNS points to a variable holding a chain of USE insns
161 to which a USE of the static chain
162 register should be added, if required. */
163
164rtx
165prepare_call_address (funexp, fndecl, use_insns)
166 rtx funexp;
167 tree fndecl;
168 rtx *use_insns;
169{
170 rtx static_chain_value = 0;
171
172 funexp = protect_from_queue (funexp, 0);
173
174 if (fndecl != 0)
175 /* Get possible static chain value for nested function in C. */
176 static_chain_value = lookup_static_chain (fndecl);
177
178 /* Make a valid memory address and copy constants thru pseudo-regs,
179 but not for a constant address if -fno-function-cse. */
180 if (GET_CODE (funexp) != SYMBOL_REF)
181 funexp = memory_address (FUNCTION_MODE, funexp);
182 else
183 {
184#ifndef NO_FUNCTION_CSE
185 if (optimize && ! flag_no_function_cse)
186#ifdef NO_RECURSIVE_FUNCTION_CSE
187 if (fndecl != current_function_decl)
188#endif
189 funexp = force_reg (Pmode, funexp);
190#endif
191 }
192
193 if (static_chain_value != 0)
194 {
195 emit_move_insn (static_chain_rtx, static_chain_value);
196
197 /* Put the USE insn in the chain we were passed. It will later be
198 output immediately in front of the CALL insn. */
199 push_to_sequence (*use_insns);
200 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
201 *use_insns = get_insns ();
202 end_sequence ();
203 }
204
205 return funexp;
206}
207
208/* Generate instructions to call function FUNEXP,
209 and optionally pop the results.
210 The CALL_INSN is the first insn generated.
211
212 FUNTYPE is the data type of the function, or, for a library call,
213 the identifier for the name of the call. This is given to the
214 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
215
216 STACK_SIZE is the number of bytes of arguments on the stack,
217 rounded up to STACK_BOUNDARY; zero if the size is variable.
218 This is both to put into the call insn and
219 to generate explicit popping code if necessary.
220
221 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
222 It is zero if this call doesn't want a structure value.
223
224 NEXT_ARG_REG is the rtx that results from executing
225 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
226 just after all the args have had their registers assigned.
227 This could be whatever you like, but normally it is the first
228 arg-register beyond those used for args in this call,
229 or 0 if all the arg-registers are used in this call.
230 It is passed on to `gen_call' so you can put this info in the call insn.
231
232 VALREG is a hard register in which a value is returned,
233 or 0 if the call does not return a value.
234
235 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
236 the args to this call were processed.
237 We restore `inhibit_defer_pop' to that value.
238
239 USE_INSNS is a chain of USE insns to be emitted immediately before
240 the actual CALL insn.
241
242 IS_CONST is true if this is a `const' call. */
243
244void
245emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
246 valreg, old_inhibit_defer_pop, use_insns, is_const)
247 rtx funexp;
248 tree funtype;
249 int stack_size;
250 int struct_value_size;
251 rtx next_arg_reg;
252 rtx valreg;
253 int old_inhibit_defer_pop;
254 rtx use_insns;
255 int is_const;
256{
257 rtx stack_size_rtx = gen_rtx (CONST_INT, VOIDmode, stack_size);
258 rtx struct_value_size_rtx = gen_rtx (CONST_INT, VOIDmode, struct_value_size);
259 rtx call_insn;
260 int already_popped = 0;
261
262 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
263 and we don't want to load it into a register as an optimization,
264 because prepare_call_address already did it if it should be done. */
265 if (GET_CODE (funexp) != SYMBOL_REF)
266 funexp = memory_address (FUNCTION_MODE, funexp);
267
268#ifndef ACCUMULATE_OUTGOING_ARGS
269#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
270 if (HAVE_call_pop && HAVE_call_value_pop
271 && (RETURN_POPS_ARGS (funtype, stack_size) > 0 || stack_size == 0))
272 {
273 rtx n_pop = gen_rtx (CONST_INT, VOIDmode,
274 RETURN_POPS_ARGS (funtype, stack_size));
275 rtx pat;
276
277 /* If this subroutine pops its own args, record that in the call insn
278 if possible, for the sake of frame pointer elimination. */
279 if (valreg)
280 pat = gen_call_value_pop (valreg,
281 gen_rtx (MEM, FUNCTION_MODE, funexp),
282 stack_size_rtx, next_arg_reg, n_pop);
283 else
284 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
285 stack_size_rtx, next_arg_reg, n_pop);
286
287 emit_call_insn (pat);
288 already_popped = 1;
289 }
290 else
291#endif
292#endif
293
294#if defined (HAVE_call) && defined (HAVE_call_value)
295 if (HAVE_call && HAVE_call_value)
296 {
297 if (valreg)
298 emit_call_insn (gen_call_value (valreg,
299 gen_rtx (MEM, FUNCTION_MODE, funexp),
300 stack_size_rtx, next_arg_reg));
301 else
302 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
303 stack_size_rtx, next_arg_reg,
304 struct_value_size_rtx));
305 }
306 else
307#endif
308 abort ();
309
310 /* Find the CALL insn we just emitted and write the USE insns before it. */
311 for (call_insn = get_last_insn ();
312 call_insn && GET_CODE (call_insn) != CALL_INSN;
313 call_insn = PREV_INSN (call_insn))
314 ;
315
316 if (! call_insn)
317 abort ();
318
319 /* Put the USE insns before the CALL. */
320 emit_insns_before (use_insns, call_insn);
321
322 /* If this is a const call, then set the insn's unchanging bit. */
323 if (is_const)
324 CONST_CALL_P (call_insn) = 1;
325
326 inhibit_defer_pop = old_inhibit_defer_pop;
327
328#ifndef ACCUMULATE_OUTGOING_ARGS
329 /* If returning from the subroutine does not automatically pop the args,
330 we need an instruction to pop them sooner or later.
331 Perhaps do it now; perhaps just record how much space to pop later.
332
333 If returning from the subroutine does pop the args, indicate that the
334 stack pointer will be changed. */
335
336 if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
337 {
338 if (!already_popped)
339 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
340 stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
341 stack_size_rtx = gen_rtx (CONST_INT, VOIDmode, stack_size);
342 }
343
344 if (stack_size != 0)
345 {
346 if (flag_defer_pop && inhibit_defer_pop == 0)
347 pending_stack_adjust += stack_size;
348 else
349 adjust_stack (stack_size_rtx);
350 }
351#endif
352}
353
354/* Generate all the code for a function call
355 and return an rtx for its value.
356 Store the value in TARGET (specified as an rtx) if convenient.
357 If the value is stored in TARGET then TARGET is returned.
358 If IGNORE is nonzero, then we ignore the value of the function call. */
359
360rtx
361expand_call (exp, target, ignore, modifier)
362 tree exp;
363 rtx target;
364 int ignore;
365 enum expand_modifier modifier;
366{
367 /* List of actual parameters. */
368 tree actparms = TREE_OPERAND (exp, 1);
369 /* RTX for the function to be called. */
370 rtx funexp;
371 /* Tree node for the function to be called (not the address!). */
372 tree funtree;
373 /* Data type of the function. */
374 tree funtype;
375 /* Declaration of the function being called,
376 or 0 if the function is computed (not known by name). */
377 tree fndecl = 0;
378 char *name = 0;
379
380 /* Register in which non-BLKmode value will be returned,
381 or 0 if no value or if value is BLKmode. */
382 rtx valreg;
383 /* Address where we should return a BLKmode value;
384 0 if value not BLKmode. */
385 rtx structure_value_addr = 0;
386 /* Nonzero if that address is being passed by treating it as
387 an extra, implicit first parameter. Otherwise,
388 it is passed by being copied directly into struct_value_rtx. */
389 int structure_value_addr_parm = 0;
390 /* Size of aggregate value wanted, or zero if none wanted
391 or if we are using the non-reentrant PCC calling convention
392 or expecting the value in registers. */
393 int struct_value_size = 0;
394 /* Nonzero if called function returns an aggregate in memory PCC style,
395 by returning the address of where to find it. */
396 int pcc_struct_value = 0;
397
398 /* Number of actual parameters in this call, including struct value addr. */
399 int num_actuals;
400 /* Number of named args. Args after this are anonymous ones
401 and they must all go on the stack. */
402 int n_named_args;
403 /* Count arg position in order args appear. */
404 int argpos;
405
406 /* Vector of information about each argument.
407 Arguments are numbered in the order they will be pushed,
408 not the order they are written. */
409 struct arg_data *args;
410
411 /* Total size in bytes of all the stack-parms scanned so far. */
412 struct args_size args_size;
413 /* Size of arguments before any adjustments (such as rounding). */
414 struct args_size original_args_size;
415 /* Data on reg parms scanned so far. */
416 CUMULATIVE_ARGS args_so_far;
417 /* Nonzero if a reg parm has been scanned. */
418 int reg_parm_seen;
419
420 /* Nonzero if we must avoid push-insns in the args for this call.
421 If stack space is allocated for register parameters, but not by the
422 caller, then it is preallocated in the fixed part of the stack frame.
423 So the entire argument block must then be preallocated (i.e., we
424 ignore PUSH_ROUNDING in that case). */
425
426#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
427 int must_preallocate = 1;
428#else
429#ifdef PUSH_ROUNDING
430 int must_preallocate = 0;
431#else
432 int must_preallocate = 1;
433#endif
434#endif
435
436 /* 1 if scanning parms front to back, -1 if scanning back to front. */
437 int inc;
438 /* Address of space preallocated for stack parms
439 (on machines that lack push insns), or 0 if space not preallocated. */
440 rtx argblock = 0;
441
442 /* Nonzero if it is plausible that this is a call to alloca. */
443 int may_be_alloca;
444 /* Nonzero if this is a call to setjmp or a related function. */
445 int returns_twice;
446 /* Nonzero if this is a call to `longjmp'. */
447 int is_longjmp;
448 /* Nonzero if this is a call to an inline function. */
449 int is_integrable = 0;
450 /* Nonzero if this is a call to __builtin_new. */
451 int is_builtin_new;
452 /* Nonzero if this is a call to a `const' function.
453 Note that only explicitly named functions are handled as `const' here. */
454 int is_const = 0;
455 /* Nonzero if this is a call to a `volatile' function. */
456 int is_volatile = 0;
457#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
458 /* Define the boundary of the register parm stack space that needs to be
459 save, if any. */
460 int low_to_save = -1, high_to_save;
461 rtx save_area = 0; /* Place that it is saved */
462#endif
463
464#ifdef ACCUMULATE_OUTGOING_ARGS
465 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
466 char *initial_stack_usage_map = stack_usage_map;
467#endif
468
469 rtx old_stack_level = 0;
470 int old_pending_adj;
471 int old_inhibit_defer_pop = inhibit_defer_pop;
472 tree old_cleanups = cleanups_this_call;
473
474 rtx use_insns = 0;
475
476 register tree p;
477 register int i;
478
479 /* See if we can find a DECL-node for the actual function.
480 As a result, decide whether this is a call to an integrable function. */
481
482 p = TREE_OPERAND (exp, 0);
483 if (TREE_CODE (p) == ADDR_EXPR)
484 {
485 fndecl = TREE_OPERAND (p, 0);
486 if (TREE_CODE (fndecl) != FUNCTION_DECL)
487 {
488 /* May still be a `const' function if it is
489 a call through a pointer-to-const.
490 But we don't handle that. */
491 fndecl = 0;
492 }
493 else
494 {
495 if (!flag_no_inline
496 && fndecl != current_function_decl
497 && DECL_SAVED_INSNS (fndecl))
498 is_integrable = 1;
499 else if (! TREE_ADDRESSABLE (fndecl))
500 {
501 /* In case this function later becomes inlineable,
502 record that there was already a non-inline call to it.
503
504 Use abstraction instead of setting TREE_ADDRESSABLE
505 directly. */
506 if (TREE_INLINE (fndecl) && extra_warnings && !flag_no_inline)
507 warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
508 mark_addressable (fndecl);
509 }
510
511 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl))
512 is_const = 1;
513 }
514 }
515
516 is_volatile = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
517
518 /* Warn if this value is an aggregate type,
519 regardless of which calling convention we are using for it. */
520 if (warn_aggregate_return
521 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
522 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
523 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
524 warning ("function call has aggregate value");
525
526 /* Set up a place to return a structure. */
527
528 /* Cater to broken compilers. */
529 if (aggregate_value_p (exp))
530 {
531 /* This call returns a big structure. */
532 is_const = 0;
533
534#ifdef PCC_STATIC_STRUCT_RETURN
535 if (flag_pcc_struct_return)
536 {
537 pcc_struct_value = 1;
538 is_integrable = 0; /* Easier than making that case work right. */
539 }
540 else
541#endif
542 {
543 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
544
545 if (struct_value_size < 0)
546 abort ();
547
548 if (target && GET_CODE (target) == MEM)
549 structure_value_addr = XEXP (target, 0);
550 else
551 {
552 /* Assign a temporary on the stack to hold the value. */
553
554 /* For variable-sized objects, we must be called with a target
555 specified. If we were to allocate space on the stack here,
556 we would have no way of knowing when to free it. */
557
558 structure_value_addr
559 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
560 target = 0;
561 }
562 }
563 }
564
565 /* If called function is inline, try to integrate it. */
566
567 if (is_integrable)
568 {
569 rtx temp;
570
571 temp = expand_inline_function (fndecl, actparms, target,
572 ignore, TREE_TYPE (exp),
573 structure_value_addr);
574
575 /* If inlining succeeded, return. */
576 if ((int) temp != -1)
577 {
578 /* Perform all cleanups needed for the arguments of this call
579 (i.e. destructors in C++). It is ok if these destructors
580 clobber RETURN_VALUE_REG, because the only time we care about
581 this is when TARGET is that register. But in C++, we take
582 care to never return that register directly. */
583 expand_cleanups_to (old_cleanups);
584
585 /* If the result is equivalent to TARGET, return TARGET to simplify
586 checks in store_expr. They can be equivalent but not equal in the
587 case of a function that returns BLKmode. */
588 if (temp != target && rtx_equal_p (temp, target))
589 return target;
590 return temp;
591 }
592
593 /* If inlining failed, mark FNDECL as needing to be compiled
594 separately after all. */
595 mark_addressable (fndecl);
596 }
597
598 /* When calling a const function, we must pop the stack args right away,
599 so that the pop is deleted or moved with the call. */
600 if (is_const)
601 NO_DEFER_POP;
602
603 function_call_count++;
604
605 if (fndecl && DECL_NAME (fndecl))
606 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
607
608#if 0
609 /* Unless it's a call to a specific function that isn't alloca,
610 if it has one argument, we must assume it might be alloca. */
611
612 may_be_alloca =
613 (!(fndecl != 0 && strcmp (name, "alloca"))
614 && actparms != 0
615 && TREE_CHAIN (actparms) == 0);
616#else
617 /* We assume that alloca will always be called by name. It
618 makes no sense to pass it as a pointer-to-function to
619 anything that does not understand its behavior. */
620 may_be_alloca =
621 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
622 && name[0] == 'a'
623 && ! strcmp (name, "alloca"))
624 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
625 && name[0] == '_'
626 && ! strcmp (name, "__builtin_alloca"))));
627#endif
628
629 /* See if this is a call to a function that can return more than once
630 or a call to longjmp. */
631
632 returns_twice = 0;
633 is_longjmp = 0;
634
635 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
636 {
637 char *tname = name;
638
639 if (name[0] == '_')
640 tname += ((name[1] == '_' && name[2] == 'x') ? 3 : 1);
641
642 if (tname[0] == 's')
643 {
644 returns_twice
645 = ((tname[1] == 'e'
646 && (! strcmp (tname, "setjmp")
647 || ! strcmp (tname, "setjmp_syscall")))
648 || (tname[1] == 'i'
649 && ! strcmp (tname, "sigsetjmp"))
650 || (tname[1] == 'a'
651 && ! strcmp (tname, "savectx")));
652 if (tname[1] == 'i'
653 && ! strcmp (tname, "siglongjmp"))
654 is_longjmp = 1;
655 }
656 else if ((tname[0] == 'q' && tname[1] == 's'
657 && ! strcmp (tname, "qsetjmp"))
658 || (tname[0] == 'v' && tname[1] == 'f'
659 && ! strcmp (tname, "vfork")))
660 returns_twice = 1;
661
662 else if (tname[0] == 'l' && tname[1] == 'o'
663 && ! strcmp (tname, "longjmp"))
664 is_longjmp = 1;
665 }
666
667 is_builtin_new
668 = (name != 0
669 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 13
670 && (!strcmp (name, "__builtin_new")));
671
672 if (may_be_alloca)
673 current_function_calls_alloca = 1;
674
675 /* Don't let pending stack adjusts add up to too much.
676 Also, do all pending adjustments now
677 if there is any chance this might be a call to alloca. */
678
679 if (pending_stack_adjust >= 32
680 || (pending_stack_adjust > 0 && may_be_alloca))
681 do_pending_stack_adjust ();
682
683 /* Operand 0 is a pointer-to-function; get the type of the function. */
684 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
685 if (TREE_CODE (funtype) != POINTER_TYPE)
686 abort ();
687 funtype = TREE_TYPE (funtype);
688
689 /* Push the temporary stack slot level so that we can free temporaries used
690 by each of the arguments separately. */
691 push_temp_slots ();
692
693 /* Start updating where the next arg would go. */
694 INIT_CUMULATIVE_ARGS (args_so_far, funtype, 0);
695
696 /* If struct_value_rtx is 0, it means pass the address
697 as if it were an extra parameter. */
698 if (structure_value_addr && struct_value_rtx == 0)
699 {
700 actparms
701 = tree_cons (error_mark_node,
702 make_tree (build_pointer_type (TREE_TYPE (funtype)),
703 force_reg (Pmode, structure_value_addr)),
704 actparms);
705 structure_value_addr_parm = 1;
706 }
707
708 /* Count the arguments and set NUM_ACTUALS. */
709 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
710 num_actuals = i;
711
712 /* Compute number of named args.
713 Normally, don't include the last named arg if anonymous args follow.
714 (If no anonymous args follow, the result of list_length
715 is actually one too large.)
716
717 If SETUP_INCOMING_VARARGS is defined, this machine will be able to
718 place unnamed args that were passed in registers into the stack. So
719 treat all args as named. This allows the insns emitting for a specific
720 argument list to be independant of the function declaration.
721
722 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
723 way to pass unnamed args in registers, so we must force them into
724 memory. */
725#ifndef SETUP_INCOMING_VARARGS
726 if (TYPE_ARG_TYPES (funtype) != 0)
727 n_named_args
728 = list_length (TYPE_ARG_TYPES (funtype)) - 1
729 /* Count the struct value address, if it is passed as a parm. */
730 + structure_value_addr_parm;
731 else
732#endif
733 /* If we know nothing, treat all args as named. */
734 n_named_args = num_actuals;
735
736 /* Make a vector to hold all the information about each arg. */
737 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
738 bzero (args, num_actuals * sizeof (struct arg_data));
739
740 args_size.constant = 0;
741 args_size.var = 0;
742
743 /* In this loop, we consider args in the order they are written.
744 We fill up ARGS from the front of from the back if necessary
745 so that in any case the first arg to be pushed ends up at the front. */
746
747#ifdef PUSH_ARGS_REVERSED
748 i = num_actuals - 1, inc = -1;
749 /* In this case, must reverse order of args
750 so that we compute and push the last arg first. */
751#else
752 i = 0, inc = 1;
753#endif
754
755 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
756 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
757 {
758 tree type = TREE_TYPE (TREE_VALUE (p));
759
760 args[i].tree_value = TREE_VALUE (p);
761
762 /* Replace erroneous argument with constant zero. */
763 if (type == error_mark_node || TYPE_SIZE (type) == 0)
764 args[i].tree_value = integer_zero_node, type = integer_type_node;
765
766 /* Decide where to pass this arg.
767
768 args[i].reg is nonzero if all or part is passed in registers.
769
770 args[i].partial is nonzero if part but not all is passed in registers,
771 and the exact value says how many words are passed in registers.
772
773 args[i].pass_on_stack is nonzero if the argument must at least be
774 computed on the stack. It may then be loaded back into registers
775 if args[i].reg is nonzero.
776
777 These decisions are driven by the FUNCTION_... macros and must agree
778 with those made by function.c. */
779
780#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
781 /* See if this argument should be passed by invisible reference. */
782 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type), type,
783 argpos < n_named_args))
784 {
785 /* We make a copy of the object and pass the address to the function
786 being called. */
787 int size = int_size_in_bytes (type);
788 rtx copy;
789
790 if (size < 0)
791 {
792 /* This is a variable-sized object. Make space on the stack
793 for it. */
794 rtx size_rtx = expand_expr (size_in_bytes (type), 0,
795 VOIDmode, 0);
796
797 if (old_stack_level == 0)
798 {
799 old_stack_level = copy_to_mode_reg (Pmode, stack_pointer_rtx);
800 old_pending_adj = pending_stack_adjust;
801 pending_stack_adjust = 0;
802 }
803
804 copy = gen_rtx (MEM, BLKmode,
805 allocate_dynamic_stack_space (size_rtx, 0));
806 }
807 else
808 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
809
810 store_expr (args[i].tree_value, copy, 0);
811
812 args[i].tree_value = build1 (ADDR_EXPR, build_pointer_type (type),
813 make_tree (type, copy));
814 type = build_pointer_type (type);
815 }
816#endif
817
818 args[i].reg = FUNCTION_ARG (args_so_far, TYPE_MODE (type), type,
819 argpos < n_named_args);
820#ifdef FUNCTION_ARG_PARTIAL_NREGS
821 if (args[i].reg)
822 args[i].partial
823 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, TYPE_MODE (type), type,
824 argpos < n_named_args);
825#endif
826
827 args[i].pass_on_stack = MUST_PASS_IN_STACK (TYPE_MODE (type), type);
828
829 /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
830 we are to pass this arg in the register(s) designated by FOO, but
831 also to pass it in the stack. */
832 if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
833 && XEXP (args[i].reg, 0) == 0)
834 args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
835
836 /* If this is an addressable type, we must preallocate the stack
837 since we must evaluate the object into its final location.
838
839 If this is to be passed in both registers and the stack, it is simpler
840 to preallocate. */
841 if (TREE_ADDRESSABLE (type)
842 || (args[i].pass_on_stack && args[i].reg != 0))
843 must_preallocate = 1;
844
845 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
846 we cannot consider this function call constant. */
847 if (TREE_ADDRESSABLE (type))
848 is_const = 0;
849
850 /* Compute the stack-size of this argument. */
851 if (args[i].reg == 0 || args[i].partial != 0
852#ifdef REG_PARM_STACK_SPACE
853 || REG_PARM_STACK_SPACE (fndecl) > 0
854#endif
855 || args[i].pass_on_stack)
856 locate_and_pad_parm (TYPE_MODE (type), type,
857#ifdef STACK_PARMS_IN_REG_PARM_AREA
858 1,
859#else
860 args[i].reg != 0,
861#endif
862 fndecl, &args_size, &args[i].offset,
863 &args[i].size);
864
865#ifndef ARGS_GROW_DOWNWARD
866 args[i].slot_offset = args_size;
867#endif
868
869#ifndef REG_PARM_STACK_SPACE
870 /* If a part of the arg was put into registers,
871 don't include that part in the amount pushed. */
872 if (! args[i].pass_on_stack)
873 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
874 / (PARM_BOUNDARY / BITS_PER_UNIT)
875 * (PARM_BOUNDARY / BITS_PER_UNIT));
876#endif
877
878 /* Update ARGS_SIZE, the total stack space for args so far. */
879
880 args_size.constant += args[i].size.constant;
881 if (args[i].size.var)
882 {
883 ADD_PARM_SIZE (args_size, args[i].size.var);
884 }
885
886 /* Since the slot offset points to the bottom of the slot,
887 we must record it after incrementing if the args grow down. */
888#ifdef ARGS_GROW_DOWNWARD
889 args[i].slot_offset = args_size;
890
891 args[i].slot_offset.constant = -args_size.constant;
892 if (args_size.var)
893 {
894 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
895 }
896#endif
897
898 /* Increment ARGS_SO_FAR, which has info about which arg-registers
899 have been used, etc. */
900
901 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
902 argpos < n_named_args);
903 }
904
905 /* Compute the actual size of the argument block required. The variable
906 and constant sizes must be combined, the size may have to be rounded,
907 and there may be a minimum required size. */
908
909 original_args_size = args_size;
910 if (args_size.var)
911 {
912 /* If this function requires a variable-sized argument list, don't try to
913 make a cse'able block for this call. We may be able to do this
914 eventually, but it is too complicated to keep track of what insns go
915 in the cse'able block and which don't. */
916
917 is_const = 0;
918 must_preallocate = 1;
919
920 args_size.var = ARGS_SIZE_TREE (args_size);
921 args_size.constant = 0;
922
923#ifdef STACK_BOUNDARY
924 if (STACK_BOUNDARY != BITS_PER_UNIT)
925 args_size.var = round_up (args_size.var, STACK_BYTES);
926#endif
927
928#ifdef REG_PARM_STACK_SPACE
929 if (REG_PARM_STACK_SPACE (fndecl) > 0)
930 {
931 args_size.var
932 = size_binop (MAX_EXPR, args_size.var,
933 size_int (REG_PARM_STACK_SPACE (fndecl)));
934
935#ifndef OUTGOING_REG_PARM_STACK_SPACE
936 /* The area corresponding to register parameters is not to count in
937 the size of the block we need. So make the adjustment. */
938 args_size.var
939 = size_binop (MINUS_EXPR, args_size.var,
940 size_int (REG_PARM_STACK_SPACE (fndecl)));
941#endif
942 }
943#endif
944 }
945 else
946 {
947#ifdef STACK_BOUNDARY
948 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
949 / STACK_BYTES) * STACK_BYTES);
950#endif
951
952#ifdef REG_PARM_STACK_SPACE
953 args_size.constant = MAX (args_size.constant,
954 REG_PARM_STACK_SPACE (fndecl));
955#ifndef OUTGOING_REG_PARM_STACK_SPACE
956 args_size.constant -= REG_PARM_STACK_SPACE (fndecl);
957#endif
958#endif
959 }
960
961 /* See if we have or want to preallocate stack space.
962
963 If we would have to push a partially-in-regs parm
964 before other stack parms, preallocate stack space instead.
965
966 If the size of some parm is not a multiple of the required stack
967 alignment, we must preallocate.
968
969 If the total size of arguments that would otherwise create a copy in
970 a temporary (such as a CALL) is more than half the total argument list
971 size, preallocation is faster.
972
973 Another reason to preallocate is if we have a machine (like the m88k)
974 where stack alignment is required to be maintained between every
975 pair of insns, not just when the call is made. However, we assume here
976 that such machines either do not have push insns (and hence preallocation
977 would occur anyway) or the problem is taken care of with
978 PUSH_ROUNDING. */
979
980 if (! must_preallocate)
981 {
982 int partial_seen = 0;
983 int copy_to_evaluate_size = 0;
984
985 for (i = 0; i < num_actuals && ! must_preallocate; i++)
986 {
987 if (args[i].partial > 0 && ! args[i].pass_on_stack)
988 partial_seen = 1;
989 else if (partial_seen && args[i].reg == 0)
990 must_preallocate = 1;
991
992 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
993 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
994 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
995 || TREE_CODE (args[i].tree_value) == COND_EXPR
996 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
997 copy_to_evaluate_size
998 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
999 }
1000
1001 if (copy_to_evaluate_size >= args_size.constant / 2)
1002 must_preallocate = 1;
1003 }
1004
1005 /* If the structure value address will reference the stack pointer, we must
1006 stabilize it. We don't need to do this if we know that we are not going
1007 to adjust the stack pointer in processing this call. */
1008
1009 if (structure_value_addr
1010 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1011 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1012 && (args_size.var
1013#ifndef ACCUMULATE_OUTGOING_ARGS
1014 || args_size.constant
1015#endif
1016 ))
1017 structure_value_addr = copy_to_reg (structure_value_addr);
1018
1019 /* If this function call is cse'able, precompute all the parameters.
1020 Note that if the parameter is constructed into a temporary, this will
1021 cause an additional copy because the parameter will be constructed
1022 into a temporary location and then copied into the outgoing arguments.
1023 If a parameter contains a call to alloca and this function uses the
1024 stack, precompute the parameter. */
1025
1026 for (i = 0; i < num_actuals; i++)
1027 if (is_const
1028 || ((args_size.var != 0 || args_size.constant != 0)
1029 && calls_alloca (args[i].tree_value)))
1030 {
1031 args[i].initial_value = args[i].value
1032 = expand_expr (args[i].tree_value, 0, VOIDmode, 0);
1033 preserve_temp_slots (args[i].value);
1034 free_temp_slots ();
1035
1036 /* ANSI doesn't require a sequence point here,
1037 but PCC has one, so this will avoid some problems. */
1038 emit_queue ();
1039 }
1040
1041 /* Now we are about to start emitting insns that can be deleted
1042 if a libcall is deleted. */
1043 if (is_const)
1044 start_sequence ();
1045
1046 /* If we have no actual push instructions, or shouldn't use them,
1047 make space for all args right now. */
1048
1049 if (args_size.var != 0)
1050 {
1051 if (old_stack_level == 0)
1052 {
1053 old_stack_level = copy_to_mode_reg (Pmode, stack_pointer_rtx);
1054 old_pending_adj = pending_stack_adjust;
1055 pending_stack_adjust = 0;
1056 }
1057 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1058 }
1059 else if (must_preallocate)
1060 {
1061 /* Note that we must go through the motions of allocating an argument
1062 block even if the size is zero because we may be storing args
1063 in the area reserved for register arguments, which may be part of
1064 the stack frame. */
1065 int needed = args_size.constant;
1066
1067#ifdef ACCUMULATE_OUTGOING_ARGS
1068 /* Store the maximum argument space used. It will be pushed by the
1069 prologue.
1070
1071 Since the stack pointer will never be pushed, it is possible for
1072 the evaluation of a parm to clobber something we have already
1073 written to the stack. Since most function calls on RISC machines
1074 do not use the stack, this is uncommon, but must work correctly.
1075
1076 Therefore, we save any area of the stack that was already written
1077 and that we are using. Here we set up to do this by making a new
1078 stack usage map from the old one. The actual save will be done
1079 by store_one_arg.
1080
1081 Another approach might be to try to reorder the argument
1082 evaluations to avoid this conflicting stack usage. */
1083
1084 if (needed > current_function_outgoing_args_size)
1085 current_function_outgoing_args_size = needed;
1086
1087#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1088 /* Since we will be writing into the entire argument area, the
1089 map must be allocated for its entire size, not just the part that
1090 is the responsibility of the caller. */
1091 needed += REG_PARM_STACK_SPACE (fndecl);
1092#endif
1093
1094#ifdef ARGS_GROW_DOWNWARD
1095 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1096 needed + 1);
1097#else
1098 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
1099#endif
1100 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1101
1102 if (initial_highest_arg_in_use)
1103 bcopy (initial_stack_usage_map, stack_usage_map,
1104 initial_highest_arg_in_use);
1105
1106 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1107 bzero (&stack_usage_map[initial_highest_arg_in_use],
1108 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1109 needed = 0;
1110 /* No need to copy this virtual register; the space we're
1111 using gets preallocated at the start of the function
1112 so the stack pointer won't change here. */
1113 argblock = virtual_outgoing_args_rtx;
1114#else /* not ACCUMULATE_OUTGOING_ARGS */
1115 if (inhibit_defer_pop == 0)
1116 {
1117 /* Try to reuse some or all of the pending_stack_adjust
1118 to get this space. Maybe we can avoid any pushing. */
1119 if (needed > pending_stack_adjust)
1120 {
1121 needed -= pending_stack_adjust;
1122 pending_stack_adjust = 0;
1123 }
1124 else
1125 {
1126 pending_stack_adjust -= needed;
1127 needed = 0;
1128 }
1129 }
1130 /* Special case this because overhead of `push_block' in this
1131 case is non-trivial. */
1132 if (needed == 0)
1133 argblock = virtual_outgoing_args_rtx;
1134 else
1135 argblock = push_block (gen_rtx (CONST_INT, VOIDmode, needed), 0, 0);
1136
1137 /* We only really need to call `copy_to_reg' in the case where push
1138 insns are going to be used to pass ARGBLOCK to a function
1139 call in ARGS. In that case, the stack pointer changes value
1140 from the allocation point to the call point, and hence
1141 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1142 But might as well always do it. */
1143 argblock = copy_to_reg (argblock);
1144#endif /* not ACCUMULATE_OUTGOING_ARGS */
1145 }
1146
1147 /* If we preallocated stack space, compute the address of each argument.
1148 We need not ensure it is a valid memory address here; it will be
1149 validized when it is used. */
1150 if (argblock)
1151 {
1152 rtx arg_reg = argblock;
1153 int arg_offset = 0;
1154
1155 if (GET_CODE (argblock) == PLUS)
1156 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1157
1158 for (i = 0; i < num_actuals; i++)
1159 {
1160 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1161 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1162 rtx addr;
1163
1164 /* Skip this parm if it will not be passed on the stack. */
1165 if (! args[i].pass_on_stack && args[i].reg != 0)
1166 continue;
1167
1168 if (GET_CODE (offset) == CONST_INT)
1169 addr = plus_constant (arg_reg, INTVAL (offset));
1170 else
1171 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1172
1173 addr = plus_constant (addr, arg_offset);
1174 args[i].stack
1175 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (args[i].tree_value)), addr);
1176
1177 if (GET_CODE (slot_offset) == CONST_INT)
1178 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1179 else
1180 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1181
1182 addr = plus_constant (addr, arg_offset);
1183 args[i].stack_slot
1184 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (args[i].tree_value)), addr);
1185 }
1186 }
1187
1188#ifdef PUSH_ARGS_REVERSED
1189#ifdef STACK_BOUNDARY
1190 /* If we push args individually in reverse order, perform stack alignment
1191 before the first push (the last arg). */
1192 if (argblock == 0)
1193 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode,
1194 (args_size.constant
1195 - original_args_size.constant)));
1196#endif
1197#endif
1198
1199 /* Don't try to defer pops if preallocating, not even from the first arg,
1200 since ARGBLOCK probably refers to the SP. */
1201 if (argblock)
1202 NO_DEFER_POP;
1203
1204 /* Get the function to call, in the form of RTL. */
1205 if (fndecl)
1206 /* Get a SYMBOL_REF rtx for the function address. */
1207 funexp = XEXP (DECL_RTL (fndecl), 0);
1208 else
1209 /* Generate an rtx (probably a pseudo-register) for the address. */
1210 {
1211 funexp = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
1212 free_temp_slots (); /* FUNEXP can't be BLKmode */
1213 emit_queue ();
1214 }
1215
1216 /* Figure out the register where the value, if any, will come back. */
1217 valreg = 0;
1218 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1219 && ! structure_value_addr)
1220 {
1221 if (pcc_struct_value)
1222 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1223 fndecl);
1224 else
1225 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1226 }
1227
1228 /* Precompute all register parameters. It isn't safe to compute anything
1229 once we have started filling any specific hard regs. */
1230 reg_parm_seen = 0;
1231 for (i = 0; i < num_actuals; i++)
1232 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1233 {
1234 reg_parm_seen = 1;
1235
1236 if (args[i].value == 0)
1237 {
1238 args[i].value = expand_expr (args[i].tree_value, 0, VOIDmode, 0);
1239 preserve_temp_slots (args[i].value);
1240 free_temp_slots ();
1241
1242 /* ANSI doesn't require a sequence point here,
1243 but PCC has one, so this will avoid some problems. */
1244 emit_queue ();
1245 }
1246 }
1247
1248#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1249 /* The argument list is the property of the called routine and it
1250 may clobber it. If the fixed area has been used for previous
1251 parameters, we must save and restore it.
1252
1253 Here we compute the boundary of the that needs to be saved, if any. */
1254
1255 for (i = 0; i < REG_PARM_STACK_SPACE (fndecl); i++)
1256 {
1257 if (i >= highest_outgoing_arg_in_use
1258 || stack_usage_map[i] == 0)
1259 continue;
1260
1261 if (low_to_save == -1)
1262 low_to_save = i;
1263
1264 high_to_save = i;
1265 }
1266
1267 if (low_to_save >= 0)
1268 {
1269 int num_to_save = high_to_save - low_to_save + 1;
1270 enum machine_mode save_mode
1271 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1272 rtx stack_area;
1273
1274 /* If we don't have the required alignment, must do this in BLKmode. */
1275 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1276 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1277 save_mode = BLKmode;
1278
1279 stack_area = gen_rtx (MEM, save_mode,
1280 memory_address (save_mode,
1281 plus_constant (argblock,
1282 low_to_save)));
1283 if (save_mode == BLKmode)
1284 {
1285 save_area = assign_stack_temp (BLKmode, num_to_save, 1);
1286 emit_block_move (validize_mem (save_area), stack_area,
1287 gen_rtx (CONST_INT, VOIDmode, num_to_save),
1288 PARM_BOUNDARY / BITS_PER_UNIT);
1289 }
1290 else
1291 {
1292 save_area = gen_reg_rtx (save_mode);
1293 emit_move_insn (save_area, stack_area);
1294 }
1295 }
1296#endif
1297
1298
1299 /* Now store (and compute if necessary) all non-register parms.
1300 These come before register parms, since they can require block-moves,
1301 which could clobber the registers used for register parms.
1302 Parms which have partial registers are not stored here,
1303 but we do preallocate space here if they want that. */
1304
1305 for (i = 0; i < num_actuals; i++)
1306 if (args[i].reg == 0 || args[i].pass_on_stack)
1307 store_one_arg (&args[i], argblock, may_be_alloca,
1308 args_size.var != 0, fndecl);
1309
1310 /* Now store any partially-in-registers parm.
1311 This is the last place a block-move can happen. */
1312 if (reg_parm_seen)
1313 for (i = 0; i < num_actuals; i++)
1314 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1315 store_one_arg (&args[i], argblock, may_be_alloca,
1316 args_size.var != 0, fndecl);
1317
1318#ifndef PUSH_ARGS_REVERSED
1319#ifdef STACK_BOUNDARY
1320 /* If we pushed args in forward order, perform stack alignment
1321 after pushing the last arg. */
1322 if (argblock == 0)
1323 anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode,
1324 (args_size.constant
1325 - original_args_size.constant)));
1326#endif
1327#endif
1328
1329 /* Pass the function the address in which to return a structure value. */
1330 if (structure_value_addr && ! structure_value_addr_parm)
1331 {
1332 emit_move_insn (struct_value_rtx,
1333 force_reg (Pmode,
1334 force_operand (structure_value_addr, 0)));
1335 if (GET_CODE (struct_value_rtx) == REG)
1336 {
1337 push_to_sequence (use_insns);
1338 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
1339 use_insns = get_insns ();
1340 end_sequence ();
1341 }
1342 }
1343
1344 /* Now do the register loads required for any wholly-register parms or any
1345 parms which are passed both on the stack and in a register. Their
1346 expressions were already evaluated.
1347
1348 Mark all register-parms as living through the call, putting these USE
1349 insns in a list headed by USE_INSNS. */
1350
1351 for (i = 0; i < num_actuals; i++)
1352 {
1353 rtx list = args[i].reg;
1354 int partial = args[i].partial;
1355
1356 while (list)
1357 {
1358 rtx reg;
1359 int nregs;
1360
1361 /* Process each register that needs to get this arg. */
1362 if (GET_CODE (list) == EXPR_LIST)
1363 reg = XEXP (list, 0), list = XEXP (list, 1);
1364 else
1365 reg = list, list = 0;
1366
1367 /* Set to non-zero if must move a word at a time, even if just one
1368 word (e.g, partial == 1 && mode == DFmode). Set to zero if
1369 we just use a normal move insn. */
1370 nregs = (partial ? partial
1371 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1372 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1373 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1374 : 0));
1375
1376 /* If simple case, just do move. If normal partial, store_one_arg
1377 has already loaded the register for us. In all other cases,
1378 load the register(s) from memory. */
1379
1380 if (nregs == 0)
1381 emit_move_insn (reg, args[i].value);
1382 else if (args[i].partial == 0 || args[i].pass_on_stack)
1383 move_block_to_reg (REGNO (reg),
1384 validize_mem (args[i].value), nregs,
1385 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
1386
1387 push_to_sequence (use_insns);
1388 if (nregs == 0)
1389 emit_insn (gen_rtx (USE, VOIDmode, reg));
1390 else
1391 use_regs (REGNO (reg), nregs);
1392 use_insns = get_insns ();
1393 end_sequence ();
1394
1395 /* PARTIAL referred only to the first register, so clear it for the
1396 next time. */
1397 partial = 0;
1398 }
1399 }
1400
1401 /* Perform postincrements before actually calling the function. */
1402 emit_queue ();
1403
1404 /* All arguments and registers used for the call must be set up by now! */
1405
1406 funexp = prepare_call_address (funexp, fndecl, &use_insns);
1407
1408 /* Generate the actual call instruction. */
1409 emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
1410 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1411 valreg, old_inhibit_defer_pop, use_insns, is_const);
1412
1413 /* If call is cse'able, make appropriate pair of reg-notes around it.
1414 Test valreg so we don't crash; may safely ignore `const'
1415 if return type is void. */
1416 if (is_const && valreg != 0)
1417 {
1418 rtx note = 0;
1419 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1420 rtx insns;
1421
1422 /* Construct an "equal form" for the value which mentions all the
1423 arguments in order as well as the function name. */
1424#ifdef PUSH_ARGS_REVERSED
1425 for (i = 0; i < num_actuals; i++)
1426 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1427#else
1428 for (i = num_actuals - 1; i >= 0; i--)
1429 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1430#endif
1431 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1432
1433 insns = get_insns ();
1434 end_sequence ();
1435
1436 emit_libcall_block (insns, temp, valreg, note);
1437
1438 valreg = temp;
1439 }
1440
1441 /* For calls to `setjmp', etc., inform flow.c it should complain
1442 if nonvolatile values are live. */
1443
1444 if (returns_twice)
1445 {
1446 emit_note (name, NOTE_INSN_SETJMP);
1447 current_function_calls_setjmp = 1;
1448 }
1449
1450 if (is_longjmp)
1451 current_function_calls_longjmp = 1;
1452
1453 /* Notice functions that cannot return.
1454 If optimizing, insns emitted below will be dead.
1455 If not optimizing, they will exist, which is useful
1456 if the user uses the `return' command in the debugger. */
1457
1458 if (is_volatile || is_longjmp)
1459 emit_barrier ();
1460
1461 /* For calls to __builtin_new, note that it can never return 0.
1462 This is because a new handler will be called, and 0 it not
1463 among the numbers it is supposed to return. */
1464#if 0
1465 if (is_builtin_new)
1466 emit_note (name, NOTE_INSN_BUILTIN_NEW);
1467#endif
1468
1469 /* If value type not void, return an rtx for the value. */
1470
1471 /* If there are cleanups to be called, don't use a hard reg as target. */
1472 if (cleanups_this_call != old_cleanups
1473 && target && REG_P (target)
1474 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1475 target = 0;
1476
1477 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1478 || ignore)
1479 {
1480 target = const0_rtx;
1481 }
1482 else if (structure_value_addr)
1483 {
1484 if (target == 0 || GET_CODE (target) != MEM)
1485 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1486 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1487 structure_value_addr));
1488 }
1489 else if (pcc_struct_value)
1490 {
1491 if (target == 0)
1492 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1493 copy_to_reg (valreg));
1494 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1495 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1496 copy_to_reg (valreg)));
1497 else
1498 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
1499 expr_size (exp),
1500 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1501 }
1502 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)))
1503 /* TARGET and VALREG cannot be equal at this point because the latter
1504 would not have REG_FUNCTION_VALUE_P true, while the former would if
1505 it were referring to the same register.
1506
1507 If they refer to the same register, this move will be a no-op, except
1508 when function inlining is being done. */
1509 emit_move_insn (target, valreg);
1510 else
1511 target = copy_to_reg (valreg);
1512
1513 /* Perform all cleanups needed for the arguments of this call
1514 (i.e. destructors in C++). */
1515 expand_cleanups_to (old_cleanups);
1516
1517 /* If size of args is variable, restore saved stack-pointer value. */
1518
1519 if (old_stack_level)
1520 {
1521 emit_move_insn (stack_pointer_rtx, old_stack_level);
1522 pending_stack_adjust = old_pending_adj;
1523 }
1524
1525#ifdef ACCUMULATE_OUTGOING_ARGS
1526 else
1527 {
1528#ifdef REG_PARM_STACK_SPACE
1529 if (save_area)
1530 {
1531 enum machine_mode save_mode = GET_MODE (save_area);
1532 rtx stack_area
1533 = gen_rtx (MEM, save_mode,
1534 memory_address (save_mode,
1535 plus_constant (argblock, low_to_save)));
1536
1537 if (save_mode != BLKmode)
1538 emit_move_insn (stack_area, save_area);
1539 else
1540 emit_block_move (stack_area, validize_mem (save_area),
1541 gen_rtx (CONST_INT, VOIDmode,
1542 high_to_save - low_to_save + 1,
1543 PARM_BOUNDARY / BITS_PER_UNIT));
1544 }
1545#endif
1546
1547 /* If we saved any argument areas, restore them. */
1548 for (i = 0; i < num_actuals; i++)
1549 if (args[i].save_area)
1550 {
1551 enum machine_mode save_mode = GET_MODE (args[i].save_area);
1552 rtx stack_area
1553 = gen_rtx (MEM, save_mode,
1554 memory_address (save_mode,
1555 XEXP (args[i].stack_slot, 0)));
1556
1557 if (save_mode != BLKmode)
1558 emit_move_insn (stack_area, args[i].save_area);
1559 else
1560 emit_block_move (stack_area, validize_mem (args[i].save_area),
1561 gen_rtx (CONST_INT, VOIDmode,
1562 args[i].size.constant),
1563 PARM_BOUNDARY / BITS_PER_UNIT);
1564 }
1565
1566 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
1567 stack_usage_map = initial_stack_usage_map;
1568 }
1569#endif
1570
1571 /* If this was alloca, record the new stack level for nonlocal gotos. */
1572 if (may_be_alloca && nonlocal_goto_stack_level != 0)
1573 emit_move_insn (nonlocal_goto_stack_level, stack_pointer_rtx);
1574
1575 pop_temp_slots ();
1576
1577 return target;
1578}
1579\f
1580#if 0
1581/* Return an rtx which represents a suitable home on the stack
1582 given TYPE, the type of the argument looking for a home.
1583 This is called only for BLKmode arguments.
1584
1585 SIZE is the size needed for this target.
1586 ARGS_ADDR is the address of the bottom of the argument block for this call.
1587 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
1588 if this machine uses push insns. */
1589
1590static rtx
1591target_for_arg (type, size, args_addr, offset)
1592 tree type;
1593 rtx size;
1594 rtx args_addr;
1595 struct args_size offset;
1596{
1597 rtx target;
1598 rtx offset_rtx = ARGS_SIZE_RTX (offset);
1599
1600 /* We do not call memory_address if possible,
1601 because we want to address as close to the stack
1602 as possible. For non-variable sized arguments,
1603 this will be stack-pointer relative addressing. */
1604 if (GET_CODE (offset_rtx) == CONST_INT)
1605 target = plus_constant (args_addr, INTVAL (offset_rtx));
1606 else
1607 {
1608 /* I have no idea how to guarantee that this
1609 will work in the presence of register parameters. */
1610 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
1611 target = memory_address (QImode, target);
1612 }
1613
1614 return gen_rtx (MEM, BLKmode, target);
1615}
1616#endif
1617\f
1618/* Store a single argument for a function call
1619 into the register or memory area where it must be passed.
1620 *ARG describes the argument value and where to pass it.
1621
1622 ARGBLOCK is the address of the stack-block for all the arguments,
1623 or 0 on a machine where arguemnts are pushed individually.
1624
1625 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
1626 so must be careful about how the stack is used.
1627
1628 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
1629 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
1630 that we need not worry about saving and restoring the stack.
1631
1632 FNDECL is the declaration of the function we are calling. */
1633
1634static void
1635store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl)
1636 struct arg_data *arg;
1637 rtx argblock;
1638 int may_be_alloca;
1639 int variable_size;
1640 tree fndecl;
1641{
1642 register tree pval = arg->tree_value;
1643 rtx reg = 0;
1644 int partial = 0;
1645 int used = 0;
1646 int i, lower_bound, upper_bound;
1647
1648 if (TREE_CODE (pval) == ERROR_MARK)
1649 return;
1650
1651#ifdef ACCUMULATE_OUTGOING_ARGS
1652 /* If this is being stored into a pre-allocated, fixed-size, stack area,
1653 save any previous data at that location. */
1654 if (argblock && ! variable_size && arg->stack)
1655 {
1656#ifdef ARGS_GROW_DOWNWARD
1657 /* stack_slot is negative, but we want to index stack_usage_map */
1658 /* with positive values. */
1659 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
1660 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
1661 else
1662 abort ();
1663
1664 lower_bound = upper_bound - arg->size.constant;
1665#else
1666 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
1667 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
1668 else
1669 lower_bound = 0;
1670
1671 upper_bound = lower_bound + arg->size.constant;
1672#endif
1673
1674 for (i = lower_bound; i < upper_bound; i++)
1675 if (stack_usage_map[i]
1676#ifdef REG_PARM_STACK_SPACE
1677 /* Don't store things in the fixed argument area at this point;
1678 it has already been saved. */
1679 && i > REG_PARM_STACK_SPACE (fndecl)
1680#endif
1681 )
1682 break;
1683
1684 if (i != upper_bound)
1685 {
1686 /* We need to make a save area. See what mode we can make it. */
1687 enum machine_mode save_mode
1688 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
1689 rtx stack_area
1690 = gen_rtx (MEM, save_mode,
1691 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
1692
1693 if (save_mode == BLKmode)
1694 {
1695 arg->save_area = assign_stack_temp (BLKmode,
1696 arg->size.constant, 1);
1697 emit_block_move (validize_mem (arg->save_area), stack_area,
1698 gen_rtx (CONST_INT, VOIDmode,
1699 arg->size.constant),
1700 PARM_BOUNDARY / BITS_PER_UNIT);
1701 }
1702 else
1703 {
1704 arg->save_area = gen_reg_rtx (save_mode);
1705 emit_move_insn (arg->save_area, stack_area);
1706 }
1707 }
1708 }
1709#endif
1710
1711 /* If this isn't going to be placed on both the stack and in registers,
1712 set up the register and number of words. */
1713 if (! arg->pass_on_stack)
1714 reg = arg->reg, partial = arg->partial;
1715
1716 if (reg != 0 && partial == 0)
1717 /* Being passed entirely in a register. We shouldn't be called in
1718 this case. */
1719 abort ();
1720
1721 /* If this is being partially passed in a register, but multiple locations
1722 are specified, we assume that the one partially used is the one that is
1723 listed first. */
1724 if (reg && GET_CODE (reg) == EXPR_LIST)
1725 reg = XEXP (reg, 0);
1726
1727 /* If this is being passes partially in a register, we can't evaluate
1728 it directly into its stack slot. Otherwise, we can. */
1729 if (arg->value == 0)
1730 arg->value = expand_expr (pval, partial ? 0 : arg->stack, VOIDmode, 0);
1731
1732 /* Don't allow anything left on stack from computation
1733 of argument to alloca. */
1734 if (may_be_alloca)
1735 do_pending_stack_adjust ();
1736
1737 if (arg->value == arg->stack)
1738 /* If the value is already in the stack slot, we are done. */
1739 ;
1740 else if (TYPE_MODE (TREE_TYPE (pval)) != BLKmode)
1741 {
1742 register int size;
1743
1744 /* Argument is a scalar, not entirely passed in registers.
1745 (If part is passed in registers, arg->partial says how much
1746 and emit_push_insn will take care of putting it there.)
1747
1748 Push it, and if its size is less than the
1749 amount of space allocated to it,
1750 also bump stack pointer by the additional space.
1751 Note that in C the default argument promotions
1752 will prevent such mismatches. */
1753
1754 size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (pval)));
1755 /* Compute how much space the push instruction will push.
1756 On many machines, pushing a byte will advance the stack
1757 pointer by a halfword. */
1758#ifdef PUSH_ROUNDING
1759 size = PUSH_ROUNDING (size);
1760#endif
1761 used = size;
1762
1763 /* Compute how much space the argument should get:
1764 round up to a multiple of the alignment for arguments. */
1765 if (none != FUNCTION_ARG_PADDING (TYPE_MODE (TREE_TYPE (pval)),
1766 TREE_TYPE (pval)))
1767 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
1768 / (PARM_BOUNDARY / BITS_PER_UNIT))
1769 * (PARM_BOUNDARY / BITS_PER_UNIT));
1770
1771 /* This isn't already where we want it on the stack, so put it there.
1772 This can either be done with push or copy insns. */
1773 emit_push_insn (arg->value, TYPE_MODE (TREE_TYPE (pval)),
1774 TREE_TYPE (pval), 0, 0, partial, reg,
1775 used - size, argblock, ARGS_SIZE_RTX (arg->offset));
1776 }
1777 else
1778 {
1779 /* BLKmode, at least partly to be pushed. */
1780
1781 register int excess;
1782 rtx size_rtx;
1783
1784 /* Pushing a nonscalar.
1785 If part is passed in registers, PARTIAL says how much
1786 and emit_push_insn will take care of putting it there. */
1787
1788 /* Round its size up to a multiple
1789 of the allocation unit for arguments. */
1790
1791 if (arg->size.var != 0)
1792 {
1793 excess = 0;
1794 size_rtx = ARGS_SIZE_RTX (arg->size);
1795 }
1796 else
1797 {
1798 register tree size = size_in_bytes (TREE_TYPE (pval));
1799 /* PUSH_ROUNDING has no effect on us, because
1800 emit_push_insn for BLKmode is careful to avoid it. */
1801 excess = (arg->size.constant - TREE_INT_CST_LOW (size)
1802 + partial * UNITS_PER_WORD);
1803 size_rtx = expand_expr (size, 0, VOIDmode, 0);
1804 }
1805
1806 emit_push_insn (arg->value, TYPE_MODE (TREE_TYPE (pval)),
1807 TREE_TYPE (pval), size_rtx,
1808 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
1809 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
1810 }
1811
1812
1813 /* Unless this is a partially-in-register argument, the argument is now
1814 in the stack.
1815
1816 ??? Note that this can change arg->value from arg->stack to
1817 arg->stack_slot and it matters when they are not the same.
1818 It isn't totally clear that this is correct in all cases. */
1819 if (partial == 0)
1820 arg->value = arg->stack_slot;
1821
1822 /* Once we have pushed something, pops can't safely
1823 be deferred during the rest of the arguments. */
1824 NO_DEFER_POP;
1825
1826 /* ANSI doesn't require a sequence point here,
1827 but PCC has one, so this will avoid some problems. */
1828 emit_queue ();
1829
1830 /* Free any temporary slots made in processing this argument. */
1831 free_temp_slots ();
1832
1833#ifdef ACCUMULATE_OUTGOING_ARGS
1834 /* Now mark the segment we just used. */
1835 if (argblock && ! variable_size && arg->stack)
1836 for (i = lower_bound; i < upper_bound; i++)
1837 stack_usage_map[i] = 1;
1838#endif
1839}