]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
Makefile.in, [...]: replace "GNU CC" with "GCC".
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "libfuncs.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35 #include "sbitmap.h"
36
37 #if !defined FUNCTION_OK_FOR_SIBCALL
38 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
39 #endif
40
41 /* Decide whether a function's arguments should be processed
42 from first to last or from last to first.
43
44 They should if the stack and args grow in opposite directions, but
45 only if we have push insns. */
46
47 #ifdef PUSH_ROUNDING
48
49 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
50 #define PUSH_ARGS_REVERSED PUSH_ARGS
51 #endif
52
53 #endif
54
55 #ifndef PUSH_ARGS_REVERSED
56 #define PUSH_ARGS_REVERSED 0
57 #endif
58
59 #ifndef STACK_POINTER_OFFSET
60 #define STACK_POINTER_OFFSET 0
61 #endif
62
63 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
64 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
65
66 /* Data structure and subroutines used within expand_call. */
67
68 struct arg_data
69 {
70 /* Tree node for this argument. */
71 tree tree_value;
72 /* Mode for value; TYPE_MODE unless promoted. */
73 enum machine_mode mode;
74 /* Current RTL value for argument, or 0 if it isn't precomputed. */
75 rtx value;
76 /* Initially-compute RTL value for argument; only for const functions. */
77 rtx initial_value;
78 /* Register to pass this argument in, 0 if passed on stack, or an
79 PARALLEL if the arg is to be copied into multiple non-contiguous
80 registers. */
81 rtx reg;
82 /* Register to pass this argument in when generating tail call sequence.
83 This is not the same register as for normal calls on machines with
84 register windows. */
85 rtx tail_call_reg;
86 /* If REG was promoted from the actual mode of the argument expression,
87 indicates whether the promotion is sign- or zero-extended. */
88 int unsignedp;
89 /* Number of registers to use. 0 means put the whole arg in registers.
90 Also 0 if not passed in registers. */
91 int partial;
92 /* Non-zero if argument must be passed on stack.
93 Note that some arguments may be passed on the stack
94 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
95 pass_on_stack identifies arguments that *cannot* go in registers. */
96 int pass_on_stack;
97 /* Offset of this argument from beginning of stack-args. */
98 struct args_size offset;
99 /* Similar, but offset to the start of the stack slot. Different from
100 OFFSET if this arg pads downward. */
101 struct args_size slot_offset;
102 /* Size of this argument on the stack, rounded up for any padding it gets,
103 parts of the argument passed in registers do not count.
104 If REG_PARM_STACK_SPACE is defined, then register parms
105 are counted here as well. */
106 struct args_size size;
107 /* Location on the stack at which parameter should be stored. The store
108 has already been done if STACK == VALUE. */
109 rtx stack;
110 /* Location on the stack of the start of this argument slot. This can
111 differ from STACK if this arg pads downward. This location is known
112 to be aligned to FUNCTION_ARG_BOUNDARY. */
113 rtx stack_slot;
114 /* Place that this stack area has been saved, if needed. */
115 rtx save_area;
116 /* If an argument's alignment does not permit direct copying into registers,
117 copy in smaller-sized pieces into pseudos. These are stored in a
118 block pointed to by this field. The next field says how many
119 word-sized pseudos we made. */
120 rtx *aligned_regs;
121 int n_aligned_regs;
122 /* The amount that the stack pointer needs to be adjusted to
123 force alignment for the next argument. */
124 struct args_size alignment_pad;
125 };
126
127 /* A vector of one char per byte of stack space. A byte if non-zero if
128 the corresponding stack location has been used.
129 This vector is used to prevent a function call within an argument from
130 clobbering any stack already set up. */
131 static char *stack_usage_map;
132
133 /* Size of STACK_USAGE_MAP. */
134 static int highest_outgoing_arg_in_use;
135
136 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
137 stack location's tail call argument has been already stored into the stack.
138 This bitmap is used to prevent sibling call optimization if function tries
139 to use parent's incoming argument slots when they have been already
140 overwritten with tail call arguments. */
141 static sbitmap stored_args_map;
142
143 /* stack_arg_under_construction is nonzero when an argument may be
144 initialized with a constructor call (including a C function that
145 returns a BLKmode struct) and expand_call must take special action
146 to make sure the object being constructed does not overlap the
147 argument list for the constructor call. */
148 int stack_arg_under_construction;
149
150 static int calls_function PARAMS ((tree, int));
151 static int calls_function_1 PARAMS ((tree, int));
152
153 /* Nonzero if this is a call to a `const' function. */
154 #define ECF_CONST 1
155 /* Nonzero if this is a call to a `volatile' function. */
156 #define ECF_NORETURN 2
157 /* Nonzero if this is a call to malloc or a related function. */
158 #define ECF_MALLOC 4
159 /* Nonzero if it is plausible that this is a call to alloca. */
160 #define ECF_MAY_BE_ALLOCA 8
161 /* Nonzero if this is a call to a function that won't throw an exception. */
162 #define ECF_NOTHROW 16
163 /* Nonzero if this is a call to setjmp or a related function. */
164 #define ECF_RETURNS_TWICE 32
165 /* Nonzero if this is a call to `longjmp'. */
166 #define ECF_LONGJMP 64
167 /* Nonzero if this is a syscall that makes a new process in the image of
168 the current one. */
169 #define ECF_FORK_OR_EXEC 128
170 #define ECF_SIBCALL 256
171 /* Nonzero if this is a call to "pure" function (like const function,
172 but may read memory. */
173 #define ECF_PURE 512
174 /* Nonzero if this is a call to a function that returns with the stack
175 pointer depressed. */
176 #define ECF_SP_DEPRESSED 1024
177 /* Nonzero if this call is known to always return. */
178 #define ECF_ALWAYS_RETURN 2048
179
180 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
181 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
182 rtx, int, rtx, int));
183 static void precompute_register_parameters PARAMS ((int,
184 struct arg_data *,
185 int *));
186 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
187 int));
188 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
189 int));
190 static int finalize_must_preallocate PARAMS ((int, int,
191 struct arg_data *,
192 struct args_size *));
193 static void precompute_arguments PARAMS ((int, int,
194 struct arg_data *));
195 static int compute_argument_block_size PARAMS ((int,
196 struct args_size *,
197 int));
198 static void initialize_argument_information PARAMS ((int,
199 struct arg_data *,
200 struct args_size *,
201 int, tree, tree,
202 CUMULATIVE_ARGS *,
203 int, rtx *, int *,
204 int *, int *));
205 static void compute_argument_addresses PARAMS ((struct arg_data *,
206 rtx, int));
207 static rtx rtx_for_function_call PARAMS ((tree, tree));
208 static void load_register_parameters PARAMS ((struct arg_data *,
209 int, rtx *, int));
210 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
211 enum libcall_type,
212 enum machine_mode,
213 int, va_list));
214 static int special_function_p PARAMS ((tree, int));
215 static int flags_from_decl_or_type PARAMS ((tree));
216 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
217 int, tree, rtx));
218 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
219 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
220
221 static int combine_pending_stack_adjustment_and_call
222 PARAMS ((int, struct args_size *, int));
223
224 #ifdef REG_PARM_STACK_SPACE
225 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
226 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
227 #endif
228 \f
229 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
230 `alloca'.
231
232 If WHICH is 0, return 1 if EXP contains a call to any function.
233 Actually, we only need return 1 if evaluating EXP would require pushing
234 arguments on the stack, but that is too difficult to compute, so we just
235 assume any function call might require the stack. */
236
237 static tree calls_function_save_exprs;
238
239 static int
240 calls_function (exp, which)
241 tree exp;
242 int which;
243 {
244 int val;
245
246 calls_function_save_exprs = 0;
247 val = calls_function_1 (exp, which);
248 calls_function_save_exprs = 0;
249 return val;
250 }
251
252 /* Recursive function to do the work of above function. */
253
254 static int
255 calls_function_1 (exp, which)
256 tree exp;
257 int which;
258 {
259 register int i;
260 enum tree_code code = TREE_CODE (exp);
261 int class = TREE_CODE_CLASS (code);
262 int length = first_rtl_op (code);
263
264 /* If this code is language-specific, we don't know what it will do. */
265 if ((int) code >= NUM_TREE_CODES)
266 return 1;
267
268 switch (code)
269 {
270 case CALL_EXPR:
271 if (which == 0)
272 return 1;
273 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
274 == FUNCTION_TYPE)
275 && (TYPE_RETURNS_STACK_DEPRESSED
276 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
277 return 1;
278 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
279 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
280 == FUNCTION_DECL)
281 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
282 0)
283 & ECF_MAY_BE_ALLOCA))
284 return 1;
285
286 break;
287
288 case CONSTRUCTOR:
289 {
290 tree tem;
291
292 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
293 if (calls_function_1 (TREE_VALUE (tem), which))
294 return 1;
295 }
296
297 return 0;
298
299 case SAVE_EXPR:
300 if (SAVE_EXPR_RTL (exp) != 0)
301 return 0;
302 if (value_member (exp, calls_function_save_exprs))
303 return 0;
304 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
305 calls_function_save_exprs);
306 return (TREE_OPERAND (exp, 0) != 0
307 && calls_function_1 (TREE_OPERAND (exp, 0), which));
308
309 case BLOCK:
310 {
311 register tree local;
312 register tree subblock;
313
314 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
315 if (DECL_INITIAL (local) != 0
316 && calls_function_1 (DECL_INITIAL (local), which))
317 return 1;
318
319 for (subblock = BLOCK_SUBBLOCKS (exp);
320 subblock;
321 subblock = TREE_CHAIN (subblock))
322 if (calls_function_1 (subblock, which))
323 return 1;
324 }
325 return 0;
326
327 case TREE_LIST:
328 for (; exp != 0; exp = TREE_CHAIN (exp))
329 if (calls_function_1 (TREE_VALUE (exp), which))
330 return 1;
331 return 0;
332
333 default:
334 break;
335 }
336
337 /* Only expressions, references, and blocks can contain calls. */
338 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
339 return 0;
340
341 for (i = 0; i < length; i++)
342 if (TREE_OPERAND (exp, i) != 0
343 && calls_function_1 (TREE_OPERAND (exp, i), which))
344 return 1;
345
346 return 0;
347 }
348 \f
349 /* Force FUNEXP into a form suitable for the address of a CALL,
350 and return that as an rtx. Also load the static chain register
351 if FNDECL is a nested function.
352
353 CALL_FUSAGE points to a variable holding the prospective
354 CALL_INSN_FUNCTION_USAGE information. */
355
356 rtx
357 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
358 rtx funexp;
359 tree fndecl;
360 rtx *call_fusage;
361 int reg_parm_seen;
362 int sibcallp;
363 {
364 rtx static_chain_value = 0;
365
366 funexp = protect_from_queue (funexp, 0);
367
368 if (fndecl != 0)
369 /* Get possible static chain value for nested function in C. */
370 static_chain_value = lookup_static_chain (fndecl);
371
372 /* Make a valid memory address and copy constants thru pseudo-regs,
373 but not for a constant address if -fno-function-cse. */
374 if (GET_CODE (funexp) != SYMBOL_REF)
375 /* If we are using registers for parameters, force the
376 function address into a register now. */
377 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
378 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
379 : memory_address (FUNCTION_MODE, funexp));
380 else if (! sibcallp)
381 {
382 #ifndef NO_FUNCTION_CSE
383 if (optimize && ! flag_no_function_cse)
384 #ifdef NO_RECURSIVE_FUNCTION_CSE
385 if (fndecl != current_function_decl)
386 #endif
387 funexp = force_reg (Pmode, funexp);
388 #endif
389 }
390
391 if (static_chain_value != 0)
392 {
393 emit_move_insn (static_chain_rtx, static_chain_value);
394
395 if (GET_CODE (static_chain_rtx) == REG)
396 use_reg (call_fusage, static_chain_rtx);
397 }
398
399 return funexp;
400 }
401
402 /* Generate instructions to call function FUNEXP,
403 and optionally pop the results.
404 The CALL_INSN is the first insn generated.
405
406 FNDECL is the declaration node of the function. This is given to the
407 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
408
409 FUNTYPE is the data type of the function. This is given to the macro
410 RETURN_POPS_ARGS to determine whether this function pops its own args.
411 We used to allow an identifier for library functions, but that doesn't
412 work when the return type is an aggregate type and the calling convention
413 says that the pointer to this aggregate is to be popped by the callee.
414
415 STACK_SIZE is the number of bytes of arguments on the stack,
416 ROUNDED_STACK_SIZE is that number rounded up to
417 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
418 both to put into the call insn and to generate explicit popping
419 code if necessary.
420
421 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
422 It is zero if this call doesn't want a structure value.
423
424 NEXT_ARG_REG is the rtx that results from executing
425 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
426 just after all the args have had their registers assigned.
427 This could be whatever you like, but normally it is the first
428 arg-register beyond those used for args in this call,
429 or 0 if all the arg-registers are used in this call.
430 It is passed on to `gen_call' so you can put this info in the call insn.
431
432 VALREG is a hard register in which a value is returned,
433 or 0 if the call does not return a value.
434
435 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
436 the args to this call were processed.
437 We restore `inhibit_defer_pop' to that value.
438
439 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
440 denote registers used by the called function. */
441
442 static void
443 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
444 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
445 call_fusage, ecf_flags)
446 rtx funexp;
447 tree fndecl ATTRIBUTE_UNUSED;
448 tree funtype ATTRIBUTE_UNUSED;
449 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
450 HOST_WIDE_INT rounded_stack_size;
451 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
452 rtx next_arg_reg;
453 rtx valreg;
454 int old_inhibit_defer_pop;
455 rtx call_fusage;
456 int ecf_flags;
457 {
458 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
459 rtx call_insn;
460 int already_popped = 0;
461 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
462 #if defined (HAVE_call) && defined (HAVE_call_value)
463 rtx struct_value_size_rtx;
464 struct_value_size_rtx = GEN_INT (struct_value_size);
465 #endif
466
467 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
468 and we don't want to load it into a register as an optimization,
469 because prepare_call_address already did it if it should be done. */
470 if (GET_CODE (funexp) != SYMBOL_REF)
471 funexp = memory_address (FUNCTION_MODE, funexp);
472
473 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
474 if ((ecf_flags & ECF_SIBCALL)
475 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
476 && (n_popped > 0 || stack_size == 0))
477 {
478 rtx n_pop = GEN_INT (n_popped));
479 rtx pat;
480
481 /* If this subroutine pops its own args, record that in the call insn
482 if possible, for the sake of frame pointer elimination. */
483
484 if (valreg)
485 pat = GEN_SIBCALL_VALUE_POP (valreg,
486 gen_rtx_MEM (FUNCTION_MODE, funexp),
487 rounded_stack_size_rtx, next_arg_reg,
488 n_pop);
489 else
490 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
491 rounded_stack_size_rtx, next_arg_reg, n_pop);
492
493 emit_call_insn (pat);
494 already_popped = 1;
495 }
496 else
497 #endif
498
499 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
500 /* If the target has "call" or "call_value" insns, then prefer them
501 if no arguments are actually popped. If the target does not have
502 "call" or "call_value" insns, then we must use the popping versions
503 even if the call has no arguments to pop. */
504 #if defined (HAVE_call) && defined (HAVE_call_value)
505 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
506 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
507 #else
508 if (HAVE_call_pop && HAVE_call_value_pop)
509 #endif
510 {
511 rtx n_pop = GEN_INT (n_popped);
512 rtx pat;
513
514 /* If this subroutine pops its own args, record that in the call insn
515 if possible, for the sake of frame pointer elimination. */
516
517 if (valreg)
518 pat = GEN_CALL_VALUE_POP (valreg,
519 gen_rtx_MEM (FUNCTION_MODE, funexp),
520 rounded_stack_size_rtx, next_arg_reg, n_pop);
521 else
522 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
523 rounded_stack_size_rtx, next_arg_reg, n_pop);
524
525 emit_call_insn (pat);
526 already_popped = 1;
527 }
528 else
529 #endif
530
531 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
532 if ((ecf_flags & ECF_SIBCALL)
533 && HAVE_sibcall && HAVE_sibcall_value)
534 {
535 if (valreg)
536 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
537 gen_rtx_MEM (FUNCTION_MODE, funexp),
538 rounded_stack_size_rtx,
539 next_arg_reg, NULL_RTX));
540 else
541 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
542 rounded_stack_size_rtx, next_arg_reg,
543 struct_value_size_rtx));
544 }
545 else
546 #endif
547
548 #if defined (HAVE_call) && defined (HAVE_call_value)
549 if (HAVE_call && HAVE_call_value)
550 {
551 if (valreg)
552 emit_call_insn (GEN_CALL_VALUE (valreg,
553 gen_rtx_MEM (FUNCTION_MODE, funexp),
554 rounded_stack_size_rtx, next_arg_reg,
555 NULL_RTX));
556 else
557 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
558 rounded_stack_size_rtx, next_arg_reg,
559 struct_value_size_rtx));
560 }
561 else
562 #endif
563 abort ();
564
565 /* Find the CALL insn we just emitted. */
566 for (call_insn = get_last_insn ();
567 call_insn && GET_CODE (call_insn) != CALL_INSN;
568 call_insn = PREV_INSN (call_insn))
569 ;
570
571 if (! call_insn)
572 abort ();
573
574 /* Mark memory as used for "pure" function call. */
575 if (ecf_flags & ECF_PURE)
576 {
577 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
578 gen_rtx_USE (VOIDmode,
579 gen_rtx_MEM (BLKmode,
580 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
581 }
582
583 /* Put the register usage information on the CALL. If there is already
584 some usage information, put ours at the end. */
585 if (CALL_INSN_FUNCTION_USAGE (call_insn))
586 {
587 rtx link;
588
589 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
590 link = XEXP (link, 1))
591 ;
592
593 XEXP (link, 1) = call_fusage;
594 }
595 else
596 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
597
598 /* If this is a const call, then set the insn's unchanging bit. */
599 if (ecf_flags & (ECF_CONST | ECF_PURE))
600 CONST_OR_PURE_CALL_P (call_insn) = 1;
601
602 /* If this call can't throw, attach a REG_EH_REGION reg note to that
603 effect. */
604 if (ecf_flags & ECF_NOTHROW)
605 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
606 REG_NOTES (call_insn));
607
608 if (ecf_flags & ECF_NORETURN)
609 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
610 REG_NOTES (call_insn));
611 if (ecf_flags & ECF_ALWAYS_RETURN)
612 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
613 REG_NOTES (call_insn));
614
615 if (ecf_flags & ECF_RETURNS_TWICE)
616 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
617 REG_NOTES (call_insn));
618
619 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
620
621 /* Restore this now, so that we do defer pops for this call's args
622 if the context of the call as a whole permits. */
623 inhibit_defer_pop = old_inhibit_defer_pop;
624
625 if (n_popped > 0)
626 {
627 if (!already_popped)
628 CALL_INSN_FUNCTION_USAGE (call_insn)
629 = gen_rtx_EXPR_LIST (VOIDmode,
630 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
631 CALL_INSN_FUNCTION_USAGE (call_insn));
632 rounded_stack_size -= n_popped;
633 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
634 stack_pointer_delta -= n_popped;
635 }
636
637 if (!ACCUMULATE_OUTGOING_ARGS)
638 {
639 /* If returning from the subroutine does not automatically pop the args,
640 we need an instruction to pop them sooner or later.
641 Perhaps do it now; perhaps just record how much space to pop later.
642
643 If returning from the subroutine does pop the args, indicate that the
644 stack pointer will be changed. */
645
646 if (rounded_stack_size != 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
647 {
648 if (flag_defer_pop && inhibit_defer_pop == 0
649 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
650 pending_stack_adjust += rounded_stack_size;
651 else
652 adjust_stack (rounded_stack_size_rtx);
653 }
654 }
655 /* When we accumulate outgoing args, we must avoid any stack manipulations.
656 Restore the stack pointer to its original value now. Usually
657 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
658 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
659 popping variants of functions exist as well.
660
661 ??? We may optimize similar to defer_pop above, but it is
662 probably not worthwhile.
663
664 ??? It will be worthwhile to enable combine_stack_adjustments even for
665 such machines. */
666 else if (n_popped)
667 anti_adjust_stack (GEN_INT (n_popped));
668 }
669
670 /* Determine if the function identified by NAME and FNDECL is one with
671 special properties we wish to know about.
672
673 For example, if the function might return more than one time (setjmp), then
674 set RETURNS_TWICE to a nonzero value.
675
676 Similarly set LONGJMP for if the function is in the longjmp family.
677
678 Set MALLOC for any of the standard memory allocation functions which
679 allocate from the heap.
680
681 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
682 space from the stack such as alloca. */
683
684 static int
685 special_function_p (fndecl, flags)
686 tree fndecl;
687 int flags;
688 {
689 if (! (flags & ECF_MALLOC)
690 && fndecl && DECL_NAME (fndecl)
691 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
692 /* Exclude functions not at the file scope, or not `extern',
693 since they are not the magic functions we would otherwise
694 think they are. */
695 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
696 {
697 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
698 const char *tname = name;
699
700 /* We assume that alloca will always be called by name. It
701 makes no sense to pass it as a pointer-to-function to
702 anything that does not understand its behavior. */
703 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
704 && name[0] == 'a'
705 && ! strcmp (name, "alloca"))
706 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
707 && name[0] == '_'
708 && ! strcmp (name, "__builtin_alloca"))))
709 flags |= ECF_MAY_BE_ALLOCA;
710
711 /* Disregard prefix _, __ or __x. */
712 if (name[0] == '_')
713 {
714 if (name[1] == '_' && name[2] == 'x')
715 tname += 3;
716 else if (name[1] == '_')
717 tname += 2;
718 else
719 tname += 1;
720 }
721
722 if (tname[0] == 's')
723 {
724 if ((tname[1] == 'e'
725 && (! strcmp (tname, "setjmp")
726 || ! strcmp (tname, "setjmp_syscall")))
727 || (tname[1] == 'i'
728 && ! strcmp (tname, "sigsetjmp"))
729 || (tname[1] == 'a'
730 && ! strcmp (tname, "savectx")))
731 flags |= ECF_RETURNS_TWICE;
732
733 if (tname[1] == 'i'
734 && ! strcmp (tname, "siglongjmp"))
735 flags |= ECF_LONGJMP;
736 }
737 else if ((tname[0] == 'q' && tname[1] == 's'
738 && ! strcmp (tname, "qsetjmp"))
739 || (tname[0] == 'v' && tname[1] == 'f'
740 && ! strcmp (tname, "vfork")))
741 flags |= ECF_RETURNS_TWICE;
742
743 else if (tname[0] == 'l' && tname[1] == 'o'
744 && ! strcmp (tname, "longjmp"))
745 flags |= ECF_LONGJMP;
746
747 else if ((tname[0] == 'f' && tname[1] == 'o'
748 && ! strcmp (tname, "fork"))
749 /* Linux specific: __clone. check NAME to insist on the
750 leading underscores, to avoid polluting the ISO / POSIX
751 namespace. */
752 || (name[0] == '_' && name[1] == '_'
753 && ! strcmp (tname, "clone"))
754 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
755 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
756 && (tname[5] == '\0'
757 || ((tname[5] == 'p' || tname[5] == 'e')
758 && tname[6] == '\0'))))
759 flags |= ECF_FORK_OR_EXEC;
760
761 /* Do not add any more malloc-like functions to this list,
762 instead mark them as malloc functions using the malloc attribute.
763 Note, realloc is not suitable for attribute malloc since
764 it may return the same address across multiple calls.
765 C++ operator new is not suitable because it is not required
766 to return a unique pointer; indeed, the standard placement new
767 just returns its argument. */
768 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
769 && (! strcmp (tname, "malloc")
770 || ! strcmp (tname, "calloc")
771 || ! strcmp (tname, "strdup")))
772 flags |= ECF_MALLOC;
773 }
774 return flags;
775 }
776
777 /* Return nonzero when tree represent call to longjmp. */
778
779 int
780 setjmp_call_p (fndecl)
781 tree fndecl;
782 {
783 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
784 }
785
786 /* Detect flags (function attributes) from the function type node. */
787
788 static int
789 flags_from_decl_or_type (exp)
790 tree exp;
791 {
792 int flags = 0;
793
794 /* ??? We can't set IS_MALLOC for function types? */
795 if (DECL_P (exp))
796 {
797 /* The function exp may have the `malloc' attribute. */
798 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
799 flags |= ECF_MALLOC;
800
801 /* The function exp may have the `pure' attribute. */
802 if (DECL_P (exp) && DECL_IS_PURE (exp))
803 flags |= ECF_PURE;
804
805 if (TREE_NOTHROW (exp))
806 flags |= ECF_NOTHROW;
807 }
808
809 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
810 flags |= ECF_CONST;
811
812 if (TREE_THIS_VOLATILE (exp))
813 flags |= ECF_NORETURN;
814
815 return flags;
816 }
817
818 /* Precompute all register parameters as described by ARGS, storing values
819 into fields within the ARGS array.
820
821 NUM_ACTUALS indicates the total number elements in the ARGS array.
822
823 Set REG_PARM_SEEN if we encounter a register parameter. */
824
825 static void
826 precompute_register_parameters (num_actuals, args, reg_parm_seen)
827 int num_actuals;
828 struct arg_data *args;
829 int *reg_parm_seen;
830 {
831 int i;
832
833 *reg_parm_seen = 0;
834
835 for (i = 0; i < num_actuals; i++)
836 if (args[i].reg != 0 && ! args[i].pass_on_stack)
837 {
838 *reg_parm_seen = 1;
839
840 if (args[i].value == 0)
841 {
842 push_temp_slots ();
843 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
844 VOIDmode, 0);
845 preserve_temp_slots (args[i].value);
846 pop_temp_slots ();
847
848 /* ANSI doesn't require a sequence point here,
849 but PCC has one, so this will avoid some problems. */
850 emit_queue ();
851 }
852
853 /* If we are to promote the function arg to a wider mode,
854 do it now. */
855
856 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
857 args[i].value
858 = convert_modes (args[i].mode,
859 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
860 args[i].value, args[i].unsignedp);
861
862 /* If the value is expensive, and we are inside an appropriately
863 short loop, put the value into a pseudo and then put the pseudo
864 into the hard reg.
865
866 For small register classes, also do this if this call uses
867 register parameters. This is to avoid reload conflicts while
868 loading the parameters registers. */
869
870 if ((! (GET_CODE (args[i].value) == REG
871 || (GET_CODE (args[i].value) == SUBREG
872 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
873 && args[i].mode != BLKmode
874 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
875 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
876 || preserve_subexpressions_p ()))
877 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
878 }
879 }
880
881 #ifdef REG_PARM_STACK_SPACE
882
883 /* The argument list is the property of the called routine and it
884 may clobber it. If the fixed area has been used for previous
885 parameters, we must save and restore it. */
886
887 static rtx
888 save_fixed_argument_area (reg_parm_stack_space, argblock,
889 low_to_save, high_to_save)
890 int reg_parm_stack_space;
891 rtx argblock;
892 int *low_to_save;
893 int *high_to_save;
894 {
895 int i;
896 rtx save_area = NULL_RTX;
897
898 /* Compute the boundary of the that needs to be saved, if any. */
899 #ifdef ARGS_GROW_DOWNWARD
900 for (i = 0; i < reg_parm_stack_space + 1; i++)
901 #else
902 for (i = 0; i < reg_parm_stack_space; i++)
903 #endif
904 {
905 if (i >= highest_outgoing_arg_in_use
906 || stack_usage_map[i] == 0)
907 continue;
908
909 if (*low_to_save == -1)
910 *low_to_save = i;
911
912 *high_to_save = i;
913 }
914
915 if (*low_to_save >= 0)
916 {
917 int num_to_save = *high_to_save - *low_to_save + 1;
918 enum machine_mode save_mode
919 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
920 rtx stack_area;
921
922 /* If we don't have the required alignment, must do this in BLKmode. */
923 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
924 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
925 save_mode = BLKmode;
926
927 #ifdef ARGS_GROW_DOWNWARD
928 stack_area
929 = gen_rtx_MEM (save_mode,
930 memory_address (save_mode,
931 plus_constant (argblock,
932 - *high_to_save)));
933 #else
934 stack_area = gen_rtx_MEM (save_mode,
935 memory_address (save_mode,
936 plus_constant (argblock,
937 *low_to_save)));
938 #endif
939 if (save_mode == BLKmode)
940 {
941 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
942 /* Cannot use emit_block_move here because it can be done by a
943 library call which in turn gets into this place again and deadly
944 infinite recursion happens. */
945 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
946 PARM_BOUNDARY);
947 }
948 else
949 {
950 save_area = gen_reg_rtx (save_mode);
951 emit_move_insn (save_area, stack_area);
952 }
953 }
954 return save_area;
955 }
956
957 static void
958 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
959 rtx save_area;
960 rtx argblock;
961 int high_to_save;
962 int low_to_save;
963 {
964 enum machine_mode save_mode = GET_MODE (save_area);
965 #ifdef ARGS_GROW_DOWNWARD
966 rtx stack_area
967 = gen_rtx_MEM (save_mode,
968 memory_address (save_mode,
969 plus_constant (argblock,
970 - high_to_save)));
971 #else
972 rtx stack_area
973 = gen_rtx_MEM (save_mode,
974 memory_address (save_mode,
975 plus_constant (argblock,
976 low_to_save)));
977 #endif
978
979 if (save_mode != BLKmode)
980 emit_move_insn (stack_area, save_area);
981 else
982 /* Cannot use emit_block_move here because it can be done by a library
983 call which in turn gets into this place again and deadly infinite
984 recursion happens. */
985 move_by_pieces (stack_area, validize_mem (save_area),
986 high_to_save - low_to_save + 1, PARM_BOUNDARY);
987 }
988 #endif /* REG_PARM_STACK_SPACE */
989
990 /* If any elements in ARGS refer to parameters that are to be passed in
991 registers, but not in memory, and whose alignment does not permit a
992 direct copy into registers. Copy the values into a group of pseudos
993 which we will later copy into the appropriate hard registers.
994
995 Pseudos for each unaligned argument will be stored into the array
996 args[argnum].aligned_regs. The caller is responsible for deallocating
997 the aligned_regs array if it is nonzero. */
998
999 static void
1000 store_unaligned_arguments_into_pseudos (args, num_actuals)
1001 struct arg_data *args;
1002 int num_actuals;
1003 {
1004 int i, j;
1005
1006 for (i = 0; i < num_actuals; i++)
1007 if (args[i].reg != 0 && ! args[i].pass_on_stack
1008 && args[i].mode == BLKmode
1009 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1010 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1011 {
1012 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1013 int big_endian_correction = 0;
1014
1015 args[i].n_aligned_regs
1016 = args[i].partial ? args[i].partial
1017 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1018
1019 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1020 * args[i].n_aligned_regs);
1021
1022 /* Structures smaller than a word are aligned to the least
1023 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1024 this means we must skip the empty high order bytes when
1025 calculating the bit offset. */
1026 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1027 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1028
1029 for (j = 0; j < args[i].n_aligned_regs; j++)
1030 {
1031 rtx reg = gen_reg_rtx (word_mode);
1032 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1033 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1034 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1035
1036 args[i].aligned_regs[j] = reg;
1037
1038 /* There is no need to restrict this code to loading items
1039 in TYPE_ALIGN sized hunks. The bitfield instructions can
1040 load up entire word sized registers efficiently.
1041
1042 ??? This may not be needed anymore.
1043 We use to emit a clobber here but that doesn't let later
1044 passes optimize the instructions we emit. By storing 0 into
1045 the register later passes know the first AND to zero out the
1046 bitfield being set in the register is unnecessary. The store
1047 of 0 will be deleted as will at least the first AND. */
1048
1049 emit_move_insn (reg, const0_rtx);
1050
1051 bytes -= bitsize / BITS_PER_UNIT;
1052 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1053 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1054 word_mode, word_mode, bitalign,
1055 BITS_PER_WORD),
1056 bitalign, BITS_PER_WORD);
1057 }
1058 }
1059 }
1060
1061 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1062 ACTPARMS.
1063
1064 NUM_ACTUALS is the total number of parameters.
1065
1066 N_NAMED_ARGS is the total number of named arguments.
1067
1068 FNDECL is the tree code for the target of this call (if known)
1069
1070 ARGS_SO_FAR holds state needed by the target to know where to place
1071 the next argument.
1072
1073 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1074 for arguments which are passed in registers.
1075
1076 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1077 and may be modified by this routine.
1078
1079 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1080 flags which may may be modified by this routine. */
1081
1082 static void
1083 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1084 actparms, fndecl, args_so_far,
1085 reg_parm_stack_space, old_stack_level,
1086 old_pending_adj, must_preallocate,
1087 ecf_flags)
1088 int num_actuals ATTRIBUTE_UNUSED;
1089 struct arg_data *args;
1090 struct args_size *args_size;
1091 int n_named_args ATTRIBUTE_UNUSED;
1092 tree actparms;
1093 tree fndecl;
1094 CUMULATIVE_ARGS *args_so_far;
1095 int reg_parm_stack_space;
1096 rtx *old_stack_level;
1097 int *old_pending_adj;
1098 int *must_preallocate;
1099 int *ecf_flags;
1100 {
1101 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1102 int inc;
1103
1104 /* Count arg position in order args appear. */
1105 int argpos;
1106
1107 struct args_size alignment_pad;
1108 int i;
1109 tree p;
1110
1111 args_size->constant = 0;
1112 args_size->var = 0;
1113
1114 /* In this loop, we consider args in the order they are written.
1115 We fill up ARGS from the front or from the back if necessary
1116 so that in any case the first arg to be pushed ends up at the front. */
1117
1118 if (PUSH_ARGS_REVERSED)
1119 {
1120 i = num_actuals - 1, inc = -1;
1121 /* In this case, must reverse order of args
1122 so that we compute and push the last arg first. */
1123 }
1124 else
1125 {
1126 i = 0, inc = 1;
1127 }
1128
1129 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1130 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1131 {
1132 tree type = TREE_TYPE (TREE_VALUE (p));
1133 int unsignedp;
1134 enum machine_mode mode;
1135
1136 args[i].tree_value = TREE_VALUE (p);
1137
1138 /* Replace erroneous argument with constant zero. */
1139 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1140 args[i].tree_value = integer_zero_node, type = integer_type_node;
1141
1142 /* If TYPE is a transparent union, pass things the way we would
1143 pass the first field of the union. We have already verified that
1144 the modes are the same. */
1145 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1146 type = TREE_TYPE (TYPE_FIELDS (type));
1147
1148 /* Decide where to pass this arg.
1149
1150 args[i].reg is nonzero if all or part is passed in registers.
1151
1152 args[i].partial is nonzero if part but not all is passed in registers,
1153 and the exact value says how many words are passed in registers.
1154
1155 args[i].pass_on_stack is nonzero if the argument must at least be
1156 computed on the stack. It may then be loaded back into registers
1157 if args[i].reg is nonzero.
1158
1159 These decisions are driven by the FUNCTION_... macros and must agree
1160 with those made by function.c. */
1161
1162 /* See if this argument should be passed by invisible reference. */
1163 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1164 && contains_placeholder_p (TYPE_SIZE (type)))
1165 || TREE_ADDRESSABLE (type)
1166 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1167 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1168 type, argpos < n_named_args)
1169 #endif
1170 )
1171 {
1172 /* If we're compiling a thunk, pass through invisible
1173 references instead of making a copy. */
1174 if (current_function_is_thunk
1175 #ifdef FUNCTION_ARG_CALLEE_COPIES
1176 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1177 type, argpos < n_named_args)
1178 /* If it's in a register, we must make a copy of it too. */
1179 /* ??? Is this a sufficient test? Is there a better one? */
1180 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1181 && REG_P (DECL_RTL (args[i].tree_value)))
1182 && ! TREE_ADDRESSABLE (type))
1183 #endif
1184 )
1185 {
1186 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1187 new object from the argument. If we are passing by
1188 invisible reference, the callee will do that for us, so we
1189 can strip off the TARGET_EXPR. This is not always safe,
1190 but it is safe in the only case where this is a useful
1191 optimization; namely, when the argument is a plain object.
1192 In that case, the frontend is just asking the backend to
1193 make a bitwise copy of the argument. */
1194
1195 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1196 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1197 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1198 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1199
1200 args[i].tree_value = build1 (ADDR_EXPR,
1201 build_pointer_type (type),
1202 args[i].tree_value);
1203 type = build_pointer_type (type);
1204 }
1205 else
1206 {
1207 /* We make a copy of the object and pass the address to the
1208 function being called. */
1209 rtx copy;
1210
1211 if (!COMPLETE_TYPE_P (type)
1212 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1213 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1214 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1215 STACK_CHECK_MAX_VAR_SIZE))))
1216 {
1217 /* This is a variable-sized object. Make space on the stack
1218 for it. */
1219 rtx size_rtx = expr_size (TREE_VALUE (p));
1220
1221 if (*old_stack_level == 0)
1222 {
1223 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1224 *old_pending_adj = pending_stack_adjust;
1225 pending_stack_adjust = 0;
1226 }
1227
1228 copy = gen_rtx_MEM (BLKmode,
1229 allocate_dynamic_stack_space
1230 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1231 set_mem_attributes (copy, type, 1);
1232 }
1233 else
1234 copy = assign_temp (type, 0, 1, 0);
1235
1236 store_expr (args[i].tree_value, copy, 0);
1237 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1238
1239 args[i].tree_value = build1 (ADDR_EXPR,
1240 build_pointer_type (type),
1241 make_tree (type, copy));
1242 type = build_pointer_type (type);
1243 }
1244 }
1245
1246 mode = TYPE_MODE (type);
1247 unsignedp = TREE_UNSIGNED (type);
1248
1249 #ifdef PROMOTE_FUNCTION_ARGS
1250 mode = promote_mode (type, mode, &unsignedp, 1);
1251 #endif
1252
1253 args[i].unsignedp = unsignedp;
1254 args[i].mode = mode;
1255
1256 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1257 argpos < n_named_args);
1258 #ifdef FUNCTION_INCOMING_ARG
1259 /* If this is a sibling call and the machine has register windows, the
1260 register window has to be unwinded before calling the routine, so
1261 arguments have to go into the incoming registers. */
1262 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1263 argpos < n_named_args);
1264 #else
1265 args[i].tail_call_reg = args[i].reg;
1266 #endif
1267
1268 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1269 if (args[i].reg)
1270 args[i].partial
1271 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1272 argpos < n_named_args);
1273 #endif
1274
1275 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1276
1277 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1278 it means that we are to pass this arg in the register(s) designated
1279 by the PARALLEL, but also to pass it in the stack. */
1280 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1281 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1282 args[i].pass_on_stack = 1;
1283
1284 /* If this is an addressable type, we must preallocate the stack
1285 since we must evaluate the object into its final location.
1286
1287 If this is to be passed in both registers and the stack, it is simpler
1288 to preallocate. */
1289 if (TREE_ADDRESSABLE (type)
1290 || (args[i].pass_on_stack && args[i].reg != 0))
1291 *must_preallocate = 1;
1292
1293 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1294 we cannot consider this function call constant. */
1295 if (TREE_ADDRESSABLE (type))
1296 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1297
1298 /* Compute the stack-size of this argument. */
1299 if (args[i].reg == 0 || args[i].partial != 0
1300 || reg_parm_stack_space > 0
1301 || args[i].pass_on_stack)
1302 locate_and_pad_parm (mode, type,
1303 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1304 1,
1305 #else
1306 args[i].reg != 0,
1307 #endif
1308 fndecl, args_size, &args[i].offset,
1309 &args[i].size, &alignment_pad);
1310
1311 #ifndef ARGS_GROW_DOWNWARD
1312 args[i].slot_offset = *args_size;
1313 #endif
1314
1315 args[i].alignment_pad = alignment_pad;
1316
1317 /* If a part of the arg was put into registers,
1318 don't include that part in the amount pushed. */
1319 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1320 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1321 / (PARM_BOUNDARY / BITS_PER_UNIT)
1322 * (PARM_BOUNDARY / BITS_PER_UNIT));
1323
1324 /* Update ARGS_SIZE, the total stack space for args so far. */
1325
1326 args_size->constant += args[i].size.constant;
1327 if (args[i].size.var)
1328 {
1329 ADD_PARM_SIZE (*args_size, args[i].size.var);
1330 }
1331
1332 /* Since the slot offset points to the bottom of the slot,
1333 we must record it after incrementing if the args grow down. */
1334 #ifdef ARGS_GROW_DOWNWARD
1335 args[i].slot_offset = *args_size;
1336
1337 args[i].slot_offset.constant = -args_size->constant;
1338 if (args_size->var)
1339 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1340 #endif
1341
1342 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1343 have been used, etc. */
1344
1345 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1346 argpos < n_named_args);
1347 }
1348 }
1349
1350 /* Update ARGS_SIZE to contain the total size for the argument block.
1351 Return the original constant component of the argument block's size.
1352
1353 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1354 for arguments passed in registers. */
1355
1356 static int
1357 compute_argument_block_size (reg_parm_stack_space, args_size,
1358 preferred_stack_boundary)
1359 int reg_parm_stack_space;
1360 struct args_size *args_size;
1361 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1362 {
1363 int unadjusted_args_size = args_size->constant;
1364
1365 /* For accumulate outgoing args mode we don't need to align, since the frame
1366 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1367 backends from generating missaligned frame sizes. */
1368 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1369 preferred_stack_boundary = STACK_BOUNDARY;
1370
1371 /* Compute the actual size of the argument block required. The variable
1372 and constant sizes must be combined, the size may have to be rounded,
1373 and there may be a minimum required size. */
1374
1375 if (args_size->var)
1376 {
1377 args_size->var = ARGS_SIZE_TREE (*args_size);
1378 args_size->constant = 0;
1379
1380 preferred_stack_boundary /= BITS_PER_UNIT;
1381 if (preferred_stack_boundary > 1)
1382 {
1383 /* We don't handle this case yet. To handle it correctly we have
1384 to add the delta, round and substract the delta.
1385 Currently no machine description requires this support. */
1386 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1387 abort ();
1388 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1389 }
1390
1391 if (reg_parm_stack_space > 0)
1392 {
1393 args_size->var
1394 = size_binop (MAX_EXPR, args_size->var,
1395 ssize_int (reg_parm_stack_space));
1396
1397 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1398 /* The area corresponding to register parameters is not to count in
1399 the size of the block we need. So make the adjustment. */
1400 args_size->var
1401 = size_binop (MINUS_EXPR, args_size->var,
1402 ssize_int (reg_parm_stack_space));
1403 #endif
1404 }
1405 }
1406 else
1407 {
1408 preferred_stack_boundary /= BITS_PER_UNIT;
1409 if (preferred_stack_boundary < 1)
1410 preferred_stack_boundary = 1;
1411 args_size->constant = (((args_size->constant
1412 + stack_pointer_delta
1413 + preferred_stack_boundary - 1)
1414 / preferred_stack_boundary
1415 * preferred_stack_boundary)
1416 - stack_pointer_delta);
1417
1418 args_size->constant = MAX (args_size->constant,
1419 reg_parm_stack_space);
1420
1421 #ifdef MAYBE_REG_PARM_STACK_SPACE
1422 if (reg_parm_stack_space == 0)
1423 args_size->constant = 0;
1424 #endif
1425
1426 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1427 args_size->constant -= reg_parm_stack_space;
1428 #endif
1429 }
1430 return unadjusted_args_size;
1431 }
1432
1433 /* Precompute parameters as needed for a function call.
1434
1435 FLAGS is mask of ECF_* constants.
1436
1437 NUM_ACTUALS is the number of arguments.
1438
1439 ARGS is an array containing information for each argument; this
1440 routine fills in the INITIAL_VALUE and VALUE fields for each
1441 precomputed argument. */
1442
1443 static void
1444 precompute_arguments (flags, num_actuals, args)
1445 int flags;
1446 int num_actuals;
1447 struct arg_data *args;
1448 {
1449 int i;
1450
1451 /* If this function call is cse'able, precompute all the parameters.
1452 Note that if the parameter is constructed into a temporary, this will
1453 cause an additional copy because the parameter will be constructed
1454 into a temporary location and then copied into the outgoing arguments.
1455 If a parameter contains a call to alloca and this function uses the
1456 stack, precompute the parameter. */
1457
1458 /* If we preallocated the stack space, and some arguments must be passed
1459 on the stack, then we must precompute any parameter which contains a
1460 function call which will store arguments on the stack.
1461 Otherwise, evaluating the parameter may clobber previous parameters
1462 which have already been stored into the stack. (we have code to avoid
1463 such case by saving the ougoing stack arguments, but it results in
1464 worse code) */
1465
1466 for (i = 0; i < num_actuals; i++)
1467 if ((flags & (ECF_CONST | ECF_PURE))
1468 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1469 {
1470 enum machine_mode mode;
1471
1472 /* If this is an addressable type, we cannot pre-evaluate it. */
1473 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1474 abort ();
1475
1476 push_temp_slots ();
1477
1478 args[i].value
1479 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1480
1481 preserve_temp_slots (args[i].value);
1482 pop_temp_slots ();
1483
1484 /* ANSI doesn't require a sequence point here,
1485 but PCC has one, so this will avoid some problems. */
1486 emit_queue ();
1487
1488 args[i].initial_value = args[i].value
1489 = protect_from_queue (args[i].value, 0);
1490
1491 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1492 if (mode != args[i].mode)
1493 {
1494 args[i].value
1495 = convert_modes (args[i].mode, mode,
1496 args[i].value, args[i].unsignedp);
1497 #ifdef PROMOTE_FOR_CALL_ONLY
1498 /* CSE will replace this only if it contains args[i].value
1499 pseudo, so convert it down to the declared mode using
1500 a SUBREG. */
1501 if (GET_CODE (args[i].value) == REG
1502 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1503 {
1504 args[i].initial_value
1505 = gen_lowpart_SUBREG (mode, args[i].value);
1506 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1507 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1508 = args[i].unsignedp;
1509 }
1510 #endif
1511 }
1512 }
1513 }
1514
1515 /* Given the current state of MUST_PREALLOCATE and information about
1516 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1517 compute and return the final value for MUST_PREALLOCATE. */
1518
1519 static int
1520 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1521 int must_preallocate;
1522 int num_actuals;
1523 struct arg_data *args;
1524 struct args_size *args_size;
1525 {
1526 /* See if we have or want to preallocate stack space.
1527
1528 If we would have to push a partially-in-regs parm
1529 before other stack parms, preallocate stack space instead.
1530
1531 If the size of some parm is not a multiple of the required stack
1532 alignment, we must preallocate.
1533
1534 If the total size of arguments that would otherwise create a copy in
1535 a temporary (such as a CALL) is more than half the total argument list
1536 size, preallocation is faster.
1537
1538 Another reason to preallocate is if we have a machine (like the m88k)
1539 where stack alignment is required to be maintained between every
1540 pair of insns, not just when the call is made. However, we assume here
1541 that such machines either do not have push insns (and hence preallocation
1542 would occur anyway) or the problem is taken care of with
1543 PUSH_ROUNDING. */
1544
1545 if (! must_preallocate)
1546 {
1547 int partial_seen = 0;
1548 int copy_to_evaluate_size = 0;
1549 int i;
1550
1551 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1552 {
1553 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1554 partial_seen = 1;
1555 else if (partial_seen && args[i].reg == 0)
1556 must_preallocate = 1;
1557
1558 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1559 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1560 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1561 || TREE_CODE (args[i].tree_value) == COND_EXPR
1562 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1563 copy_to_evaluate_size
1564 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1565 }
1566
1567 if (copy_to_evaluate_size * 2 >= args_size->constant
1568 && args_size->constant > 0)
1569 must_preallocate = 1;
1570 }
1571 return must_preallocate;
1572 }
1573
1574 /* If we preallocated stack space, compute the address of each argument
1575 and store it into the ARGS array.
1576
1577 We need not ensure it is a valid memory address here; it will be
1578 validized when it is used.
1579
1580 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1581
1582 static void
1583 compute_argument_addresses (args, argblock, num_actuals)
1584 struct arg_data *args;
1585 rtx argblock;
1586 int num_actuals;
1587 {
1588 if (argblock)
1589 {
1590 rtx arg_reg = argblock;
1591 int i, arg_offset = 0;
1592
1593 if (GET_CODE (argblock) == PLUS)
1594 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1595
1596 for (i = 0; i < num_actuals; i++)
1597 {
1598 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1599 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1600 rtx addr;
1601
1602 /* Skip this parm if it will not be passed on the stack. */
1603 if (! args[i].pass_on_stack && args[i].reg != 0)
1604 continue;
1605
1606 if (GET_CODE (offset) == CONST_INT)
1607 addr = plus_constant (arg_reg, INTVAL (offset));
1608 else
1609 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1610
1611 addr = plus_constant (addr, arg_offset);
1612 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1613 set_mem_attributes (args[i].stack,
1614 TREE_TYPE (args[i].tree_value), 1);
1615
1616 if (GET_CODE (slot_offset) == CONST_INT)
1617 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1618 else
1619 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1620
1621 addr = plus_constant (addr, arg_offset);
1622 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1623 set_mem_attributes (args[i].stack_slot,
1624 TREE_TYPE (args[i].tree_value), 1);
1625
1626 /* Function incoming arguments may overlap with sibling call
1627 outgoing arguments and we cannot allow reordering of reads
1628 from function arguments with stores to outgoing arguments
1629 of sibling calls. */
1630 set_mem_alias_set (args[i].stack, 0);
1631 set_mem_alias_set (args[i].stack_slot, 0);
1632 }
1633 }
1634 }
1635
1636 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1637 in a call instruction.
1638
1639 FNDECL is the tree node for the target function. For an indirect call
1640 FNDECL will be NULL_TREE.
1641
1642 EXP is the CALL_EXPR for this call. */
1643
1644 static rtx
1645 rtx_for_function_call (fndecl, exp)
1646 tree fndecl;
1647 tree exp;
1648 {
1649 rtx funexp;
1650
1651 /* Get the function to call, in the form of RTL. */
1652 if (fndecl)
1653 {
1654 /* If this is the first use of the function, see if we need to
1655 make an external definition for it. */
1656 if (! TREE_USED (fndecl))
1657 {
1658 assemble_external (fndecl);
1659 TREE_USED (fndecl) = 1;
1660 }
1661
1662 /* Get a SYMBOL_REF rtx for the function address. */
1663 funexp = XEXP (DECL_RTL (fndecl), 0);
1664 }
1665 else
1666 /* Generate an rtx (probably a pseudo-register) for the address. */
1667 {
1668 rtx funaddr;
1669 push_temp_slots ();
1670 funaddr = funexp =
1671 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1672 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1673
1674 /* Check the function is executable. */
1675 if (current_function_check_memory_usage)
1676 {
1677 #ifdef POINTERS_EXTEND_UNSIGNED
1678 /* It might be OK to convert funexp in place, but there's
1679 a lot going on between here and when it happens naturally
1680 that this seems safer. */
1681 funaddr = convert_memory_address (Pmode, funexp);
1682 #endif
1683 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
1684 VOIDmode, 1, funaddr, Pmode);
1685 }
1686 emit_queue ();
1687 }
1688 return funexp;
1689 }
1690
1691 /* Do the register loads required for any wholly-register parms or any
1692 parms which are passed both on the stack and in a register. Their
1693 expressions were already evaluated.
1694
1695 Mark all register-parms as living through the call, putting these USE
1696 insns in the CALL_INSN_FUNCTION_USAGE field. */
1697
1698 static void
1699 load_register_parameters (args, num_actuals, call_fusage, flags)
1700 struct arg_data *args;
1701 int num_actuals;
1702 rtx *call_fusage;
1703 int flags;
1704 {
1705 int i, j;
1706
1707 #ifdef LOAD_ARGS_REVERSED
1708 for (i = num_actuals - 1; i >= 0; i--)
1709 #else
1710 for (i = 0; i < num_actuals; i++)
1711 #endif
1712 {
1713 rtx reg = ((flags & ECF_SIBCALL)
1714 ? args[i].tail_call_reg : args[i].reg);
1715 int partial = args[i].partial;
1716 int nregs;
1717
1718 if (reg)
1719 {
1720 /* Set to non-negative if must move a word at a time, even if just
1721 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1722 we just use a normal move insn. This value can be zero if the
1723 argument is a zero size structure with no fields. */
1724 nregs = (partial ? partial
1725 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1726 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1727 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1728 : -1));
1729
1730 /* Handle calls that pass values in multiple non-contiguous
1731 locations. The Irix 6 ABI has examples of this. */
1732
1733 if (GET_CODE (reg) == PARALLEL)
1734 emit_group_load (reg, args[i].value,
1735 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1736 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1737
1738 /* If simple case, just do move. If normal partial, store_one_arg
1739 has already loaded the register for us. In all other cases,
1740 load the register(s) from memory. */
1741
1742 else if (nregs == -1)
1743 emit_move_insn (reg, args[i].value);
1744
1745 /* If we have pre-computed the values to put in the registers in
1746 the case of non-aligned structures, copy them in now. */
1747
1748 else if (args[i].n_aligned_regs != 0)
1749 for (j = 0; j < args[i].n_aligned_regs; j++)
1750 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1751 args[i].aligned_regs[j]);
1752
1753 else if (partial == 0 || args[i].pass_on_stack)
1754 move_block_to_reg (REGNO (reg),
1755 validize_mem (args[i].value), nregs,
1756 args[i].mode);
1757
1758 /* Handle calls that pass values in multiple non-contiguous
1759 locations. The Irix 6 ABI has examples of this. */
1760 if (GET_CODE (reg) == PARALLEL)
1761 use_group_regs (call_fusage, reg);
1762 else if (nregs == -1)
1763 use_reg (call_fusage, reg);
1764 else
1765 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1766 }
1767 }
1768 }
1769
1770 /* Try to integrate function. See expand_inline_function for documentation
1771 about the parameters. */
1772
1773 static rtx
1774 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1775 tree fndecl;
1776 tree actparms;
1777 rtx target;
1778 int ignore;
1779 tree type;
1780 rtx structure_value_addr;
1781 {
1782 rtx temp;
1783 rtx before_call;
1784 int i;
1785 rtx old_stack_level = 0;
1786 int reg_parm_stack_space = 0;
1787
1788 #ifdef REG_PARM_STACK_SPACE
1789 #ifdef MAYBE_REG_PARM_STACK_SPACE
1790 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1791 #else
1792 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1793 #endif
1794 #endif
1795
1796 before_call = get_last_insn ();
1797
1798 timevar_push (TV_INTEGRATION);
1799
1800 temp = expand_inline_function (fndecl, actparms, target,
1801 ignore, type,
1802 structure_value_addr);
1803
1804 timevar_pop (TV_INTEGRATION);
1805
1806 /* If inlining succeeded, return. */
1807 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1808 {
1809 if (ACCUMULATE_OUTGOING_ARGS)
1810 {
1811 /* If the outgoing argument list must be preserved, push
1812 the stack before executing the inlined function if it
1813 makes any calls. */
1814
1815 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1816 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1817 break;
1818
1819 if (stack_arg_under_construction || i >= 0)
1820 {
1821 rtx first_insn
1822 = before_call ? NEXT_INSN (before_call) : get_insns ();
1823 rtx insn = NULL_RTX, seq;
1824
1825 /* Look for a call in the inline function code.
1826 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1827 nonzero then there is a call and it is not necessary
1828 to scan the insns. */
1829
1830 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1831 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1832 if (GET_CODE (insn) == CALL_INSN)
1833 break;
1834
1835 if (insn)
1836 {
1837 /* Reserve enough stack space so that the largest
1838 argument list of any function call in the inline
1839 function does not overlap the argument list being
1840 evaluated. This is usually an overestimate because
1841 allocate_dynamic_stack_space reserves space for an
1842 outgoing argument list in addition to the requested
1843 space, but there is no way to ask for stack space such
1844 that an argument list of a certain length can be
1845 safely constructed.
1846
1847 Add the stack space reserved for register arguments, if
1848 any, in the inline function. What is really needed is the
1849 largest value of reg_parm_stack_space in the inline
1850 function, but that is not available. Using the current
1851 value of reg_parm_stack_space is wrong, but gives
1852 correct results on all supported machines. */
1853
1854 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1855 + reg_parm_stack_space);
1856
1857 start_sequence ();
1858 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1859 allocate_dynamic_stack_space (GEN_INT (adjust),
1860 NULL_RTX, BITS_PER_UNIT);
1861 seq = get_insns ();
1862 end_sequence ();
1863 emit_insns_before (seq, first_insn);
1864 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1865 }
1866 }
1867 }
1868
1869 /* If the result is equivalent to TARGET, return TARGET to simplify
1870 checks in store_expr. They can be equivalent but not equal in the
1871 case of a function that returns BLKmode. */
1872 if (temp != target && rtx_equal_p (temp, target))
1873 return target;
1874 return temp;
1875 }
1876
1877 /* If inlining failed, mark FNDECL as needing to be compiled
1878 separately after all. If function was declared inline,
1879 give a warning. */
1880 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1881 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1882 {
1883 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1884 warning ("called from here");
1885 }
1886 mark_addressable (fndecl);
1887 return (rtx) (HOST_WIDE_INT) - 1;
1888 }
1889
1890 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1891 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1892 bytes, then we would need to push some additional bytes to pad the
1893 arguments. So, we compute an adjust to the stack pointer for an
1894 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1895 bytes. Then, when the arguments are pushed the stack will be perfectly
1896 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1897 be popped after the call. Returns the adjustment. */
1898
1899 static int
1900 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1901 args_size,
1902 preferred_unit_stack_boundary)
1903 int unadjusted_args_size;
1904 struct args_size *args_size;
1905 int preferred_unit_stack_boundary;
1906 {
1907 /* The number of bytes to pop so that the stack will be
1908 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1909 HOST_WIDE_INT adjustment;
1910 /* The alignment of the stack after the arguments are pushed, if we
1911 just pushed the arguments without adjust the stack here. */
1912 HOST_WIDE_INT unadjusted_alignment;
1913
1914 unadjusted_alignment
1915 = ((stack_pointer_delta + unadjusted_args_size)
1916 % preferred_unit_stack_boundary);
1917
1918 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1919 as possible -- leaving just enough left to cancel out the
1920 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1921 PENDING_STACK_ADJUST is non-negative, and congruent to
1922 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1923
1924 /* Begin by trying to pop all the bytes. */
1925 unadjusted_alignment
1926 = (unadjusted_alignment
1927 - (pending_stack_adjust % preferred_unit_stack_boundary));
1928 adjustment = pending_stack_adjust;
1929 /* Push enough additional bytes that the stack will be aligned
1930 after the arguments are pushed. */
1931 if (preferred_unit_stack_boundary > 1)
1932 {
1933 if (unadjusted_alignment > 0)
1934 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1935 else
1936 adjustment += unadjusted_alignment;
1937 }
1938
1939 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1940 bytes after the call. The right number is the entire
1941 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1942 by the arguments in the first place. */
1943 args_size->constant
1944 = pending_stack_adjust - adjustment + unadjusted_args_size;
1945
1946 return adjustment;
1947 }
1948
1949 /* Scan X expression if it does not dereference any argument slots
1950 we already clobbered by tail call arguments (as noted in stored_args_map
1951 bitmap).
1952 Return non-zero if X expression dereferences such argument slots,
1953 zero otherwise. */
1954
1955 static int
1956 check_sibcall_argument_overlap_1 (x)
1957 rtx x;
1958 {
1959 RTX_CODE code;
1960 int i, j;
1961 unsigned int k;
1962 const char *fmt;
1963
1964 if (x == NULL_RTX)
1965 return 0;
1966
1967 code = GET_CODE (x);
1968
1969 if (code == MEM)
1970 {
1971 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1972 i = 0;
1973 else if (GET_CODE (XEXP (x, 0)) == PLUS
1974 && XEXP (XEXP (x, 0), 0) ==
1975 current_function_internal_arg_pointer
1976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1977 i = INTVAL (XEXP (XEXP (x, 0), 1));
1978 else
1979 return 0;
1980
1981 #ifdef ARGS_GROW_DOWNWARD
1982 i = -i - GET_MODE_SIZE (GET_MODE (x));
1983 #endif
1984
1985 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1986 if (i + k < stored_args_map->n_bits
1987 && TEST_BIT (stored_args_map, i + k))
1988 return 1;
1989
1990 return 0;
1991 }
1992
1993 /* Scan all subexpressions. */
1994 fmt = GET_RTX_FORMAT (code);
1995 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1996 {
1997 if (*fmt == 'e')
1998 {
1999 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2000 return 1;
2001 }
2002 else if (*fmt == 'E')
2003 {
2004 for (j = 0; j < XVECLEN (x, i); j++)
2005 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2006 return 1;
2007 }
2008 }
2009 return 0;
2010 }
2011
2012 /* Scan sequence after INSN if it does not dereference any argument slots
2013 we already clobbered by tail call arguments (as noted in stored_args_map
2014 bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
2015 Return non-zero if sequence after INSN dereferences such argument slots,
2016 zero otherwise. */
2017
2018 static int
2019 check_sibcall_argument_overlap (insn, arg)
2020 rtx insn;
2021 struct arg_data *arg;
2022 {
2023 int low, high;
2024
2025 if (insn == NULL_RTX)
2026 insn = get_insns ();
2027 else
2028 insn = NEXT_INSN (insn);
2029
2030 for (; insn; insn = NEXT_INSN (insn))
2031 if (INSN_P (insn)
2032 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2033 break;
2034
2035 #ifdef ARGS_GROW_DOWNWARD
2036 low = -arg->offset.constant - arg->size.constant;
2037 #else
2038 low = arg->offset.constant;
2039 #endif
2040
2041 for (high = low + arg->size.constant; low < high; low++)
2042 SET_BIT (stored_args_map, low);
2043 return insn != NULL_RTX;
2044 }
2045
2046 /* Generate all the code for a function call
2047 and return an rtx for its value.
2048 Store the value in TARGET (specified as an rtx) if convenient.
2049 If the value is stored in TARGET then TARGET is returned.
2050 If IGNORE is nonzero, then we ignore the value of the function call. */
2051
2052 rtx
2053 expand_call (exp, target, ignore)
2054 tree exp;
2055 rtx target;
2056 int ignore;
2057 {
2058 /* Nonzero if we are currently expanding a call. */
2059 static int currently_expanding_call = 0;
2060
2061 /* List of actual parameters. */
2062 tree actparms = TREE_OPERAND (exp, 1);
2063 /* RTX for the function to be called. */
2064 rtx funexp;
2065 /* Sequence of insns to perform a tail recursive "call". */
2066 rtx tail_recursion_insns = NULL_RTX;
2067 /* Sequence of insns to perform a normal "call". */
2068 rtx normal_call_insns = NULL_RTX;
2069 /* Sequence of insns to perform a tail recursive "call". */
2070 rtx tail_call_insns = NULL_RTX;
2071 /* Data type of the function. */
2072 tree funtype;
2073 /* Declaration of the function being called,
2074 or 0 if the function is computed (not known by name). */
2075 tree fndecl = 0;
2076 rtx insn;
2077 int try_tail_call = 1;
2078 int try_tail_recursion = 1;
2079 int pass;
2080
2081 /* Register in which non-BLKmode value will be returned,
2082 or 0 if no value or if value is BLKmode. */
2083 rtx valreg;
2084 /* Address where we should return a BLKmode value;
2085 0 if value not BLKmode. */
2086 rtx structure_value_addr = 0;
2087 /* Nonzero if that address is being passed by treating it as
2088 an extra, implicit first parameter. Otherwise,
2089 it is passed by being copied directly into struct_value_rtx. */
2090 int structure_value_addr_parm = 0;
2091 /* Size of aggregate value wanted, or zero if none wanted
2092 or if we are using the non-reentrant PCC calling convention
2093 or expecting the value in registers. */
2094 HOST_WIDE_INT struct_value_size = 0;
2095 /* Nonzero if called function returns an aggregate in memory PCC style,
2096 by returning the address of where to find it. */
2097 int pcc_struct_value = 0;
2098
2099 /* Number of actual parameters in this call, including struct value addr. */
2100 int num_actuals;
2101 /* Number of named args. Args after this are anonymous ones
2102 and they must all go on the stack. */
2103 int n_named_args;
2104
2105 /* Vector of information about each argument.
2106 Arguments are numbered in the order they will be pushed,
2107 not the order they are written. */
2108 struct arg_data *args;
2109
2110 /* Total size in bytes of all the stack-parms scanned so far. */
2111 struct args_size args_size;
2112 struct args_size adjusted_args_size;
2113 /* Size of arguments before any adjustments (such as rounding). */
2114 int unadjusted_args_size;
2115 /* Data on reg parms scanned so far. */
2116 CUMULATIVE_ARGS args_so_far;
2117 /* Nonzero if a reg parm has been scanned. */
2118 int reg_parm_seen;
2119 /* Nonzero if this is an indirect function call. */
2120
2121 /* Nonzero if we must avoid push-insns in the args for this call.
2122 If stack space is allocated for register parameters, but not by the
2123 caller, then it is preallocated in the fixed part of the stack frame.
2124 So the entire argument block must then be preallocated (i.e., we
2125 ignore PUSH_ROUNDING in that case). */
2126
2127 int must_preallocate = !PUSH_ARGS;
2128
2129 /* Size of the stack reserved for parameter registers. */
2130 int reg_parm_stack_space = 0;
2131
2132 /* Address of space preallocated for stack parms
2133 (on machines that lack push insns), or 0 if space not preallocated. */
2134 rtx argblock = 0;
2135
2136 /* Mask of ECF_ flags. */
2137 int flags = 0;
2138 /* Nonzero if this is a call to an inline function. */
2139 int is_integrable = 0;
2140 #ifdef REG_PARM_STACK_SPACE
2141 /* Define the boundary of the register parm stack space that needs to be
2142 save, if any. */
2143 int low_to_save = -1, high_to_save;
2144 rtx save_area = 0; /* Place that it is saved */
2145 #endif
2146
2147 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2148 char *initial_stack_usage_map = stack_usage_map;
2149 int old_stack_arg_under_construction = 0;
2150
2151 rtx old_stack_level = 0;
2152 int old_pending_adj = 0;
2153 int old_inhibit_defer_pop = inhibit_defer_pop;
2154 int old_stack_allocated;
2155 rtx call_fusage;
2156 register tree p = TREE_OPERAND (exp, 0);
2157 register int i;
2158 /* The alignment of the stack, in bits. */
2159 HOST_WIDE_INT preferred_stack_boundary;
2160 /* The alignment of the stack, in bytes. */
2161 HOST_WIDE_INT preferred_unit_stack_boundary;
2162
2163 /* The value of the function call can be put in a hard register. But
2164 if -fcheck-memory-usage, code which invokes functions (and thus
2165 damages some hard registers) can be inserted before using the value.
2166 So, target is always a pseudo-register in that case. */
2167 if (current_function_check_memory_usage)
2168 target = 0;
2169
2170 /* See if this is "nothrow" function call. */
2171 if (TREE_NOTHROW (exp))
2172 flags |= ECF_NOTHROW;
2173
2174 /* See if we can find a DECL-node for the actual function.
2175 As a result, decide whether this is a call to an integrable function. */
2176
2177 fndecl = get_callee_fndecl (exp);
2178 if (fndecl)
2179 {
2180 if (!flag_no_inline
2181 && fndecl != current_function_decl
2182 && DECL_INLINE (fndecl)
2183 && DECL_SAVED_INSNS (fndecl)
2184 && DECL_SAVED_INSNS (fndecl)->inlinable)
2185 is_integrable = 1;
2186 else if (! TREE_ADDRESSABLE (fndecl))
2187 {
2188 /* In case this function later becomes inlinable,
2189 record that there was already a non-inline call to it.
2190
2191 Use abstraction instead of setting TREE_ADDRESSABLE
2192 directly. */
2193 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2194 && optimize > 0)
2195 {
2196 warning_with_decl (fndecl, "can't inline call to `%s'");
2197 warning ("called from here");
2198 }
2199 mark_addressable (fndecl);
2200 }
2201
2202 flags |= flags_from_decl_or_type (fndecl);
2203 }
2204
2205 /* If we don't have specific function to call, see if we have a
2206 attributes set in the type. */
2207 else
2208 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2209
2210 /* Mark if the function returns with the stack pointer depressed. */
2211 if (TREE_CODE (TREE_TYPE (TREE_TYPE (p))) == FUNCTION_TYPE
2212 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (TREE_TYPE (p))))
2213 {
2214 flags |= ECF_SP_DEPRESSED;
2215 flags &= ~(ECF_PURE | ECF_CONST);
2216 }
2217
2218 #ifdef REG_PARM_STACK_SPACE
2219 #ifdef MAYBE_REG_PARM_STACK_SPACE
2220 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2221 #else
2222 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2223 #endif
2224 #endif
2225
2226 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2227 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2228 must_preallocate = 1;
2229 #endif
2230
2231 /* Warn if this value is an aggregate type,
2232 regardless of which calling convention we are using for it. */
2233 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2234 warning ("function call has aggregate value");
2235
2236 /* Set up a place to return a structure. */
2237
2238 /* Cater to broken compilers. */
2239 if (aggregate_value_p (exp))
2240 {
2241 /* This call returns a big structure. */
2242 flags &= ~(ECF_CONST | ECF_PURE);
2243
2244 #ifdef PCC_STATIC_STRUCT_RETURN
2245 {
2246 pcc_struct_value = 1;
2247 /* Easier than making that case work right. */
2248 if (is_integrable)
2249 {
2250 /* In case this is a static function, note that it has been
2251 used. */
2252 if (! TREE_ADDRESSABLE (fndecl))
2253 mark_addressable (fndecl);
2254 is_integrable = 0;
2255 }
2256 }
2257 #else /* not PCC_STATIC_STRUCT_RETURN */
2258 {
2259 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2260
2261 if (target && GET_CODE (target) == MEM)
2262 structure_value_addr = XEXP (target, 0);
2263 else
2264 {
2265 /* For variable-sized objects, we must be called with a target
2266 specified. If we were to allocate space on the stack here,
2267 we would have no way of knowing when to free it. */
2268 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2269
2270 mark_temp_addr_taken (d);
2271 structure_value_addr = XEXP (d, 0);
2272 target = 0;
2273 }
2274 }
2275 #endif /* not PCC_STATIC_STRUCT_RETURN */
2276 }
2277
2278 /* If called function is inline, try to integrate it. */
2279
2280 if (is_integrable)
2281 {
2282 rtx temp = try_to_integrate (fndecl, actparms, target,
2283 ignore, TREE_TYPE (exp),
2284 structure_value_addr);
2285 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2286 return temp;
2287 }
2288
2289 /* Figure out the amount to which the stack should be aligned. */
2290 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2291
2292 /* Operand 0 is a pointer-to-function; get the type of the function. */
2293 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2294 if (! POINTER_TYPE_P (funtype))
2295 abort ();
2296 funtype = TREE_TYPE (funtype);
2297
2298 /* See if this is a call to a function that can return more than once
2299 or a call to longjmp or malloc. */
2300 flags |= special_function_p (fndecl, flags);
2301
2302 if (flags & ECF_MAY_BE_ALLOCA)
2303 current_function_calls_alloca = 1;
2304
2305 /* If struct_value_rtx is 0, it means pass the address
2306 as if it were an extra parameter. */
2307 if (structure_value_addr && struct_value_rtx == 0)
2308 {
2309 /* If structure_value_addr is a REG other than
2310 virtual_outgoing_args_rtx, we can use always use it. If it
2311 is not a REG, we must always copy it into a register.
2312 If it is virtual_outgoing_args_rtx, we must copy it to another
2313 register in some cases. */
2314 rtx temp = (GET_CODE (structure_value_addr) != REG
2315 || (ACCUMULATE_OUTGOING_ARGS
2316 && stack_arg_under_construction
2317 && structure_value_addr == virtual_outgoing_args_rtx)
2318 ? copy_addr_to_reg (structure_value_addr)
2319 : structure_value_addr);
2320
2321 actparms
2322 = tree_cons (error_mark_node,
2323 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2324 temp),
2325 actparms);
2326 structure_value_addr_parm = 1;
2327 }
2328
2329 /* Count the arguments and set NUM_ACTUALS. */
2330 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2331 num_actuals++;
2332
2333 /* Compute number of named args.
2334 Normally, don't include the last named arg if anonymous args follow.
2335 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2336 (If no anonymous args follow, the result of list_length is actually
2337 one too large. This is harmless.)
2338
2339 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2340 zero, this machine will be able to place unnamed args that were
2341 passed in registers into the stack. So treat all args as named.
2342 This allows the insns emitting for a specific argument list to be
2343 independent of the function declaration.
2344
2345 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2346 reliable way to pass unnamed args in registers, so we must force
2347 them into memory. */
2348
2349 if ((STRICT_ARGUMENT_NAMING
2350 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2351 && TYPE_ARG_TYPES (funtype) != 0)
2352 n_named_args
2353 = (list_length (TYPE_ARG_TYPES (funtype))
2354 /* Don't include the last named arg. */
2355 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2356 /* Count the struct value address, if it is passed as a parm. */
2357 + structure_value_addr_parm);
2358 else
2359 /* If we know nothing, treat all args as named. */
2360 n_named_args = num_actuals;
2361
2362 /* Start updating where the next arg would go.
2363
2364 On some machines (such as the PA) indirect calls have a different
2365 calling convention than normal calls. The last argument in
2366 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2367 or not. */
2368 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2369
2370 /* Make a vector to hold all the information about each arg. */
2371 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2372 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2373
2374 /* Build up entries in the ARGS array, compute the size of the
2375 arguments into ARGS_SIZE, etc. */
2376 initialize_argument_information (num_actuals, args, &args_size,
2377 n_named_args, actparms, fndecl,
2378 &args_so_far, reg_parm_stack_space,
2379 &old_stack_level, &old_pending_adj,
2380 &must_preallocate, &flags);
2381
2382 if (args_size.var)
2383 {
2384 /* If this function requires a variable-sized argument list, don't
2385 try to make a cse'able block for this call. We may be able to
2386 do this eventually, but it is too complicated to keep track of
2387 what insns go in the cse'able block and which don't. */
2388
2389 flags &= ~(ECF_CONST | ECF_PURE);
2390 must_preallocate = 1;
2391 }
2392
2393 /* Now make final decision about preallocating stack space. */
2394 must_preallocate = finalize_must_preallocate (must_preallocate,
2395 num_actuals, args,
2396 &args_size);
2397
2398 /* If the structure value address will reference the stack pointer, we
2399 must stabilize it. We don't need to do this if we know that we are
2400 not going to adjust the stack pointer in processing this call. */
2401
2402 if (structure_value_addr
2403 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2404 || reg_mentioned_p (virtual_outgoing_args_rtx,
2405 structure_value_addr))
2406 && (args_size.var
2407 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2408 structure_value_addr = copy_to_reg (structure_value_addr);
2409
2410 /* Tail calls can make things harder to debug, and we're traditionally
2411 pushed these optimizations into -O2. Don't try if we're already
2412 expanding a call, as that means we're an argument. Don't try if
2413 there's cleanups, as we know there's code to follow the call.
2414
2415 If rtx_equal_function_value_matters is false, that means we've
2416 finished with regular parsing. Which means that some of the
2417 machinery we use to generate tail-calls is no longer in place.
2418 This is most often true of sjlj-exceptions, which we couldn't
2419 tail-call to anyway. */
2420
2421 if (currently_expanding_call++ != 0
2422 || !flag_optimize_sibling_calls
2423 || !rtx_equal_function_value_matters
2424 || any_pending_cleanups (1)
2425 || args_size.var)
2426 try_tail_call = try_tail_recursion = 0;
2427
2428 /* Tail recursion fails, when we are not dealing with recursive calls. */
2429 if (!try_tail_recursion
2430 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2431 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2432 try_tail_recursion = 0;
2433
2434 /* Rest of purposes for tail call optimizations to fail. */
2435 if (
2436 #ifdef HAVE_sibcall_epilogue
2437 !HAVE_sibcall_epilogue
2438 #else
2439 1
2440 #endif
2441 || !try_tail_call
2442 /* Doing sibling call optimization needs some work, since
2443 structure_value_addr can be allocated on the stack.
2444 It does not seem worth the effort since few optimizable
2445 sibling calls will return a structure. */
2446 || structure_value_addr != NULL_RTX
2447 /* If the register holding the address is a callee saved
2448 register, then we lose. We have no way to prevent that,
2449 so we only allow calls to named functions. */
2450 /* ??? This could be done by having the insn constraints
2451 use a register class that is all call-clobbered. Any
2452 reload insns generated to fix things up would appear
2453 before the sibcall_epilogue. */
2454 || fndecl == NULL_TREE
2455 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP))
2456 || TREE_THIS_VOLATILE (fndecl)
2457 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2458 /* If this function requires more stack slots than the current
2459 function, we cannot change it into a sibling call. */
2460 || args_size.constant > current_function_args_size
2461 /* If the callee pops its own arguments, then it must pop exactly
2462 the same number of arguments as the current function. */
2463 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2464 != RETURN_POPS_ARGS (current_function_decl,
2465 TREE_TYPE (current_function_decl),
2466 current_function_args_size))
2467 try_tail_call = 0;
2468
2469 if (try_tail_call || try_tail_recursion)
2470 {
2471 int end, inc;
2472 actparms = NULL_TREE;
2473 /* Ok, we're going to give the tail call the old college try.
2474 This means we're going to evaluate the function arguments
2475 up to three times. There are two degrees of badness we can
2476 encounter, those that can be unsaved and those that can't.
2477 (See unsafe_for_reeval commentary for details.)
2478
2479 Generate a new argument list. Pass safe arguments through
2480 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2481 For hard badness, evaluate them now and put their resulting
2482 rtx in a temporary VAR_DECL.
2483
2484 initialize_argument_information has ordered the array for the
2485 order to be pushed, and we must remember this when reconstructing
2486 the original argument orde. */
2487
2488 if (PUSH_ARGS_REVERSED)
2489 {
2490 inc = 1;
2491 i = 0;
2492 end = num_actuals;
2493 }
2494 else
2495 {
2496 inc = -1;
2497 i = num_actuals - 1;
2498 end = -1;
2499 }
2500
2501 for (; i != end; i += inc)
2502 {
2503 switch (unsafe_for_reeval (args[i].tree_value))
2504 {
2505 case 0: /* Safe. */
2506 break;
2507
2508 case 1: /* Mildly unsafe. */
2509 args[i].tree_value = unsave_expr (args[i].tree_value);
2510 break;
2511
2512 case 2: /* Wildly unsafe. */
2513 {
2514 tree var = build_decl (VAR_DECL, NULL_TREE,
2515 TREE_TYPE (args[i].tree_value));
2516 SET_DECL_RTL (var,
2517 expand_expr (args[i].tree_value, NULL_RTX,
2518 VOIDmode, EXPAND_NORMAL));
2519 args[i].tree_value = var;
2520 }
2521 break;
2522
2523 default:
2524 abort ();
2525 }
2526 /* We need to build actparms for optimize_tail_recursion. We can
2527 safely trash away TREE_PURPOSE, since it is unused by this
2528 function. */
2529 if (try_tail_recursion)
2530 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2531 }
2532 /* Expanding one of those dangerous arguments could have added
2533 cleanups, but otherwise give it a whirl. */
2534 if (any_pending_cleanups (1))
2535 try_tail_call = try_tail_recursion = 0;
2536 }
2537
2538 /* Generate a tail recursion sequence when calling ourselves. */
2539
2540 if (try_tail_recursion)
2541 {
2542 /* We want to emit any pending stack adjustments before the tail
2543 recursion "call". That way we know any adjustment after the tail
2544 recursion call can be ignored if we indeed use the tail recursion
2545 call expansion. */
2546 int save_pending_stack_adjust = pending_stack_adjust;
2547 int save_stack_pointer_delta = stack_pointer_delta;
2548
2549 /* Emit any queued insns now; otherwise they would end up in
2550 only one of the alternates. */
2551 emit_queue ();
2552
2553 /* Use a new sequence to hold any RTL we generate. We do not even
2554 know if we will use this RTL yet. The final decision can not be
2555 made until after RTL generation for the entire function is
2556 complete. */
2557 start_sequence ();
2558 /* If expanding any of the arguments creates cleanups, we can't
2559 do a tailcall. So, we'll need to pop the pending cleanups
2560 list. If, however, all goes well, and there are no cleanups
2561 then the call to expand_start_target_temps will have no
2562 effect. */
2563 expand_start_target_temps ();
2564 if (optimize_tail_recursion (actparms, get_last_insn ()))
2565 {
2566 if (any_pending_cleanups (1))
2567 try_tail_call = try_tail_recursion = 0;
2568 else
2569 tail_recursion_insns = get_insns ();
2570 }
2571 expand_end_target_temps ();
2572 end_sequence ();
2573
2574 /* Restore the original pending stack adjustment for the sibling and
2575 normal call cases below. */
2576 pending_stack_adjust = save_pending_stack_adjust;
2577 stack_pointer_delta = save_stack_pointer_delta;
2578 }
2579
2580 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2581 {
2582 /* A fork duplicates the profile information, and an exec discards
2583 it. We can't rely on fork/exec to be paired. So write out the
2584 profile information we have gathered so far, and clear it. */
2585 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2586 is subject to race conditions, just as with multithreaded
2587 programs. */
2588
2589 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"),
2590 LCT_ALWAYS_RETURN,
2591 VOIDmode, 0);
2592 }
2593
2594 /* Ensure current function's preferred stack boundary is at least
2595 what we need. We don't have to increase alignment for recursive
2596 functions. */
2597 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2598 && fndecl != current_function_decl)
2599 cfun->preferred_stack_boundary = preferred_stack_boundary;
2600
2601 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2602
2603 function_call_count++;
2604
2605 /* We want to make two insn chains; one for a sibling call, the other
2606 for a normal call. We will select one of the two chains after
2607 initial RTL generation is complete. */
2608 for (pass = 0; pass < 2; pass++)
2609 {
2610 int sibcall_failure = 0;
2611 /* We want to emit ay pending stack adjustments before the tail
2612 recursion "call". That way we know any adjustment after the tail
2613 recursion call can be ignored if we indeed use the tail recursion
2614 call expansion. */
2615 int save_pending_stack_adjust = 0;
2616 int save_stack_pointer_delta = 0;
2617 rtx insns;
2618 rtx before_call, next_arg_reg;
2619
2620 if (pass == 0)
2621 {
2622 if (! try_tail_call)
2623 continue;
2624
2625 /* Emit any queued insns now; otherwise they would end up in
2626 only one of the alternates. */
2627 emit_queue ();
2628
2629 /* State variables we need to save and restore between
2630 iterations. */
2631 save_pending_stack_adjust = pending_stack_adjust;
2632 save_stack_pointer_delta = stack_pointer_delta;
2633 }
2634 if (pass)
2635 flags &= ~ECF_SIBCALL;
2636 else
2637 flags |= ECF_SIBCALL;
2638
2639 /* Other state variables that we must reinitialize each time
2640 through the loop (that are not initialized by the loop itself). */
2641 argblock = 0;
2642 call_fusage = 0;
2643
2644 /* Start a new sequence for the normal call case.
2645
2646 From this point on, if the sibling call fails, we want to set
2647 sibcall_failure instead of continuing the loop. */
2648 start_sequence ();
2649
2650 if (pass == 0)
2651 {
2652 /* We know at this point that there are not currently any
2653 pending cleanups. If, however, in the process of evaluating
2654 the arguments we were to create some, we'll need to be
2655 able to get rid of them. */
2656 expand_start_target_temps ();
2657 }
2658
2659 /* Don't let pending stack adjusts add up to too much.
2660 Also, do all pending adjustments now if there is any chance
2661 this might be a call to alloca or if we are expanding a sibling
2662 call sequence. */
2663 if (pending_stack_adjust >= 32
2664 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2665 || pass == 0)
2666 do_pending_stack_adjust ();
2667
2668 /* When calling a const function, we must pop the stack args right away,
2669 so that the pop is deleted or moved with the call. */
2670 if (flags & (ECF_CONST | ECF_PURE))
2671 NO_DEFER_POP;
2672
2673 /* Push the temporary stack slot level so that we can free any
2674 temporaries we make. */
2675 push_temp_slots ();
2676
2677 #ifdef FINAL_REG_PARM_STACK_SPACE
2678 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2679 args_size.var);
2680 #endif
2681 /* Precompute any arguments as needed. */
2682 if (pass)
2683 precompute_arguments (flags, num_actuals, args);
2684
2685 /* Now we are about to start emitting insns that can be deleted
2686 if a libcall is deleted. */
2687 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2688 start_sequence ();
2689
2690 adjusted_args_size = args_size;
2691 /* Compute the actual size of the argument block required. The variable
2692 and constant sizes must be combined, the size may have to be rounded,
2693 and there may be a minimum required size. When generating a sibcall
2694 pattern, do not round up, since we'll be re-using whatever space our
2695 caller provided. */
2696 unadjusted_args_size
2697 = compute_argument_block_size (reg_parm_stack_space,
2698 &adjusted_args_size,
2699 (pass == 0 ? 0
2700 : preferred_stack_boundary));
2701
2702 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2703
2704 /* The argument block when performing a sibling call is the
2705 incoming argument block. */
2706 if (pass == 0)
2707 {
2708 argblock = virtual_incoming_args_rtx;
2709 stored_args_map = sbitmap_alloc (args_size.constant);
2710 sbitmap_zero (stored_args_map);
2711 }
2712
2713 /* If we have no actual push instructions, or shouldn't use them,
2714 make space for all args right now. */
2715 else if (adjusted_args_size.var != 0)
2716 {
2717 if (old_stack_level == 0)
2718 {
2719 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2720 old_pending_adj = pending_stack_adjust;
2721 pending_stack_adjust = 0;
2722 /* stack_arg_under_construction says whether a stack arg is
2723 being constructed at the old stack level. Pushing the stack
2724 gets a clean outgoing argument block. */
2725 old_stack_arg_under_construction = stack_arg_under_construction;
2726 stack_arg_under_construction = 0;
2727 }
2728 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2729 }
2730 else
2731 {
2732 /* Note that we must go through the motions of allocating an argument
2733 block even if the size is zero because we may be storing args
2734 in the area reserved for register arguments, which may be part of
2735 the stack frame. */
2736
2737 int needed = adjusted_args_size.constant;
2738
2739 /* Store the maximum argument space used. It will be pushed by
2740 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2741 checking). */
2742
2743 if (needed > current_function_outgoing_args_size)
2744 current_function_outgoing_args_size = needed;
2745
2746 if (must_preallocate)
2747 {
2748 if (ACCUMULATE_OUTGOING_ARGS)
2749 {
2750 /* Since the stack pointer will never be pushed, it is
2751 possible for the evaluation of a parm to clobber
2752 something we have already written to the stack.
2753 Since most function calls on RISC machines do not use
2754 the stack, this is uncommon, but must work correctly.
2755
2756 Therefore, we save any area of the stack that was already
2757 written and that we are using. Here we set up to do this
2758 by making a new stack usage map from the old one. The
2759 actual save will be done by store_one_arg.
2760
2761 Another approach might be to try to reorder the argument
2762 evaluations to avoid this conflicting stack usage. */
2763
2764 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2765 /* Since we will be writing into the entire argument area,
2766 the map must be allocated for its entire size, not just
2767 the part that is the responsibility of the caller. */
2768 needed += reg_parm_stack_space;
2769 #endif
2770
2771 #ifdef ARGS_GROW_DOWNWARD
2772 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2773 needed + 1);
2774 #else
2775 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2776 needed);
2777 #endif
2778 stack_usage_map
2779 = (char *) alloca (highest_outgoing_arg_in_use);
2780
2781 if (initial_highest_arg_in_use)
2782 memcpy (stack_usage_map, initial_stack_usage_map,
2783 initial_highest_arg_in_use);
2784
2785 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2786 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2787 (highest_outgoing_arg_in_use
2788 - initial_highest_arg_in_use));
2789 needed = 0;
2790
2791 /* The address of the outgoing argument list must not be
2792 copied to a register here, because argblock would be left
2793 pointing to the wrong place after the call to
2794 allocate_dynamic_stack_space below. */
2795
2796 argblock = virtual_outgoing_args_rtx;
2797 }
2798 else
2799 {
2800 if (inhibit_defer_pop == 0)
2801 {
2802 /* Try to reuse some or all of the pending_stack_adjust
2803 to get this space. */
2804 needed
2805 = (combine_pending_stack_adjustment_and_call
2806 (unadjusted_args_size,
2807 &adjusted_args_size,
2808 preferred_unit_stack_boundary));
2809
2810 /* combine_pending_stack_adjustment_and_call computes
2811 an adjustment before the arguments are allocated.
2812 Account for them and see whether or not the stack
2813 needs to go up or down. */
2814 needed = unadjusted_args_size - needed;
2815
2816 if (needed < 0)
2817 {
2818 /* We're releasing stack space. */
2819 /* ??? We can avoid any adjustment at all if we're
2820 already aligned. FIXME. */
2821 pending_stack_adjust = -needed;
2822 do_pending_stack_adjust ();
2823 needed = 0;
2824 }
2825 else
2826 /* We need to allocate space. We'll do that in
2827 push_block below. */
2828 pending_stack_adjust = 0;
2829 }
2830
2831 /* Special case this because overhead of `push_block' in
2832 this case is non-trivial. */
2833 if (needed == 0)
2834 argblock = virtual_outgoing_args_rtx;
2835 else
2836 argblock = push_block (GEN_INT (needed), 0, 0);
2837
2838 /* We only really need to call `copy_to_reg' in the case
2839 where push insns are going to be used to pass ARGBLOCK
2840 to a function call in ARGS. In that case, the stack
2841 pointer changes value from the allocation point to the
2842 call point, and hence the value of
2843 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2844 as well always do it. */
2845 argblock = copy_to_reg (argblock);
2846
2847 /* The save/restore code in store_one_arg handles all
2848 cases except one: a constructor call (including a C
2849 function returning a BLKmode struct) to initialize
2850 an argument. */
2851 if (stack_arg_under_construction)
2852 {
2853 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2854 rtx push_size = GEN_INT (reg_parm_stack_space
2855 + adjusted_args_size.constant);
2856 #else
2857 rtx push_size = GEN_INT (adjusted_args_size.constant);
2858 #endif
2859 if (old_stack_level == 0)
2860 {
2861 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2862 NULL_RTX);
2863 old_pending_adj = pending_stack_adjust;
2864 pending_stack_adjust = 0;
2865 /* stack_arg_under_construction says whether a stack
2866 arg is being constructed at the old stack level.
2867 Pushing the stack gets a clean outgoing argument
2868 block. */
2869 old_stack_arg_under_construction
2870 = stack_arg_under_construction;
2871 stack_arg_under_construction = 0;
2872 /* Make a new map for the new argument list. */
2873 stack_usage_map = (char *)
2874 alloca (highest_outgoing_arg_in_use);
2875 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2876 highest_outgoing_arg_in_use = 0;
2877 }
2878 allocate_dynamic_stack_space (push_size, NULL_RTX,
2879 BITS_PER_UNIT);
2880 }
2881 /* If argument evaluation might modify the stack pointer,
2882 copy the address of the argument list to a register. */
2883 for (i = 0; i < num_actuals; i++)
2884 if (args[i].pass_on_stack)
2885 {
2886 argblock = copy_addr_to_reg (argblock);
2887 break;
2888 }
2889 }
2890 }
2891 }
2892
2893 compute_argument_addresses (args, argblock, num_actuals);
2894
2895 /* If we push args individually in reverse order, perform stack alignment
2896 before the first push (the last arg). */
2897 if (PUSH_ARGS_REVERSED && argblock == 0
2898 && adjusted_args_size.constant != unadjusted_args_size)
2899 {
2900 /* When the stack adjustment is pending, we get better code
2901 by combining the adjustments. */
2902 if (pending_stack_adjust
2903 && ! (flags & (ECF_CONST | ECF_PURE))
2904 && ! inhibit_defer_pop)
2905 {
2906 pending_stack_adjust
2907 = (combine_pending_stack_adjustment_and_call
2908 (unadjusted_args_size,
2909 &adjusted_args_size,
2910 preferred_unit_stack_boundary));
2911 do_pending_stack_adjust ();
2912 }
2913 else if (argblock == 0)
2914 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2915 - unadjusted_args_size));
2916 }
2917 /* Now that the stack is properly aligned, pops can't safely
2918 be deferred during the evaluation of the arguments. */
2919 NO_DEFER_POP;
2920
2921 funexp = rtx_for_function_call (fndecl, exp);
2922
2923 /* Figure out the register where the value, if any, will come back. */
2924 valreg = 0;
2925 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2926 && ! structure_value_addr)
2927 {
2928 if (pcc_struct_value)
2929 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2930 fndecl, (pass == 0));
2931 else
2932 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2933 }
2934
2935 /* Precompute all register parameters. It isn't safe to compute anything
2936 once we have started filling any specific hard regs. */
2937 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2938
2939 #ifdef REG_PARM_STACK_SPACE
2940 /* Save the fixed argument area if it's part of the caller's frame and
2941 is clobbered by argument setup for this call. */
2942 if (ACCUMULATE_OUTGOING_ARGS && pass)
2943 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2944 &low_to_save, &high_to_save);
2945 #endif
2946
2947 /* Now store (and compute if necessary) all non-register parms.
2948 These come before register parms, since they can require block-moves,
2949 which could clobber the registers used for register parms.
2950 Parms which have partial registers are not stored here,
2951 but we do preallocate space here if they want that. */
2952
2953 for (i = 0; i < num_actuals; i++)
2954 if (args[i].reg == 0 || args[i].pass_on_stack)
2955 {
2956 rtx before_arg = get_last_insn ();
2957
2958 if (store_one_arg (&args[i], argblock, flags,
2959 adjusted_args_size.var != 0,
2960 reg_parm_stack_space)
2961 || (pass == 0
2962 && check_sibcall_argument_overlap (before_arg,
2963 &args[i])))
2964 sibcall_failure = 1;
2965 }
2966
2967 /* If we have a parm that is passed in registers but not in memory
2968 and whose alignment does not permit a direct copy into registers,
2969 make a group of pseudos that correspond to each register that we
2970 will later fill. */
2971 if (STRICT_ALIGNMENT)
2972 store_unaligned_arguments_into_pseudos (args, num_actuals);
2973
2974 /* Now store any partially-in-registers parm.
2975 This is the last place a block-move can happen. */
2976 if (reg_parm_seen)
2977 for (i = 0; i < num_actuals; i++)
2978 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2979 {
2980 rtx before_arg = get_last_insn ();
2981
2982 if (store_one_arg (&args[i], argblock, flags,
2983 adjusted_args_size.var != 0,
2984 reg_parm_stack_space)
2985 || (pass == 0
2986 && check_sibcall_argument_overlap (before_arg,
2987 &args[i])))
2988 sibcall_failure = 1;
2989 }
2990
2991 /* If we pushed args in forward order, perform stack alignment
2992 after pushing the last arg. */
2993 if (!PUSH_ARGS_REVERSED && argblock == 0)
2994 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2995 - unadjusted_args_size));
2996
2997 /* If register arguments require space on the stack and stack space
2998 was not preallocated, allocate stack space here for arguments
2999 passed in registers. */
3000 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3001 if (!ACCUMULATE_OUTGOING_ARGS
3002 && must_preallocate == 0 && reg_parm_stack_space > 0)
3003 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3004 #endif
3005
3006 /* Pass the function the address in which to return a
3007 structure value. */
3008 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3009 {
3010 emit_move_insn (struct_value_rtx,
3011 force_reg (Pmode,
3012 force_operand (structure_value_addr,
3013 NULL_RTX)));
3014
3015 /* Mark the memory for the aggregate as write-only. */
3016 if (current_function_check_memory_usage)
3017 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
3018 VOIDmode, 3,
3019 structure_value_addr, ptr_mode,
3020 GEN_INT (struct_value_size),
3021 TYPE_MODE (sizetype),
3022 GEN_INT (MEMORY_USE_WO),
3023 TYPE_MODE (integer_type_node));
3024
3025 if (GET_CODE (struct_value_rtx) == REG)
3026 use_reg (&call_fusage, struct_value_rtx);
3027 }
3028
3029 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3030 reg_parm_seen, pass == 0);
3031
3032 load_register_parameters (args, num_actuals, &call_fusage, flags);
3033
3034 /* Perform postincrements before actually calling the function. */
3035 emit_queue ();
3036
3037 /* Save a pointer to the last insn before the call, so that we can
3038 later safely search backwards to find the CALL_INSN. */
3039 before_call = get_last_insn ();
3040
3041 /* Set up next argument register. For sibling calls on machines
3042 with register windows this should be the incoming register. */
3043 #ifdef FUNCTION_INCOMING_ARG
3044 if (pass == 0)
3045 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3046 void_type_node, 1);
3047 else
3048 #endif
3049 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3050 void_type_node, 1);
3051
3052 /* All arguments and registers used for the call must be set up by
3053 now! */
3054
3055 /* Stack must be properly aligned now. */
3056 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3057 abort ();
3058
3059 /* Generate the actual call instruction. */
3060 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3061 adjusted_args_size.constant, struct_value_size,
3062 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3063 flags);
3064
3065 /* Verify that we've deallocated all the stack we used. */
3066 if (pass
3067 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3068 abort ();
3069
3070 /* If call is cse'able, make appropriate pair of reg-notes around it.
3071 Test valreg so we don't crash; may safely ignore `const'
3072 if return type is void. Disable for PARALLEL return values, because
3073 we have no way to move such values into a pseudo register. */
3074 if (pass
3075 && (flags & (ECF_CONST | ECF_PURE))
3076 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3077 {
3078 rtx note = 0;
3079 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3080 rtx insns;
3081
3082 /* Mark the return value as a pointer if needed. */
3083 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3084 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3085
3086 /* Construct an "equal form" for the value which mentions all the
3087 arguments in order as well as the function name. */
3088 for (i = 0; i < num_actuals; i++)
3089 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
3090 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3091
3092 insns = get_insns ();
3093 end_sequence ();
3094
3095 if (flags & ECF_PURE)
3096 note = gen_rtx_EXPR_LIST (VOIDmode,
3097 gen_rtx_USE (VOIDmode,
3098 gen_rtx_MEM (BLKmode,
3099 gen_rtx_SCRATCH (VOIDmode))), note);
3100
3101 emit_libcall_block (insns, temp, valreg, note);
3102
3103 valreg = temp;
3104 }
3105 else if (flags & (ECF_CONST | ECF_PURE))
3106 {
3107 /* Otherwise, just write out the sequence without a note. */
3108 rtx insns = get_insns ();
3109
3110 end_sequence ();
3111 emit_insns (insns);
3112 }
3113 else if (flags & ECF_MALLOC)
3114 {
3115 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3116 rtx last, insns;
3117
3118 /* The return value from a malloc-like function is a pointer. */
3119 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3120 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3121
3122 emit_move_insn (temp, valreg);
3123
3124 /* The return value from a malloc-like function can not alias
3125 anything else. */
3126 last = get_last_insn ();
3127 REG_NOTES (last) =
3128 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3129
3130 /* Write out the sequence. */
3131 insns = get_insns ();
3132 end_sequence ();
3133 emit_insns (insns);
3134 valreg = temp;
3135 }
3136
3137 /* For calls to `setjmp', etc., inform flow.c it should complain
3138 if nonvolatile values are live. For functions that cannot return,
3139 inform flow that control does not fall through. */
3140
3141 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3142 {
3143 /* The barrier must be emitted
3144 immediately after the CALL_INSN. Some ports emit more
3145 than just a CALL_INSN above, so we must search for it here. */
3146
3147 rtx last = get_last_insn ();
3148 while (GET_CODE (last) != CALL_INSN)
3149 {
3150 last = PREV_INSN (last);
3151 /* There was no CALL_INSN? */
3152 if (last == before_call)
3153 abort ();
3154 }
3155
3156 emit_barrier_after (last);
3157 }
3158
3159 if (flags & ECF_LONGJMP)
3160 current_function_calls_longjmp = 1;
3161
3162 /* If this function is returning into a memory location marked as
3163 readonly, it means it is initializing that location. But we normally
3164 treat functions as not clobbering such locations, so we need to
3165 specify that this one does. */
3166 if (target != 0 && GET_CODE (target) == MEM
3167 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3168 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3169
3170 /* If value type not void, return an rtx for the value. */
3171
3172 /* If there are cleanups to be called, don't use a hard reg as target.
3173 We need to double check this and see if it matters anymore. */
3174 if (any_pending_cleanups (1))
3175 {
3176 if (target && REG_P (target)
3177 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3178 target = 0;
3179 sibcall_failure = 1;
3180 }
3181
3182 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3183 || ignore)
3184 {
3185 target = const0_rtx;
3186 }
3187 else if (structure_value_addr)
3188 {
3189 if (target == 0 || GET_CODE (target) != MEM)
3190 {
3191 target
3192 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3193 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3194 structure_value_addr));
3195 set_mem_attributes (target, exp, 1);
3196 }
3197 }
3198 else if (pcc_struct_value)
3199 {
3200 /* This is the special C++ case where we need to
3201 know what the true target was. We take care to
3202 never use this value more than once in one expression. */
3203 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3204 copy_to_reg (valreg));
3205 set_mem_attributes (target, exp, 1);
3206 }
3207 /* Handle calls that return values in multiple non-contiguous locations.
3208 The Irix 6 ABI has examples of this. */
3209 else if (GET_CODE (valreg) == PARALLEL)
3210 {
3211 if (target == 0)
3212 {
3213 /* This will only be assigned once, so it can be readonly. */
3214 tree nt = build_qualified_type (TREE_TYPE (exp),
3215 (TYPE_QUALS (TREE_TYPE (exp))
3216 | TYPE_QUAL_CONST));
3217
3218 target = assign_temp (nt, 0, 1, 1);
3219 preserve_temp_slots (target);
3220 }
3221
3222 if (! rtx_equal_p (target, valreg))
3223 emit_group_store (target, valreg,
3224 int_size_in_bytes (TREE_TYPE (exp)),
3225 TYPE_ALIGN (TREE_TYPE (exp)));
3226
3227 /* We can not support sibling calls for this case. */
3228 sibcall_failure = 1;
3229 }
3230 else if (target
3231 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3232 && GET_MODE (target) == GET_MODE (valreg))
3233 {
3234 /* TARGET and VALREG cannot be equal at this point because the
3235 latter would not have REG_FUNCTION_VALUE_P true, while the
3236 former would if it were referring to the same register.
3237
3238 If they refer to the same register, this move will be a no-op,
3239 except when function inlining is being done. */
3240 emit_move_insn (target, valreg);
3241 }
3242 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3243 {
3244 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3245
3246 /* We can not support sibling calls for this case. */
3247 sibcall_failure = 1;
3248 }
3249 else
3250 target = copy_to_reg (valreg);
3251
3252 #ifdef PROMOTE_FUNCTION_RETURN
3253 /* If we promoted this return value, make the proper SUBREG. TARGET
3254 might be const0_rtx here, so be careful. */
3255 if (GET_CODE (target) == REG
3256 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3257 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3258 {
3259 tree type = TREE_TYPE (exp);
3260 int unsignedp = TREE_UNSIGNED (type);
3261 int offset = 0;
3262
3263 /* If we don't promote as expected, something is wrong. */
3264 if (GET_MODE (target)
3265 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3266 abort ();
3267
3268 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3269 && GET_MODE_SIZE (GET_MODE (target))
3270 > GET_MODE_SIZE (TYPE_MODE (type)))
3271 {
3272 offset = GET_MODE_SIZE (GET_MODE (target))
3273 - GET_MODE_SIZE (TYPE_MODE (type));
3274 if (! BYTES_BIG_ENDIAN)
3275 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3276 else if (! WORDS_BIG_ENDIAN)
3277 offset %= UNITS_PER_WORD;
3278 }
3279 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3280 SUBREG_PROMOTED_VAR_P (target) = 1;
3281 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3282 }
3283 #endif
3284
3285 /* If size of args is variable or this was a constructor call for a stack
3286 argument, restore saved stack-pointer value. */
3287
3288 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3289 {
3290 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3291 pending_stack_adjust = old_pending_adj;
3292 stack_arg_under_construction = old_stack_arg_under_construction;
3293 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3294 stack_usage_map = initial_stack_usage_map;
3295 sibcall_failure = 1;
3296 }
3297 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3298 {
3299 #ifdef REG_PARM_STACK_SPACE
3300 if (save_area)
3301 {
3302 restore_fixed_argument_area (save_area, argblock,
3303 high_to_save, low_to_save);
3304 }
3305 #endif
3306
3307 /* If we saved any argument areas, restore them. */
3308 for (i = 0; i < num_actuals; i++)
3309 if (args[i].save_area)
3310 {
3311 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3312 rtx stack_area
3313 = gen_rtx_MEM (save_mode,
3314 memory_address (save_mode,
3315 XEXP (args[i].stack_slot, 0)));
3316
3317 if (save_mode != BLKmode)
3318 emit_move_insn (stack_area, args[i].save_area);
3319 else
3320 emit_block_move (stack_area,
3321 validize_mem (args[i].save_area),
3322 GEN_INT (args[i].size.constant),
3323 PARM_BOUNDARY);
3324 }
3325
3326 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3327 stack_usage_map = initial_stack_usage_map;
3328 }
3329
3330 /* If this was alloca, record the new stack level for nonlocal gotos.
3331 Check for the handler slots since we might not have a save area
3332 for non-local gotos. */
3333
3334 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3335 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3336
3337 pop_temp_slots ();
3338
3339 /* Free up storage we no longer need. */
3340 for (i = 0; i < num_actuals; ++i)
3341 if (args[i].aligned_regs)
3342 free (args[i].aligned_regs);
3343
3344 if (pass == 0)
3345 {
3346 /* Undo the fake expand_start_target_temps we did earlier. If
3347 there had been any cleanups created, we've already set
3348 sibcall_failure. */
3349 expand_end_target_temps ();
3350 }
3351
3352 insns = get_insns ();
3353 end_sequence ();
3354
3355 if (pass == 0)
3356 {
3357 tail_call_insns = insns;
3358
3359 /* Restore the pending stack adjustment now that we have
3360 finished generating the sibling call sequence. */
3361
3362 pending_stack_adjust = save_pending_stack_adjust;
3363 stack_pointer_delta = save_stack_pointer_delta;
3364
3365 /* Prepare arg structure for next iteration. */
3366 for (i = 0; i < num_actuals; i++)
3367 {
3368 args[i].value = 0;
3369 args[i].aligned_regs = 0;
3370 args[i].stack = 0;
3371 }
3372
3373 sbitmap_free (stored_args_map);
3374 }
3375 else
3376 normal_call_insns = insns;
3377
3378 /* If something prevents making this a sibling call,
3379 zero out the sequence. */
3380 if (sibcall_failure)
3381 tail_call_insns = NULL_RTX;
3382 }
3383
3384 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3385 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3386 can happen if the arguments to this function call an inline
3387 function who's expansion contains another CALL_PLACEHOLDER.
3388
3389 If there are any C_Ps in any of these sequences, replace them
3390 with their normal call. */
3391
3392 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3393 if (GET_CODE (insn) == CALL_INSN
3394 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3395 replace_call_placeholder (insn, sibcall_use_normal);
3396
3397 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3398 if (GET_CODE (insn) == CALL_INSN
3399 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3400 replace_call_placeholder (insn, sibcall_use_normal);
3401
3402 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3403 if (GET_CODE (insn) == CALL_INSN
3404 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3405 replace_call_placeholder (insn, sibcall_use_normal);
3406
3407 /* If this was a potential tail recursion site, then emit a
3408 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3409 One of them will be selected later. */
3410 if (tail_recursion_insns || tail_call_insns)
3411 {
3412 /* The tail recursion label must be kept around. We could expose
3413 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3414 and makes determining true tail recursion sites difficult.
3415
3416 So we set LABEL_PRESERVE_P here, then clear it when we select
3417 one of the call sequences after rtl generation is complete. */
3418 if (tail_recursion_insns)
3419 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3420 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3421 tail_call_insns,
3422 tail_recursion_insns,
3423 tail_recursion_label));
3424 }
3425 else
3426 emit_insns (normal_call_insns);
3427
3428 currently_expanding_call--;
3429
3430 /* If this function returns with the stack pointer depressed, ensure
3431 this block saves and restores the stack pointer, show it was
3432 changed, and adjust for any outgoing arg space. */
3433 if (flags & ECF_SP_DEPRESSED)
3434 {
3435 clear_pending_stack_adjust ();
3436 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3437 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3438 save_stack_pointer ();
3439 }
3440
3441 return target;
3442 }
3443 \f
3444 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3445 The RETVAL parameter specifies whether return value needs to be saved, other
3446 parameters are documented in the emit_library_call function below. */
3447 static rtx
3448 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3449 int retval;
3450 rtx orgfun;
3451 rtx value;
3452 enum libcall_type fn_type;
3453 enum machine_mode outmode;
3454 int nargs;
3455 va_list p;
3456 {
3457 /* Total size in bytes of all the stack-parms scanned so far. */
3458 struct args_size args_size;
3459 /* Size of arguments before any adjustments (such as rounding). */
3460 struct args_size original_args_size;
3461 register int argnum;
3462 rtx fun;
3463 int inc;
3464 int count;
3465 struct args_size alignment_pad;
3466 rtx argblock = 0;
3467 CUMULATIVE_ARGS args_so_far;
3468 struct arg
3469 {
3470 rtx value;
3471 enum machine_mode mode;
3472 rtx reg;
3473 int partial;
3474 struct args_size offset;
3475 struct args_size size;
3476 rtx save_area;
3477 };
3478 struct arg *argvec;
3479 int old_inhibit_defer_pop = inhibit_defer_pop;
3480 rtx call_fusage = 0;
3481 rtx mem_value = 0;
3482 rtx valreg;
3483 int pcc_struct_value = 0;
3484 int struct_value_size = 0;
3485 int flags;
3486 int reg_parm_stack_space = 0;
3487 int needed;
3488 rtx before_call;
3489
3490 #ifdef REG_PARM_STACK_SPACE
3491 /* Define the boundary of the register parm stack space that needs to be
3492 save, if any. */
3493 int low_to_save = -1, high_to_save = 0;
3494 rtx save_area = 0; /* Place that it is saved. */
3495 #endif
3496
3497 /* Size of the stack reserved for parameter registers. */
3498 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3499 char *initial_stack_usage_map = stack_usage_map;
3500
3501 #ifdef REG_PARM_STACK_SPACE
3502 #ifdef MAYBE_REG_PARM_STACK_SPACE
3503 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3504 #else
3505 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3506 #endif
3507 #endif
3508
3509 /* By default, library functions can not throw. */
3510 flags = ECF_NOTHROW;
3511
3512 switch (fn_type)
3513 {
3514 case LCT_NORMAL:
3515 case LCT_CONST:
3516 case LCT_PURE:
3517 /* Nothing to do here. */
3518 break;
3519 case LCT_CONST_MAKE_BLOCK:
3520 flags |= ECF_CONST;
3521 break;
3522 case LCT_PURE_MAKE_BLOCK:
3523 flags |= ECF_PURE;
3524 break;
3525 case LCT_NORETURN:
3526 flags |= ECF_NORETURN;
3527 break;
3528 case LCT_THROW:
3529 flags = ECF_NORETURN;
3530 break;
3531 case LCT_ALWAYS_RETURN:
3532 flags = ECF_ALWAYS_RETURN;
3533 break;
3534 }
3535 fun = orgfun;
3536
3537 /* Ensure current function's preferred stack boundary is at least
3538 what we need. */
3539 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3540 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3541
3542 /* If this kind of value comes back in memory,
3543 decide where in memory it should come back. */
3544 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3545 {
3546 #ifdef PCC_STATIC_STRUCT_RETURN
3547 rtx pointer_reg
3548 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3549 0, 0);
3550 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3551 pcc_struct_value = 1;
3552 if (value == 0)
3553 value = gen_reg_rtx (outmode);
3554 #else /* not PCC_STATIC_STRUCT_RETURN */
3555 struct_value_size = GET_MODE_SIZE (outmode);
3556 if (value != 0 && GET_CODE (value) == MEM)
3557 mem_value = value;
3558 else
3559 mem_value = assign_temp (type_for_mode (outmode, 0), 0, 1, 1);
3560 #endif
3561
3562 /* This call returns a big structure. */
3563 flags &= ~(ECF_CONST | ECF_PURE);
3564 }
3565
3566 /* ??? Unfinished: must pass the memory address as an argument. */
3567
3568 /* Copy all the libcall-arguments out of the varargs data
3569 and into a vector ARGVEC.
3570
3571 Compute how to pass each argument. We only support a very small subset
3572 of the full argument passing conventions to limit complexity here since
3573 library functions shouldn't have many args. */
3574
3575 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3576 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3577
3578 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3579 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3580 #else
3581 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3582 #endif
3583
3584 args_size.constant = 0;
3585 args_size.var = 0;
3586
3587 count = 0;
3588
3589 /* Now we are about to start emitting insns that can be deleted
3590 if a libcall is deleted. */
3591 if (flags & (ECF_CONST | ECF_PURE))
3592 start_sequence ();
3593
3594 push_temp_slots ();
3595
3596 /* If there's a structure value address to be passed,
3597 either pass it in the special place, or pass it as an extra argument. */
3598 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3599 {
3600 rtx addr = XEXP (mem_value, 0);
3601 nargs++;
3602
3603 /* Make sure it is a reasonable operand for a move or push insn. */
3604 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3605 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3606 addr = force_operand (addr, NULL_RTX);
3607
3608 argvec[count].value = addr;
3609 argvec[count].mode = Pmode;
3610 argvec[count].partial = 0;
3611
3612 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3613 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3614 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3615 abort ();
3616 #endif
3617
3618 locate_and_pad_parm (Pmode, NULL_TREE,
3619 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3620 1,
3621 #else
3622 argvec[count].reg != 0,
3623 #endif
3624 NULL_TREE, &args_size, &argvec[count].offset,
3625 &argvec[count].size, &alignment_pad);
3626
3627 if (argvec[count].reg == 0 || argvec[count].partial != 0
3628 || reg_parm_stack_space > 0)
3629 args_size.constant += argvec[count].size.constant;
3630
3631 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3632
3633 count++;
3634 }
3635
3636 for (; count < nargs; count++)
3637 {
3638 rtx val = va_arg (p, rtx);
3639 enum machine_mode mode = va_arg (p, enum machine_mode);
3640
3641 /* We cannot convert the arg value to the mode the library wants here;
3642 must do it earlier where we know the signedness of the arg. */
3643 if (mode == BLKmode
3644 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3645 abort ();
3646
3647 /* On some machines, there's no way to pass a float to a library fcn.
3648 Pass it as a double instead. */
3649 #ifdef LIBGCC_NEEDS_DOUBLE
3650 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3651 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3652 #endif
3653
3654 /* There's no need to call protect_from_queue, because
3655 either emit_move_insn or emit_push_insn will do that. */
3656
3657 /* Make sure it is a reasonable operand for a move or push insn. */
3658 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3659 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3660 val = force_operand (val, NULL_RTX);
3661
3662 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3663 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3664 {
3665 rtx slot;
3666 int must_copy = 1
3667 #ifdef FUNCTION_ARG_CALLEE_COPIES
3668 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3669 NULL_TREE, 1)
3670 #endif
3671 ;
3672
3673 if (GET_MODE (val) == MEM && ! must_copy)
3674 slot = val;
3675 else if (must_copy)
3676 {
3677 slot = assign_temp (type_for_mode (mode, 0), 0, 1, 1);
3678 emit_move_insn (slot, val);
3679 }
3680 else
3681 {
3682 tree type = type_for_mode (mode, 0);
3683
3684 slot = gen_rtx_MEM (mode,
3685 expand_expr (build1 (ADDR_EXPR,
3686 build_pointer_type
3687 (type),
3688 make_tree (type, val)),
3689 NULL_RTX, VOIDmode, 0));
3690 }
3691
3692 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3693 gen_rtx_USE (VOIDmode, slot),
3694 call_fusage);
3695 if (must_copy)
3696 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3697 gen_rtx_CLOBBER (VOIDmode,
3698 slot),
3699 call_fusage);
3700
3701 mode = Pmode;
3702 val = force_operand (XEXP (slot, 0), NULL_RTX);
3703 }
3704 #endif
3705
3706 argvec[count].value = val;
3707 argvec[count].mode = mode;
3708
3709 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3710
3711 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3712 argvec[count].partial
3713 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3714 #else
3715 argvec[count].partial = 0;
3716 #endif
3717
3718 locate_and_pad_parm (mode, NULL_TREE,
3719 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3720 1,
3721 #else
3722 argvec[count].reg != 0,
3723 #endif
3724 NULL_TREE, &args_size, &argvec[count].offset,
3725 &argvec[count].size, &alignment_pad);
3726
3727 if (argvec[count].size.var)
3728 abort ();
3729
3730 if (reg_parm_stack_space == 0 && argvec[count].partial)
3731 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3732
3733 if (argvec[count].reg == 0 || argvec[count].partial != 0
3734 || reg_parm_stack_space > 0)
3735 args_size.constant += argvec[count].size.constant;
3736
3737 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3738 }
3739
3740 #ifdef FINAL_REG_PARM_STACK_SPACE
3741 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3742 args_size.var);
3743 #endif
3744 /* If this machine requires an external definition for library
3745 functions, write one out. */
3746 assemble_external_libcall (fun);
3747
3748 original_args_size = args_size;
3749 args_size.constant = (((args_size.constant
3750 + stack_pointer_delta
3751 + STACK_BYTES - 1)
3752 / STACK_BYTES
3753 * STACK_BYTES)
3754 - stack_pointer_delta);
3755
3756 args_size.constant = MAX (args_size.constant,
3757 reg_parm_stack_space);
3758
3759 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3760 args_size.constant -= reg_parm_stack_space;
3761 #endif
3762
3763 if (args_size.constant > current_function_outgoing_args_size)
3764 current_function_outgoing_args_size = args_size.constant;
3765
3766 if (ACCUMULATE_OUTGOING_ARGS)
3767 {
3768 /* Since the stack pointer will never be pushed, it is possible for
3769 the evaluation of a parm to clobber something we have already
3770 written to the stack. Since most function calls on RISC machines
3771 do not use the stack, this is uncommon, but must work correctly.
3772
3773 Therefore, we save any area of the stack that was already written
3774 and that we are using. Here we set up to do this by making a new
3775 stack usage map from the old one.
3776
3777 Another approach might be to try to reorder the argument
3778 evaluations to avoid this conflicting stack usage. */
3779
3780 needed = args_size.constant;
3781
3782 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3783 /* Since we will be writing into the entire argument area, the
3784 map must be allocated for its entire size, not just the part that
3785 is the responsibility of the caller. */
3786 needed += reg_parm_stack_space;
3787 #endif
3788
3789 #ifdef ARGS_GROW_DOWNWARD
3790 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3791 needed + 1);
3792 #else
3793 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3794 needed);
3795 #endif
3796 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3797
3798 if (initial_highest_arg_in_use)
3799 memcpy (stack_usage_map, initial_stack_usage_map,
3800 initial_highest_arg_in_use);
3801
3802 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3803 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3804 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3805 needed = 0;
3806
3807 /* We must be careful to use virtual regs before they're instantiated,
3808 and real regs afterwards. Loop optimization, for example, can create
3809 new libcalls after we've instantiated the virtual regs, and if we
3810 use virtuals anyway, they won't match the rtl patterns. */
3811
3812 if (virtuals_instantiated)
3813 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3814 else
3815 argblock = virtual_outgoing_args_rtx;
3816 }
3817 else
3818 {
3819 if (!PUSH_ARGS)
3820 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3821 }
3822
3823 /* If we push args individually in reverse order, perform stack alignment
3824 before the first push (the last arg). */
3825 if (argblock == 0 && PUSH_ARGS_REVERSED)
3826 anti_adjust_stack (GEN_INT (args_size.constant
3827 - original_args_size.constant));
3828
3829 if (PUSH_ARGS_REVERSED)
3830 {
3831 inc = -1;
3832 argnum = nargs - 1;
3833 }
3834 else
3835 {
3836 inc = 1;
3837 argnum = 0;
3838 }
3839
3840 #ifdef REG_PARM_STACK_SPACE
3841 if (ACCUMULATE_OUTGOING_ARGS)
3842 {
3843 /* The argument list is the property of the called routine and it
3844 may clobber it. If the fixed area has been used for previous
3845 parameters, we must save and restore it.
3846
3847 Here we compute the boundary of the that needs to be saved, if any. */
3848
3849 #ifdef ARGS_GROW_DOWNWARD
3850 for (count = 0; count < reg_parm_stack_space + 1; count++)
3851 #else
3852 for (count = 0; count < reg_parm_stack_space; count++)
3853 #endif
3854 {
3855 if (count >= highest_outgoing_arg_in_use
3856 || stack_usage_map[count] == 0)
3857 continue;
3858
3859 if (low_to_save == -1)
3860 low_to_save = count;
3861
3862 high_to_save = count;
3863 }
3864
3865 if (low_to_save >= 0)
3866 {
3867 int num_to_save = high_to_save - low_to_save + 1;
3868 enum machine_mode save_mode
3869 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3870 rtx stack_area;
3871
3872 /* If we don't have the required alignment, must do this in BLKmode. */
3873 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3874 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3875 save_mode = BLKmode;
3876
3877 #ifdef ARGS_GROW_DOWNWARD
3878 stack_area = gen_rtx_MEM (save_mode,
3879 memory_address (save_mode,
3880 plus_constant (argblock,
3881 -high_to_save)));
3882 #else
3883 stack_area = gen_rtx_MEM (save_mode,
3884 memory_address (save_mode,
3885 plus_constant (argblock,
3886 low_to_save)));
3887 #endif
3888 if (save_mode == BLKmode)
3889 {
3890 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3891 emit_block_move (validize_mem (save_area), stack_area,
3892 GEN_INT (num_to_save), PARM_BOUNDARY);
3893 }
3894 else
3895 {
3896 save_area = gen_reg_rtx (save_mode);
3897 emit_move_insn (save_area, stack_area);
3898 }
3899 }
3900 }
3901 #endif
3902
3903 /* Push the args that need to be pushed. */
3904
3905 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3906 are to be pushed. */
3907 for (count = 0; count < nargs; count++, argnum += inc)
3908 {
3909 register enum machine_mode mode = argvec[argnum].mode;
3910 register rtx val = argvec[argnum].value;
3911 rtx reg = argvec[argnum].reg;
3912 int partial = argvec[argnum].partial;
3913 int lower_bound = 0, upper_bound = 0, i;
3914
3915 if (! (reg != 0 && partial == 0))
3916 {
3917 if (ACCUMULATE_OUTGOING_ARGS)
3918 {
3919 /* If this is being stored into a pre-allocated, fixed-size,
3920 stack area, save any previous data at that location. */
3921
3922 #ifdef ARGS_GROW_DOWNWARD
3923 /* stack_slot is negative, but we want to index stack_usage_map
3924 with positive values. */
3925 upper_bound = -argvec[argnum].offset.constant + 1;
3926 lower_bound = upper_bound - argvec[argnum].size.constant;
3927 #else
3928 lower_bound = argvec[argnum].offset.constant;
3929 upper_bound = lower_bound + argvec[argnum].size.constant;
3930 #endif
3931
3932 for (i = lower_bound; i < upper_bound; i++)
3933 if (stack_usage_map[i]
3934 /* Don't store things in the fixed argument area at this
3935 point; it has already been saved. */
3936 && i > reg_parm_stack_space)
3937 break;
3938
3939 if (i != upper_bound)
3940 {
3941 /* We need to make a save area. See what mode we can make
3942 it. */
3943 enum machine_mode save_mode
3944 = mode_for_size (argvec[argnum].size.constant
3945 * BITS_PER_UNIT,
3946 MODE_INT, 1);
3947 rtx stack_area
3948 = gen_rtx_MEM
3949 (save_mode,
3950 memory_address
3951 (save_mode,
3952 plus_constant (argblock,
3953 argvec[argnum].offset.constant)));
3954 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3955
3956 emit_move_insn (argvec[argnum].save_area, stack_area);
3957 }
3958 }
3959
3960 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3961 argblock, GEN_INT (argvec[argnum].offset.constant),
3962 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3963
3964 /* Now mark the segment we just used. */
3965 if (ACCUMULATE_OUTGOING_ARGS)
3966 for (i = lower_bound; i < upper_bound; i++)
3967 stack_usage_map[i] = 1;
3968
3969 NO_DEFER_POP;
3970 }
3971 }
3972
3973 /* If we pushed args in forward order, perform stack alignment
3974 after pushing the last arg. */
3975 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3976 anti_adjust_stack (GEN_INT (args_size.constant
3977 - original_args_size.constant));
3978
3979 if (PUSH_ARGS_REVERSED)
3980 argnum = nargs - 1;
3981 else
3982 argnum = 0;
3983
3984 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
3985
3986 /* Now load any reg parms into their regs. */
3987
3988 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3989 are to be pushed. */
3990 for (count = 0; count < nargs; count++, argnum += inc)
3991 {
3992 register rtx val = argvec[argnum].value;
3993 rtx reg = argvec[argnum].reg;
3994 int partial = argvec[argnum].partial;
3995
3996 /* Handle calls that pass values in multiple non-contiguous
3997 locations. The PA64 has examples of this for library calls. */
3998 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3999 emit_group_load (reg, val,
4000 GET_MODE_SIZE (GET_MODE (val)),
4001 GET_MODE_ALIGNMENT (GET_MODE (val)));
4002 else if (reg != 0 && partial == 0)
4003 emit_move_insn (reg, val);
4004
4005 NO_DEFER_POP;
4006 }
4007
4008 /* Any regs containing parms remain in use through the call. */
4009 for (count = 0; count < nargs; count++)
4010 {
4011 rtx reg = argvec[count].reg;
4012 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4013 use_group_regs (&call_fusage, reg);
4014 else if (reg != 0)
4015 use_reg (&call_fusage, reg);
4016 }
4017
4018 /* Pass the function the address in which to return a structure value. */
4019 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4020 {
4021 emit_move_insn (struct_value_rtx,
4022 force_reg (Pmode,
4023 force_operand (XEXP (mem_value, 0),
4024 NULL_RTX)));
4025 if (GET_CODE (struct_value_rtx) == REG)
4026 use_reg (&call_fusage, struct_value_rtx);
4027 }
4028
4029 /* Don't allow popping to be deferred, since then
4030 cse'ing of library calls could delete a call and leave the pop. */
4031 NO_DEFER_POP;
4032 valreg = (mem_value == 0 && outmode != VOIDmode
4033 ? hard_libcall_value (outmode) : NULL_RTX);
4034
4035 /* Stack must be properly aligned now. */
4036 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4037 abort ();
4038
4039 before_call = get_last_insn ();
4040
4041 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4042 will set inhibit_defer_pop to that value. */
4043 /* The return type is needed to decide how many bytes the function pops.
4044 Signedness plays no role in that, so for simplicity, we pretend it's
4045 always signed. We also assume that the list of arguments passed has
4046 no impact, so we pretend it is unknown. */
4047
4048 emit_call_1 (fun,
4049 get_identifier (XSTR (orgfun, 0)),
4050 build_function_type (outmode == VOIDmode ? void_type_node
4051 : type_for_mode (outmode, 0), NULL_TREE),
4052 original_args_size.constant, args_size.constant,
4053 struct_value_size,
4054 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4055 valreg,
4056 old_inhibit_defer_pop + 1, call_fusage, flags);
4057
4058 /* For calls to `setjmp', etc., inform flow.c it should complain
4059 if nonvolatile values are live. For functions that cannot return,
4060 inform flow that control does not fall through. */
4061
4062 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4063 {
4064 /* The barrier note must be emitted
4065 immediately after the CALL_INSN. Some ports emit more than
4066 just a CALL_INSN above, so we must search for it here. */
4067
4068 rtx last = get_last_insn ();
4069 while (GET_CODE (last) != CALL_INSN)
4070 {
4071 last = PREV_INSN (last);
4072 /* There was no CALL_INSN? */
4073 if (last == before_call)
4074 abort ();
4075 }
4076
4077 emit_barrier_after (last);
4078 }
4079
4080 /* Now restore inhibit_defer_pop to its actual original value. */
4081 OK_DEFER_POP;
4082
4083 /* If call is cse'able, make appropriate pair of reg-notes around it.
4084 Test valreg so we don't crash; may safely ignore `const'
4085 if return type is void. Disable for PARALLEL return values, because
4086 we have no way to move such values into a pseudo register. */
4087 if ((flags & (ECF_CONST | ECF_PURE))
4088 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
4089 {
4090 rtx note = 0;
4091 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4092 rtx insns;
4093 int i;
4094
4095 /* Construct an "equal form" for the value which mentions all the
4096 arguments in order as well as the function name. */
4097 for (i = 0; i < nargs; i++)
4098 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4099 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4100
4101 insns = get_insns ();
4102 end_sequence ();
4103
4104 if (flags & ECF_PURE)
4105 note = gen_rtx_EXPR_LIST (VOIDmode,
4106 gen_rtx_USE (VOIDmode,
4107 gen_rtx_MEM (BLKmode,
4108 gen_rtx_SCRATCH (VOIDmode))), note);
4109
4110 emit_libcall_block (insns, temp, valreg, note);
4111
4112 valreg = temp;
4113 }
4114 else if (flags & (ECF_CONST | ECF_PURE))
4115 {
4116 /* Otherwise, just write out the sequence without a note. */
4117 rtx insns = get_insns ();
4118
4119 end_sequence ();
4120 emit_insns (insns);
4121 }
4122 pop_temp_slots ();
4123
4124 /* Copy the value to the right place. */
4125 if (outmode != VOIDmode && retval)
4126 {
4127 if (mem_value)
4128 {
4129 if (value == 0)
4130 value = mem_value;
4131 if (value != mem_value)
4132 emit_move_insn (value, mem_value);
4133 }
4134 else if (value != 0)
4135 emit_move_insn (value, hard_libcall_value (outmode));
4136 else
4137 value = hard_libcall_value (outmode);
4138 }
4139
4140 if (ACCUMULATE_OUTGOING_ARGS)
4141 {
4142 #ifdef REG_PARM_STACK_SPACE
4143 if (save_area)
4144 {
4145 enum machine_mode save_mode = GET_MODE (save_area);
4146 #ifdef ARGS_GROW_DOWNWARD
4147 rtx stack_area
4148 = gen_rtx_MEM (save_mode,
4149 memory_address (save_mode,
4150 plus_constant (argblock,
4151 - high_to_save)));
4152 #else
4153 rtx stack_area
4154 = gen_rtx_MEM (save_mode,
4155 memory_address (save_mode,
4156 plus_constant (argblock, low_to_save)));
4157 #endif
4158 if (save_mode != BLKmode)
4159 emit_move_insn (stack_area, save_area);
4160 else
4161 emit_block_move (stack_area, validize_mem (save_area),
4162 GEN_INT (high_to_save - low_to_save + 1),
4163 PARM_BOUNDARY);
4164 }
4165 #endif
4166
4167 /* If we saved any argument areas, restore them. */
4168 for (count = 0; count < nargs; count++)
4169 if (argvec[count].save_area)
4170 {
4171 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4172 rtx stack_area
4173 = gen_rtx_MEM (save_mode,
4174 memory_address
4175 (save_mode,
4176 plus_constant (argblock,
4177 argvec[count].offset.constant)));
4178
4179 emit_move_insn (stack_area, argvec[count].save_area);
4180 }
4181
4182 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4183 stack_usage_map = initial_stack_usage_map;
4184 }
4185
4186 return value;
4187
4188 }
4189 \f
4190 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4191 (emitting the queue unless NO_QUEUE is nonzero),
4192 for a value of mode OUTMODE,
4193 with NARGS different arguments, passed as alternating rtx values
4194 and machine_modes to convert them to.
4195 The rtx values should have been passed through protect_from_queue already.
4196
4197 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
4198 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
4199 calls, that are handled like `const' calls with extra
4200 (use (memory (scratch)). */
4201
4202 void
4203 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4204 enum machine_mode outmode, int nargs, ...))
4205 {
4206 #ifndef ANSI_PROTOTYPES
4207 rtx orgfun;
4208 int fn_type;
4209 enum machine_mode outmode;
4210 int nargs;
4211 #endif
4212 va_list p;
4213
4214 VA_START (p, nargs);
4215
4216 #ifndef ANSI_PROTOTYPES
4217 orgfun = va_arg (p, rtx);
4218 fn_type = va_arg (p, int);
4219 outmode = va_arg (p, enum machine_mode);
4220 nargs = va_arg (p, int);
4221 #endif
4222
4223 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4224
4225 va_end (p);
4226 }
4227 \f
4228 /* Like emit_library_call except that an extra argument, VALUE,
4229 comes second and says where to store the result.
4230 (If VALUE is zero, this function chooses a convenient way
4231 to return the value.
4232
4233 This function returns an rtx for where the value is to be found.
4234 If VALUE is nonzero, VALUE is returned. */
4235
4236 rtx
4237 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4238 enum libcall_type fn_type,
4239 enum machine_mode outmode, int nargs, ...))
4240 {
4241 #ifndef ANSI_PROTOTYPES
4242 rtx orgfun;
4243 rtx value;
4244 int fn_type;
4245 enum machine_mode outmode;
4246 int nargs;
4247 #endif
4248 va_list p;
4249
4250 VA_START (p, nargs);
4251
4252 #ifndef ANSI_PROTOTYPES
4253 orgfun = va_arg (p, rtx);
4254 value = va_arg (p, rtx);
4255 fn_type = va_arg (p, int);
4256 outmode = va_arg (p, enum machine_mode);
4257 nargs = va_arg (p, int);
4258 #endif
4259
4260 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
4261
4262 va_end (p);
4263
4264 return value;
4265 }
4266 \f
4267 #if 0
4268 /* Return an rtx which represents a suitable home on the stack
4269 given TYPE, the type of the argument looking for a home.
4270 This is called only for BLKmode arguments.
4271
4272 SIZE is the size needed for this target.
4273 ARGS_ADDR is the address of the bottom of the argument block for this call.
4274 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4275 if this machine uses push insns. */
4276
4277 static rtx
4278 target_for_arg (type, size, args_addr, offset)
4279 tree type;
4280 rtx size;
4281 rtx args_addr;
4282 struct args_size offset;
4283 {
4284 rtx target;
4285 rtx offset_rtx = ARGS_SIZE_RTX (offset);
4286
4287 /* We do not call memory_address if possible,
4288 because we want to address as close to the stack
4289 as possible. For non-variable sized arguments,
4290 this will be stack-pointer relative addressing. */
4291 if (GET_CODE (offset_rtx) == CONST_INT)
4292 target = plus_constant (args_addr, INTVAL (offset_rtx));
4293 else
4294 {
4295 /* I have no idea how to guarantee that this
4296 will work in the presence of register parameters. */
4297 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
4298 target = memory_address (QImode, target);
4299 }
4300
4301 return gen_rtx_MEM (BLKmode, target);
4302 }
4303 #endif
4304 \f
4305 /* Store a single argument for a function call
4306 into the register or memory area where it must be passed.
4307 *ARG describes the argument value and where to pass it.
4308
4309 ARGBLOCK is the address of the stack-block for all the arguments,
4310 or 0 on a machine where arguments are pushed individually.
4311
4312 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4313 so must be careful about how the stack is used.
4314
4315 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4316 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4317 that we need not worry about saving and restoring the stack.
4318
4319 FNDECL is the declaration of the function we are calling.
4320
4321 Return non-zero if this arg should cause sibcall failure,
4322 zero otherwise. */
4323
4324 static int
4325 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4326 struct arg_data *arg;
4327 rtx argblock;
4328 int flags;
4329 int variable_size ATTRIBUTE_UNUSED;
4330 int reg_parm_stack_space;
4331 {
4332 register tree pval = arg->tree_value;
4333 rtx reg = 0;
4334 int partial = 0;
4335 int used = 0;
4336 int i, lower_bound = 0, upper_bound = 0;
4337 int sibcall_failure = 0;
4338
4339 if (TREE_CODE (pval) == ERROR_MARK)
4340 return 1;
4341
4342 /* Push a new temporary level for any temporaries we make for
4343 this argument. */
4344 push_temp_slots ();
4345
4346 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4347 {
4348 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4349 save any previous data at that location. */
4350 if (argblock && ! variable_size && arg->stack)
4351 {
4352 #ifdef ARGS_GROW_DOWNWARD
4353 /* stack_slot is negative, but we want to index stack_usage_map
4354 with positive values. */
4355 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4356 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4357 else
4358 upper_bound = 0;
4359
4360 lower_bound = upper_bound - arg->size.constant;
4361 #else
4362 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4363 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4364 else
4365 lower_bound = 0;
4366
4367 upper_bound = lower_bound + arg->size.constant;
4368 #endif
4369
4370 for (i = lower_bound; i < upper_bound; i++)
4371 if (stack_usage_map[i]
4372 /* Don't store things in the fixed argument area at this point;
4373 it has already been saved. */
4374 && i > reg_parm_stack_space)
4375 break;
4376
4377 if (i != upper_bound)
4378 {
4379 /* We need to make a save area. See what mode we can make it. */
4380 enum machine_mode save_mode
4381 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4382 rtx stack_area
4383 = gen_rtx_MEM (save_mode,
4384 memory_address (save_mode,
4385 XEXP (arg->stack_slot, 0)));
4386
4387 if (save_mode == BLKmode)
4388 {
4389 tree ot = TREE_TYPE (arg->tree_value);
4390 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4391 | TYPE_QUAL_CONST));
4392
4393 arg->save_area = assign_temp (nt, 0, 1, 1);
4394 preserve_temp_slots (arg->save_area);
4395 emit_block_move (validize_mem (arg->save_area), stack_area,
4396 expr_size (arg->tree_value),
4397 MIN (PARM_BOUNDARY, TYPE_ALIGN (nt)));
4398 }
4399 else
4400 {
4401 arg->save_area = gen_reg_rtx (save_mode);
4402 emit_move_insn (arg->save_area, stack_area);
4403 }
4404 }
4405 }
4406 /* Now that we have saved any slots that will be overwritten by this
4407 store, mark all slots this store will use. We must do this before
4408 we actually expand the argument since the expansion itself may
4409 trigger library calls which might need to use the same stack slot. */
4410 if (argblock && ! variable_size && arg->stack)
4411 for (i = lower_bound; i < upper_bound; i++)
4412 stack_usage_map[i] = 1;
4413 }
4414
4415 /* If this isn't going to be placed on both the stack and in registers,
4416 set up the register and number of words. */
4417 if (! arg->pass_on_stack)
4418 reg = arg->reg, partial = arg->partial;
4419
4420 if (reg != 0 && partial == 0)
4421 /* Being passed entirely in a register. We shouldn't be called in
4422 this case. */
4423 abort ();
4424
4425 /* If this arg needs special alignment, don't load the registers
4426 here. */
4427 if (arg->n_aligned_regs != 0)
4428 reg = 0;
4429
4430 /* If this is being passed partially in a register, we can't evaluate
4431 it directly into its stack slot. Otherwise, we can. */
4432 if (arg->value == 0)
4433 {
4434 /* stack_arg_under_construction is nonzero if a function argument is
4435 being evaluated directly into the outgoing argument list and
4436 expand_call must take special action to preserve the argument list
4437 if it is called recursively.
4438
4439 For scalar function arguments stack_usage_map is sufficient to
4440 determine which stack slots must be saved and restored. Scalar
4441 arguments in general have pass_on_stack == 0.
4442
4443 If this argument is initialized by a function which takes the
4444 address of the argument (a C++ constructor or a C function
4445 returning a BLKmode structure), then stack_usage_map is
4446 insufficient and expand_call must push the stack around the
4447 function call. Such arguments have pass_on_stack == 1.
4448
4449 Note that it is always safe to set stack_arg_under_construction,
4450 but this generates suboptimal code if set when not needed. */
4451
4452 if (arg->pass_on_stack)
4453 stack_arg_under_construction++;
4454
4455 arg->value = expand_expr (pval,
4456 (partial
4457 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4458 ? NULL_RTX : arg->stack,
4459 VOIDmode, 0);
4460
4461 /* If we are promoting object (or for any other reason) the mode
4462 doesn't agree, convert the mode. */
4463
4464 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4465 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4466 arg->value, arg->unsignedp);
4467
4468 if (arg->pass_on_stack)
4469 stack_arg_under_construction--;
4470 }
4471
4472 /* Don't allow anything left on stack from computation
4473 of argument to alloca. */
4474 if (flags & ECF_MAY_BE_ALLOCA)
4475 do_pending_stack_adjust ();
4476
4477 if (arg->value == arg->stack)
4478 {
4479 /* If the value is already in the stack slot, we are done. */
4480 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4481 {
4482 emit_library_call (chkr_set_right_libfunc, LCT_CONST_MAKE_BLOCK,
4483 VOIDmode, 3, XEXP (arg->stack, 0), Pmode,
4484 ARGS_SIZE_RTX (arg->size),
4485 TYPE_MODE (sizetype),
4486 GEN_INT (MEMORY_USE_RW),
4487 TYPE_MODE (integer_type_node));
4488 }
4489 }
4490 else if (arg->mode != BLKmode)
4491 {
4492 register int size;
4493
4494 /* Argument is a scalar, not entirely passed in registers.
4495 (If part is passed in registers, arg->partial says how much
4496 and emit_push_insn will take care of putting it there.)
4497
4498 Push it, and if its size is less than the
4499 amount of space allocated to it,
4500 also bump stack pointer by the additional space.
4501 Note that in C the default argument promotions
4502 will prevent such mismatches. */
4503
4504 size = GET_MODE_SIZE (arg->mode);
4505 /* Compute how much space the push instruction will push.
4506 On many machines, pushing a byte will advance the stack
4507 pointer by a halfword. */
4508 #ifdef PUSH_ROUNDING
4509 size = PUSH_ROUNDING (size);
4510 #endif
4511 used = size;
4512
4513 /* Compute how much space the argument should get:
4514 round up to a multiple of the alignment for arguments. */
4515 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4516 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4517 / (PARM_BOUNDARY / BITS_PER_UNIT))
4518 * (PARM_BOUNDARY / BITS_PER_UNIT));
4519
4520 /* This isn't already where we want it on the stack, so put it there.
4521 This can either be done with push or copy insns. */
4522 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4523 partial, reg, used - size, argblock,
4524 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4525 ARGS_SIZE_RTX (arg->alignment_pad));
4526 }
4527 else
4528 {
4529 /* BLKmode, at least partly to be pushed. */
4530
4531 register int excess;
4532 rtx size_rtx;
4533
4534 /* Pushing a nonscalar.
4535 If part is passed in registers, PARTIAL says how much
4536 and emit_push_insn will take care of putting it there. */
4537
4538 /* Round its size up to a multiple
4539 of the allocation unit for arguments. */
4540
4541 if (arg->size.var != 0)
4542 {
4543 excess = 0;
4544 size_rtx = ARGS_SIZE_RTX (arg->size);
4545 }
4546 else
4547 {
4548 /* PUSH_ROUNDING has no effect on us, because
4549 emit_push_insn for BLKmode is careful to avoid it. */
4550 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4551 + partial * UNITS_PER_WORD);
4552 size_rtx = expr_size (pval);
4553 }
4554
4555 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4556 {
4557 /* emit_push_insn might not work properly if arg->value and
4558 argblock + arg->offset areas overlap. */
4559 rtx x = arg->value;
4560 int i = 0;
4561
4562 if (XEXP (x, 0) == current_function_internal_arg_pointer
4563 || (GET_CODE (XEXP (x, 0)) == PLUS
4564 && XEXP (XEXP (x, 0), 0) ==
4565 current_function_internal_arg_pointer
4566 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4567 {
4568 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4569 i = INTVAL (XEXP (XEXP (x, 0), 1));
4570
4571 /* expand_call should ensure this */
4572 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4573 abort ();
4574
4575 if (arg->offset.constant > i)
4576 {
4577 if (arg->offset.constant < i + INTVAL (size_rtx))
4578 sibcall_failure = 1;
4579 }
4580 else if (arg->offset.constant < i)
4581 {
4582 if (i < arg->offset.constant + INTVAL (size_rtx))
4583 sibcall_failure = 1;
4584 }
4585 }
4586 }
4587
4588 /* If parm is passed both in stack and in register and offset is
4589 greater than reg_parm_stack_space, split the offset. */
4590 if (arg->reg && arg->pass_on_stack)
4591 {
4592 if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
4593 error ("variable offset is passed paritially in stack and in reg");
4594 else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
4595 error ("variable size is passed partially in stack and in reg");
4596 else if (arg->offset.constant < reg_parm_stack_space
4597 && ((arg->offset.constant + arg->size.constant)
4598 > reg_parm_stack_space))
4599 {
4600 rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
4601 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
4602 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg,
4603 excess, argblock, ARGS_SIZE_RTX (arg->offset),
4604 reg_parm_stack_space,
4605 ARGS_SIZE_RTX (arg->alignment_pad));
4606
4607 size_rtx = GEN_INT (INTVAL(size_rtx) - reg_parm_stack_space);
4608 }
4609 }
4610
4611
4612 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4613 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4614 argblock, ARGS_SIZE_RTX (arg->offset),
4615 reg_parm_stack_space,
4616 ARGS_SIZE_RTX (arg->alignment_pad));
4617 }
4618
4619 /* Unless this is a partially-in-register argument, the argument is now
4620 in the stack.
4621
4622 ??? Note that this can change arg->value from arg->stack to
4623 arg->stack_slot and it matters when they are not the same.
4624 It isn't totally clear that this is correct in all cases. */
4625 if (partial == 0)
4626 arg->value = arg->stack_slot;
4627
4628 /* Once we have pushed something, pops can't safely
4629 be deferred during the rest of the arguments. */
4630 NO_DEFER_POP;
4631
4632 /* ANSI doesn't require a sequence point here,
4633 but PCC has one, so this will avoid some problems. */
4634 emit_queue ();
4635
4636 /* Free any temporary slots made in processing this argument. Show
4637 that we might have taken the address of something and pushed that
4638 as an operand. */
4639 preserve_temp_slots (NULL_RTX);
4640 free_temp_slots ();
4641 pop_temp_slots ();
4642
4643 return sibcall_failure;
4644 }