]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
Eliminate false DV warnings for predicated calls to noreturn functions.
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "insn-flags.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35 #include "sbitmap.h"
36
37 #ifndef ACCUMULATE_OUTGOING_ARGS
38 #define ACCUMULATE_OUTGOING_ARGS 0
39 #endif
40
41 /* Supply a default definition for PUSH_ARGS. */
42 #ifndef PUSH_ARGS
43 #ifdef PUSH_ROUNDING
44 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
45 #else
46 #define PUSH_ARGS 0
47 #endif
48 #endif
49
50 #if !defined FUNCTION_OK_FOR_SIBCALL
51 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
52 #endif
53
54 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
55 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
56 #endif
57
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
60
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
63
64 #ifdef PUSH_ROUNDING
65
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED PUSH_ARGS
68 #endif
69
70 #endif
71
72 #ifndef PUSH_ARGS_REVERSED
73 #define PUSH_ARGS_REVERSED 0
74 #endif
75
76 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
77 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
78
79 /* Data structure and subroutines used within expand_call. */
80
81 struct arg_data
82 {
83 /* Tree node for this argument. */
84 tree tree_value;
85 /* Mode for value; TYPE_MODE unless promoted. */
86 enum machine_mode mode;
87 /* Current RTL value for argument, or 0 if it isn't precomputed. */
88 rtx value;
89 /* Initially-compute RTL value for argument; only for const functions. */
90 rtx initial_value;
91 /* Register to pass this argument in, 0 if passed on stack, or an
92 PARALLEL if the arg is to be copied into multiple non-contiguous
93 registers. */
94 rtx reg;
95 /* Register to pass this argument in when generating tail call sequence.
96 This is not the same register as for normal calls on machines with
97 register windows. */
98 rtx tail_call_reg;
99 /* If REG was promoted from the actual mode of the argument expression,
100 indicates whether the promotion is sign- or zero-extended. */
101 int unsignedp;
102 /* Number of registers to use. 0 means put the whole arg in registers.
103 Also 0 if not passed in registers. */
104 int partial;
105 /* Non-zero if argument must be passed on stack.
106 Note that some arguments may be passed on the stack
107 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
108 pass_on_stack identifies arguments that *cannot* go in registers. */
109 int pass_on_stack;
110 /* Offset of this argument from beginning of stack-args. */
111 struct args_size offset;
112 /* Similar, but offset to the start of the stack slot. Different from
113 OFFSET if this arg pads downward. */
114 struct args_size slot_offset;
115 /* Size of this argument on the stack, rounded up for any padding it gets,
116 parts of the argument passed in registers do not count.
117 If REG_PARM_STACK_SPACE is defined, then register parms
118 are counted here as well. */
119 struct args_size size;
120 /* Location on the stack at which parameter should be stored. The store
121 has already been done if STACK == VALUE. */
122 rtx stack;
123 /* Location on the stack of the start of this argument slot. This can
124 differ from STACK if this arg pads downward. This location is known
125 to be aligned to FUNCTION_ARG_BOUNDARY. */
126 rtx stack_slot;
127 /* Place that this stack area has been saved, if needed. */
128 rtx save_area;
129 /* If an argument's alignment does not permit direct copying into registers,
130 copy in smaller-sized pieces into pseudos. These are stored in a
131 block pointed to by this field. The next field says how many
132 word-sized pseudos we made. */
133 rtx *aligned_regs;
134 int n_aligned_regs;
135 /* The amount that the stack pointer needs to be adjusted to
136 force alignment for the next argument. */
137 struct args_size alignment_pad;
138 };
139
140 /* A vector of one char per byte of stack space. A byte if non-zero if
141 the corresponding stack location has been used.
142 This vector is used to prevent a function call within an argument from
143 clobbering any stack already set up. */
144 static char *stack_usage_map;
145
146 /* Size of STACK_USAGE_MAP. */
147 static int highest_outgoing_arg_in_use;
148
149 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
150 stack location's tail call argument has been already stored into the stack.
151 This bitmap is used to prevent sibling call optimization if function tries
152 to use parent's incoming argument slots when they have been already
153 overwritten with tail call arguments. */
154 static sbitmap stored_args_map;
155
156 /* stack_arg_under_construction is nonzero when an argument may be
157 initialized with a constructor call (including a C function that
158 returns a BLKmode struct) and expand_call must take special action
159 to make sure the object being constructed does not overlap the
160 argument list for the constructor call. */
161 int stack_arg_under_construction;
162
163 static int calls_function PARAMS ((tree, int));
164 static int calls_function_1 PARAMS ((tree, int));
165
166 /* Nonzero if this is a call to a `const' function. */
167 #define ECF_CONST 1
168 /* Nonzero if this is a call to a `volatile' function. */
169 #define ECF_NORETURN 2
170 /* Nonzero if this is a call to malloc or a related function. */
171 #define ECF_MALLOC 4
172 /* Nonzero if it is plausible that this is a call to alloca. */
173 #define ECF_MAY_BE_ALLOCA 8
174 /* Nonzero if this is a call to a function that won't throw an exception. */
175 #define ECF_NOTHROW 16
176 /* Nonzero if this is a call to setjmp or a related function. */
177 #define ECF_RETURNS_TWICE 32
178 /* Nonzero if this is a call to `longjmp'. */
179 #define ECF_LONGJMP 64
180 /* Nonzero if this is a syscall that makes a new process in the image of
181 the current one. */
182 #define ECF_FORK_OR_EXEC 128
183 #define ECF_SIBCALL 256
184 /* Nonzero if this is a call to "pure" function (like const function,
185 but may read memory. */
186 #define ECF_PURE 512
187 /* Nonzero if this is a call to a function that returns with the stack
188 pointer depressed. */
189 #define ECF_SP_DEPRESSED 1024
190
191 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
192 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
193 rtx, int, rtx, int));
194 static void precompute_register_parameters PARAMS ((int,
195 struct arg_data *,
196 int *));
197 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
198 int));
199 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
200 int));
201 static int finalize_must_preallocate PARAMS ((int, int,
202 struct arg_data *,
203 struct args_size *));
204 static void precompute_arguments PARAMS ((int, int,
205 struct arg_data *));
206 static int compute_argument_block_size PARAMS ((int,
207 struct args_size *,
208 int));
209 static void initialize_argument_information PARAMS ((int,
210 struct arg_data *,
211 struct args_size *,
212 int, tree, tree,
213 CUMULATIVE_ARGS *,
214 int, rtx *, int *,
215 int *, int *));
216 static void compute_argument_addresses PARAMS ((struct arg_data *,
217 rtx, int));
218 static rtx rtx_for_function_call PARAMS ((tree, tree));
219 static void load_register_parameters PARAMS ((struct arg_data *,
220 int, rtx *, int));
221 static int libfunc_nothrow PARAMS ((rtx));
222 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
223 enum machine_mode,
224 int, va_list));
225 static int special_function_p PARAMS ((tree, int));
226 static int flags_from_decl_or_type PARAMS ((tree));
227 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
228 int, tree, rtx));
229 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
230 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
231
232 static int combine_pending_stack_adjustment_and_call
233 PARAMS ((int, struct args_size *, int));
234
235 #ifdef REG_PARM_STACK_SPACE
236 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
237 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
238 #endif
239 \f
240 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
241 `alloca'.
242
243 If WHICH is 0, return 1 if EXP contains a call to any function.
244 Actually, we only need return 1 if evaluating EXP would require pushing
245 arguments on the stack, but that is too difficult to compute, so we just
246 assume any function call might require the stack. */
247
248 static tree calls_function_save_exprs;
249
250 static int
251 calls_function (exp, which)
252 tree exp;
253 int which;
254 {
255 int val;
256
257 calls_function_save_exprs = 0;
258 val = calls_function_1 (exp, which);
259 calls_function_save_exprs = 0;
260 return val;
261 }
262
263 /* Recursive function to do the work of above function. */
264
265 static int
266 calls_function_1 (exp, which)
267 tree exp;
268 int which;
269 {
270 register int i;
271 enum tree_code code = TREE_CODE (exp);
272 int class = TREE_CODE_CLASS (code);
273 int length = first_rtl_op (code);
274
275 /* If this code is language-specific, we don't know what it will do. */
276 if ((int) code >= NUM_TREE_CODES)
277 return 1;
278
279 switch (code)
280 {
281 case CALL_EXPR:
282 if (which == 0)
283 return 1;
284 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
285 == FUNCTION_TYPE)
286 && (TYPE_RETURNS_STACK_DEPRESSED
287 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
288 return 1;
289 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
290 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
291 == FUNCTION_DECL)
292 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
293 0)
294 & ECF_MAY_BE_ALLOCA))
295 return 1;
296
297 break;
298
299 case SAVE_EXPR:
300 if (SAVE_EXPR_RTL (exp) != 0)
301 return 0;
302 if (value_member (exp, calls_function_save_exprs))
303 return 0;
304 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
305 calls_function_save_exprs);
306 return (TREE_OPERAND (exp, 0) != 0
307 && calls_function_1 (TREE_OPERAND (exp, 0), which));
308
309 case BLOCK:
310 {
311 register tree local;
312 register tree subblock;
313
314 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
315 if (DECL_INITIAL (local) != 0
316 && calls_function_1 (DECL_INITIAL (local), which))
317 return 1;
318
319 for (subblock = BLOCK_SUBBLOCKS (exp);
320 subblock;
321 subblock = TREE_CHAIN (subblock))
322 if (calls_function_1 (subblock, which))
323 return 1;
324 }
325 return 0;
326
327 case TREE_LIST:
328 for (; exp != 0; exp = TREE_CHAIN (exp))
329 if (calls_function_1 (TREE_VALUE (exp), which))
330 return 1;
331 return 0;
332
333 default:
334 break;
335 }
336
337 /* Only expressions, references, and blocks can contain calls. */
338 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
339 return 0;
340
341 for (i = 0; i < length; i++)
342 if (TREE_OPERAND (exp, i) != 0
343 && calls_function_1 (TREE_OPERAND (exp, i), which))
344 return 1;
345
346 return 0;
347 }
348 \f
349 /* Force FUNEXP into a form suitable for the address of a CALL,
350 and return that as an rtx. Also load the static chain register
351 if FNDECL is a nested function.
352
353 CALL_FUSAGE points to a variable holding the prospective
354 CALL_INSN_FUNCTION_USAGE information. */
355
356 rtx
357 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
358 rtx funexp;
359 tree fndecl;
360 rtx *call_fusage;
361 int reg_parm_seen;
362 {
363 rtx static_chain_value = 0;
364
365 funexp = protect_from_queue (funexp, 0);
366
367 if (fndecl != 0)
368 /* Get possible static chain value for nested function in C. */
369 static_chain_value = lookup_static_chain (fndecl);
370
371 /* Make a valid memory address and copy constants thru pseudo-regs,
372 but not for a constant address if -fno-function-cse. */
373 if (GET_CODE (funexp) != SYMBOL_REF)
374 /* If we are using registers for parameters, force the
375 function address into a register now. */
376 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
377 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
378 : memory_address (FUNCTION_MODE, funexp));
379 else
380 {
381 #ifndef NO_FUNCTION_CSE
382 if (optimize && ! flag_no_function_cse)
383 #ifdef NO_RECURSIVE_FUNCTION_CSE
384 if (fndecl != current_function_decl)
385 #endif
386 funexp = force_reg (Pmode, funexp);
387 #endif
388 }
389
390 if (static_chain_value != 0)
391 {
392 emit_move_insn (static_chain_rtx, static_chain_value);
393
394 if (GET_CODE (static_chain_rtx) == REG)
395 use_reg (call_fusage, static_chain_rtx);
396 }
397
398 return funexp;
399 }
400
401 /* Generate instructions to call function FUNEXP,
402 and optionally pop the results.
403 The CALL_INSN is the first insn generated.
404
405 FNDECL is the declaration node of the function. This is given to the
406 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
407
408 FUNTYPE is the data type of the function. This is given to the macro
409 RETURN_POPS_ARGS to determine whether this function pops its own args.
410 We used to allow an identifier for library functions, but that doesn't
411 work when the return type is an aggregate type and the calling convention
412 says that the pointer to this aggregate is to be popped by the callee.
413
414 STACK_SIZE is the number of bytes of arguments on the stack,
415 ROUNDED_STACK_SIZE is that number rounded up to
416 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
417 both to put into the call insn and to generate explicit popping
418 code if necessary.
419
420 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
421 It is zero if this call doesn't want a structure value.
422
423 NEXT_ARG_REG is the rtx that results from executing
424 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
425 just after all the args have had their registers assigned.
426 This could be whatever you like, but normally it is the first
427 arg-register beyond those used for args in this call,
428 or 0 if all the arg-registers are used in this call.
429 It is passed on to `gen_call' so you can put this info in the call insn.
430
431 VALREG is a hard register in which a value is returned,
432 or 0 if the call does not return a value.
433
434 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
435 the args to this call were processed.
436 We restore `inhibit_defer_pop' to that value.
437
438 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
439 denote registers used by the called function. */
440
441 static void
442 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
443 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
444 call_fusage, ecf_flags)
445 rtx funexp;
446 tree fndecl ATTRIBUTE_UNUSED;
447 tree funtype ATTRIBUTE_UNUSED;
448 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
449 HOST_WIDE_INT rounded_stack_size;
450 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
451 rtx next_arg_reg;
452 rtx valreg;
453 int old_inhibit_defer_pop;
454 rtx call_fusage;
455 int ecf_flags;
456 {
457 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
458 rtx call_insn;
459 int already_popped = 0;
460 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
461 #if defined (HAVE_call) && defined (HAVE_call_value)
462 rtx struct_value_size_rtx;
463 struct_value_size_rtx = GEN_INT (struct_value_size);
464 #endif
465
466 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
467 and we don't want to load it into a register as an optimization,
468 because prepare_call_address already did it if it should be done. */
469 if (GET_CODE (funexp) != SYMBOL_REF)
470 funexp = memory_address (FUNCTION_MODE, funexp);
471
472 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
473 if ((ecf_flags & ECF_SIBCALL)
474 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
475 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
476 || stack_size == 0))
477 {
478 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
479 rtx pat;
480
481 /* If this subroutine pops its own args, record that in the call insn
482 if possible, for the sake of frame pointer elimination. */
483
484 if (valreg)
485 pat = GEN_SIBCALL_VALUE_POP (valreg,
486 gen_rtx_MEM (FUNCTION_MODE, funexp),
487 rounded_stack_size_rtx, next_arg_reg,
488 n_pop);
489 else
490 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
491 rounded_stack_size_rtx, next_arg_reg, n_pop);
492
493 emit_call_insn (pat);
494 already_popped = 1;
495 }
496 else
497 #endif
498
499 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
500 /* If the target has "call" or "call_value" insns, then prefer them
501 if no arguments are actually popped. If the target does not have
502 "call" or "call_value" insns, then we must use the popping versions
503 even if the call has no arguments to pop. */
504 #if defined (HAVE_call) && defined (HAVE_call_value)
505 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
506 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
507 #else
508 if (HAVE_call_pop && HAVE_call_value_pop)
509 #endif
510 {
511 rtx n_pop = GEN_INT (n_popped);
512 rtx pat;
513
514 /* If this subroutine pops its own args, record that in the call insn
515 if possible, for the sake of frame pointer elimination. */
516
517 if (valreg)
518 pat = GEN_CALL_VALUE_POP (valreg,
519 gen_rtx_MEM (FUNCTION_MODE, funexp),
520 rounded_stack_size_rtx, next_arg_reg, n_pop);
521 else
522 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
523 rounded_stack_size_rtx, next_arg_reg, n_pop);
524
525 emit_call_insn (pat);
526 already_popped = 1;
527 }
528 else
529 #endif
530
531 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
532 if ((ecf_flags & ECF_SIBCALL)
533 && HAVE_sibcall && HAVE_sibcall_value)
534 {
535 if (valreg)
536 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
537 gen_rtx_MEM (FUNCTION_MODE, funexp),
538 rounded_stack_size_rtx,
539 next_arg_reg, NULL_RTX));
540 else
541 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
542 rounded_stack_size_rtx, next_arg_reg,
543 struct_value_size_rtx));
544 }
545 else
546 #endif
547
548 #if defined (HAVE_call) && defined (HAVE_call_value)
549 if (HAVE_call && HAVE_call_value)
550 {
551 if (valreg)
552 emit_call_insn (GEN_CALL_VALUE (valreg,
553 gen_rtx_MEM (FUNCTION_MODE, funexp),
554 rounded_stack_size_rtx, next_arg_reg,
555 NULL_RTX));
556 else
557 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
558 rounded_stack_size_rtx, next_arg_reg,
559 struct_value_size_rtx));
560 }
561 else
562 #endif
563 abort ();
564
565 /* Find the CALL insn we just emitted. */
566 for (call_insn = get_last_insn ();
567 call_insn && GET_CODE (call_insn) != CALL_INSN;
568 call_insn = PREV_INSN (call_insn))
569 ;
570
571 if (! call_insn)
572 abort ();
573
574 /* Mark memory as used for "pure" function call. */
575 if (ecf_flags & ECF_PURE)
576 {
577 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
578 gen_rtx_USE (VOIDmode,
579 gen_rtx_MEM (BLKmode,
580 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
581 }
582
583 /* Put the register usage information on the CALL. If there is already
584 some usage information, put ours at the end. */
585 if (CALL_INSN_FUNCTION_USAGE (call_insn))
586 {
587 rtx link;
588
589 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
590 link = XEXP (link, 1))
591 ;
592
593 XEXP (link, 1) = call_fusage;
594 }
595 else
596 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
597
598 /* If this is a const call, then set the insn's unchanging bit. */
599 if (ecf_flags & (ECF_CONST | ECF_PURE))
600 CONST_CALL_P (call_insn) = 1;
601
602 /* If this call can't throw, attach a REG_EH_REGION reg note to that
603 effect. */
604 if (ecf_flags & ECF_NOTHROW)
605 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
606 REG_NOTES (call_insn));
607
608 if (ecf_flags & ECF_NORETURN)
609 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
610 REG_NOTES (call_insn));
611
612 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
613
614 /* Restore this now, so that we do defer pops for this call's args
615 if the context of the call as a whole permits. */
616 inhibit_defer_pop = old_inhibit_defer_pop;
617
618 if (n_popped > 0)
619 {
620 if (!already_popped)
621 CALL_INSN_FUNCTION_USAGE (call_insn)
622 = gen_rtx_EXPR_LIST (VOIDmode,
623 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
624 CALL_INSN_FUNCTION_USAGE (call_insn));
625 rounded_stack_size -= n_popped;
626 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
627 stack_pointer_delta -= n_popped;
628 }
629
630 if (!ACCUMULATE_OUTGOING_ARGS)
631 {
632 /* If returning from the subroutine does not automatically pop the args,
633 we need an instruction to pop them sooner or later.
634 Perhaps do it now; perhaps just record how much space to pop later.
635
636 If returning from the subroutine does pop the args, indicate that the
637 stack pointer will be changed. */
638
639 if (rounded_stack_size != 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
640 {
641 if (flag_defer_pop && inhibit_defer_pop == 0
642 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
643 pending_stack_adjust += rounded_stack_size;
644 else
645 adjust_stack (rounded_stack_size_rtx);
646 }
647 }
648 /* When we accumulate outgoing args, we must avoid any stack manipulations.
649 Restore the stack pointer to its original value now. Usually
650 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
651 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
652 popping variants of functions exist as well.
653
654 ??? We may optimize similar to defer_pop above, but it is
655 probably not worthwhile.
656
657 ??? It will be worthwhile to enable combine_stack_adjustments even for
658 such machines. */
659 else if (n_popped)
660 anti_adjust_stack (GEN_INT (n_popped));
661 }
662
663 /* Determine if the function identified by NAME and FNDECL is one with
664 special properties we wish to know about.
665
666 For example, if the function might return more than one time (setjmp), then
667 set RETURNS_TWICE to a nonzero value.
668
669 Similarly set LONGJMP for if the function is in the longjmp family.
670
671 Set MALLOC for any of the standard memory allocation functions which
672 allocate from the heap.
673
674 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
675 space from the stack such as alloca. */
676
677 static int
678 special_function_p (fndecl, flags)
679 tree fndecl;
680 int flags;
681 {
682 if (! (flags & ECF_MALLOC)
683 && fndecl && DECL_NAME (fndecl)
684 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
685 /* Exclude functions not at the file scope, or not `extern',
686 since they are not the magic functions we would otherwise
687 think they are. */
688 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
689 {
690 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
691 const char *tname = name;
692
693 /* We assume that alloca will always be called by name. It
694 makes no sense to pass it as a pointer-to-function to
695 anything that does not understand its behavior. */
696 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
697 && name[0] == 'a'
698 && ! strcmp (name, "alloca"))
699 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
700 && name[0] == '_'
701 && ! strcmp (name, "__builtin_alloca"))))
702 flags |= ECF_MAY_BE_ALLOCA;
703
704 /* Disregard prefix _, __ or __x. */
705 if (name[0] == '_')
706 {
707 if (name[1] == '_' && name[2] == 'x')
708 tname += 3;
709 else if (name[1] == '_')
710 tname += 2;
711 else
712 tname += 1;
713 }
714
715 if (tname[0] == 's')
716 {
717 if ((tname[1] == 'e'
718 && (! strcmp (tname, "setjmp")
719 || ! strcmp (tname, "setjmp_syscall")))
720 || (tname[1] == 'i'
721 && ! strcmp (tname, "sigsetjmp"))
722 || (tname[1] == 'a'
723 && ! strcmp (tname, "savectx")))
724 flags |= ECF_RETURNS_TWICE;
725
726 if (tname[1] == 'i'
727 && ! strcmp (tname, "siglongjmp"))
728 flags |= ECF_LONGJMP;
729 }
730 else if ((tname[0] == 'q' && tname[1] == 's'
731 && ! strcmp (tname, "qsetjmp"))
732 || (tname[0] == 'v' && tname[1] == 'f'
733 && ! strcmp (tname, "vfork")))
734 flags |= ECF_RETURNS_TWICE;
735
736 else if (tname[0] == 'l' && tname[1] == 'o'
737 && ! strcmp (tname, "longjmp"))
738 flags |= ECF_LONGJMP;
739
740 else if ((tname[0] == 'f' && tname[1] == 'o'
741 && ! strcmp (tname, "fork"))
742 /* Linux specific: __clone. check NAME to insist on the
743 leading underscores, to avoid polluting the ISO / POSIX
744 namespace. */
745 || (name[0] == '_' && name[1] == '_'
746 && ! strcmp (tname, "clone"))
747 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
748 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
749 && (tname[5] == '\0'
750 || ((tname[5] == 'p' || tname[5] == 'e')
751 && tname[6] == '\0'))))
752 flags |= ECF_FORK_OR_EXEC;
753
754 /* Do not add any more malloc-like functions to this list,
755 instead mark them as malloc functions using the malloc attribute.
756 Note, realloc is not suitable for attribute malloc since
757 it may return the same address across multiple calls.
758 C++ operator new is not suitable because it is not required
759 to return a unique pointer; indeed, the standard placement new
760 just returns its argument. */
761 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
762 && (! strcmp (tname, "malloc")
763 || ! strcmp (tname, "calloc")
764 || ! strcmp (tname, "strdup")))
765 flags |= ECF_MALLOC;
766 }
767 return flags;
768 }
769
770 /* Return nonzero when tree represent call to longjmp. */
771
772 int
773 setjmp_call_p (fndecl)
774 tree fndecl;
775 {
776 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
777 }
778
779 /* Detect flags (function attributes) from the function type node. */
780
781 static int
782 flags_from_decl_or_type (exp)
783 tree exp;
784 {
785 int flags = 0;
786
787 /* ??? We can't set IS_MALLOC for function types? */
788 if (DECL_P (exp))
789 {
790 /* The function exp may have the `malloc' attribute. */
791 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
792 flags |= ECF_MALLOC;
793
794 /* The function exp may have the `pure' attribute. */
795 if (DECL_P (exp) && DECL_IS_PURE (exp))
796 flags |= ECF_PURE;
797
798 if (TREE_NOTHROW (exp))
799 flags |= ECF_NOTHROW;
800 }
801
802 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
803 flags |= ECF_CONST;
804
805 if (TREE_THIS_VOLATILE (exp))
806 flags |= ECF_NORETURN;
807
808 return flags;
809 }
810
811
812 /* Precompute all register parameters as described by ARGS, storing values
813 into fields within the ARGS array.
814
815 NUM_ACTUALS indicates the total number elements in the ARGS array.
816
817 Set REG_PARM_SEEN if we encounter a register parameter. */
818
819 static void
820 precompute_register_parameters (num_actuals, args, reg_parm_seen)
821 int num_actuals;
822 struct arg_data *args;
823 int *reg_parm_seen;
824 {
825 int i;
826
827 *reg_parm_seen = 0;
828
829 for (i = 0; i < num_actuals; i++)
830 if (args[i].reg != 0 && ! args[i].pass_on_stack)
831 {
832 *reg_parm_seen = 1;
833
834 if (args[i].value == 0)
835 {
836 push_temp_slots ();
837 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
838 VOIDmode, 0);
839 preserve_temp_slots (args[i].value);
840 pop_temp_slots ();
841
842 /* ANSI doesn't require a sequence point here,
843 but PCC has one, so this will avoid some problems. */
844 emit_queue ();
845 }
846
847 /* If we are to promote the function arg to a wider mode,
848 do it now. */
849
850 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
851 args[i].value
852 = convert_modes (args[i].mode,
853 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
854 args[i].value, args[i].unsignedp);
855
856 /* If the value is expensive, and we are inside an appropriately
857 short loop, put the value into a pseudo and then put the pseudo
858 into the hard reg.
859
860 For small register classes, also do this if this call uses
861 register parameters. This is to avoid reload conflicts while
862 loading the parameters registers. */
863
864 if ((! (GET_CODE (args[i].value) == REG
865 || (GET_CODE (args[i].value) == SUBREG
866 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
867 && args[i].mode != BLKmode
868 && rtx_cost (args[i].value, SET) > 2
869 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
870 || preserve_subexpressions_p ()))
871 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
872 }
873 }
874
875 #ifdef REG_PARM_STACK_SPACE
876
877 /* The argument list is the property of the called routine and it
878 may clobber it. If the fixed area has been used for previous
879 parameters, we must save and restore it. */
880
881 static rtx
882 save_fixed_argument_area (reg_parm_stack_space, argblock,
883 low_to_save, high_to_save)
884 int reg_parm_stack_space;
885 rtx argblock;
886 int *low_to_save;
887 int *high_to_save;
888 {
889 int i;
890 rtx save_area = NULL_RTX;
891
892 /* Compute the boundary of the that needs to be saved, if any. */
893 #ifdef ARGS_GROW_DOWNWARD
894 for (i = 0; i < reg_parm_stack_space + 1; i++)
895 #else
896 for (i = 0; i < reg_parm_stack_space; i++)
897 #endif
898 {
899 if (i >= highest_outgoing_arg_in_use
900 || stack_usage_map[i] == 0)
901 continue;
902
903 if (*low_to_save == -1)
904 *low_to_save = i;
905
906 *high_to_save = i;
907 }
908
909 if (*low_to_save >= 0)
910 {
911 int num_to_save = *high_to_save - *low_to_save + 1;
912 enum machine_mode save_mode
913 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
914 rtx stack_area;
915
916 /* If we don't have the required alignment, must do this in BLKmode. */
917 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
918 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
919 save_mode = BLKmode;
920
921 #ifdef ARGS_GROW_DOWNWARD
922 stack_area
923 = gen_rtx_MEM (save_mode,
924 memory_address (save_mode,
925 plus_constant (argblock,
926 - *high_to_save)));
927 #else
928 stack_area = gen_rtx_MEM (save_mode,
929 memory_address (save_mode,
930 plus_constant (argblock,
931 *low_to_save)));
932 #endif
933 if (save_mode == BLKmode)
934 {
935 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
936 /* Cannot use emit_block_move here because it can be done by a
937 library call which in turn gets into this place again and deadly
938 infinite recursion happens. */
939 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
940 PARM_BOUNDARY);
941 }
942 else
943 {
944 save_area = gen_reg_rtx (save_mode);
945 emit_move_insn (save_area, stack_area);
946 }
947 }
948 return save_area;
949 }
950
951 static void
952 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
953 rtx save_area;
954 rtx argblock;
955 int high_to_save;
956 int low_to_save;
957 {
958 enum machine_mode save_mode = GET_MODE (save_area);
959 #ifdef ARGS_GROW_DOWNWARD
960 rtx stack_area
961 = gen_rtx_MEM (save_mode,
962 memory_address (save_mode,
963 plus_constant (argblock,
964 - high_to_save)));
965 #else
966 rtx stack_area
967 = gen_rtx_MEM (save_mode,
968 memory_address (save_mode,
969 plus_constant (argblock,
970 low_to_save)));
971 #endif
972
973 if (save_mode != BLKmode)
974 emit_move_insn (stack_area, save_area);
975 else
976 /* Cannot use emit_block_move here because it can be done by a library
977 call which in turn gets into this place again and deadly infinite
978 recursion happens. */
979 move_by_pieces (stack_area, validize_mem (save_area),
980 high_to_save - low_to_save + 1, PARM_BOUNDARY);
981 }
982 #endif
983
984 /* If any elements in ARGS refer to parameters that are to be passed in
985 registers, but not in memory, and whose alignment does not permit a
986 direct copy into registers. Copy the values into a group of pseudos
987 which we will later copy into the appropriate hard registers.
988
989 Pseudos for each unaligned argument will be stored into the array
990 args[argnum].aligned_regs. The caller is responsible for deallocating
991 the aligned_regs array if it is nonzero. */
992
993 static void
994 store_unaligned_arguments_into_pseudos (args, num_actuals)
995 struct arg_data *args;
996 int num_actuals;
997 {
998 int i, j;
999
1000 for (i = 0; i < num_actuals; i++)
1001 if (args[i].reg != 0 && ! args[i].pass_on_stack
1002 && args[i].mode == BLKmode
1003 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1004 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1005 {
1006 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1007 int big_endian_correction = 0;
1008
1009 args[i].n_aligned_regs
1010 = args[i].partial ? args[i].partial
1011 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1012
1013 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1014 * args[i].n_aligned_regs);
1015
1016 /* Structures smaller than a word are aligned to the least
1017 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1018 this means we must skip the empty high order bytes when
1019 calculating the bit offset. */
1020 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1021 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1022
1023 for (j = 0; j < args[i].n_aligned_regs; j++)
1024 {
1025 rtx reg = gen_reg_rtx (word_mode);
1026 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1027 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1028 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1029
1030 args[i].aligned_regs[j] = reg;
1031
1032 /* There is no need to restrict this code to loading items
1033 in TYPE_ALIGN sized hunks. The bitfield instructions can
1034 load up entire word sized registers efficiently.
1035
1036 ??? This may not be needed anymore.
1037 We use to emit a clobber here but that doesn't let later
1038 passes optimize the instructions we emit. By storing 0 into
1039 the register later passes know the first AND to zero out the
1040 bitfield being set in the register is unnecessary. The store
1041 of 0 will be deleted as will at least the first AND. */
1042
1043 emit_move_insn (reg, const0_rtx);
1044
1045 bytes -= bitsize / BITS_PER_UNIT;
1046 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1047 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1048 word_mode, word_mode, bitalign,
1049 BITS_PER_WORD),
1050 bitalign, BITS_PER_WORD);
1051 }
1052 }
1053 }
1054
1055 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1056 ACTPARMS.
1057
1058 NUM_ACTUALS is the total number of parameters.
1059
1060 N_NAMED_ARGS is the total number of named arguments.
1061
1062 FNDECL is the tree code for the target of this call (if known)
1063
1064 ARGS_SO_FAR holds state needed by the target to know where to place
1065 the next argument.
1066
1067 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1068 for arguments which are passed in registers.
1069
1070 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1071 and may be modified by this routine.
1072
1073 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1074 flags which may may be modified by this routine. */
1075
1076 static void
1077 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1078 actparms, fndecl, args_so_far,
1079 reg_parm_stack_space, old_stack_level,
1080 old_pending_adj, must_preallocate,
1081 ecf_flags)
1082 int num_actuals ATTRIBUTE_UNUSED;
1083 struct arg_data *args;
1084 struct args_size *args_size;
1085 int n_named_args ATTRIBUTE_UNUSED;
1086 tree actparms;
1087 tree fndecl;
1088 CUMULATIVE_ARGS *args_so_far;
1089 int reg_parm_stack_space;
1090 rtx *old_stack_level;
1091 int *old_pending_adj;
1092 int *must_preallocate;
1093 int *ecf_flags;
1094 {
1095 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1096 int inc;
1097
1098 /* Count arg position in order args appear. */
1099 int argpos;
1100
1101 struct args_size alignment_pad;
1102 int i;
1103 tree p;
1104
1105 args_size->constant = 0;
1106 args_size->var = 0;
1107
1108 /* In this loop, we consider args in the order they are written.
1109 We fill up ARGS from the front or from the back if necessary
1110 so that in any case the first arg to be pushed ends up at the front. */
1111
1112 if (PUSH_ARGS_REVERSED)
1113 {
1114 i = num_actuals - 1, inc = -1;
1115 /* In this case, must reverse order of args
1116 so that we compute and push the last arg first. */
1117 }
1118 else
1119 {
1120 i = 0, inc = 1;
1121 }
1122
1123 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1124 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1125 {
1126 tree type = TREE_TYPE (TREE_VALUE (p));
1127 int unsignedp;
1128 enum machine_mode mode;
1129
1130 args[i].tree_value = TREE_VALUE (p);
1131
1132 /* Replace erroneous argument with constant zero. */
1133 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1134 args[i].tree_value = integer_zero_node, type = integer_type_node;
1135
1136 /* If TYPE is a transparent union, pass things the way we would
1137 pass the first field of the union. We have already verified that
1138 the modes are the same. */
1139 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1140 type = TREE_TYPE (TYPE_FIELDS (type));
1141
1142 /* Decide where to pass this arg.
1143
1144 args[i].reg is nonzero if all or part is passed in registers.
1145
1146 args[i].partial is nonzero if part but not all is passed in registers,
1147 and the exact value says how many words are passed in registers.
1148
1149 args[i].pass_on_stack is nonzero if the argument must at least be
1150 computed on the stack. It may then be loaded back into registers
1151 if args[i].reg is nonzero.
1152
1153 These decisions are driven by the FUNCTION_... macros and must agree
1154 with those made by function.c. */
1155
1156 /* See if this argument should be passed by invisible reference. */
1157 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1158 && contains_placeholder_p (TYPE_SIZE (type)))
1159 || TREE_ADDRESSABLE (type)
1160 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1161 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1162 type, argpos < n_named_args)
1163 #endif
1164 )
1165 {
1166 /* If we're compiling a thunk, pass through invisible
1167 references instead of making a copy. */
1168 if (current_function_is_thunk
1169 #ifdef FUNCTION_ARG_CALLEE_COPIES
1170 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1171 type, argpos < n_named_args)
1172 /* If it's in a register, we must make a copy of it too. */
1173 /* ??? Is this a sufficient test? Is there a better one? */
1174 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1175 && REG_P (DECL_RTL (args[i].tree_value)))
1176 && ! TREE_ADDRESSABLE (type))
1177 #endif
1178 )
1179 {
1180 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1181 new object from the argument. If we are passing by
1182 invisible reference, the callee will do that for us, so we
1183 can strip off the TARGET_EXPR. This is not always safe,
1184 but it is safe in the only case where this is a useful
1185 optimization; namely, when the argument is a plain object.
1186 In that case, the frontend is just asking the backend to
1187 make a bitwise copy of the argument. */
1188
1189 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1190 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1191 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1192 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1193
1194 args[i].tree_value = build1 (ADDR_EXPR,
1195 build_pointer_type (type),
1196 args[i].tree_value);
1197 type = build_pointer_type (type);
1198 }
1199 else
1200 {
1201 /* We make a copy of the object and pass the address to the
1202 function being called. */
1203 rtx copy;
1204
1205 if (!COMPLETE_TYPE_P (type)
1206 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1207 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1208 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1209 STACK_CHECK_MAX_VAR_SIZE))))
1210 {
1211 /* This is a variable-sized object. Make space on the stack
1212 for it. */
1213 rtx size_rtx = expr_size (TREE_VALUE (p));
1214
1215 if (*old_stack_level == 0)
1216 {
1217 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1218 *old_pending_adj = pending_stack_adjust;
1219 pending_stack_adjust = 0;
1220 }
1221
1222 copy = gen_rtx_MEM (BLKmode,
1223 allocate_dynamic_stack_space
1224 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1225 set_mem_attributes (copy, type, 1);
1226 }
1227 else
1228 copy = assign_temp (type, 0, 1, 0);
1229
1230 store_expr (args[i].tree_value, copy, 0);
1231 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1232
1233 args[i].tree_value = build1 (ADDR_EXPR,
1234 build_pointer_type (type),
1235 make_tree (type, copy));
1236 type = build_pointer_type (type);
1237 }
1238 }
1239
1240 mode = TYPE_MODE (type);
1241 unsignedp = TREE_UNSIGNED (type);
1242
1243 #ifdef PROMOTE_FUNCTION_ARGS
1244 mode = promote_mode (type, mode, &unsignedp, 1);
1245 #endif
1246
1247 args[i].unsignedp = unsignedp;
1248 args[i].mode = mode;
1249
1250 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1251 argpos < n_named_args);
1252 #ifdef FUNCTION_INCOMING_ARG
1253 /* If this is a sibling call and the machine has register windows, the
1254 register window has to be unwinded before calling the routine, so
1255 arguments have to go into the incoming registers. */
1256 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1257 argpos < n_named_args);
1258 #else
1259 args[i].tail_call_reg = args[i].reg;
1260 #endif
1261
1262 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1263 if (args[i].reg)
1264 args[i].partial
1265 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1266 argpos < n_named_args);
1267 #endif
1268
1269 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1270
1271 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1272 it means that we are to pass this arg in the register(s) designated
1273 by the PARALLEL, but also to pass it in the stack. */
1274 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1275 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1276 args[i].pass_on_stack = 1;
1277
1278 /* If this is an addressable type, we must preallocate the stack
1279 since we must evaluate the object into its final location.
1280
1281 If this is to be passed in both registers and the stack, it is simpler
1282 to preallocate. */
1283 if (TREE_ADDRESSABLE (type)
1284 || (args[i].pass_on_stack && args[i].reg != 0))
1285 *must_preallocate = 1;
1286
1287 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1288 we cannot consider this function call constant. */
1289 if (TREE_ADDRESSABLE (type))
1290 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1291
1292 /* Compute the stack-size of this argument. */
1293 if (args[i].reg == 0 || args[i].partial != 0
1294 || reg_parm_stack_space > 0
1295 || args[i].pass_on_stack)
1296 locate_and_pad_parm (mode, type,
1297 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1298 1,
1299 #else
1300 args[i].reg != 0,
1301 #endif
1302 fndecl, args_size, &args[i].offset,
1303 &args[i].size, &alignment_pad);
1304
1305 #ifndef ARGS_GROW_DOWNWARD
1306 args[i].slot_offset = *args_size;
1307 #endif
1308
1309 args[i].alignment_pad = alignment_pad;
1310
1311 /* If a part of the arg was put into registers,
1312 don't include that part in the amount pushed. */
1313 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1314 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1315 / (PARM_BOUNDARY / BITS_PER_UNIT)
1316 * (PARM_BOUNDARY / BITS_PER_UNIT));
1317
1318 /* Update ARGS_SIZE, the total stack space for args so far. */
1319
1320 args_size->constant += args[i].size.constant;
1321 if (args[i].size.var)
1322 {
1323 ADD_PARM_SIZE (*args_size, args[i].size.var);
1324 }
1325
1326 /* Since the slot offset points to the bottom of the slot,
1327 we must record it after incrementing if the args grow down. */
1328 #ifdef ARGS_GROW_DOWNWARD
1329 args[i].slot_offset = *args_size;
1330
1331 args[i].slot_offset.constant = -args_size->constant;
1332 if (args_size->var)
1333 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1334 #endif
1335
1336 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1337 have been used, etc. */
1338
1339 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1340 argpos < n_named_args);
1341 }
1342 }
1343
1344 /* Update ARGS_SIZE to contain the total size for the argument block.
1345 Return the original constant component of the argument block's size.
1346
1347 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1348 for arguments passed in registers. */
1349
1350 static int
1351 compute_argument_block_size (reg_parm_stack_space, args_size,
1352 preferred_stack_boundary)
1353 int reg_parm_stack_space;
1354 struct args_size *args_size;
1355 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1356 {
1357 int unadjusted_args_size = args_size->constant;
1358
1359 /* For accumulate outgoing args mode we don't need to align, since the frame
1360 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1361 backends from generating missaligned frame sizes. */
1362 #ifdef STACK_BOUNDARY
1363 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1364 preferred_stack_boundary = STACK_BOUNDARY;
1365 #endif
1366
1367 /* Compute the actual size of the argument block required. The variable
1368 and constant sizes must be combined, the size may have to be rounded,
1369 and there may be a minimum required size. */
1370
1371 if (args_size->var)
1372 {
1373 args_size->var = ARGS_SIZE_TREE (*args_size);
1374 args_size->constant = 0;
1375
1376 #ifdef PREFERRED_STACK_BOUNDARY
1377 preferred_stack_boundary /= BITS_PER_UNIT;
1378 if (preferred_stack_boundary > 1)
1379 {
1380 /* We don't handle this case yet. To handle it correctly we have
1381 to add the delta, round and substract the delta.
1382 Currently no machine description requires this support. */
1383 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1384 abort();
1385 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1386 }
1387 #endif
1388
1389 if (reg_parm_stack_space > 0)
1390 {
1391 args_size->var
1392 = size_binop (MAX_EXPR, args_size->var,
1393 ssize_int (reg_parm_stack_space));
1394
1395 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1396 /* The area corresponding to register parameters is not to count in
1397 the size of the block we need. So make the adjustment. */
1398 args_size->var
1399 = size_binop (MINUS_EXPR, args_size->var,
1400 ssize_int (reg_parm_stack_space));
1401 #endif
1402 }
1403 }
1404 else
1405 {
1406 #ifdef PREFERRED_STACK_BOUNDARY
1407 preferred_stack_boundary /= BITS_PER_UNIT;
1408 if (preferred_stack_boundary < 1)
1409 preferred_stack_boundary = 1;
1410 args_size->constant = (((args_size->constant
1411 + stack_pointer_delta
1412 + preferred_stack_boundary - 1)
1413 / preferred_stack_boundary
1414 * preferred_stack_boundary)
1415 - stack_pointer_delta);
1416 #endif
1417
1418 args_size->constant = MAX (args_size->constant,
1419 reg_parm_stack_space);
1420
1421 #ifdef MAYBE_REG_PARM_STACK_SPACE
1422 if (reg_parm_stack_space == 0)
1423 args_size->constant = 0;
1424 #endif
1425
1426 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1427 args_size->constant -= reg_parm_stack_space;
1428 #endif
1429 }
1430 return unadjusted_args_size;
1431 }
1432
1433 /* Precompute parameters as needed for a function call.
1434
1435 FLAGS is mask of ECF_* constants.
1436
1437 NUM_ACTUALS is the number of arguments.
1438
1439 ARGS is an array containing information for each argument; this routine
1440 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1441 */
1442
1443 static void
1444 precompute_arguments (flags, num_actuals, args)
1445 int flags;
1446 int num_actuals;
1447 struct arg_data *args;
1448 {
1449 int i;
1450
1451 /* If this function call is cse'able, precompute all the parameters.
1452 Note that if the parameter is constructed into a temporary, this will
1453 cause an additional copy because the parameter will be constructed
1454 into a temporary location and then copied into the outgoing arguments.
1455 If a parameter contains a call to alloca and this function uses the
1456 stack, precompute the parameter. */
1457
1458 /* If we preallocated the stack space, and some arguments must be passed
1459 on the stack, then we must precompute any parameter which contains a
1460 function call which will store arguments on the stack.
1461 Otherwise, evaluating the parameter may clobber previous parameters
1462 which have already been stored into the stack. (we have code to avoid
1463 such case by saving the ougoing stack arguments, but it results in
1464 worse code) */
1465
1466 for (i = 0; i < num_actuals; i++)
1467 if ((flags & (ECF_CONST | ECF_PURE))
1468 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1469 {
1470 /* If this is an addressable type, we cannot pre-evaluate it. */
1471 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1472 abort ();
1473
1474 push_temp_slots ();
1475
1476 args[i].value
1477 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1478
1479 preserve_temp_slots (args[i].value);
1480 pop_temp_slots ();
1481
1482 /* ANSI doesn't require a sequence point here,
1483 but PCC has one, so this will avoid some problems. */
1484 emit_queue ();
1485
1486 args[i].initial_value = args[i].value
1487 = protect_from_queue (args[i].value, 0);
1488
1489 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1490 {
1491 args[i].value
1492 = convert_modes (args[i].mode,
1493 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1494 args[i].value, args[i].unsignedp);
1495 #ifdef PROMOTE_FOR_CALL_ONLY
1496 /* CSE will replace this only if it contains args[i].value
1497 pseudo, so convert it down to the declared mode using
1498 a SUBREG. */
1499 if (GET_CODE (args[i].value) == REG
1500 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1501 {
1502 args[i].initial_value
1503 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1504 args[i].value, 0);
1505 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1506 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1507 = args[i].unsignedp;
1508 }
1509 #endif
1510 }
1511 }
1512 }
1513
1514 /* Given the current state of MUST_PREALLOCATE and information about
1515 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1516 compute and return the final value for MUST_PREALLOCATE. */
1517
1518 static int
1519 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1520 int must_preallocate;
1521 int num_actuals;
1522 struct arg_data *args;
1523 struct args_size *args_size;
1524 {
1525 /* See if we have or want to preallocate stack space.
1526
1527 If we would have to push a partially-in-regs parm
1528 before other stack parms, preallocate stack space instead.
1529
1530 If the size of some parm is not a multiple of the required stack
1531 alignment, we must preallocate.
1532
1533 If the total size of arguments that would otherwise create a copy in
1534 a temporary (such as a CALL) is more than half the total argument list
1535 size, preallocation is faster.
1536
1537 Another reason to preallocate is if we have a machine (like the m88k)
1538 where stack alignment is required to be maintained between every
1539 pair of insns, not just when the call is made. However, we assume here
1540 that such machines either do not have push insns (and hence preallocation
1541 would occur anyway) or the problem is taken care of with
1542 PUSH_ROUNDING. */
1543
1544 if (! must_preallocate)
1545 {
1546 int partial_seen = 0;
1547 int copy_to_evaluate_size = 0;
1548 int i;
1549
1550 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1551 {
1552 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1553 partial_seen = 1;
1554 else if (partial_seen && args[i].reg == 0)
1555 must_preallocate = 1;
1556
1557 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1558 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1559 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1560 || TREE_CODE (args[i].tree_value) == COND_EXPR
1561 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1562 copy_to_evaluate_size
1563 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1564 }
1565
1566 if (copy_to_evaluate_size * 2 >= args_size->constant
1567 && args_size->constant > 0)
1568 must_preallocate = 1;
1569 }
1570 return must_preallocate;
1571 }
1572
1573 /* If we preallocated stack space, compute the address of each argument
1574 and store it into the ARGS array.
1575
1576 We need not ensure it is a valid memory address here; it will be
1577 validized when it is used.
1578
1579 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1580
1581 static void
1582 compute_argument_addresses (args, argblock, num_actuals)
1583 struct arg_data *args;
1584 rtx argblock;
1585 int num_actuals;
1586 {
1587 if (argblock)
1588 {
1589 rtx arg_reg = argblock;
1590 int i, arg_offset = 0;
1591
1592 if (GET_CODE (argblock) == PLUS)
1593 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1594
1595 for (i = 0; i < num_actuals; i++)
1596 {
1597 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1598 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1599 rtx addr;
1600
1601 /* Skip this parm if it will not be passed on the stack. */
1602 if (! args[i].pass_on_stack && args[i].reg != 0)
1603 continue;
1604
1605 if (GET_CODE (offset) == CONST_INT)
1606 addr = plus_constant (arg_reg, INTVAL (offset));
1607 else
1608 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1609
1610 addr = plus_constant (addr, arg_offset);
1611 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1612 set_mem_attributes (args[i].stack,
1613 TREE_TYPE (args[i].tree_value), 1);
1614
1615 if (GET_CODE (slot_offset) == CONST_INT)
1616 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1617 else
1618 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1619
1620 addr = plus_constant (addr, arg_offset);
1621 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1622 set_mem_attributes (args[i].stack_slot,
1623 TREE_TYPE (args[i].tree_value), 1);
1624
1625 /* Function incoming arguments may overlap with sibling call
1626 outgoing arguments and we cannot allow reordering of reads
1627 from function arguments with stores to outgoing arguments
1628 of sibling calls. */
1629 MEM_ALIAS_SET (args[i].stack) = 0;
1630 MEM_ALIAS_SET (args[i].stack_slot) = 0;
1631 }
1632 }
1633 }
1634
1635 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1636 in a call instruction.
1637
1638 FNDECL is the tree node for the target function. For an indirect call
1639 FNDECL will be NULL_TREE.
1640
1641 EXP is the CALL_EXPR for this call. */
1642
1643 static rtx
1644 rtx_for_function_call (fndecl, exp)
1645 tree fndecl;
1646 tree exp;
1647 {
1648 rtx funexp;
1649
1650 /* Get the function to call, in the form of RTL. */
1651 if (fndecl)
1652 {
1653 /* If this is the first use of the function, see if we need to
1654 make an external definition for it. */
1655 if (! TREE_USED (fndecl))
1656 {
1657 assemble_external (fndecl);
1658 TREE_USED (fndecl) = 1;
1659 }
1660
1661 /* Get a SYMBOL_REF rtx for the function address. */
1662 funexp = XEXP (DECL_RTL (fndecl), 0);
1663 }
1664 else
1665 /* Generate an rtx (probably a pseudo-register) for the address. */
1666 {
1667 rtx funaddr;
1668 push_temp_slots ();
1669 funaddr = funexp =
1670 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1671 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1672
1673 /* Check the function is executable. */
1674 if (current_function_check_memory_usage)
1675 {
1676 #ifdef POINTERS_EXTEND_UNSIGNED
1677 /* It might be OK to convert funexp in place, but there's
1678 a lot going on between here and when it happens naturally
1679 that this seems safer. */
1680 funaddr = convert_memory_address (Pmode, funexp);
1681 #endif
1682 emit_library_call (chkr_check_exec_libfunc, 1,
1683 VOIDmode, 1,
1684 funaddr, Pmode);
1685 }
1686 emit_queue ();
1687 }
1688 return funexp;
1689 }
1690
1691 /* Do the register loads required for any wholly-register parms or any
1692 parms which are passed both on the stack and in a register. Their
1693 expressions were already evaluated.
1694
1695 Mark all register-parms as living through the call, putting these USE
1696 insns in the CALL_INSN_FUNCTION_USAGE field. */
1697
1698 static void
1699 load_register_parameters (args, num_actuals, call_fusage, flags)
1700 struct arg_data *args;
1701 int num_actuals;
1702 rtx *call_fusage;
1703 int flags;
1704 {
1705 int i, j;
1706
1707 #ifdef LOAD_ARGS_REVERSED
1708 for (i = num_actuals - 1; i >= 0; i--)
1709 #else
1710 for (i = 0; i < num_actuals; i++)
1711 #endif
1712 {
1713 rtx reg = ((flags & ECF_SIBCALL)
1714 ? args[i].tail_call_reg : args[i].reg);
1715 int partial = args[i].partial;
1716 int nregs;
1717
1718 if (reg)
1719 {
1720 /* Set to non-negative if must move a word at a time, even if just
1721 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1722 we just use a normal move insn. This value can be zero if the
1723 argument is a zero size structure with no fields. */
1724 nregs = (partial ? partial
1725 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1726 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1727 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1728 : -1));
1729
1730 /* Handle calls that pass values in multiple non-contiguous
1731 locations. The Irix 6 ABI has examples of this. */
1732
1733 if (GET_CODE (reg) == PARALLEL)
1734 emit_group_load (reg, args[i].value,
1735 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1736 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1737
1738 /* If simple case, just do move. If normal partial, store_one_arg
1739 has already loaded the register for us. In all other cases,
1740 load the register(s) from memory. */
1741
1742 else if (nregs == -1)
1743 emit_move_insn (reg, args[i].value);
1744
1745 /* If we have pre-computed the values to put in the registers in
1746 the case of non-aligned structures, copy them in now. */
1747
1748 else if (args[i].n_aligned_regs != 0)
1749 for (j = 0; j < args[i].n_aligned_regs; j++)
1750 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1751 args[i].aligned_regs[j]);
1752
1753 else if (partial == 0 || args[i].pass_on_stack)
1754 move_block_to_reg (REGNO (reg),
1755 validize_mem (args[i].value), nregs,
1756 args[i].mode);
1757
1758 /* Handle calls that pass values in multiple non-contiguous
1759 locations. The Irix 6 ABI has examples of this. */
1760 if (GET_CODE (reg) == PARALLEL)
1761 use_group_regs (call_fusage, reg);
1762 else if (nregs == -1)
1763 use_reg (call_fusage, reg);
1764 else
1765 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1766 }
1767 }
1768 }
1769
1770 /* Try to integrate function. See expand_inline_function for documentation
1771 about the parameters. */
1772
1773 static rtx
1774 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1775 tree fndecl;
1776 tree actparms;
1777 rtx target;
1778 int ignore;
1779 tree type;
1780 rtx structure_value_addr;
1781 {
1782 rtx temp;
1783 rtx before_call;
1784 int i;
1785 rtx old_stack_level = 0;
1786 int reg_parm_stack_space = 0;
1787
1788 #ifdef REG_PARM_STACK_SPACE
1789 #ifdef MAYBE_REG_PARM_STACK_SPACE
1790 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1791 #else
1792 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1793 #endif
1794 #endif
1795
1796 before_call = get_last_insn ();
1797
1798 timevar_push (TV_INTEGRATION);
1799
1800 temp = expand_inline_function (fndecl, actparms, target,
1801 ignore, type,
1802 structure_value_addr);
1803
1804 timevar_pop (TV_INTEGRATION);
1805
1806 /* If inlining succeeded, return. */
1807 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1808 {
1809 if (ACCUMULATE_OUTGOING_ARGS)
1810 {
1811 /* If the outgoing argument list must be preserved, push
1812 the stack before executing the inlined function if it
1813 makes any calls. */
1814
1815 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1816 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1817 break;
1818
1819 if (stack_arg_under_construction || i >= 0)
1820 {
1821 rtx first_insn
1822 = before_call ? NEXT_INSN (before_call) : get_insns ();
1823 rtx insn = NULL_RTX, seq;
1824
1825 /* Look for a call in the inline function code.
1826 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1827 nonzero then there is a call and it is not necessary
1828 to scan the insns. */
1829
1830 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1831 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1832 if (GET_CODE (insn) == CALL_INSN)
1833 break;
1834
1835 if (insn)
1836 {
1837 /* Reserve enough stack space so that the largest
1838 argument list of any function call in the inline
1839 function does not overlap the argument list being
1840 evaluated. This is usually an overestimate because
1841 allocate_dynamic_stack_space reserves space for an
1842 outgoing argument list in addition to the requested
1843 space, but there is no way to ask for stack space such
1844 that an argument list of a certain length can be
1845 safely constructed.
1846
1847 Add the stack space reserved for register arguments, if
1848 any, in the inline function. What is really needed is the
1849 largest value of reg_parm_stack_space in the inline
1850 function, but that is not available. Using the current
1851 value of reg_parm_stack_space is wrong, but gives
1852 correct results on all supported machines. */
1853
1854 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1855 + reg_parm_stack_space);
1856
1857 start_sequence ();
1858 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1859 allocate_dynamic_stack_space (GEN_INT (adjust),
1860 NULL_RTX, BITS_PER_UNIT);
1861 seq = get_insns ();
1862 end_sequence ();
1863 emit_insns_before (seq, first_insn);
1864 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1865 }
1866 }
1867 }
1868
1869 /* If the result is equivalent to TARGET, return TARGET to simplify
1870 checks in store_expr. They can be equivalent but not equal in the
1871 case of a function that returns BLKmode. */
1872 if (temp != target && rtx_equal_p (temp, target))
1873 return target;
1874 return temp;
1875 }
1876
1877 /* If inlining failed, mark FNDECL as needing to be compiled
1878 separately after all. If function was declared inline,
1879 give a warning. */
1880 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1881 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1882 {
1883 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1884 warning ("called from here");
1885 }
1886 mark_addressable (fndecl);
1887 return (rtx) (HOST_WIDE_INT) - 1;
1888 }
1889
1890 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1891 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1892 bytes, then we would need to push some additional bytes to pad the
1893 arguments. So, we compute an adjust to the stack pointer for an
1894 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1895 bytes. Then, when the arguments are pushed the stack will be perfectly
1896 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1897 be popped after the call. Returns the adjustment. */
1898
1899 static int
1900 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1901 args_size,
1902 preferred_unit_stack_boundary)
1903 int unadjusted_args_size;
1904 struct args_size *args_size;
1905 int preferred_unit_stack_boundary;
1906 {
1907 /* The number of bytes to pop so that the stack will be
1908 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1909 HOST_WIDE_INT adjustment;
1910 /* The alignment of the stack after the arguments are pushed, if we
1911 just pushed the arguments without adjust the stack here. */
1912 HOST_WIDE_INT unadjusted_alignment;
1913
1914 unadjusted_alignment
1915 = ((stack_pointer_delta + unadjusted_args_size)
1916 % preferred_unit_stack_boundary);
1917
1918 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1919 as possible -- leaving just enough left to cancel out the
1920 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1921 PENDING_STACK_ADJUST is non-negative, and congruent to
1922 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1923
1924 /* Begin by trying to pop all the bytes. */
1925 unadjusted_alignment
1926 = (unadjusted_alignment
1927 - (pending_stack_adjust % preferred_unit_stack_boundary));
1928 adjustment = pending_stack_adjust;
1929 /* Push enough additional bytes that the stack will be aligned
1930 after the arguments are pushed. */
1931 if (preferred_unit_stack_boundary > 1)
1932 {
1933 if (unadjusted_alignment >= 0)
1934 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1935 else
1936 adjustment += unadjusted_alignment;
1937 }
1938
1939 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1940 bytes after the call. The right number is the entire
1941 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1942 by the arguments in the first place. */
1943 args_size->constant
1944 = pending_stack_adjust - adjustment + unadjusted_args_size;
1945
1946 return adjustment;
1947 }
1948
1949 /* Scan X expression if it does not dereference any argument slots
1950 we already clobbered by tail call arguments (as noted in stored_args_map
1951 bitmap).
1952 Return non-zero if X expression dereferences such argument slots,
1953 zero otherwise. */
1954
1955 static int
1956 check_sibcall_argument_overlap_1 (x)
1957 rtx x;
1958 {
1959 RTX_CODE code;
1960 int i, j;
1961 unsigned int k;
1962 const char *fmt;
1963
1964 if (x == NULL_RTX)
1965 return 0;
1966
1967 code = GET_CODE (x);
1968
1969 if (code == MEM)
1970 {
1971 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1972 i = 0;
1973 else if (GET_CODE (XEXP (x, 0)) == PLUS
1974 && XEXP (XEXP (x, 0), 0) ==
1975 current_function_internal_arg_pointer
1976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1977 i = INTVAL (XEXP (XEXP (x, 0), 1));
1978 else
1979 return 0;
1980
1981 #ifdef ARGS_GROW_DOWNWARD
1982 i = -i - GET_MODE_SIZE (GET_MODE (x));
1983 #endif
1984
1985 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1986 if (i + k < stored_args_map->n_bits
1987 && TEST_BIT (stored_args_map, i + k))
1988 return 1;
1989
1990 return 0;
1991 }
1992
1993 /* Scan all subexpressions. */
1994 fmt = GET_RTX_FORMAT (code);
1995 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1996 {
1997 if (*fmt == 'e')
1998 {
1999 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2000 return 1;
2001 }
2002 else if (*fmt == 'E')
2003 {
2004 for (j = 0; j < XVECLEN (x, i); j++)
2005 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2006 return 1;
2007 }
2008 }
2009 return 0;
2010
2011 }
2012
2013 /* Scan sequence after INSN if it does not dereference any argument slots
2014 we already clobbered by tail call arguments (as noted in stored_args_map
2015 bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
2016 Return non-zero if sequence after INSN dereferences such argument slots,
2017 zero otherwise. */
2018
2019 static int
2020 check_sibcall_argument_overlap (insn, arg)
2021 rtx insn;
2022 struct arg_data *arg;
2023 {
2024 int low, high;
2025
2026 if (insn == NULL_RTX)
2027 insn = get_insns ();
2028 else
2029 insn = NEXT_INSN (insn);
2030
2031 for (; insn; insn = NEXT_INSN (insn))
2032 if (INSN_P (insn) &&
2033 check_sibcall_argument_overlap_1 (PATTERN (insn)))
2034 break;
2035
2036 #ifdef ARGS_GROW_DOWNWARD
2037 low = -arg->offset.constant - arg->size.constant;
2038 #else
2039 low = arg->offset.constant;
2040 #endif
2041
2042 for (high = low + arg->size.constant; low < high; low++)
2043 SET_BIT (stored_args_map, low);
2044 return insn != NULL_RTX;
2045 }
2046
2047 /* Generate all the code for a function call
2048 and return an rtx for its value.
2049 Store the value in TARGET (specified as an rtx) if convenient.
2050 If the value is stored in TARGET then TARGET is returned.
2051 If IGNORE is nonzero, then we ignore the value of the function call. */
2052
2053 rtx
2054 expand_call (exp, target, ignore)
2055 tree exp;
2056 rtx target;
2057 int ignore;
2058 {
2059 /* Nonzero if we are currently expanding a call. */
2060 static int currently_expanding_call = 0;
2061
2062 /* List of actual parameters. */
2063 tree actparms = TREE_OPERAND (exp, 1);
2064 /* RTX for the function to be called. */
2065 rtx funexp;
2066 /* Sequence of insns to perform a tail recursive "call". */
2067 rtx tail_recursion_insns = NULL_RTX;
2068 /* Sequence of insns to perform a normal "call". */
2069 rtx normal_call_insns = NULL_RTX;
2070 /* Sequence of insns to perform a tail recursive "call". */
2071 rtx tail_call_insns = NULL_RTX;
2072 /* Data type of the function. */
2073 tree funtype;
2074 /* Declaration of the function being called,
2075 or 0 if the function is computed (not known by name). */
2076 tree fndecl = 0;
2077 rtx insn;
2078 int try_tail_call = 1;
2079 int try_tail_recursion = 1;
2080 int pass;
2081
2082 /* Register in which non-BLKmode value will be returned,
2083 or 0 if no value or if value is BLKmode. */
2084 rtx valreg;
2085 /* Address where we should return a BLKmode value;
2086 0 if value not BLKmode. */
2087 rtx structure_value_addr = 0;
2088 /* Nonzero if that address is being passed by treating it as
2089 an extra, implicit first parameter. Otherwise,
2090 it is passed by being copied directly into struct_value_rtx. */
2091 int structure_value_addr_parm = 0;
2092 /* Size of aggregate value wanted, or zero if none wanted
2093 or if we are using the non-reentrant PCC calling convention
2094 or expecting the value in registers. */
2095 HOST_WIDE_INT struct_value_size = 0;
2096 /* Nonzero if called function returns an aggregate in memory PCC style,
2097 by returning the address of where to find it. */
2098 int pcc_struct_value = 0;
2099
2100 /* Number of actual parameters in this call, including struct value addr. */
2101 int num_actuals;
2102 /* Number of named args. Args after this are anonymous ones
2103 and they must all go on the stack. */
2104 int n_named_args;
2105
2106 /* Vector of information about each argument.
2107 Arguments are numbered in the order they will be pushed,
2108 not the order they are written. */
2109 struct arg_data *args;
2110
2111 /* Total size in bytes of all the stack-parms scanned so far. */
2112 struct args_size args_size;
2113 struct args_size adjusted_args_size;
2114 /* Size of arguments before any adjustments (such as rounding). */
2115 int unadjusted_args_size;
2116 /* Data on reg parms scanned so far. */
2117 CUMULATIVE_ARGS args_so_far;
2118 /* Nonzero if a reg parm has been scanned. */
2119 int reg_parm_seen;
2120 /* Nonzero if this is an indirect function call. */
2121
2122 /* Nonzero if we must avoid push-insns in the args for this call.
2123 If stack space is allocated for register parameters, but not by the
2124 caller, then it is preallocated in the fixed part of the stack frame.
2125 So the entire argument block must then be preallocated (i.e., we
2126 ignore PUSH_ROUNDING in that case). */
2127
2128 int must_preallocate = !PUSH_ARGS;
2129
2130 /* Size of the stack reserved for parameter registers. */
2131 int reg_parm_stack_space = 0;
2132
2133 /* Address of space preallocated for stack parms
2134 (on machines that lack push insns), or 0 if space not preallocated. */
2135 rtx argblock = 0;
2136
2137 /* Mask of ECF_ flags. */
2138 int flags = 0;
2139 /* Nonzero if this is a call to an inline function. */
2140 int is_integrable = 0;
2141 #ifdef REG_PARM_STACK_SPACE
2142 /* Define the boundary of the register parm stack space that needs to be
2143 save, if any. */
2144 int low_to_save = -1, high_to_save;
2145 rtx save_area = 0; /* Place that it is saved */
2146 #endif
2147
2148 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2149 char *initial_stack_usage_map = stack_usage_map;
2150 int old_stack_arg_under_construction = 0;
2151
2152 rtx old_stack_level = 0;
2153 int old_pending_adj = 0;
2154 int old_inhibit_defer_pop = inhibit_defer_pop;
2155 int old_stack_allocated;
2156 rtx call_fusage;
2157 register tree p = TREE_OPERAND (exp, 0);
2158 register int i;
2159 /* The alignment of the stack, in bits. */
2160 HOST_WIDE_INT preferred_stack_boundary;
2161 /* The alignment of the stack, in bytes. */
2162 HOST_WIDE_INT preferred_unit_stack_boundary;
2163
2164 /* The value of the function call can be put in a hard register. But
2165 if -fcheck-memory-usage, code which invokes functions (and thus
2166 damages some hard registers) can be inserted before using the value.
2167 So, target is always a pseudo-register in that case. */
2168 if (current_function_check_memory_usage)
2169 target = 0;
2170
2171 /* See if this is "nothrow" function call. */
2172 if (TREE_NOTHROW (exp))
2173 flags |= ECF_NOTHROW;
2174
2175 /* See if we can find a DECL-node for the actual function.
2176 As a result, decide whether this is a call to an integrable function. */
2177
2178 fndecl = get_callee_fndecl (exp);
2179 if (fndecl)
2180 {
2181 if (!flag_no_inline
2182 && fndecl != current_function_decl
2183 && DECL_INLINE (fndecl)
2184 && DECL_SAVED_INSNS (fndecl)
2185 && DECL_SAVED_INSNS (fndecl)->inlinable)
2186 is_integrable = 1;
2187 else if (! TREE_ADDRESSABLE (fndecl))
2188 {
2189 /* In case this function later becomes inlinable,
2190 record that there was already a non-inline call to it.
2191
2192 Use abstraction instead of setting TREE_ADDRESSABLE
2193 directly. */
2194 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2195 && optimize > 0)
2196 {
2197 warning_with_decl (fndecl, "can't inline call to `%s'");
2198 warning ("called from here");
2199 }
2200 mark_addressable (fndecl);
2201 }
2202
2203 flags |= flags_from_decl_or_type (fndecl);
2204 }
2205
2206 /* If we don't have specific function to call, see if we have a
2207 attributes set in the type. */
2208 else
2209 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2210
2211 /* Mark if the function returns with the stack pointer depressed. */
2212 if (TREE_CODE (TREE_TYPE (TREE_TYPE (p))) == FUNCTION_TYPE
2213 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (TREE_TYPE (p))))
2214 {
2215 flags |= ECF_SP_DEPRESSED;
2216 flags &= ~ (ECF_PURE | ECF_CONST);
2217 }
2218
2219 #ifdef REG_PARM_STACK_SPACE
2220 #ifdef MAYBE_REG_PARM_STACK_SPACE
2221 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2222 #else
2223 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2224 #endif
2225 #endif
2226
2227 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2228 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2229 must_preallocate = 1;
2230 #endif
2231
2232 /* Warn if this value is an aggregate type,
2233 regardless of which calling convention we are using for it. */
2234 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2235 warning ("function call has aggregate value");
2236
2237 /* Set up a place to return a structure. */
2238
2239 /* Cater to broken compilers. */
2240 if (aggregate_value_p (exp))
2241 {
2242 /* This call returns a big structure. */
2243 flags &= ~(ECF_CONST | ECF_PURE);
2244
2245 #ifdef PCC_STATIC_STRUCT_RETURN
2246 {
2247 pcc_struct_value = 1;
2248 /* Easier than making that case work right. */
2249 if (is_integrable)
2250 {
2251 /* In case this is a static function, note that it has been
2252 used. */
2253 if (! TREE_ADDRESSABLE (fndecl))
2254 mark_addressable (fndecl);
2255 is_integrable = 0;
2256 }
2257 }
2258 #else /* not PCC_STATIC_STRUCT_RETURN */
2259 {
2260 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2261
2262 if (target && GET_CODE (target) == MEM)
2263 structure_value_addr = XEXP (target, 0);
2264 else
2265 {
2266 rtx d;
2267
2268 /* For variable-sized objects, we must be called with a target
2269 specified. If we were to allocate space on the stack here,
2270 we would have no way of knowing when to free it. */
2271
2272 if (struct_value_size < 0)
2273 abort ();
2274
2275 d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2276 mark_temp_addr_taken (d);
2277 structure_value_addr = XEXP (d, 0);
2278 target = 0;
2279 }
2280 }
2281 #endif /* not PCC_STATIC_STRUCT_RETURN */
2282 }
2283
2284 /* If called function is inline, try to integrate it. */
2285
2286 if (is_integrable)
2287 {
2288 rtx temp = try_to_integrate (fndecl, actparms, target,
2289 ignore, TREE_TYPE (exp),
2290 structure_value_addr);
2291 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2292 return temp;
2293 }
2294
2295 /* Figure out the amount to which the stack should be aligned. */
2296 #ifdef PREFERRED_STACK_BOUNDARY
2297 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2298 #else
2299 preferred_stack_boundary = STACK_BOUNDARY;
2300 #endif
2301
2302 /* Operand 0 is a pointer-to-function; get the type of the function. */
2303 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2304 if (! POINTER_TYPE_P (funtype))
2305 abort ();
2306 funtype = TREE_TYPE (funtype);
2307
2308 /* See if this is a call to a function that can return more than once
2309 or a call to longjmp or malloc. */
2310 flags |= special_function_p (fndecl, flags);
2311
2312 if (flags & ECF_MAY_BE_ALLOCA)
2313 current_function_calls_alloca = 1;
2314
2315 /* If struct_value_rtx is 0, it means pass the address
2316 as if it were an extra parameter. */
2317 if (structure_value_addr && struct_value_rtx == 0)
2318 {
2319 /* If structure_value_addr is a REG other than
2320 virtual_outgoing_args_rtx, we can use always use it. If it
2321 is not a REG, we must always copy it into a register.
2322 If it is virtual_outgoing_args_rtx, we must copy it to another
2323 register in some cases. */
2324 rtx temp = (GET_CODE (structure_value_addr) != REG
2325 || (ACCUMULATE_OUTGOING_ARGS
2326 && stack_arg_under_construction
2327 && structure_value_addr == virtual_outgoing_args_rtx)
2328 ? copy_addr_to_reg (structure_value_addr)
2329 : structure_value_addr);
2330
2331 actparms
2332 = tree_cons (error_mark_node,
2333 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2334 temp),
2335 actparms);
2336 structure_value_addr_parm = 1;
2337 }
2338
2339 /* Count the arguments and set NUM_ACTUALS. */
2340 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2341 num_actuals++;
2342
2343 /* Compute number of named args.
2344 Normally, don't include the last named arg if anonymous args follow.
2345 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2346 (If no anonymous args follow, the result of list_length is actually
2347 one too large. This is harmless.)
2348
2349 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2350 zero, this machine will be able to place unnamed args that were
2351 passed in registers into the stack. So treat all args as named.
2352 This allows the insns emitting for a specific argument list to be
2353 independent of the function declaration.
2354
2355 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2356 reliable way to pass unnamed args in registers, so we must force
2357 them into memory. */
2358
2359 if ((STRICT_ARGUMENT_NAMING
2360 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2361 && TYPE_ARG_TYPES (funtype) != 0)
2362 n_named_args
2363 = (list_length (TYPE_ARG_TYPES (funtype))
2364 /* Don't include the last named arg. */
2365 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2366 /* Count the struct value address, if it is passed as a parm. */
2367 + structure_value_addr_parm);
2368 else
2369 /* If we know nothing, treat all args as named. */
2370 n_named_args = num_actuals;
2371
2372 /* Start updating where the next arg would go.
2373
2374 On some machines (such as the PA) indirect calls have a different
2375 calling convention than normal calls. The last argument in
2376 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2377 or not. */
2378 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2379
2380
2381 /* Make a vector to hold all the information about each arg. */
2382 args = (struct arg_data *) alloca (num_actuals
2383 * sizeof (struct arg_data));
2384 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2385
2386 /* Build up entries inthe ARGS array, compute the size of the arguments
2387 into ARGS_SIZE, etc. */
2388 initialize_argument_information (num_actuals, args, &args_size,
2389 n_named_args, actparms, fndecl,
2390 &args_so_far, reg_parm_stack_space,
2391 &old_stack_level, &old_pending_adj,
2392 &must_preallocate, &flags);
2393
2394 if (args_size.var)
2395 {
2396 /* If this function requires a variable-sized argument list, don't
2397 try to make a cse'able block for this call. We may be able to
2398 do this eventually, but it is too complicated to keep track of
2399 what insns go in the cse'able block and which don't. */
2400
2401 flags &= ~(ECF_CONST | ECF_PURE);
2402 must_preallocate = 1;
2403 }
2404
2405 /* Now make final decision about preallocating stack space. */
2406 must_preallocate = finalize_must_preallocate (must_preallocate,
2407 num_actuals, args,
2408 &args_size);
2409
2410 /* If the structure value address will reference the stack pointer, we
2411 must stabilize it. We don't need to do this if we know that we are
2412 not going to adjust the stack pointer in processing this call. */
2413
2414 if (structure_value_addr
2415 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2416 || reg_mentioned_p (virtual_outgoing_args_rtx,
2417 structure_value_addr))
2418 && (args_size.var
2419 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2420 structure_value_addr = copy_to_reg (structure_value_addr);
2421
2422 /* Tail calls can make things harder to debug, and we're traditionally
2423 pushed these optimizations into -O2. Don't try if we're already
2424 expanding a call, as that means we're an argument. Similarly, if
2425 there's pending loops or cleanups we know there's code to follow
2426 the call.
2427
2428 If rtx_equal_function_value_matters is false, that means we've
2429 finished with regular parsing. Which means that some of the
2430 machinery we use to generate tail-calls is no longer in place.
2431 This is most often true of sjlj-exceptions, which we couldn't
2432 tail-call to anyway. */
2433
2434 if (currently_expanding_call++ != 0
2435 || !flag_optimize_sibling_calls
2436 || !rtx_equal_function_value_matters
2437 || !stmt_loop_nest_empty ()
2438 || any_pending_cleanups (1)
2439 || args_size.var)
2440 try_tail_call = try_tail_recursion = 0;
2441
2442 /* Tail recursion fails, when we are not dealing with recursive calls. */
2443 if (!try_tail_recursion
2444 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2445 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2446 try_tail_recursion = 0;
2447
2448 /* Rest of purposes for tail call optimizations to fail. */
2449 if (
2450 #ifdef HAVE_sibcall_epilogue
2451 !HAVE_sibcall_epilogue
2452 #else
2453 1
2454 #endif
2455 || !try_tail_call
2456 /* Doing sibling call optimization needs some work, since
2457 structure_value_addr can be allocated on the stack.
2458 It does not seem worth the effort since few optimizable
2459 sibling calls will return a structure. */
2460 || structure_value_addr != NULL_RTX
2461 /* If the register holding the address is a callee saved
2462 register, then we lose. We have no way to prevent that,
2463 so we only allow calls to named functions. */
2464 /* ??? This could be done by having the insn constraints
2465 use a register class that is all call-clobbered. Any
2466 reload insns generated to fix things up would appear
2467 before the sibcall_epilogue. */
2468 || fndecl == NULL_TREE
2469 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP))
2470 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2471 /* If this function requires more stack slots than the current
2472 function, we cannot change it into a sibling call. */
2473 || args_size.constant > current_function_args_size
2474 /* If the callee pops its own arguments, then it must pop exactly
2475 the same number of arguments as the current function. */
2476 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2477 != RETURN_POPS_ARGS (current_function_decl,
2478 TREE_TYPE (current_function_decl),
2479 current_function_args_size))
2480 try_tail_call = 0;
2481
2482 if (try_tail_call || try_tail_recursion)
2483 {
2484 int end, inc;
2485 actparms = NULL_TREE;
2486 /* Ok, we're going to give the tail call the old college try.
2487 This means we're going to evaluate the function arguments
2488 up to three times. There are two degrees of badness we can
2489 encounter, those that can be unsaved and those that can't.
2490 (See unsafe_for_reeval commentary for details.)
2491
2492 Generate a new argument list. Pass safe arguments through
2493 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2494 For hard badness, evaluate them now and put their resulting
2495 rtx in a temporary VAR_DECL.
2496
2497 initialize_argument_information has ordered the array for the
2498 order to be pushed, and we must remember this when reconstructing
2499 the original argument orde. */
2500
2501 if (PUSH_ARGS_REVERSED)
2502 {
2503 inc = 1;
2504 i = 0;
2505 end = num_actuals;
2506 }
2507 else
2508 {
2509 inc = -1;
2510 i = num_actuals - 1;
2511 end = -1;
2512 }
2513
2514 for (; i != end; i += inc)
2515 {
2516 switch (unsafe_for_reeval (args[i].tree_value))
2517 {
2518 case 0: /* Safe. */
2519 break;
2520
2521 case 1: /* Mildly unsafe. */
2522 args[i].tree_value = unsave_expr (args[i].tree_value);
2523 break;
2524
2525 case 2: /* Wildly unsafe. */
2526 {
2527 tree var = build_decl (VAR_DECL, NULL_TREE,
2528 TREE_TYPE (args[i].tree_value));
2529 DECL_RTL (var) = expand_expr (args[i].tree_value, NULL_RTX,
2530 VOIDmode, EXPAND_NORMAL);
2531 args[i].tree_value = var;
2532 }
2533 break;
2534
2535 default:
2536 abort ();
2537 }
2538 /* We need to build actparms for optimize_tail_recursion. We can
2539 safely trash away TREE_PURPOSE, since it is unused by this
2540 function. */
2541 if (try_tail_recursion)
2542 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2543 }
2544 /* Expanding one of those dangerous arguments could have added
2545 cleanups, but otherwise give it a whirl. */
2546 if (any_pending_cleanups (1))
2547 try_tail_call = try_tail_recursion = 0;
2548 }
2549
2550 /* Generate a tail recursion sequence when calling ourselves. */
2551
2552 if (try_tail_recursion)
2553 {
2554 /* We want to emit any pending stack adjustments before the tail
2555 recursion "call". That way we know any adjustment after the tail
2556 recursion call can be ignored if we indeed use the tail recursion
2557 call expansion. */
2558 int save_pending_stack_adjust = pending_stack_adjust;
2559 int save_stack_pointer_delta = stack_pointer_delta;
2560
2561 /* Use a new sequence to hold any RTL we generate. We do not even
2562 know if we will use this RTL yet. The final decision can not be
2563 made until after RTL generation for the entire function is
2564 complete. */
2565 start_sequence ();
2566 /* If expanding any of the arguments creates cleanups, we can't
2567 do a tailcall. So, we'll need to pop the pending cleanups
2568 list. If, however, all goes well, and there are no cleanups
2569 then the call to expand_start_target_temps will have no
2570 effect. */
2571 expand_start_target_temps ();
2572 if (optimize_tail_recursion (actparms, get_last_insn ()))
2573 {
2574 if (any_pending_cleanups (1))
2575 try_tail_call = try_tail_recursion = 0;
2576 else
2577 tail_recursion_insns = get_insns ();
2578 }
2579 expand_end_target_temps ();
2580 end_sequence ();
2581
2582 /* Restore the original pending stack adjustment for the sibling and
2583 normal call cases below. */
2584 pending_stack_adjust = save_pending_stack_adjust;
2585 stack_pointer_delta = save_stack_pointer_delta;
2586 }
2587
2588 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2589 {
2590 /* A fork duplicates the profile information, and an exec discards
2591 it. We can't rely on fork/exec to be paired. So write out the
2592 profile information we have gathered so far, and clear it. */
2593 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2594 is subject to race conditions, just as with multithreaded
2595 programs. */
2596
2597 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2598 VOIDmode, 0);
2599 }
2600
2601 /* Ensure current function's preferred stack boundary is at least
2602 what we need. We don't have to increase alignment for recursive
2603 functions. */
2604 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2605 && fndecl != current_function_decl)
2606 cfun->preferred_stack_boundary = preferred_stack_boundary;
2607
2608 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2609
2610 function_call_count++;
2611
2612 /* We want to make two insn chains; one for a sibling call, the other
2613 for a normal call. We will select one of the two chains after
2614 initial RTL generation is complete. */
2615 for (pass = 0; pass < 2; pass++)
2616 {
2617 int sibcall_failure = 0;
2618 /* We want to emit ay pending stack adjustments before the tail
2619 recursion "call". That way we know any adjustment after the tail
2620 recursion call can be ignored if we indeed use the tail recursion
2621 call expansion. */
2622 int save_pending_stack_adjust = 0;
2623 int save_stack_pointer_delta = 0;
2624 rtx insns;
2625 rtx before_call, next_arg_reg;
2626
2627 if (pass == 0)
2628 {
2629 if (! try_tail_call)
2630 continue;
2631
2632 /* Emit any queued insns now; otherwise they would end up in
2633 only one of the alternates. */
2634 emit_queue ();
2635
2636 /* State variables we need to save and restore between
2637 iterations. */
2638 save_pending_stack_adjust = pending_stack_adjust;
2639 save_stack_pointer_delta = stack_pointer_delta;
2640 }
2641 if (pass)
2642 flags &= ~ECF_SIBCALL;
2643 else
2644 flags |= ECF_SIBCALL;
2645
2646 /* Other state variables that we must reinitialize each time
2647 through the loop (that are not initialized by the loop itself). */
2648 argblock = 0;
2649 call_fusage = 0;
2650
2651 /* Start a new sequence for the normal call case.
2652
2653 From this point on, if the sibling call fails, we want to set
2654 sibcall_failure instead of continuing the loop. */
2655 start_sequence ();
2656
2657 if (pass == 0)
2658 {
2659 /* We know at this point that there are not currently any
2660 pending cleanups. If, however, in the process of evaluating
2661 the arguments we were to create some, we'll need to be
2662 able to get rid of them. */
2663 expand_start_target_temps ();
2664 }
2665
2666 /* When calling a const function, we must pop the stack args right away,
2667 so that the pop is deleted or moved with the call. */
2668 if (flags & (ECF_CONST | ECF_PURE))
2669 NO_DEFER_POP;
2670
2671 /* Don't let pending stack adjusts add up to too much.
2672 Also, do all pending adjustments now if there is any chance
2673 this might be a call to alloca or if we are expanding a sibling
2674 call sequence. */
2675 if (pending_stack_adjust >= 32
2676 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2677 || pass == 0)
2678 do_pending_stack_adjust ();
2679
2680 /* Push the temporary stack slot level so that we can free any
2681 temporaries we make. */
2682 push_temp_slots ();
2683
2684
2685 #ifdef FINAL_REG_PARM_STACK_SPACE
2686 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2687 args_size.var);
2688 #endif
2689 /* Precompute any arguments as needed. */
2690 if (pass)
2691 precompute_arguments (flags, num_actuals, args);
2692
2693 /* Now we are about to start emitting insns that can be deleted
2694 if a libcall is deleted. */
2695 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2696 start_sequence ();
2697
2698 adjusted_args_size = args_size;
2699 /* Compute the actual size of the argument block required. The variable
2700 and constant sizes must be combined, the size may have to be rounded,
2701 and there may be a minimum required size. When generating a sibcall
2702 pattern, do not round up, since we'll be re-using whatever space our
2703 caller provided. */
2704 unadjusted_args_size
2705 = compute_argument_block_size (reg_parm_stack_space, &adjusted_args_size,
2706 (pass == 0 ? 0
2707 : preferred_stack_boundary));
2708
2709 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2710
2711 /* The argument block when performing a sibling call is the
2712 incoming argument block. */
2713 if (pass == 0)
2714 {
2715 argblock = virtual_incoming_args_rtx;
2716 stored_args_map = sbitmap_alloc (args_size.constant);
2717 sbitmap_zero (stored_args_map);
2718 }
2719
2720 /* If we have no actual push instructions, or shouldn't use them,
2721 make space for all args right now. */
2722 else if (adjusted_args_size.var != 0)
2723 {
2724 if (old_stack_level == 0)
2725 {
2726 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2727 old_pending_adj = pending_stack_adjust;
2728 pending_stack_adjust = 0;
2729 /* stack_arg_under_construction says whether a stack arg is
2730 being constructed at the old stack level. Pushing the stack
2731 gets a clean outgoing argument block. */
2732 old_stack_arg_under_construction = stack_arg_under_construction;
2733 stack_arg_under_construction = 0;
2734 }
2735 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2736 }
2737 else
2738 {
2739 /* Note that we must go through the motions of allocating an argument
2740 block even if the size is zero because we may be storing args
2741 in the area reserved for register arguments, which may be part of
2742 the stack frame. */
2743
2744 int needed = adjusted_args_size.constant;
2745
2746 /* Store the maximum argument space used. It will be pushed by
2747 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2748 checking). */
2749
2750 if (needed > current_function_outgoing_args_size)
2751 current_function_outgoing_args_size = needed;
2752
2753 if (must_preallocate)
2754 {
2755 if (ACCUMULATE_OUTGOING_ARGS)
2756 {
2757 /* Since the stack pointer will never be pushed, it is
2758 possible for the evaluation of a parm to clobber
2759 something we have already written to the stack.
2760 Since most function calls on RISC machines do not use
2761 the stack, this is uncommon, but must work correctly.
2762
2763 Therefore, we save any area of the stack that was already
2764 written and that we are using. Here we set up to do this
2765 by making a new stack usage map from the old one. The
2766 actual save will be done by store_one_arg.
2767
2768 Another approach might be to try to reorder the argument
2769 evaluations to avoid this conflicting stack usage. */
2770
2771 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2772 /* Since we will be writing into the entire argument area,
2773 the map must be allocated for its entire size, not just
2774 the part that is the responsibility of the caller. */
2775 needed += reg_parm_stack_space;
2776 #endif
2777
2778 #ifdef ARGS_GROW_DOWNWARD
2779 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2780 needed + 1);
2781 #else
2782 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2783 needed);
2784 #endif
2785 stack_usage_map
2786 = (char *) alloca (highest_outgoing_arg_in_use);
2787
2788 if (initial_highest_arg_in_use)
2789 bcopy (initial_stack_usage_map, stack_usage_map,
2790 initial_highest_arg_in_use);
2791
2792 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2793 bzero (&stack_usage_map[initial_highest_arg_in_use],
2794 (highest_outgoing_arg_in_use
2795 - initial_highest_arg_in_use));
2796 needed = 0;
2797
2798 /* The address of the outgoing argument list must not be
2799 copied to a register here, because argblock would be left
2800 pointing to the wrong place after the call to
2801 allocate_dynamic_stack_space below. */
2802
2803 argblock = virtual_outgoing_args_rtx;
2804 }
2805 else
2806 {
2807 if (inhibit_defer_pop == 0)
2808 {
2809 /* Try to reuse some or all of the pending_stack_adjust
2810 to get this space. */
2811 needed
2812 = (combine_pending_stack_adjustment_and_call
2813 (unadjusted_args_size,
2814 &adjusted_args_size,
2815 preferred_unit_stack_boundary));
2816
2817 /* combine_pending_stack_adjustment_and_call computes
2818 an adjustment before the arguments are allocated.
2819 Account for them and see whether or not the stack
2820 needs to go up or down. */
2821 needed = unadjusted_args_size - needed;
2822
2823 if (needed < 0)
2824 {
2825 /* We're releasing stack space. */
2826 /* ??? We can avoid any adjustment at all if we're
2827 already aligned. FIXME. */
2828 pending_stack_adjust = -needed;
2829 do_pending_stack_adjust ();
2830 needed = 0;
2831 }
2832 else
2833 /* We need to allocate space. We'll do that in
2834 push_block below. */
2835 pending_stack_adjust = 0;
2836 }
2837
2838 /* Special case this because overhead of `push_block' in
2839 this case is non-trivial. */
2840 if (needed == 0)
2841 argblock = virtual_outgoing_args_rtx;
2842 else
2843 argblock = push_block (GEN_INT (needed), 0, 0);
2844
2845 /* We only really need to call `copy_to_reg' in the case
2846 where push insns are going to be used to pass ARGBLOCK
2847 to a function call in ARGS. In that case, the stack
2848 pointer changes value from the allocation point to the
2849 call point, and hence the value of
2850 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2851 as well always do it. */
2852 argblock = copy_to_reg (argblock);
2853
2854 /* The save/restore code in store_one_arg handles all
2855 cases except one: a constructor call (including a C
2856 function returning a BLKmode struct) to initialize
2857 an argument. */
2858 if (stack_arg_under_construction)
2859 {
2860 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2861 rtx push_size = GEN_INT (reg_parm_stack_space
2862 + adjusted_args_size.constant);
2863 #else
2864 rtx push_size = GEN_INT (adjusted_args_size.constant);
2865 #endif
2866 if (old_stack_level == 0)
2867 {
2868 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2869 NULL_RTX);
2870 old_pending_adj = pending_stack_adjust;
2871 pending_stack_adjust = 0;
2872 /* stack_arg_under_construction says whether a stack
2873 arg is being constructed at the old stack level.
2874 Pushing the stack gets a clean outgoing argument
2875 block. */
2876 old_stack_arg_under_construction
2877 = stack_arg_under_construction;
2878 stack_arg_under_construction = 0;
2879 /* Make a new map for the new argument list. */
2880 stack_usage_map = (char *)
2881 alloca (highest_outgoing_arg_in_use);
2882 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2883 highest_outgoing_arg_in_use = 0;
2884 }
2885 allocate_dynamic_stack_space (push_size, NULL_RTX,
2886 BITS_PER_UNIT);
2887 }
2888 /* If argument evaluation might modify the stack pointer,
2889 copy the address of the argument list to a register. */
2890 for (i = 0; i < num_actuals; i++)
2891 if (args[i].pass_on_stack)
2892 {
2893 argblock = copy_addr_to_reg (argblock);
2894 break;
2895 }
2896 }
2897 }
2898 }
2899
2900 compute_argument_addresses (args, argblock, num_actuals);
2901
2902 #ifdef PREFERRED_STACK_BOUNDARY
2903 /* If we push args individually in reverse order, perform stack alignment
2904 before the first push (the last arg). */
2905 if (PUSH_ARGS_REVERSED && argblock == 0
2906 && adjusted_args_size.constant != unadjusted_args_size)
2907 {
2908 /* When the stack adjustment is pending, we get better code
2909 by combining the adjustments. */
2910 if (pending_stack_adjust
2911 && ! (flags & (ECF_CONST | ECF_PURE))
2912 && ! inhibit_defer_pop)
2913 {
2914 pending_stack_adjust
2915 = (combine_pending_stack_adjustment_and_call
2916 (unadjusted_args_size,
2917 &adjusted_args_size,
2918 preferred_unit_stack_boundary));
2919 do_pending_stack_adjust ();
2920 }
2921 else if (argblock == 0)
2922 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2923 - unadjusted_args_size));
2924 }
2925 /* Now that the stack is properly aligned, pops can't safely
2926 be deferred during the evaluation of the arguments. */
2927 NO_DEFER_POP;
2928 #endif
2929
2930 /* Don't try to defer pops if preallocating, not even from the first arg,
2931 since ARGBLOCK probably refers to the SP. */
2932 if (argblock)
2933 NO_DEFER_POP;
2934
2935 funexp = rtx_for_function_call (fndecl, exp);
2936
2937 /* Figure out the register where the value, if any, will come back. */
2938 valreg = 0;
2939 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2940 && ! structure_value_addr)
2941 {
2942 if (pcc_struct_value)
2943 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2944 fndecl, (pass == 0));
2945 else
2946 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2947 }
2948
2949 /* Precompute all register parameters. It isn't safe to compute anything
2950 once we have started filling any specific hard regs. */
2951 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2952
2953 #ifdef REG_PARM_STACK_SPACE
2954 /* Save the fixed argument area if it's part of the caller's frame and
2955 is clobbered by argument setup for this call. */
2956 if (ACCUMULATE_OUTGOING_ARGS && pass)
2957 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2958 &low_to_save, &high_to_save);
2959 #endif
2960
2961 /* Now store (and compute if necessary) all non-register parms.
2962 These come before register parms, since they can require block-moves,
2963 which could clobber the registers used for register parms.
2964 Parms which have partial registers are not stored here,
2965 but we do preallocate space here if they want that. */
2966
2967 for (i = 0; i < num_actuals; i++)
2968 if (args[i].reg == 0 || args[i].pass_on_stack)
2969 {
2970 rtx before_arg = get_last_insn ();
2971
2972 if (store_one_arg (&args[i], argblock, flags,
2973 adjusted_args_size.var != 0,
2974 reg_parm_stack_space)
2975 || (pass == 0
2976 && check_sibcall_argument_overlap (before_arg,
2977 &args[i])))
2978 sibcall_failure = 1;
2979 }
2980
2981 /* If we have a parm that is passed in registers but not in memory
2982 and whose alignment does not permit a direct copy into registers,
2983 make a group of pseudos that correspond to each register that we
2984 will later fill. */
2985 if (STRICT_ALIGNMENT)
2986 store_unaligned_arguments_into_pseudos (args, num_actuals);
2987
2988 /* Now store any partially-in-registers parm.
2989 This is the last place a block-move can happen. */
2990 if (reg_parm_seen)
2991 for (i = 0; i < num_actuals; i++)
2992 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2993 {
2994 rtx before_arg = get_last_insn ();
2995
2996 if (store_one_arg (&args[i], argblock, flags,
2997 adjusted_args_size.var != 0,
2998 reg_parm_stack_space)
2999 || (pass == 0
3000 && check_sibcall_argument_overlap (before_arg,
3001 &args[i])))
3002 sibcall_failure = 1;
3003 }
3004
3005 #ifdef PREFERRED_STACK_BOUNDARY
3006 /* If we pushed args in forward order, perform stack alignment
3007 after pushing the last arg. */
3008 if (!PUSH_ARGS_REVERSED && argblock == 0)
3009 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3010 - unadjusted_args_size));
3011 #endif
3012
3013 /* If register arguments require space on the stack and stack space
3014 was not preallocated, allocate stack space here for arguments
3015 passed in registers. */
3016 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3017 if (!ACCUMULATE_OUTGOING_ARGS
3018 && must_preallocate == 0 && reg_parm_stack_space > 0)
3019 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3020 #endif
3021
3022 /* Pass the function the address in which to return a
3023 structure value. */
3024 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3025 {
3026 emit_move_insn (struct_value_rtx,
3027 force_reg (Pmode,
3028 force_operand (structure_value_addr,
3029 NULL_RTX)));
3030
3031 /* Mark the memory for the aggregate as write-only. */
3032 if (current_function_check_memory_usage)
3033 emit_library_call (chkr_set_right_libfunc, 1,
3034 VOIDmode, 3,
3035 structure_value_addr, ptr_mode,
3036 GEN_INT (struct_value_size),
3037 TYPE_MODE (sizetype),
3038 GEN_INT (MEMORY_USE_WO),
3039 TYPE_MODE (integer_type_node));
3040
3041 if (GET_CODE (struct_value_rtx) == REG)
3042 use_reg (&call_fusage, struct_value_rtx);
3043 }
3044
3045 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3046 reg_parm_seen);
3047
3048 load_register_parameters (args, num_actuals, &call_fusage, flags);
3049
3050 /* Perform postincrements before actually calling the function. */
3051 emit_queue ();
3052
3053 /* Save a pointer to the last insn before the call, so that we can
3054 later safely search backwards to find the CALL_INSN. */
3055 before_call = get_last_insn ();
3056
3057 /* Set up next argument register. For sibling calls on machines
3058 with register windows this should be the incoming register. */
3059 #ifdef FUNCTION_INCOMING_ARG
3060 if (pass == 0)
3061 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3062 void_type_node, 1);
3063 else
3064 #endif
3065 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3066 void_type_node, 1);
3067
3068 /* All arguments and registers used for the call must be set up by
3069 now! */
3070
3071 #ifdef PREFERRED_STACK_BOUNDARY
3072 /* Stack must be properly aligned now. */
3073 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3074 abort ();
3075 #endif
3076
3077 /* Generate the actual call instruction. */
3078 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3079 adjusted_args_size.constant, struct_value_size,
3080 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3081 flags);
3082
3083 /* Verify that we've deallocated all the stack we used. */
3084 if (pass
3085 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3086 abort();
3087
3088 /* If call is cse'able, make appropriate pair of reg-notes around it.
3089 Test valreg so we don't crash; may safely ignore `const'
3090 if return type is void. Disable for PARALLEL return values, because
3091 we have no way to move such values into a pseudo register. */
3092 if (pass
3093 && (flags & (ECF_CONST | ECF_PURE))
3094 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3095 {
3096 rtx note = 0;
3097 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3098 rtx insns;
3099
3100 /* Mark the return value as a pointer if needed. */
3101 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3102 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3103
3104 /* Construct an "equal form" for the value which mentions all the
3105 arguments in order as well as the function name. */
3106 for (i = 0; i < num_actuals; i++)
3107 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
3108 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3109
3110 insns = get_insns ();
3111 end_sequence ();
3112
3113 if (flags & ECF_PURE)
3114 note = gen_rtx_EXPR_LIST (VOIDmode,
3115 gen_rtx_USE (VOIDmode,
3116 gen_rtx_MEM (BLKmode,
3117 gen_rtx_SCRATCH (VOIDmode))), note);
3118
3119 emit_libcall_block (insns, temp, valreg, note);
3120
3121 valreg = temp;
3122 }
3123 else if (flags & (ECF_CONST | ECF_PURE))
3124 {
3125 /* Otherwise, just write out the sequence without a note. */
3126 rtx insns = get_insns ();
3127
3128 end_sequence ();
3129 emit_insns (insns);
3130 }
3131 else if (flags & ECF_MALLOC)
3132 {
3133 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3134 rtx last, insns;
3135
3136 /* The return value from a malloc-like function is a pointer. */
3137 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3138 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3139
3140 emit_move_insn (temp, valreg);
3141
3142 /* The return value from a malloc-like function can not alias
3143 anything else. */
3144 last = get_last_insn ();
3145 REG_NOTES (last) =
3146 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3147
3148 /* Write out the sequence. */
3149 insns = get_insns ();
3150 end_sequence ();
3151 emit_insns (insns);
3152 valreg = temp;
3153 }
3154
3155 /* For calls to `setjmp', etc., inform flow.c it should complain
3156 if nonvolatile values are live. For functions that cannot return,
3157 inform flow that control does not fall through. */
3158
3159 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3160 {
3161 /* The barrier or NOTE_INSN_SETJMP note must be emitted
3162 immediately after the CALL_INSN. Some ports emit more
3163 than just a CALL_INSN above, so we must search for it here. */
3164
3165 rtx last = get_last_insn ();
3166 while (GET_CODE (last) != CALL_INSN)
3167 {
3168 last = PREV_INSN (last);
3169 /* There was no CALL_INSN? */
3170 if (last == before_call)
3171 abort ();
3172 }
3173
3174 if (flags & ECF_RETURNS_TWICE)
3175 {
3176 emit_note_after (NOTE_INSN_SETJMP, last);
3177 current_function_calls_setjmp = 1;
3178 }
3179 else
3180 emit_barrier_after (last);
3181 }
3182
3183 if (flags & ECF_LONGJMP)
3184 current_function_calls_longjmp = 1;
3185
3186 /* If this function is returning into a memory location marked as
3187 readonly, it means it is initializing that location. But we normally
3188 treat functions as not clobbering such locations, so we need to
3189 specify that this one does. */
3190 if (target != 0 && GET_CODE (target) == MEM
3191 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3192 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3193
3194 /* If value type not void, return an rtx for the value. */
3195
3196 /* If there are cleanups to be called, don't use a hard reg as target.
3197 We need to double check this and see if it matters anymore. */
3198 if (any_pending_cleanups (1))
3199 {
3200 if (target && REG_P (target)
3201 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3202 target = 0;
3203 sibcall_failure = 1;
3204 }
3205
3206 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3207 || ignore)
3208 {
3209 target = const0_rtx;
3210 }
3211 else if (structure_value_addr)
3212 {
3213 if (target == 0 || GET_CODE (target) != MEM)
3214 {
3215 target
3216 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3217 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3218 structure_value_addr));
3219 set_mem_attributes (target, exp, 1);
3220 }
3221 }
3222 else if (pcc_struct_value)
3223 {
3224 /* This is the special C++ case where we need to
3225 know what the true target was. We take care to
3226 never use this value more than once in one expression. */
3227 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3228 copy_to_reg (valreg));
3229 set_mem_attributes (target, exp, 1);
3230 }
3231 /* Handle calls that return values in multiple non-contiguous locations.
3232 The Irix 6 ABI has examples of this. */
3233 else if (GET_CODE (valreg) == PARALLEL)
3234 {
3235 int bytes = int_size_in_bytes (TREE_TYPE (exp));
3236
3237 if (target == 0)
3238 {
3239 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
3240 bytes, 0);
3241 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
3242 preserve_temp_slots (target);
3243 }
3244
3245 if (! rtx_equal_p (target, valreg))
3246 emit_group_store (target, valreg, bytes,
3247 TYPE_ALIGN (TREE_TYPE (exp)));
3248
3249 /* We can not support sibling calls for this case. */
3250 sibcall_failure = 1;
3251 }
3252 else if (target
3253 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3254 && GET_MODE (target) == GET_MODE (valreg))
3255 {
3256 /* TARGET and VALREG cannot be equal at this point because the
3257 latter would not have REG_FUNCTION_VALUE_P true, while the
3258 former would if it were referring to the same register.
3259
3260 If they refer to the same register, this move will be a no-op,
3261 except when function inlining is being done. */
3262 emit_move_insn (target, valreg);
3263 }
3264 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3265 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3266 else
3267 target = copy_to_reg (valreg);
3268
3269 #ifdef PROMOTE_FUNCTION_RETURN
3270 /* If we promoted this return value, make the proper SUBREG. TARGET
3271 might be const0_rtx here, so be careful. */
3272 if (GET_CODE (target) == REG
3273 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3274 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3275 {
3276 tree type = TREE_TYPE (exp);
3277 int unsignedp = TREE_UNSIGNED (type);
3278
3279 /* If we don't promote as expected, something is wrong. */
3280 if (GET_MODE (target)
3281 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3282 abort ();
3283
3284 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3285 SUBREG_PROMOTED_VAR_P (target) = 1;
3286 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3287 }
3288 #endif
3289
3290 /* If size of args is variable or this was a constructor call for a stack
3291 argument, restore saved stack-pointer value. */
3292
3293 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3294 {
3295 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3296 pending_stack_adjust = old_pending_adj;
3297 stack_arg_under_construction = old_stack_arg_under_construction;
3298 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3299 stack_usage_map = initial_stack_usage_map;
3300 sibcall_failure = 1;
3301 }
3302 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3303 {
3304 #ifdef REG_PARM_STACK_SPACE
3305 if (save_area)
3306 {
3307 restore_fixed_argument_area (save_area, argblock,
3308 high_to_save, low_to_save);
3309 }
3310 #endif
3311
3312 /* If we saved any argument areas, restore them. */
3313 for (i = 0; i < num_actuals; i++)
3314 if (args[i].save_area)
3315 {
3316 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3317 rtx stack_area
3318 = gen_rtx_MEM (save_mode,
3319 memory_address (save_mode,
3320 XEXP (args[i].stack_slot, 0)));
3321
3322 if (save_mode != BLKmode)
3323 emit_move_insn (stack_area, args[i].save_area);
3324 else
3325 emit_block_move (stack_area,
3326 validize_mem (args[i].save_area),
3327 GEN_INT (args[i].size.constant),
3328 PARM_BOUNDARY);
3329 }
3330
3331 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3332 stack_usage_map = initial_stack_usage_map;
3333 }
3334
3335 /* If this was alloca, record the new stack level for nonlocal gotos.
3336 Check for the handler slots since we might not have a save area
3337 for non-local gotos. */
3338
3339 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3340 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3341
3342 pop_temp_slots ();
3343
3344 /* Free up storage we no longer need. */
3345 for (i = 0; i < num_actuals; ++i)
3346 if (args[i].aligned_regs)
3347 free (args[i].aligned_regs);
3348
3349 if (pass == 0)
3350 {
3351 /* Undo the fake expand_start_target_temps we did earlier. If
3352 there had been any cleanups created, we've already set
3353 sibcall_failure. */
3354 expand_end_target_temps ();
3355 }
3356
3357 insns = get_insns ();
3358 end_sequence ();
3359
3360 if (pass == 0)
3361 {
3362 tail_call_insns = insns;
3363
3364 /* If something prevents making this a sibling call,
3365 zero out the sequence. */
3366 if (sibcall_failure)
3367 tail_call_insns = NULL_RTX;
3368 /* Restore the pending stack adjustment now that we have
3369 finished generating the sibling call sequence. */
3370
3371 pending_stack_adjust = save_pending_stack_adjust;
3372 stack_pointer_delta = save_stack_pointer_delta;
3373
3374 /* Prepare arg structure for next iteration. */
3375 for (i = 0 ; i < num_actuals ; i++)
3376 {
3377 args[i].value = 0;
3378 args[i].aligned_regs = 0;
3379 args[i].stack = 0;
3380 }
3381
3382 sbitmap_free (stored_args_map);
3383 }
3384 else
3385 normal_call_insns = insns;
3386 }
3387
3388 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3389 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3390 can happen if the arguments to this function call an inline
3391 function who's expansion contains another CALL_PLACEHOLDER.
3392
3393 If there are any C_Ps in any of these sequences, replace them
3394 with their normal call. */
3395
3396 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3397 if (GET_CODE (insn) == CALL_INSN
3398 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3399 replace_call_placeholder (insn, sibcall_use_normal);
3400
3401 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3402 if (GET_CODE (insn) == CALL_INSN
3403 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3404 replace_call_placeholder (insn, sibcall_use_normal);
3405
3406 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3407 if (GET_CODE (insn) == CALL_INSN
3408 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3409 replace_call_placeholder (insn, sibcall_use_normal);
3410
3411 /* If this was a potential tail recursion site, then emit a
3412 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3413 One of them will be selected later. */
3414 if (tail_recursion_insns || tail_call_insns)
3415 {
3416 /* The tail recursion label must be kept around. We could expose
3417 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3418 and makes determining true tail recursion sites difficult.
3419
3420 So we set LABEL_PRESERVE_P here, then clear it when we select
3421 one of the call sequences after rtl generation is complete. */
3422 if (tail_recursion_insns)
3423 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3424 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3425 tail_call_insns,
3426 tail_recursion_insns,
3427 tail_recursion_label));
3428 }
3429 else
3430 emit_insns (normal_call_insns);
3431
3432 currently_expanding_call--;
3433
3434 /* If this function returns with the stack pointer depressed, ensure
3435 this block saves and restores the stack pointer, show it was
3436 changed, and adjust for any outgoing arg space. */
3437 if (flags & ECF_SP_DEPRESSED)
3438 {
3439 clear_pending_stack_adjust ();
3440 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3441 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3442 save_stack_pointer ();
3443 }
3444
3445 return target;
3446 }
3447 \f
3448 /* Returns nonzero if FUN is the symbol for a library function which can
3449 not throw. */
3450
3451 static int
3452 libfunc_nothrow (fun)
3453 rtx fun;
3454 {
3455 if (fun == throw_libfunc
3456 || fun == rethrow_libfunc
3457 || fun == sjthrow_libfunc
3458 || fun == sjpopnthrow_libfunc)
3459 return 0;
3460
3461 return 1;
3462 }
3463 \f
3464 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3465 The RETVAL parameter specifies whether return value needs to be saved, other
3466 parameters are documented in the emit_library_call function bellow. */
3467 static rtx
3468 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3469 int retval;
3470 rtx orgfun;
3471 rtx value;
3472 int fn_type;
3473 enum machine_mode outmode;
3474 int nargs;
3475 va_list p;
3476 {
3477 /* Total size in bytes of all the stack-parms scanned so far. */
3478 struct args_size args_size;
3479 /* Size of arguments before any adjustments (such as rounding). */
3480 struct args_size original_args_size;
3481 register int argnum;
3482 rtx fun;
3483 int inc;
3484 int count;
3485 struct args_size alignment_pad;
3486 rtx argblock = 0;
3487 CUMULATIVE_ARGS args_so_far;
3488 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3489 struct args_size offset; struct args_size size; rtx save_area; };
3490 struct arg *argvec;
3491 int old_inhibit_defer_pop = inhibit_defer_pop;
3492 rtx call_fusage = 0;
3493 rtx mem_value = 0;
3494 rtx valreg;
3495 int pcc_struct_value = 0;
3496 int struct_value_size = 0;
3497 int flags = 0;
3498 int reg_parm_stack_space = 0;
3499 int needed;
3500
3501 #ifdef REG_PARM_STACK_SPACE
3502 /* Define the boundary of the register parm stack space that needs to be
3503 save, if any. */
3504 int low_to_save = -1, high_to_save = 0;
3505 rtx save_area = 0; /* Place that it is saved */
3506 #endif
3507
3508 /* Size of the stack reserved for parameter registers. */
3509 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3510 char *initial_stack_usage_map = stack_usage_map;
3511
3512 #ifdef REG_PARM_STACK_SPACE
3513 #ifdef MAYBE_REG_PARM_STACK_SPACE
3514 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3515 #else
3516 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3517 #endif
3518 #endif
3519
3520 if (fn_type == 1)
3521 flags |= ECF_CONST;
3522 else if (fn_type == 2)
3523 flags |= ECF_PURE;
3524 fun = orgfun;
3525
3526 if (libfunc_nothrow (fun))
3527 flags |= ECF_NOTHROW;
3528
3529 #ifdef PREFERRED_STACK_BOUNDARY
3530 /* Ensure current function's preferred stack boundary is at least
3531 what we need. */
3532 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3533 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3534 #endif
3535
3536 /* If this kind of value comes back in memory,
3537 decide where in memory it should come back. */
3538 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3539 {
3540 #ifdef PCC_STATIC_STRUCT_RETURN
3541 rtx pointer_reg
3542 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3543 0, 0);
3544 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3545 pcc_struct_value = 1;
3546 if (value == 0)
3547 value = gen_reg_rtx (outmode);
3548 #else /* not PCC_STATIC_STRUCT_RETURN */
3549 struct_value_size = GET_MODE_SIZE (outmode);
3550 if (value != 0 && GET_CODE (value) == MEM)
3551 mem_value = value;
3552 else
3553 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3554 #endif
3555
3556 /* This call returns a big structure. */
3557 flags &= ~(ECF_CONST | ECF_PURE);
3558 }
3559
3560 /* ??? Unfinished: must pass the memory address as an argument. */
3561
3562 /* Copy all the libcall-arguments out of the varargs data
3563 and into a vector ARGVEC.
3564
3565 Compute how to pass each argument. We only support a very small subset
3566 of the full argument passing conventions to limit complexity here since
3567 library functions shouldn't have many args. */
3568
3569 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3570 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3571
3572 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3573
3574 args_size.constant = 0;
3575 args_size.var = 0;
3576
3577 count = 0;
3578
3579 /* Now we are about to start emitting insns that can be deleted
3580 if a libcall is deleted. */
3581 if (flags & (ECF_CONST | ECF_PURE))
3582 start_sequence ();
3583
3584 push_temp_slots ();
3585
3586 /* If there's a structure value address to be passed,
3587 either pass it in the special place, or pass it as an extra argument. */
3588 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3589 {
3590 rtx addr = XEXP (mem_value, 0);
3591 nargs++;
3592
3593 /* Make sure it is a reasonable operand for a move or push insn. */
3594 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3595 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3596 addr = force_operand (addr, NULL_RTX);
3597
3598 argvec[count].value = addr;
3599 argvec[count].mode = Pmode;
3600 argvec[count].partial = 0;
3601
3602 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3603 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3604 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3605 abort ();
3606 #endif
3607
3608 locate_and_pad_parm (Pmode, NULL_TREE,
3609 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3610 1,
3611 #else
3612 argvec[count].reg != 0,
3613 #endif
3614 NULL_TREE, &args_size, &argvec[count].offset,
3615 &argvec[count].size, &alignment_pad);
3616
3617
3618 if (argvec[count].reg == 0 || argvec[count].partial != 0
3619 || reg_parm_stack_space > 0)
3620 args_size.constant += argvec[count].size.constant;
3621
3622 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3623
3624 count++;
3625 }
3626
3627 for (; count < nargs; count++)
3628 {
3629 rtx val = va_arg (p, rtx);
3630 enum machine_mode mode = va_arg (p, enum machine_mode);
3631
3632 /* We cannot convert the arg value to the mode the library wants here;
3633 must do it earlier where we know the signedness of the arg. */
3634 if (mode == BLKmode
3635 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3636 abort ();
3637
3638 /* On some machines, there's no way to pass a float to a library fcn.
3639 Pass it as a double instead. */
3640 #ifdef LIBGCC_NEEDS_DOUBLE
3641 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3642 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3643 #endif
3644
3645 /* There's no need to call protect_from_queue, because
3646 either emit_move_insn or emit_push_insn will do that. */
3647
3648 /* Make sure it is a reasonable operand for a move or push insn. */
3649 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3650 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3651 val = force_operand (val, NULL_RTX);
3652
3653 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3654 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3655 {
3656 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3657 be viewed as just an efficiency improvement. */
3658 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3659 emit_move_insn (slot, val);
3660 val = force_operand (XEXP (slot, 0), NULL_RTX);
3661 mode = Pmode;
3662 }
3663 #endif
3664
3665 argvec[count].value = val;
3666 argvec[count].mode = mode;
3667
3668 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3669
3670 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3671 argvec[count].partial
3672 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3673 #else
3674 argvec[count].partial = 0;
3675 #endif
3676
3677 locate_and_pad_parm (mode, NULL_TREE,
3678 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3679 1,
3680 #else
3681 argvec[count].reg != 0,
3682 #endif
3683 NULL_TREE, &args_size, &argvec[count].offset,
3684 &argvec[count].size, &alignment_pad);
3685
3686 if (argvec[count].size.var)
3687 abort ();
3688
3689 if (reg_parm_stack_space == 0 && argvec[count].partial)
3690 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3691
3692 if (argvec[count].reg == 0 || argvec[count].partial != 0
3693 || reg_parm_stack_space > 0)
3694 args_size.constant += argvec[count].size.constant;
3695
3696 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3697 }
3698
3699 #ifdef FINAL_REG_PARM_STACK_SPACE
3700 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3701 args_size.var);
3702 #endif
3703 /* If this machine requires an external definition for library
3704 functions, write one out. */
3705 assemble_external_libcall (fun);
3706
3707 original_args_size = args_size;
3708 #ifdef PREFERRED_STACK_BOUNDARY
3709 args_size.constant = (((args_size.constant
3710 + stack_pointer_delta
3711 + STACK_BYTES - 1)
3712 / STACK_BYTES
3713 * STACK_BYTES)
3714 - stack_pointer_delta);
3715 #endif
3716
3717 args_size.constant = MAX (args_size.constant,
3718 reg_parm_stack_space);
3719
3720 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3721 args_size.constant -= reg_parm_stack_space;
3722 #endif
3723
3724 if (args_size.constant > current_function_outgoing_args_size)
3725 current_function_outgoing_args_size = args_size.constant;
3726
3727 if (ACCUMULATE_OUTGOING_ARGS)
3728 {
3729 /* Since the stack pointer will never be pushed, it is possible for
3730 the evaluation of a parm to clobber something we have already
3731 written to the stack. Since most function calls on RISC machines
3732 do not use the stack, this is uncommon, but must work correctly.
3733
3734 Therefore, we save any area of the stack that was already written
3735 and that we are using. Here we set up to do this by making a new
3736 stack usage map from the old one.
3737
3738 Another approach might be to try to reorder the argument
3739 evaluations to avoid this conflicting stack usage. */
3740
3741 needed = args_size.constant;
3742
3743 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3744 /* Since we will be writing into the entire argument area, the
3745 map must be allocated for its entire size, not just the part that
3746 is the responsibility of the caller. */
3747 needed += reg_parm_stack_space;
3748 #endif
3749
3750 #ifdef ARGS_GROW_DOWNWARD
3751 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3752 needed + 1);
3753 #else
3754 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3755 needed);
3756 #endif
3757 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3758
3759 if (initial_highest_arg_in_use)
3760 bcopy (initial_stack_usage_map, stack_usage_map,
3761 initial_highest_arg_in_use);
3762
3763 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3764 bzero (&stack_usage_map[initial_highest_arg_in_use],
3765 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3766 needed = 0;
3767
3768 /* The address of the outgoing argument list must not be copied to a
3769 register here, because argblock would be left pointing to the
3770 wrong place after the call to allocate_dynamic_stack_space below.
3771 */
3772
3773 argblock = virtual_outgoing_args_rtx;
3774 }
3775 else
3776 {
3777 if (!PUSH_ARGS)
3778 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3779 }
3780
3781 #ifdef PREFERRED_STACK_BOUNDARY
3782 /* If we push args individually in reverse order, perform stack alignment
3783 before the first push (the last arg). */
3784 if (argblock == 0 && PUSH_ARGS_REVERSED)
3785 anti_adjust_stack (GEN_INT (args_size.constant
3786 - original_args_size.constant));
3787 #endif
3788
3789 if (PUSH_ARGS_REVERSED)
3790 {
3791 inc = -1;
3792 argnum = nargs - 1;
3793 }
3794 else
3795 {
3796 inc = 1;
3797 argnum = 0;
3798 }
3799
3800 #ifdef REG_PARM_STACK_SPACE
3801 if (ACCUMULATE_OUTGOING_ARGS)
3802 {
3803 /* The argument list is the property of the called routine and it
3804 may clobber it. If the fixed area has been used for previous
3805 parameters, we must save and restore it.
3806
3807 Here we compute the boundary of the that needs to be saved, if any. */
3808
3809 #ifdef ARGS_GROW_DOWNWARD
3810 for (count = 0; count < reg_parm_stack_space + 1; count++)
3811 #else
3812 for (count = 0; count < reg_parm_stack_space; count++)
3813 #endif
3814 {
3815 if (count >= highest_outgoing_arg_in_use
3816 || stack_usage_map[count] == 0)
3817 continue;
3818
3819 if (low_to_save == -1)
3820 low_to_save = count;
3821
3822 high_to_save = count;
3823 }
3824
3825 if (low_to_save >= 0)
3826 {
3827 int num_to_save = high_to_save - low_to_save + 1;
3828 enum machine_mode save_mode
3829 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3830 rtx stack_area;
3831
3832 /* If we don't have the required alignment, must do this in BLKmode. */
3833 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3834 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3835 save_mode = BLKmode;
3836
3837 #ifdef ARGS_GROW_DOWNWARD
3838 stack_area = gen_rtx_MEM (save_mode,
3839 memory_address (save_mode,
3840 plus_constant (argblock,
3841 - high_to_save)));
3842 #else
3843 stack_area = gen_rtx_MEM (save_mode,
3844 memory_address (save_mode,
3845 plus_constant (argblock,
3846 low_to_save)));
3847 #endif
3848 if (save_mode == BLKmode)
3849 {
3850 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3851 emit_block_move (validize_mem (save_area), stack_area,
3852 GEN_INT (num_to_save), PARM_BOUNDARY);
3853 }
3854 else
3855 {
3856 save_area = gen_reg_rtx (save_mode);
3857 emit_move_insn (save_area, stack_area);
3858 }
3859 }
3860 }
3861 #endif
3862
3863 /* Push the args that need to be pushed. */
3864
3865 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3866 are to be pushed. */
3867 for (count = 0; count < nargs; count++, argnum += inc)
3868 {
3869 register enum machine_mode mode = argvec[argnum].mode;
3870 register rtx val = argvec[argnum].value;
3871 rtx reg = argvec[argnum].reg;
3872 int partial = argvec[argnum].partial;
3873 int lower_bound = 0, upper_bound = 0, i;
3874
3875 if (! (reg != 0 && partial == 0))
3876 {
3877 if (ACCUMULATE_OUTGOING_ARGS)
3878 {
3879 /* If this is being stored into a pre-allocated, fixed-size,
3880 stack area, save any previous data at that location. */
3881
3882 #ifdef ARGS_GROW_DOWNWARD
3883 /* stack_slot is negative, but we want to index stack_usage_map
3884 with positive values. */
3885 upper_bound = -argvec[argnum].offset.constant + 1;
3886 lower_bound = upper_bound - argvec[argnum].size.constant;
3887 #else
3888 lower_bound = argvec[argnum].offset.constant;
3889 upper_bound = lower_bound + argvec[argnum].size.constant;
3890 #endif
3891
3892 for (i = lower_bound; i < upper_bound; i++)
3893 if (stack_usage_map[i]
3894 /* Don't store things in the fixed argument area at this
3895 point; it has already been saved. */
3896 && i > reg_parm_stack_space)
3897 break;
3898
3899 if (i != upper_bound)
3900 {
3901 /* We need to make a save area. See what mode we can make
3902 it. */
3903 enum machine_mode save_mode
3904 = mode_for_size (argvec[argnum].size.constant
3905 * BITS_PER_UNIT,
3906 MODE_INT, 1);
3907 rtx stack_area
3908 = gen_rtx_MEM
3909 (save_mode,
3910 memory_address
3911 (save_mode,
3912 plus_constant (argblock,
3913 argvec[argnum].offset.constant)));
3914 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3915
3916 emit_move_insn (argvec[argnum].save_area, stack_area);
3917 }
3918 }
3919
3920 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3921 argblock, GEN_INT (argvec[argnum].offset.constant),
3922 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3923
3924 /* Now mark the segment we just used. */
3925 if (ACCUMULATE_OUTGOING_ARGS)
3926 for (i = lower_bound; i < upper_bound; i++)
3927 stack_usage_map[i] = 1;
3928
3929 NO_DEFER_POP;
3930 }
3931 }
3932
3933 #ifdef PREFERRED_STACK_BOUNDARY
3934 /* If we pushed args in forward order, perform stack alignment
3935 after pushing the last arg. */
3936 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3937 anti_adjust_stack (GEN_INT (args_size.constant
3938 - original_args_size.constant));
3939 #endif
3940
3941 if (PUSH_ARGS_REVERSED)
3942 argnum = nargs - 1;
3943 else
3944 argnum = 0;
3945
3946 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3947
3948 /* Now load any reg parms into their regs. */
3949
3950 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3951 are to be pushed. */
3952 for (count = 0; count < nargs; count++, argnum += inc)
3953 {
3954 register rtx val = argvec[argnum].value;
3955 rtx reg = argvec[argnum].reg;
3956 int partial = argvec[argnum].partial;
3957
3958 /* Handle calls that pass values in multiple non-contiguous
3959 locations. The PA64 has examples of this for library calls. */
3960 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3961 emit_group_load (reg, val,
3962 GET_MODE_SIZE (GET_MODE (val)),
3963 GET_MODE_ALIGNMENT (GET_MODE (val)));
3964 else if (reg != 0 && partial == 0)
3965 emit_move_insn (reg, val);
3966
3967 NO_DEFER_POP;
3968 }
3969
3970 /* Any regs containing parms remain in use through the call. */
3971 for (count = 0; count < nargs; count++)
3972 {
3973 rtx reg = argvec[count].reg;
3974 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3975 use_group_regs (&call_fusage, reg);
3976 else if (reg != 0)
3977 use_reg (&call_fusage, reg);
3978 }
3979
3980 /* Pass the function the address in which to return a structure value. */
3981 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3982 {
3983 emit_move_insn (struct_value_rtx,
3984 force_reg (Pmode,
3985 force_operand (XEXP (mem_value, 0),
3986 NULL_RTX)));
3987 if (GET_CODE (struct_value_rtx) == REG)
3988 use_reg (&call_fusage, struct_value_rtx);
3989 }
3990
3991 /* Don't allow popping to be deferred, since then
3992 cse'ing of library calls could delete a call and leave the pop. */
3993 NO_DEFER_POP;
3994 valreg = (mem_value == 0 && outmode != VOIDmode
3995 ? hard_libcall_value (outmode) : NULL_RTX);
3996
3997 #ifdef PREFERRED_STACK_BOUNDARY
3998 /* Stack must be properly aligned now. */
3999 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4000 abort();
4001 #endif
4002
4003 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4004 will set inhibit_defer_pop to that value. */
4005 /* The return type is needed to decide how many bytes the function pops.
4006 Signedness plays no role in that, so for simplicity, we pretend it's
4007 always signed. We also assume that the list of arguments passed has
4008 no impact, so we pretend it is unknown. */
4009
4010 emit_call_1 (fun,
4011 get_identifier (XSTR (orgfun, 0)),
4012 build_function_type (outmode == VOIDmode ? void_type_node
4013 : type_for_mode (outmode, 0), NULL_TREE),
4014 original_args_size.constant, args_size.constant,
4015 struct_value_size,
4016 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4017 valreg,
4018 old_inhibit_defer_pop + 1, call_fusage, flags);
4019
4020 /* Now restore inhibit_defer_pop to its actual original value. */
4021 OK_DEFER_POP;
4022
4023 /* If call is cse'able, make appropriate pair of reg-notes around it.
4024 Test valreg so we don't crash; may safely ignore `const'
4025 if return type is void. Disable for PARALLEL return values, because
4026 we have no way to move such values into a pseudo register. */
4027 if ((flags & (ECF_CONST | ECF_PURE))
4028 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
4029 {
4030 rtx note = 0;
4031 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4032 rtx insns;
4033 int i;
4034
4035 /* Construct an "equal form" for the value which mentions all the
4036 arguments in order as well as the function name. */
4037 for (i = 0; i < nargs; i++)
4038 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4039 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4040
4041 insns = get_insns ();
4042 end_sequence ();
4043
4044 if (flags & ECF_PURE)
4045 note = gen_rtx_EXPR_LIST (VOIDmode,
4046 gen_rtx_USE (VOIDmode,
4047 gen_rtx_MEM (BLKmode,
4048 gen_rtx_SCRATCH (VOIDmode))), note);
4049
4050 emit_libcall_block (insns, temp, valreg, note);
4051
4052 valreg = temp;
4053 }
4054 else if (flags & (ECF_CONST | ECF_PURE))
4055 {
4056 /* Otherwise, just write out the sequence without a note. */
4057 rtx insns = get_insns ();
4058
4059 end_sequence ();
4060 emit_insns (insns);
4061 }
4062 pop_temp_slots ();
4063
4064 /* Copy the value to the right place. */
4065 if (outmode != VOIDmode && retval)
4066 {
4067 if (mem_value)
4068 {
4069 if (value == 0)
4070 value = mem_value;
4071 if (value != mem_value)
4072 emit_move_insn (value, mem_value);
4073 }
4074 else if (value != 0)
4075 emit_move_insn (value, hard_libcall_value (outmode));
4076 else
4077 value = hard_libcall_value (outmode);
4078 }
4079
4080 if (ACCUMULATE_OUTGOING_ARGS)
4081 {
4082 #ifdef REG_PARM_STACK_SPACE
4083 if (save_area)
4084 {
4085 enum machine_mode save_mode = GET_MODE (save_area);
4086 #ifdef ARGS_GROW_DOWNWARD
4087 rtx stack_area
4088 = gen_rtx_MEM (save_mode,
4089 memory_address (save_mode,
4090 plus_constant (argblock,
4091 - high_to_save)));
4092 #else
4093 rtx stack_area
4094 = gen_rtx_MEM (save_mode,
4095 memory_address (save_mode,
4096 plus_constant (argblock, low_to_save)));
4097 #endif
4098 if (save_mode != BLKmode)
4099 emit_move_insn (stack_area, save_area);
4100 else
4101 emit_block_move (stack_area, validize_mem (save_area),
4102 GEN_INT (high_to_save - low_to_save + 1),
4103 PARM_BOUNDARY);
4104 }
4105 #endif
4106
4107 /* If we saved any argument areas, restore them. */
4108 for (count = 0; count < nargs; count++)
4109 if (argvec[count].save_area)
4110 {
4111 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4112 rtx stack_area
4113 = gen_rtx_MEM (save_mode,
4114 memory_address
4115 (save_mode,
4116 plus_constant (argblock,
4117 argvec[count].offset.constant)));
4118
4119 emit_move_insn (stack_area, argvec[count].save_area);
4120 }
4121
4122 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4123 stack_usage_map = initial_stack_usage_map;
4124 }
4125
4126 return value;
4127
4128 }
4129 \f
4130 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4131 (emitting the queue unless NO_QUEUE is nonzero),
4132 for a value of mode OUTMODE,
4133 with NARGS different arguments, passed as alternating rtx values
4134 and machine_modes to convert them to.
4135 The rtx values should have been passed through protect_from_queue already.
4136
4137 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
4138 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
4139 calls, that are handled like `const' calls with extra
4140 (use (memory (scratch)). */
4141
4142 void
4143 emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
4144 int nargs, ...))
4145 {
4146 #ifndef ANSI_PROTOTYPES
4147 rtx orgfun;
4148 int fn_type;
4149 enum machine_mode outmode;
4150 int nargs;
4151 #endif
4152 va_list p;
4153
4154 VA_START (p, nargs);
4155
4156 #ifndef ANSI_PROTOTYPES
4157 orgfun = va_arg (p, rtx);
4158 fn_type = va_arg (p, int);
4159 outmode = va_arg (p, enum machine_mode);
4160 nargs = va_arg (p, int);
4161 #endif
4162
4163 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4164
4165 va_end (p);
4166 }
4167 \f
4168 /* Like emit_library_call except that an extra argument, VALUE,
4169 comes second and says where to store the result.
4170 (If VALUE is zero, this function chooses a convenient way
4171 to return the value.
4172
4173 This function returns an rtx for where the value is to be found.
4174 If VALUE is nonzero, VALUE is returned. */
4175
4176 rtx
4177 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
4178 enum machine_mode outmode, int nargs, ...))
4179 {
4180 #ifndef ANSI_PROTOTYPES
4181 rtx orgfun;
4182 rtx value;
4183 int fn_type;
4184 enum machine_mode outmode;
4185 int nargs;
4186 #endif
4187 va_list p;
4188
4189 VA_START (p, nargs);
4190
4191 #ifndef ANSI_PROTOTYPES
4192 orgfun = va_arg (p, rtx);
4193 value = va_arg (p, rtx);
4194 fn_type = va_arg (p, int);
4195 outmode = va_arg (p, enum machine_mode);
4196 nargs = va_arg (p, int);
4197 #endif
4198
4199 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
4200
4201 va_end (p);
4202
4203 return value;
4204 }
4205 \f
4206 #if 0
4207 /* Return an rtx which represents a suitable home on the stack
4208 given TYPE, the type of the argument looking for a home.
4209 This is called only for BLKmode arguments.
4210
4211 SIZE is the size needed for this target.
4212 ARGS_ADDR is the address of the bottom of the argument block for this call.
4213 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4214 if this machine uses push insns. */
4215
4216 static rtx
4217 target_for_arg (type, size, args_addr, offset)
4218 tree type;
4219 rtx size;
4220 rtx args_addr;
4221 struct args_size offset;
4222 {
4223 rtx target;
4224 rtx offset_rtx = ARGS_SIZE_RTX (offset);
4225
4226 /* We do not call memory_address if possible,
4227 because we want to address as close to the stack
4228 as possible. For non-variable sized arguments,
4229 this will be stack-pointer relative addressing. */
4230 if (GET_CODE (offset_rtx) == CONST_INT)
4231 target = plus_constant (args_addr, INTVAL (offset_rtx));
4232 else
4233 {
4234 /* I have no idea how to guarantee that this
4235 will work in the presence of register parameters. */
4236 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
4237 target = memory_address (QImode, target);
4238 }
4239
4240 return gen_rtx_MEM (BLKmode, target);
4241 }
4242 #endif
4243 \f
4244 /* Store a single argument for a function call
4245 into the register or memory area where it must be passed.
4246 *ARG describes the argument value and where to pass it.
4247
4248 ARGBLOCK is the address of the stack-block for all the arguments,
4249 or 0 on a machine where arguments are pushed individually.
4250
4251 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4252 so must be careful about how the stack is used.
4253
4254 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4255 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4256 that we need not worry about saving and restoring the stack.
4257
4258 FNDECL is the declaration of the function we are calling.
4259
4260 Return non-zero if this arg should cause sibcall failure,
4261 zero otherwise. */
4262
4263 static int
4264 store_one_arg (arg, argblock, flags, variable_size,
4265 reg_parm_stack_space)
4266 struct arg_data *arg;
4267 rtx argblock;
4268 int flags;
4269 int variable_size ATTRIBUTE_UNUSED;
4270 int reg_parm_stack_space;
4271 {
4272 register tree pval = arg->tree_value;
4273 rtx reg = 0;
4274 int partial = 0;
4275 int used = 0;
4276 int i, lower_bound = 0, upper_bound = 0;
4277 int sibcall_failure = 0;
4278
4279 if (TREE_CODE (pval) == ERROR_MARK)
4280 return 1;
4281
4282 /* Push a new temporary level for any temporaries we make for
4283 this argument. */
4284 push_temp_slots ();
4285
4286 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4287 {
4288 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4289 save any previous data at that location. */
4290 if (argblock && ! variable_size && arg->stack)
4291 {
4292 #ifdef ARGS_GROW_DOWNWARD
4293 /* stack_slot is negative, but we want to index stack_usage_map
4294 with positive values. */
4295 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4296 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4297 else
4298 upper_bound = 0;
4299
4300 lower_bound = upper_bound - arg->size.constant;
4301 #else
4302 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4303 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4304 else
4305 lower_bound = 0;
4306
4307 upper_bound = lower_bound + arg->size.constant;
4308 #endif
4309
4310 for (i = lower_bound; i < upper_bound; i++)
4311 if (stack_usage_map[i]
4312 /* Don't store things in the fixed argument area at this point;
4313 it has already been saved. */
4314 && i > reg_parm_stack_space)
4315 break;
4316
4317 if (i != upper_bound)
4318 {
4319 /* We need to make a save area. See what mode we can make it. */
4320 enum machine_mode save_mode
4321 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4322 rtx stack_area
4323 = gen_rtx_MEM (save_mode,
4324 memory_address (save_mode,
4325 XEXP (arg->stack_slot, 0)));
4326
4327 if (save_mode == BLKmode)
4328 {
4329 arg->save_area = assign_stack_temp (BLKmode,
4330 arg->size.constant, 0);
4331 MEM_SET_IN_STRUCT_P (arg->save_area,
4332 AGGREGATE_TYPE_P (TREE_TYPE
4333 (arg->tree_value)));
4334 preserve_temp_slots (arg->save_area);
4335 emit_block_move (validize_mem (arg->save_area), stack_area,
4336 GEN_INT (arg->size.constant),
4337 PARM_BOUNDARY);
4338 }
4339 else
4340 {
4341 arg->save_area = gen_reg_rtx (save_mode);
4342 emit_move_insn (arg->save_area, stack_area);
4343 }
4344 }
4345 }
4346 /* Now that we have saved any slots that will be overwritten by this
4347 store, mark all slots this store will use. We must do this before
4348 we actually expand the argument since the expansion itself may
4349 trigger library calls which might need to use the same stack slot. */
4350 if (argblock && ! variable_size && arg->stack)
4351 for (i = lower_bound; i < upper_bound; i++)
4352 stack_usage_map[i] = 1;
4353 }
4354
4355 /* If this isn't going to be placed on both the stack and in registers,
4356 set up the register and number of words. */
4357 if (! arg->pass_on_stack)
4358 reg = arg->reg, partial = arg->partial;
4359
4360 if (reg != 0 && partial == 0)
4361 /* Being passed entirely in a register. We shouldn't be called in
4362 this case. */
4363 abort ();
4364
4365 /* If this arg needs special alignment, don't load the registers
4366 here. */
4367 if (arg->n_aligned_regs != 0)
4368 reg = 0;
4369
4370 /* If this is being passed partially in a register, we can't evaluate
4371 it directly into its stack slot. Otherwise, we can. */
4372 if (arg->value == 0)
4373 {
4374 /* stack_arg_under_construction is nonzero if a function argument is
4375 being evaluated directly into the outgoing argument list and
4376 expand_call must take special action to preserve the argument list
4377 if it is called recursively.
4378
4379 For scalar function arguments stack_usage_map is sufficient to
4380 determine which stack slots must be saved and restored. Scalar
4381 arguments in general have pass_on_stack == 0.
4382
4383 If this argument is initialized by a function which takes the
4384 address of the argument (a C++ constructor or a C function
4385 returning a BLKmode structure), then stack_usage_map is
4386 insufficient and expand_call must push the stack around the
4387 function call. Such arguments have pass_on_stack == 1.
4388
4389 Note that it is always safe to set stack_arg_under_construction,
4390 but this generates suboptimal code if set when not needed. */
4391
4392 if (arg->pass_on_stack)
4393 stack_arg_under_construction++;
4394
4395 arg->value = expand_expr (pval,
4396 (partial
4397 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4398 ? NULL_RTX : arg->stack,
4399 VOIDmode, 0);
4400
4401 /* If we are promoting object (or for any other reason) the mode
4402 doesn't agree, convert the mode. */
4403
4404 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4405 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4406 arg->value, arg->unsignedp);
4407
4408 if (arg->pass_on_stack)
4409 stack_arg_under_construction--;
4410 }
4411
4412 /* Don't allow anything left on stack from computation
4413 of argument to alloca. */
4414 if (flags & ECF_MAY_BE_ALLOCA)
4415 do_pending_stack_adjust ();
4416
4417 if (arg->value == arg->stack)
4418 {
4419 /* If the value is already in the stack slot, we are done. */
4420 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4421 {
4422 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4423 XEXP (arg->stack, 0), Pmode,
4424 ARGS_SIZE_RTX (arg->size),
4425 TYPE_MODE (sizetype),
4426 GEN_INT (MEMORY_USE_RW),
4427 TYPE_MODE (integer_type_node));
4428 }
4429 }
4430 else if (arg->mode != BLKmode)
4431 {
4432 register int size;
4433
4434 /* Argument is a scalar, not entirely passed in registers.
4435 (If part is passed in registers, arg->partial says how much
4436 and emit_push_insn will take care of putting it there.)
4437
4438 Push it, and if its size is less than the
4439 amount of space allocated to it,
4440 also bump stack pointer by the additional space.
4441 Note that in C the default argument promotions
4442 will prevent such mismatches. */
4443
4444 size = GET_MODE_SIZE (arg->mode);
4445 /* Compute how much space the push instruction will push.
4446 On many machines, pushing a byte will advance the stack
4447 pointer by a halfword. */
4448 #ifdef PUSH_ROUNDING
4449 size = PUSH_ROUNDING (size);
4450 #endif
4451 used = size;
4452
4453 /* Compute how much space the argument should get:
4454 round up to a multiple of the alignment for arguments. */
4455 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4456 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4457 / (PARM_BOUNDARY / BITS_PER_UNIT))
4458 * (PARM_BOUNDARY / BITS_PER_UNIT));
4459
4460 /* This isn't already where we want it on the stack, so put it there.
4461 This can either be done with push or copy insns. */
4462 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4463 partial, reg, used - size, argblock,
4464 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4465 ARGS_SIZE_RTX (arg->alignment_pad));
4466 }
4467 else
4468 {
4469 /* BLKmode, at least partly to be pushed. */
4470
4471 register int excess;
4472 rtx size_rtx;
4473
4474 /* Pushing a nonscalar.
4475 If part is passed in registers, PARTIAL says how much
4476 and emit_push_insn will take care of putting it there. */
4477
4478 /* Round its size up to a multiple
4479 of the allocation unit for arguments. */
4480
4481 if (arg->size.var != 0)
4482 {
4483 excess = 0;
4484 size_rtx = ARGS_SIZE_RTX (arg->size);
4485 }
4486 else
4487 {
4488 /* PUSH_ROUNDING has no effect on us, because
4489 emit_push_insn for BLKmode is careful to avoid it. */
4490 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4491 + partial * UNITS_PER_WORD);
4492 size_rtx = expr_size (pval);
4493 }
4494
4495 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4496 {
4497 /* emit_push_insn might not work properly if arg->value and
4498 argblock + arg->offset areas overlap. */
4499 rtx x = arg->value;
4500 int i = 0;
4501
4502 if (XEXP (x, 0) == current_function_internal_arg_pointer
4503 || (GET_CODE (XEXP (x, 0)) == PLUS
4504 && XEXP (XEXP (x, 0), 0) ==
4505 current_function_internal_arg_pointer
4506 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4507 {
4508 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4509 i = INTVAL (XEXP (XEXP (x, 0), 1));
4510
4511 /* expand_call should ensure this */
4512 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4513 abort ();
4514
4515 if (arg->offset.constant > i)
4516 {
4517 if (arg->offset.constant < i + INTVAL (size_rtx))
4518 sibcall_failure = 1;
4519 }
4520 else if (arg->offset.constant < i)
4521 {
4522 if (i < arg->offset.constant + INTVAL (size_rtx))
4523 sibcall_failure = 1;
4524 }
4525 }
4526 }
4527
4528 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4529 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4530 argblock, ARGS_SIZE_RTX (arg->offset),
4531 reg_parm_stack_space,
4532 ARGS_SIZE_RTX (arg->alignment_pad));
4533 }
4534
4535
4536 /* Unless this is a partially-in-register argument, the argument is now
4537 in the stack.
4538
4539 ??? Note that this can change arg->value from arg->stack to
4540 arg->stack_slot and it matters when they are not the same.
4541 It isn't totally clear that this is correct in all cases. */
4542 if (partial == 0)
4543 arg->value = arg->stack_slot;
4544
4545 /* Once we have pushed something, pops can't safely
4546 be deferred during the rest of the arguments. */
4547 NO_DEFER_POP;
4548
4549 /* ANSI doesn't require a sequence point here,
4550 but PCC has one, so this will avoid some problems. */
4551 emit_queue ();
4552
4553 /* Free any temporary slots made in processing this argument. Show
4554 that we might have taken the address of something and pushed that
4555 as an operand. */
4556 preserve_temp_slots (NULL_RTX);
4557 free_temp_slots ();
4558 pop_temp_slots ();
4559
4560 return sibcall_failure;
4561 }