1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47 /* Data structure and subroutines used within expand_call. */
51 /* Tree node for this argument. */
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode
;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 /* Initially-compute RTL value for argument; only for const functions. */
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate
;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
87 /* Place that this stack area has been saved, if needed. */
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map
;
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use
;
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map
;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction
;
120 static void emit_call_1 (rtx
, tree
, tree
, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
121 HOST_WIDE_INT
, rtx
, rtx
, int, rtx
, int,
123 static void precompute_register_parameters (int, struct arg_data
*, int *);
124 static int store_one_arg (struct arg_data
*, rtx
, int, int, int);
125 static void store_unaligned_arguments_into_pseudos (struct arg_data
*, int);
126 static int finalize_must_preallocate (int, int, struct arg_data
*,
128 static void precompute_arguments (int, int, struct arg_data
*);
129 static int compute_argument_block_size (int, struct args_size
*, int);
130 static void initialize_argument_information (int, struct arg_data
*,
131 struct args_size
*, int, tree
,
132 tree
, CUMULATIVE_ARGS
*, int,
133 rtx
*, int *, int *, int *,
135 static void compute_argument_addresses (struct arg_data
*, rtx
, int);
136 static rtx
rtx_for_function_call (tree
, tree
);
137 static void load_register_parameters (struct arg_data
*, int, rtx
*, int,
139 static rtx
emit_library_call_value_1 (int, rtx
, rtx
, enum libcall_type
,
140 enum machine_mode
, int, va_list);
141 static int special_function_p (tree
, int);
142 static int check_sibcall_argument_overlap_1 (rtx
);
143 static int check_sibcall_argument_overlap (rtx
, struct arg_data
*, int);
145 static int combine_pending_stack_adjustment_and_call (int, struct args_size
*,
147 static tree
fix_unsafe_tree (tree
);
148 static bool shift_returned_value (tree
, rtx
*);
150 #ifdef REG_PARM_STACK_SPACE
151 static rtx
save_fixed_argument_area (int, rtx
, int *, int *);
152 static void restore_fixed_argument_area (rtx
, rtx
, int, int);
155 /* Force FUNEXP into a form suitable for the address of a CALL,
156 and return that as an rtx. Also load the static chain register
157 if FNDECL is a nested function.
159 CALL_FUSAGE points to a variable holding the prospective
160 CALL_INSN_FUNCTION_USAGE information. */
163 prepare_call_address (rtx funexp
, rtx static_chain_value
,
164 rtx
*call_fusage
, int reg_parm_seen
, int sibcallp
)
166 funexp
= protect_from_queue (funexp
, 0);
168 /* Make a valid memory address and copy constants through pseudo-regs,
169 but not for a constant address if -fno-function-cse. */
170 if (GET_CODE (funexp
) != SYMBOL_REF
)
171 /* If we are using registers for parameters, force the
172 function address into a register now. */
173 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
174 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
175 : memory_address (FUNCTION_MODE
, funexp
));
178 #ifndef NO_FUNCTION_CSE
179 if (optimize
&& ! flag_no_function_cse
)
180 funexp
= force_reg (Pmode
, funexp
);
184 if (static_chain_value
!= 0)
186 static_chain_value
= convert_memory_address (Pmode
, static_chain_value
);
187 emit_move_insn (static_chain_rtx
, static_chain_value
);
189 if (REG_P (static_chain_rtx
))
190 use_reg (call_fusage
, static_chain_rtx
);
196 /* Generate instructions to call function FUNEXP,
197 and optionally pop the results.
198 The CALL_INSN is the first insn generated.
200 FNDECL is the declaration node of the function. This is given to the
201 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
203 FUNTYPE is the data type of the function. This is given to the macro
204 RETURN_POPS_ARGS to determine whether this function pops its own args.
205 We used to allow an identifier for library functions, but that doesn't
206 work when the return type is an aggregate type and the calling convention
207 says that the pointer to this aggregate is to be popped by the callee.
209 STACK_SIZE is the number of bytes of arguments on the stack,
210 ROUNDED_STACK_SIZE is that number rounded up to
211 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
212 both to put into the call insn and to generate explicit popping
215 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
216 It is zero if this call doesn't want a structure value.
218 NEXT_ARG_REG is the rtx that results from executing
219 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
220 just after all the args have had their registers assigned.
221 This could be whatever you like, but normally it is the first
222 arg-register beyond those used for args in this call,
223 or 0 if all the arg-registers are used in this call.
224 It is passed on to `gen_call' so you can put this info in the call insn.
226 VALREG is a hard register in which a value is returned,
227 or 0 if the call does not return a value.
229 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
230 the args to this call were processed.
231 We restore `inhibit_defer_pop' to that value.
233 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
234 denote registers used by the called function. */
237 emit_call_1 (rtx funexp
, tree fntree
, tree fndecl ATTRIBUTE_UNUSED
,
238 tree funtype ATTRIBUTE_UNUSED
,
239 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
,
240 HOST_WIDE_INT rounded_stack_size
,
241 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED
,
242 rtx next_arg_reg ATTRIBUTE_UNUSED
, rtx valreg
,
243 int old_inhibit_defer_pop
, rtx call_fusage
, int ecf_flags
,
244 CUMULATIVE_ARGS
*args_so_far ATTRIBUTE_UNUSED
)
246 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
248 int already_popped
= 0;
249 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
250 #if defined (HAVE_call) && defined (HAVE_call_value)
251 rtx struct_value_size_rtx
;
252 struct_value_size_rtx
= GEN_INT (struct_value_size
);
255 #ifdef CALL_POPS_ARGS
256 n_popped
+= CALL_POPS_ARGS (* args_so_far
);
259 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
260 and we don't want to load it into a register as an optimization,
261 because prepare_call_address already did it if it should be done. */
262 if (GET_CODE (funexp
) != SYMBOL_REF
)
263 funexp
= memory_address (FUNCTION_MODE
, funexp
);
265 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
266 if ((ecf_flags
& ECF_SIBCALL
)
267 && HAVE_sibcall_pop
&& HAVE_sibcall_value_pop
268 && (n_popped
> 0 || stack_size
== 0))
270 rtx n_pop
= GEN_INT (n_popped
);
273 /* If this subroutine pops its own args, record that in the call insn
274 if possible, for the sake of frame pointer elimination. */
277 pat
= GEN_SIBCALL_VALUE_POP (valreg
,
278 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
279 rounded_stack_size_rtx
, next_arg_reg
,
282 pat
= GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
283 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
285 emit_call_insn (pat
);
291 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
292 /* If the target has "call" or "call_value" insns, then prefer them
293 if no arguments are actually popped. If the target does not have
294 "call" or "call_value" insns, then we must use the popping versions
295 even if the call has no arguments to pop. */
296 #if defined (HAVE_call) && defined (HAVE_call_value)
297 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
298 && n_popped
> 0 && ! (ecf_flags
& ECF_SP_DEPRESSED
))
300 if (HAVE_call_pop
&& HAVE_call_value_pop
)
303 rtx n_pop
= GEN_INT (n_popped
);
306 /* If this subroutine pops its own args, record that in the call insn
307 if possible, for the sake of frame pointer elimination. */
310 pat
= GEN_CALL_VALUE_POP (valreg
,
311 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
312 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
314 pat
= GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
315 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
317 emit_call_insn (pat
);
323 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
324 if ((ecf_flags
& ECF_SIBCALL
)
325 && HAVE_sibcall
&& HAVE_sibcall_value
)
328 emit_call_insn (GEN_SIBCALL_VALUE (valreg
,
329 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
330 rounded_stack_size_rtx
,
331 next_arg_reg
, NULL_RTX
));
333 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
334 rounded_stack_size_rtx
, next_arg_reg
,
335 struct_value_size_rtx
));
340 #if defined (HAVE_call) && defined (HAVE_call_value)
341 if (HAVE_call
&& HAVE_call_value
)
344 emit_call_insn (GEN_CALL_VALUE (valreg
,
345 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
346 rounded_stack_size_rtx
, next_arg_reg
,
349 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
350 rounded_stack_size_rtx
, next_arg_reg
,
351 struct_value_size_rtx
));
357 /* Find the call we just emitted. */
358 call_insn
= last_call_insn ();
360 /* Mark memory as used for "pure" function call. */
361 if (ecf_flags
& ECF_PURE
)
365 gen_rtx_USE (VOIDmode
,
366 gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
))),
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn
, call_fusage
);
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags
& (ECF_CONST
| ECF_PURE
))
374 CONST_OR_PURE_CALL_P (call_insn
) = 1;
376 /* If this call can't throw, attach a REG_EH_REGION reg note to that
378 if (ecf_flags
& ECF_NOTHROW
)
379 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_EH_REGION
, const0_rtx
,
380 REG_NOTES (call_insn
));
383 int rn
= lookup_stmt_eh_region (fntree
);
385 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
386 throw, which we already took care of. */
388 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_EH_REGION
, GEN_INT (rn
),
389 REG_NOTES (call_insn
));
390 note_current_region_may_contain_throw ();
393 if (ecf_flags
& ECF_NORETURN
)
394 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_NORETURN
, const0_rtx
,
395 REG_NOTES (call_insn
));
396 if (ecf_flags
& ECF_ALWAYS_RETURN
)
397 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN
, const0_rtx
,
398 REG_NOTES (call_insn
));
400 if (ecf_flags
& ECF_RETURNS_TWICE
)
402 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_SETJMP
, const0_rtx
,
403 REG_NOTES (call_insn
));
404 current_function_calls_setjmp
= 1;
407 SIBLING_CALL_P (call_insn
) = ((ecf_flags
& ECF_SIBCALL
) != 0);
409 /* Restore this now, so that we do defer pops for this call's args
410 if the context of the call as a whole permits. */
411 inhibit_defer_pop
= old_inhibit_defer_pop
;
416 CALL_INSN_FUNCTION_USAGE (call_insn
)
417 = gen_rtx_EXPR_LIST (VOIDmode
,
418 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
419 CALL_INSN_FUNCTION_USAGE (call_insn
));
420 rounded_stack_size
-= n_popped
;
421 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
422 stack_pointer_delta
-= n_popped
;
425 if (!ACCUMULATE_OUTGOING_ARGS
)
427 /* If returning from the subroutine does not automatically pop the args,
428 we need an instruction to pop them sooner or later.
429 Perhaps do it now; perhaps just record how much space to pop later.
431 If returning from the subroutine does pop the args, indicate that the
432 stack pointer will be changed. */
434 if (rounded_stack_size
!= 0)
436 if (ecf_flags
& (ECF_SP_DEPRESSED
| ECF_NORETURN
| ECF_LONGJMP
))
437 /* Just pretend we did the pop. */
438 stack_pointer_delta
-= rounded_stack_size
;
439 else if (flag_defer_pop
&& inhibit_defer_pop
== 0
440 && ! (ecf_flags
& (ECF_CONST
| ECF_PURE
)))
441 pending_stack_adjust
+= rounded_stack_size
;
443 adjust_stack (rounded_stack_size_rtx
);
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
447 Restore the stack pointer to its original value now. Usually
448 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
449 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
450 popping variants of functions exist as well.
452 ??? We may optimize similar to defer_pop above, but it is
453 probably not worthwhile.
455 ??? It will be worthwhile to enable combine_stack_adjustments even for
458 anti_adjust_stack (GEN_INT (n_popped
));
461 /* Determine if the function identified by NAME and FNDECL is one with
462 special properties we wish to know about.
464 For example, if the function might return more than one time (setjmp), then
465 set RETURNS_TWICE to a nonzero value.
467 Similarly set LONGJMP for if the function is in the longjmp family.
469 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
470 space from the stack such as alloca. */
473 special_function_p (tree fndecl
, int flags
)
475 if (fndecl
&& DECL_NAME (fndecl
)
476 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
477 /* Exclude functions not at the file scope, or not `extern',
478 since they are not the magic functions we would otherwise
480 FIXME: this should be handled with attributes, not with this
481 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
482 because you can declare fork() inside a function if you
484 && (DECL_CONTEXT (fndecl
) == NULL_TREE
485 || TREE_CODE (DECL_CONTEXT (fndecl
)) == TRANSLATION_UNIT_DECL
)
486 && TREE_PUBLIC (fndecl
))
488 const char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
489 const char *tname
= name
;
491 /* We assume that alloca will always be called by name. It
492 makes no sense to pass it as a pointer-to-function to
493 anything that does not understand its behavior. */
494 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
496 && ! strcmp (name
, "alloca"))
497 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
499 && ! strcmp (name
, "__builtin_alloca"))))
500 flags
|= ECF_MAY_BE_ALLOCA
;
502 /* Disregard prefix _, __ or __x. */
505 if (name
[1] == '_' && name
[2] == 'x')
507 else if (name
[1] == '_')
516 && (! strcmp (tname
, "setjmp")
517 || ! strcmp (tname
, "setjmp_syscall")))
519 && ! strcmp (tname
, "sigsetjmp"))
521 && ! strcmp (tname
, "savectx")))
522 flags
|= ECF_RETURNS_TWICE
;
525 && ! strcmp (tname
, "siglongjmp"))
526 flags
|= ECF_LONGJMP
;
528 else if ((tname
[0] == 'q' && tname
[1] == 's'
529 && ! strcmp (tname
, "qsetjmp"))
530 || (tname
[0] == 'v' && tname
[1] == 'f'
531 && ! strcmp (tname
, "vfork")))
532 flags
|= ECF_RETURNS_TWICE
;
534 else if (tname
[0] == 'l' && tname
[1] == 'o'
535 && ! strcmp (tname
, "longjmp"))
536 flags
|= ECF_LONGJMP
;
542 /* Return nonzero when tree represent call to longjmp. */
545 setjmp_call_p (tree fndecl
)
547 return special_function_p (fndecl
, 0) & ECF_RETURNS_TWICE
;
550 /* Return true when exp contains alloca call. */
552 alloca_call_p (tree exp
)
554 if (TREE_CODE (exp
) == CALL_EXPR
555 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
556 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
558 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
559 0) & ECF_MAY_BE_ALLOCA
))
564 /* Detect flags (function attributes) from the function decl or type node. */
567 flags_from_decl_or_type (tree exp
)
574 struct cgraph_rtl_info
*i
= cgraph_rtl_info (exp
);
575 type
= TREE_TYPE (exp
);
579 if (i
->pure_function
)
580 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
581 if (i
->const_function
)
582 flags
|= ECF_CONST
| ECF_LIBCALL_BLOCK
;
585 /* The function exp may have the `malloc' attribute. */
586 if (DECL_IS_MALLOC (exp
))
589 /* The function exp may have the `pure' attribute. */
590 if (DECL_IS_PURE (exp
))
591 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
593 if (TREE_NOTHROW (exp
))
594 flags
|= ECF_NOTHROW
;
596 if (TREE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
597 flags
|= ECF_LIBCALL_BLOCK
| ECF_CONST
;
599 flags
= special_function_p (exp
, flags
);
601 else if (TYPE_P (exp
) && TYPE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
604 if (TREE_THIS_VOLATILE (exp
))
605 flags
|= ECF_NORETURN
;
607 /* Mark if the function returns with the stack pointer depressed. We
608 cannot consider it pure or constant in that case. */
609 if (TREE_CODE (type
) == FUNCTION_TYPE
&& TYPE_RETURNS_STACK_DEPRESSED (type
))
611 flags
|= ECF_SP_DEPRESSED
;
612 flags
&= ~(ECF_PURE
| ECF_CONST
| ECF_LIBCALL_BLOCK
);
618 /* Detect flags from a CALL_EXPR. */
621 call_expr_flags (tree t
)
624 tree decl
= get_callee_fndecl (t
);
627 flags
= flags_from_decl_or_type (decl
);
630 t
= TREE_TYPE (TREE_OPERAND (t
, 0));
631 if (t
&& TREE_CODE (t
) == POINTER_TYPE
)
632 flags
= flags_from_decl_or_type (TREE_TYPE (t
));
640 /* Precompute all register parameters as described by ARGS, storing values
641 into fields within the ARGS array.
643 NUM_ACTUALS indicates the total number elements in the ARGS array.
645 Set REG_PARM_SEEN if we encounter a register parameter. */
648 precompute_register_parameters (int num_actuals
, struct arg_data
*args
, int *reg_parm_seen
)
654 for (i
= 0; i
< num_actuals
; i
++)
655 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
659 if (args
[i
].value
== 0)
662 args
[i
].value
= expand_expr (args
[i
].tree_value
, NULL_RTX
,
664 preserve_temp_slots (args
[i
].value
);
667 /* ANSI doesn't require a sequence point here,
668 but PCC has one, so this will avoid some problems. */
672 /* If the value is a non-legitimate constant, force it into a
673 pseudo now. TLS symbols sometimes need a call to resolve. */
674 if (CONSTANT_P (args
[i
].value
)
675 && !LEGITIMATE_CONSTANT_P (args
[i
].value
))
676 args
[i
].value
= force_reg (args
[i
].mode
, args
[i
].value
);
678 /* If we are to promote the function arg to a wider mode,
681 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
683 = convert_modes (args
[i
].mode
,
684 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
685 args
[i
].value
, args
[i
].unsignedp
);
687 /* If the value is expensive, and we are inside an appropriately
688 short loop, put the value into a pseudo and then put the pseudo
691 For small register classes, also do this if this call uses
692 register parameters. This is to avoid reload conflicts while
693 loading the parameters registers. */
695 if ((! (REG_P (args
[i
].value
)
696 || (GET_CODE (args
[i
].value
) == SUBREG
697 && REG_P (SUBREG_REG (args
[i
].value
)))))
698 && args
[i
].mode
!= BLKmode
699 && rtx_cost (args
[i
].value
, SET
) > COSTS_N_INSNS (1)
700 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
701 || preserve_subexpressions_p ()))
702 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
706 #ifdef REG_PARM_STACK_SPACE
708 /* The argument list is the property of the called routine and it
709 may clobber it. If the fixed area has been used for previous
710 parameters, we must save and restore it. */
713 save_fixed_argument_area (int reg_parm_stack_space
, rtx argblock
, int *low_to_save
, int *high_to_save
)
718 /* Compute the boundary of the area that needs to be saved, if any. */
719 high
= reg_parm_stack_space
;
720 #ifdef ARGS_GROW_DOWNWARD
723 if (high
> highest_outgoing_arg_in_use
)
724 high
= highest_outgoing_arg_in_use
;
726 for (low
= 0; low
< high
; low
++)
727 if (stack_usage_map
[low
] != 0)
730 enum machine_mode save_mode
;
735 while (stack_usage_map
[--high
] == 0)
739 *high_to_save
= high
;
741 num_to_save
= high
- low
+ 1;
742 save_mode
= mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
744 /* If we don't have the required alignment, must do this
746 if ((low
& (MIN (GET_MODE_SIZE (save_mode
),
747 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
750 #ifdef ARGS_GROW_DOWNWARD
755 stack_area
= gen_rtx_MEM (save_mode
,
756 memory_address (save_mode
,
757 plus_constant (argblock
,
760 set_mem_align (stack_area
, PARM_BOUNDARY
);
761 if (save_mode
== BLKmode
)
763 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
764 emit_block_move (validize_mem (save_area
), stack_area
,
765 GEN_INT (num_to_save
), BLOCK_OP_CALL_PARM
);
769 save_area
= gen_reg_rtx (save_mode
);
770 emit_move_insn (save_area
, stack_area
);
780 restore_fixed_argument_area (rtx save_area
, rtx argblock
, int high_to_save
, int low_to_save
)
782 enum machine_mode save_mode
= GET_MODE (save_area
);
786 #ifdef ARGS_GROW_DOWNWARD
787 delta
= -high_to_save
;
791 stack_area
= gen_rtx_MEM (save_mode
,
792 memory_address (save_mode
,
793 plus_constant (argblock
, delta
)));
794 set_mem_align (stack_area
, PARM_BOUNDARY
);
796 if (save_mode
!= BLKmode
)
797 emit_move_insn (stack_area
, save_area
);
799 emit_block_move (stack_area
, validize_mem (save_area
),
800 GEN_INT (high_to_save
- low_to_save
+ 1),
803 #endif /* REG_PARM_STACK_SPACE */
805 /* If any elements in ARGS refer to parameters that are to be passed in
806 registers, but not in memory, and whose alignment does not permit a
807 direct copy into registers. Copy the values into a group of pseudos
808 which we will later copy into the appropriate hard registers.
810 Pseudos for each unaligned argument will be stored into the array
811 args[argnum].aligned_regs. The caller is responsible for deallocating
812 the aligned_regs array if it is nonzero. */
815 store_unaligned_arguments_into_pseudos (struct arg_data
*args
, int num_actuals
)
819 for (i
= 0; i
< num_actuals
; i
++)
820 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
821 && args
[i
].mode
== BLKmode
822 && (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
823 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
825 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
826 int nregs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
827 int endian_correction
= 0;
829 args
[i
].n_aligned_regs
= args
[i
].partial
? args
[i
].partial
: nregs
;
830 args
[i
].aligned_regs
= xmalloc (sizeof (rtx
) * args
[i
].n_aligned_regs
);
832 /* Structures smaller than a word are normally aligned to the
833 least significant byte. On a BYTES_BIG_ENDIAN machine,
834 this means we must skip the empty high order bytes when
835 calculating the bit offset. */
836 if (bytes
< UNITS_PER_WORD
837 #ifdef BLOCK_REG_PADDING
838 && (BLOCK_REG_PADDING (args
[i
].mode
,
839 TREE_TYPE (args
[i
].tree_value
), 1)
845 endian_correction
= BITS_PER_WORD
- bytes
* BITS_PER_UNIT
;
847 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
849 rtx reg
= gen_reg_rtx (word_mode
);
850 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
851 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
853 args
[i
].aligned_regs
[j
] = reg
;
854 word
= extract_bit_field (word
, bitsize
, 0, 1, NULL_RTX
,
855 word_mode
, word_mode
);
857 /* There is no need to restrict this code to loading items
858 in TYPE_ALIGN sized hunks. The bitfield instructions can
859 load up entire word sized registers efficiently.
861 ??? This may not be needed anymore.
862 We use to emit a clobber here but that doesn't let later
863 passes optimize the instructions we emit. By storing 0 into
864 the register later passes know the first AND to zero out the
865 bitfield being set in the register is unnecessary. The store
866 of 0 will be deleted as will at least the first AND. */
868 emit_move_insn (reg
, const0_rtx
);
870 bytes
-= bitsize
/ BITS_PER_UNIT
;
871 store_bit_field (reg
, bitsize
, endian_correction
, word_mode
,
877 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
880 NUM_ACTUALS is the total number of parameters.
882 N_NAMED_ARGS is the total number of named arguments.
884 FNDECL is the tree code for the target of this call (if known)
886 ARGS_SO_FAR holds state needed by the target to know where to place
889 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
890 for arguments which are passed in registers.
892 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
893 and may be modified by this routine.
895 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
896 flags which may may be modified by this routine.
898 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
899 that requires allocation of stack space.
901 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
902 the thunked-to function. */
905 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED
,
906 struct arg_data
*args
,
907 struct args_size
*args_size
,
908 int n_named_args ATTRIBUTE_UNUSED
,
909 tree actparms
, tree fndecl
,
910 CUMULATIVE_ARGS
*args_so_far
,
911 int reg_parm_stack_space
,
912 rtx
*old_stack_level
, int *old_pending_adj
,
913 int *must_preallocate
, int *ecf_flags
,
914 bool *may_tailcall
, bool call_from_thunk_p
)
916 /* 1 if scanning parms front to back, -1 if scanning back to front. */
919 /* Count arg position in order args appear. */
925 args_size
->constant
= 0;
928 /* In this loop, we consider args in the order they are written.
929 We fill up ARGS from the front or from the back if necessary
930 so that in any case the first arg to be pushed ends up at the front. */
932 if (PUSH_ARGS_REVERSED
)
934 i
= num_actuals
- 1, inc
= -1;
935 /* In this case, must reverse order of args
936 so that we compute and push the last arg first. */
943 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
944 for (p
= actparms
, argpos
= 0; p
; p
= TREE_CHAIN (p
), i
+= inc
, argpos
++)
946 tree type
= TREE_TYPE (TREE_VALUE (p
));
948 enum machine_mode mode
;
950 args
[i
].tree_value
= TREE_VALUE (p
);
952 /* Replace erroneous argument with constant zero. */
953 if (type
== error_mark_node
|| !COMPLETE_TYPE_P (type
))
954 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
956 /* If TYPE is a transparent union, pass things the way we would
957 pass the first field of the union. We have already verified that
958 the modes are the same. */
959 if (TREE_CODE (type
) == UNION_TYPE
&& TYPE_TRANSPARENT_UNION (type
))
960 type
= TREE_TYPE (TYPE_FIELDS (type
));
962 /* Decide where to pass this arg.
964 args[i].reg is nonzero if all or part is passed in registers.
966 args[i].partial is nonzero if part but not all is passed in registers,
967 and the exact value says how many words are passed in registers.
969 args[i].pass_on_stack is nonzero if the argument must at least be
970 computed on the stack. It may then be loaded back into registers
971 if args[i].reg is nonzero.
973 These decisions are driven by the FUNCTION_... macros and must agree
974 with those made by function.c. */
976 /* See if this argument should be passed by invisible reference. */
977 if (pass_by_reference (args_so_far
, TYPE_MODE (type
),
978 type
, argpos
< n_named_args
))
980 /* If we're compiling a thunk, pass through invisible
981 references instead of making a copy. */
982 if (call_from_thunk_p
983 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far
, TYPE_MODE (type
),
984 type
, argpos
< n_named_args
)
985 /* If it's in a register, we must make a copy of it too. */
986 /* ??? Is this a sufficient test? Is there a better one? */
987 && !(TREE_CODE (args
[i
].tree_value
) == VAR_DECL
988 && REG_P (DECL_RTL (args
[i
].tree_value
)))
989 && ! TREE_ADDRESSABLE (type
))
992 /* C++ uses a TARGET_EXPR to indicate that we want to make a
993 new object from the argument. If we are passing by
994 invisible reference, the callee will do that for us, so we
995 can strip off the TARGET_EXPR. This is not always safe,
996 but it is safe in the only case where this is a useful
997 optimization; namely, when the argument is a plain object.
998 In that case, the frontend is just asking the backend to
999 make a bitwise copy of the argument. */
1001 if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1002 && (DECL_P (TREE_OPERAND (args
[i
].tree_value
, 1)))
1003 && ! REG_P (DECL_RTL (TREE_OPERAND (args
[i
].tree_value
, 1))))
1004 args
[i
].tree_value
= TREE_OPERAND (args
[i
].tree_value
, 1);
1006 /* We can't use sibcalls if a callee-copied argument is stored
1007 in the current function's frame. */
1008 if (!call_from_thunk_p
1009 && (!DECL_P (args
[i
].tree_value
)
1010 || !TREE_STATIC (args
[i
].tree_value
)))
1011 *may_tailcall
= false;
1013 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1014 build_pointer_type (type
),
1015 args
[i
].tree_value
);
1016 type
= build_pointer_type (type
);
1018 else if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
)
1020 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1021 We implement this by passing the address of the temporary
1022 rather than expanding it into another allocated slot. */
1023 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1024 build_pointer_type (type
),
1025 args
[i
].tree_value
);
1026 type
= build_pointer_type (type
);
1027 *may_tailcall
= false;
1031 /* We make a copy of the object and pass the address to the
1032 function being called. */
1035 if (!COMPLETE_TYPE_P (type
)
1036 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1037 || (flag_stack_check
&& ! STACK_CHECK_BUILTIN
1038 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type
),
1039 STACK_CHECK_MAX_VAR_SIZE
))))
1041 /* This is a variable-sized object. Make space on the stack
1043 rtx size_rtx
= expr_size (TREE_VALUE (p
));
1045 if (*old_stack_level
== 0)
1047 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1048 *old_pending_adj
= pending_stack_adjust
;
1049 pending_stack_adjust
= 0;
1052 copy
= gen_rtx_MEM (BLKmode
,
1053 allocate_dynamic_stack_space
1054 (size_rtx
, NULL_RTX
, TYPE_ALIGN (type
)));
1055 set_mem_attributes (copy
, type
, 1);
1058 copy
= assign_temp (type
, 0, 1, 0);
1060 store_expr (args
[i
].tree_value
, copy
, 0);
1061 *ecf_flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
1063 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1064 build_pointer_type (type
),
1065 make_tree (type
, copy
));
1066 type
= build_pointer_type (type
);
1067 *may_tailcall
= false;
1071 mode
= TYPE_MODE (type
);
1072 unsignedp
= TYPE_UNSIGNED (type
);
1074 if (targetm
.calls
.promote_function_args (fndecl
? TREE_TYPE (fndecl
) : 0))
1075 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
1077 args
[i
].unsignedp
= unsignedp
;
1078 args
[i
].mode
= mode
;
1080 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1081 argpos
< n_named_args
);
1082 #ifdef FUNCTION_INCOMING_ARG
1083 /* If this is a sibling call and the machine has register windows, the
1084 register window has to be unwinded before calling the routine, so
1085 arguments have to go into the incoming registers. */
1086 args
[i
].tail_call_reg
= FUNCTION_INCOMING_ARG (*args_so_far
, mode
, type
,
1087 argpos
< n_named_args
);
1089 args
[i
].tail_call_reg
= args
[i
].reg
;
1094 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far
, mode
, type
,
1095 argpos
< n_named_args
);
1097 args
[i
].pass_on_stack
= targetm
.calls
.must_pass_in_stack (mode
, type
);
1099 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1100 it means that we are to pass this arg in the register(s) designated
1101 by the PARALLEL, but also to pass it in the stack. */
1102 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1103 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1104 args
[i
].pass_on_stack
= 1;
1106 /* If this is an addressable type, we must preallocate the stack
1107 since we must evaluate the object into its final location.
1109 If this is to be passed in both registers and the stack, it is simpler
1111 if (TREE_ADDRESSABLE (type
)
1112 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1113 *must_preallocate
= 1;
1115 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1116 we cannot consider this function call constant. */
1117 if (TREE_ADDRESSABLE (type
))
1118 *ecf_flags
&= ~ECF_LIBCALL_BLOCK
;
1120 /* Compute the stack-size of this argument. */
1121 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1122 || reg_parm_stack_space
> 0
1123 || args
[i
].pass_on_stack
)
1124 locate_and_pad_parm (mode
, type
,
1125 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1130 args
[i
].pass_on_stack
? 0 : args
[i
].partial
,
1131 fndecl
, args_size
, &args
[i
].locate
);
1132 #ifdef BLOCK_REG_PADDING
1134 /* The argument is passed entirely in registers. See at which
1135 end it should be padded. */
1136 args
[i
].locate
.where_pad
=
1137 BLOCK_REG_PADDING (mode
, type
,
1138 int_size_in_bytes (type
) <= UNITS_PER_WORD
);
1141 /* Update ARGS_SIZE, the total stack space for args so far. */
1143 args_size
->constant
+= args
[i
].locate
.size
.constant
;
1144 if (args
[i
].locate
.size
.var
)
1145 ADD_PARM_SIZE (*args_size
, args
[i
].locate
.size
.var
);
1147 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1148 have been used, etc. */
1150 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1151 argpos
< n_named_args
);
1155 /* Update ARGS_SIZE to contain the total size for the argument block.
1156 Return the original constant component of the argument block's size.
1158 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1159 for arguments passed in registers. */
1162 compute_argument_block_size (int reg_parm_stack_space
,
1163 struct args_size
*args_size
,
1164 int preferred_stack_boundary ATTRIBUTE_UNUSED
)
1166 int unadjusted_args_size
= args_size
->constant
;
1168 /* For accumulate outgoing args mode we don't need to align, since the frame
1169 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1170 backends from generating misaligned frame sizes. */
1171 if (ACCUMULATE_OUTGOING_ARGS
&& preferred_stack_boundary
> STACK_BOUNDARY
)
1172 preferred_stack_boundary
= STACK_BOUNDARY
;
1174 /* Compute the actual size of the argument block required. The variable
1175 and constant sizes must be combined, the size may have to be rounded,
1176 and there may be a minimum required size. */
1180 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1181 args_size
->constant
= 0;
1183 preferred_stack_boundary
/= BITS_PER_UNIT
;
1184 if (preferred_stack_boundary
> 1)
1186 /* We don't handle this case yet. To handle it correctly we have
1187 to add the delta, round and subtract the delta.
1188 Currently no machine description requires this support. */
1189 if (stack_pointer_delta
& (preferred_stack_boundary
- 1))
1191 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
1194 if (reg_parm_stack_space
> 0)
1197 = size_binop (MAX_EXPR
, args_size
->var
,
1198 ssize_int (reg_parm_stack_space
));
1200 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1201 /* The area corresponding to register parameters is not to count in
1202 the size of the block we need. So make the adjustment. */
1204 = size_binop (MINUS_EXPR
, args_size
->var
,
1205 ssize_int (reg_parm_stack_space
));
1211 preferred_stack_boundary
/= BITS_PER_UNIT
;
1212 if (preferred_stack_boundary
< 1)
1213 preferred_stack_boundary
= 1;
1214 args_size
->constant
= (((args_size
->constant
1215 + stack_pointer_delta
1216 + preferred_stack_boundary
- 1)
1217 / preferred_stack_boundary
1218 * preferred_stack_boundary
)
1219 - stack_pointer_delta
);
1221 args_size
->constant
= MAX (args_size
->constant
,
1222 reg_parm_stack_space
);
1224 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1225 args_size
->constant
-= reg_parm_stack_space
;
1228 return unadjusted_args_size
;
1231 /* Precompute parameters as needed for a function call.
1233 FLAGS is mask of ECF_* constants.
1235 NUM_ACTUALS is the number of arguments.
1237 ARGS is an array containing information for each argument; this
1238 routine fills in the INITIAL_VALUE and VALUE fields for each
1239 precomputed argument. */
1242 precompute_arguments (int flags
, int num_actuals
, struct arg_data
*args
)
1246 /* If this is a libcall, then precompute all arguments so that we do not
1247 get extraneous instructions emitted as part of the libcall sequence. */
1248 if ((flags
& ECF_LIBCALL_BLOCK
) == 0)
1251 for (i
= 0; i
< num_actuals
; i
++)
1253 enum machine_mode mode
;
1255 /* If this is an addressable type, we cannot pre-evaluate it. */
1256 if (TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
)))
1260 = expand_expr (args
[i
].tree_value
, NULL_RTX
, VOIDmode
, 0);
1262 /* ANSI doesn't require a sequence point here,
1263 but PCC has one, so this will avoid some problems. */
1266 args
[i
].initial_value
= args
[i
].value
1267 = protect_from_queue (args
[i
].value
, 0);
1269 mode
= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
));
1270 if (mode
!= args
[i
].mode
)
1273 = convert_modes (args
[i
].mode
, mode
,
1274 args
[i
].value
, args
[i
].unsignedp
);
1275 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1276 /* CSE will replace this only if it contains args[i].value
1277 pseudo, so convert it down to the declared mode using
1279 if (REG_P (args
[i
].value
)
1280 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
)
1282 args
[i
].initial_value
1283 = gen_lowpart_SUBREG (mode
, args
[i
].value
);
1284 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
1285 SUBREG_PROMOTED_UNSIGNED_SET (args
[i
].initial_value
,
1293 /* Given the current state of MUST_PREALLOCATE and information about
1294 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1295 compute and return the final value for MUST_PREALLOCATE. */
1298 finalize_must_preallocate (int must_preallocate
, int num_actuals
, struct arg_data
*args
, struct args_size
*args_size
)
1300 /* See if we have or want to preallocate stack space.
1302 If we would have to push a partially-in-regs parm
1303 before other stack parms, preallocate stack space instead.
1305 If the size of some parm is not a multiple of the required stack
1306 alignment, we must preallocate.
1308 If the total size of arguments that would otherwise create a copy in
1309 a temporary (such as a CALL) is more than half the total argument list
1310 size, preallocation is faster.
1312 Another reason to preallocate is if we have a machine (like the m88k)
1313 where stack alignment is required to be maintained between every
1314 pair of insns, not just when the call is made. However, we assume here
1315 that such machines either do not have push insns (and hence preallocation
1316 would occur anyway) or the problem is taken care of with
1319 if (! must_preallocate
)
1321 int partial_seen
= 0;
1322 int copy_to_evaluate_size
= 0;
1325 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1327 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1329 else if (partial_seen
&& args
[i
].reg
== 0)
1330 must_preallocate
= 1;
1332 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1333 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1334 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1335 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1336 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1337 copy_to_evaluate_size
1338 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1341 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1342 && args_size
->constant
> 0)
1343 must_preallocate
= 1;
1345 return must_preallocate
;
1348 /* If we preallocated stack space, compute the address of each argument
1349 and store it into the ARGS array.
1351 We need not ensure it is a valid memory address here; it will be
1352 validized when it is used.
1354 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1357 compute_argument_addresses (struct arg_data
*args
, rtx argblock
, int num_actuals
)
1361 rtx arg_reg
= argblock
;
1362 int i
, arg_offset
= 0;
1364 if (GET_CODE (argblock
) == PLUS
)
1365 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1367 for (i
= 0; i
< num_actuals
; i
++)
1369 rtx offset
= ARGS_SIZE_RTX (args
[i
].locate
.offset
);
1370 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].locate
.slot_offset
);
1373 /* Skip this parm if it will not be passed on the stack. */
1374 if (! args
[i
].pass_on_stack
&& args
[i
].reg
!= 0)
1377 if (GET_CODE (offset
) == CONST_INT
)
1378 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1380 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1382 addr
= plus_constant (addr
, arg_offset
);
1383 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1384 set_mem_align (args
[i
].stack
, PARM_BOUNDARY
);
1385 set_mem_attributes (args
[i
].stack
,
1386 TREE_TYPE (args
[i
].tree_value
), 1);
1388 if (GET_CODE (slot_offset
) == CONST_INT
)
1389 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1391 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1393 addr
= plus_constant (addr
, arg_offset
);
1394 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1395 set_mem_align (args
[i
].stack_slot
, PARM_BOUNDARY
);
1396 set_mem_attributes (args
[i
].stack_slot
,
1397 TREE_TYPE (args
[i
].tree_value
), 1);
1399 /* Function incoming arguments may overlap with sibling call
1400 outgoing arguments and we cannot allow reordering of reads
1401 from function arguments with stores to outgoing arguments
1402 of sibling calls. */
1403 set_mem_alias_set (args
[i
].stack
, 0);
1404 set_mem_alias_set (args
[i
].stack_slot
, 0);
1409 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1410 in a call instruction.
1412 FNDECL is the tree node for the target function. For an indirect call
1413 FNDECL will be NULL_TREE.
1415 ADDR is the operand 0 of CALL_EXPR for this call. */
1418 rtx_for_function_call (tree fndecl
, tree addr
)
1422 /* Get the function to call, in the form of RTL. */
1425 /* If this is the first use of the function, see if we need to
1426 make an external definition for it. */
1427 if (! TREE_USED (fndecl
))
1429 assemble_external (fndecl
);
1430 TREE_USED (fndecl
) = 1;
1433 /* Get a SYMBOL_REF rtx for the function address. */
1434 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1437 /* Generate an rtx (probably a pseudo-register) for the address. */
1440 funexp
= expand_expr (addr
, NULL_RTX
, VOIDmode
, 0);
1441 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1447 /* Do the register loads required for any wholly-register parms or any
1448 parms which are passed both on the stack and in a register. Their
1449 expressions were already evaluated.
1451 Mark all register-parms as living through the call, putting these USE
1452 insns in the CALL_INSN_FUNCTION_USAGE field.
1454 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1455 checking, setting *SIBCALL_FAILURE if appropriate. */
1458 load_register_parameters (struct arg_data
*args
, int num_actuals
,
1459 rtx
*call_fusage
, int flags
, int is_sibcall
,
1460 int *sibcall_failure
)
1464 for (i
= 0; i
< num_actuals
; i
++)
1466 rtx reg
= ((flags
& ECF_SIBCALL
)
1467 ? args
[i
].tail_call_reg
: args
[i
].reg
);
1470 int partial
= args
[i
].partial
;
1473 rtx before_arg
= get_last_insn ();
1474 /* Set to non-negative if must move a word at a time, even if just
1475 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1476 we just use a normal move insn. This value can be zero if the
1477 argument is a zero size structure with no fields. */
1481 else if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
)
1483 size
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1484 nregs
= (size
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1487 size
= GET_MODE_SIZE (args
[i
].mode
);
1489 /* Handle calls that pass values in multiple non-contiguous
1490 locations. The Irix 6 ABI has examples of this. */
1492 if (GET_CODE (reg
) == PARALLEL
)
1494 tree type
= TREE_TYPE (args
[i
].tree_value
);
1495 emit_group_load (reg
, args
[i
].value
, type
,
1496 int_size_in_bytes (type
));
1499 /* If simple case, just do move. If normal partial, store_one_arg
1500 has already loaded the register for us. In all other cases,
1501 load the register(s) from memory. */
1503 else if (nregs
== -1)
1505 emit_move_insn (reg
, args
[i
].value
);
1506 #ifdef BLOCK_REG_PADDING
1507 /* Handle case where we have a value that needs shifting
1508 up to the msb. eg. a QImode value and we're padding
1509 upward on a BYTES_BIG_ENDIAN machine. */
1510 if (size
< UNITS_PER_WORD
1511 && (args
[i
].locate
.where_pad
1512 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
1515 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1517 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1518 report the whole reg as used. Strictly speaking, the
1519 call only uses SIZE bytes at the msb end, but it doesn't
1520 seem worth generating rtl to say that. */
1521 reg
= gen_rtx_REG (word_mode
, REGNO (reg
));
1522 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
,
1523 build_int_2 (shift
, 0), reg
, 1);
1525 emit_move_insn (reg
, x
);
1530 /* If we have pre-computed the values to put in the registers in
1531 the case of non-aligned structures, copy them in now. */
1533 else if (args
[i
].n_aligned_regs
!= 0)
1534 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1535 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1536 args
[i
].aligned_regs
[j
]);
1538 else if (partial
== 0 || args
[i
].pass_on_stack
)
1540 rtx mem
= validize_mem (args
[i
].value
);
1542 /* Handle a BLKmode that needs shifting. */
1543 if (nregs
== 1 && size
< UNITS_PER_WORD
1544 #ifdef BLOCK_REG_PADDING
1545 && args
[i
].locate
.where_pad
== downward
1551 rtx tem
= operand_subword_force (mem
, 0, args
[i
].mode
);
1552 rtx ri
= gen_rtx_REG (word_mode
, REGNO (reg
));
1553 rtx x
= gen_reg_rtx (word_mode
);
1554 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1555 enum tree_code dir
= BYTES_BIG_ENDIAN
? RSHIFT_EXPR
1558 emit_move_insn (x
, tem
);
1559 x
= expand_shift (dir
, word_mode
, x
,
1560 build_int_2 (shift
, 0), ri
, 1);
1562 emit_move_insn (ri
, x
);
1565 move_block_to_reg (REGNO (reg
), mem
, nregs
, args
[i
].mode
);
1568 /* When a parameter is a block, and perhaps in other cases, it is
1569 possible that it did a load from an argument slot that was
1570 already clobbered. */
1572 && check_sibcall_argument_overlap (before_arg
, &args
[i
], 0))
1573 *sibcall_failure
= 1;
1575 /* Handle calls that pass values in multiple non-contiguous
1576 locations. The Irix 6 ABI has examples of this. */
1577 if (GET_CODE (reg
) == PARALLEL
)
1578 use_group_regs (call_fusage
, reg
);
1579 else if (nregs
== -1)
1580 use_reg (call_fusage
, reg
);
1582 use_regs (call_fusage
, REGNO (reg
), nregs
== 0 ? 1 : nregs
);
1587 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1588 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1589 bytes, then we would need to push some additional bytes to pad the
1590 arguments. So, we compute an adjust to the stack pointer for an
1591 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1592 bytes. Then, when the arguments are pushed the stack will be perfectly
1593 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1594 be popped after the call. Returns the adjustment. */
1597 combine_pending_stack_adjustment_and_call (int unadjusted_args_size
,
1598 struct args_size
*args_size
,
1599 int preferred_unit_stack_boundary
)
1601 /* The number of bytes to pop so that the stack will be
1602 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1603 HOST_WIDE_INT adjustment
;
1604 /* The alignment of the stack after the arguments are pushed, if we
1605 just pushed the arguments without adjust the stack here. */
1606 HOST_WIDE_INT unadjusted_alignment
;
1608 unadjusted_alignment
1609 = ((stack_pointer_delta
+ unadjusted_args_size
)
1610 % preferred_unit_stack_boundary
);
1612 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1613 as possible -- leaving just enough left to cancel out the
1614 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1615 PENDING_STACK_ADJUST is non-negative, and congruent to
1616 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1618 /* Begin by trying to pop all the bytes. */
1619 unadjusted_alignment
1620 = (unadjusted_alignment
1621 - (pending_stack_adjust
% preferred_unit_stack_boundary
));
1622 adjustment
= pending_stack_adjust
;
1623 /* Push enough additional bytes that the stack will be aligned
1624 after the arguments are pushed. */
1625 if (preferred_unit_stack_boundary
> 1)
1627 if (unadjusted_alignment
> 0)
1628 adjustment
-= preferred_unit_stack_boundary
- unadjusted_alignment
;
1630 adjustment
+= unadjusted_alignment
;
1633 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1634 bytes after the call. The right number is the entire
1635 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1636 by the arguments in the first place. */
1638 = pending_stack_adjust
- adjustment
+ unadjusted_args_size
;
1643 /* Scan X expression if it does not dereference any argument slots
1644 we already clobbered by tail call arguments (as noted in stored_args_map
1646 Return nonzero if X expression dereferences such argument slots,
1650 check_sibcall_argument_overlap_1 (rtx x
)
1660 code
= GET_CODE (x
);
1664 if (XEXP (x
, 0) == current_function_internal_arg_pointer
)
1666 else if (GET_CODE (XEXP (x
, 0)) == PLUS
1667 && XEXP (XEXP (x
, 0), 0) ==
1668 current_function_internal_arg_pointer
1669 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
1670 i
= INTVAL (XEXP (XEXP (x
, 0), 1));
1674 #ifdef ARGS_GROW_DOWNWARD
1675 i
= -i
- GET_MODE_SIZE (GET_MODE (x
));
1678 for (k
= 0; k
< GET_MODE_SIZE (GET_MODE (x
)); k
++)
1679 if (i
+ k
< stored_args_map
->n_bits
1680 && TEST_BIT (stored_args_map
, i
+ k
))
1686 /* Scan all subexpressions. */
1687 fmt
= GET_RTX_FORMAT (code
);
1688 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1692 if (check_sibcall_argument_overlap_1 (XEXP (x
, i
)))
1695 else if (*fmt
== 'E')
1697 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1698 if (check_sibcall_argument_overlap_1 (XVECEXP (x
, i
, j
)))
1705 /* Scan sequence after INSN if it does not dereference any argument slots
1706 we already clobbered by tail call arguments (as noted in stored_args_map
1707 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1708 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1709 should be 0). Return nonzero if sequence after INSN dereferences such argument
1710 slots, zero otherwise. */
1713 check_sibcall_argument_overlap (rtx insn
, struct arg_data
*arg
, int mark_stored_args_map
)
1717 if (insn
== NULL_RTX
)
1718 insn
= get_insns ();
1720 insn
= NEXT_INSN (insn
);
1722 for (; insn
; insn
= NEXT_INSN (insn
))
1724 && check_sibcall_argument_overlap_1 (PATTERN (insn
)))
1727 if (mark_stored_args_map
)
1729 #ifdef ARGS_GROW_DOWNWARD
1730 low
= -arg
->locate
.slot_offset
.constant
- arg
->locate
.size
.constant
;
1732 low
= arg
->locate
.slot_offset
.constant
;
1735 for (high
= low
+ arg
->locate
.size
.constant
; low
< high
; low
++)
1736 SET_BIT (stored_args_map
, low
);
1738 return insn
!= NULL_RTX
;
1742 fix_unsafe_tree (tree t
)
1744 switch (unsafe_for_reeval (t
))
1749 case 1: /* Mildly unsafe. */
1750 t
= unsave_expr (t
);
1753 case 2: /* Wildly unsafe. */
1755 tree var
= build_decl (VAR_DECL
, NULL_TREE
,
1758 expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
1770 /* If function value *VALUE was returned at the most significant end of a
1771 register, shift it towards the least significant end and convert it to
1772 TYPE's mode. Return true and update *VALUE if some action was needed.
1774 TYPE is the type of the function's return value, which is known not
1775 to have mode BLKmode. */
1778 shift_returned_value (tree type
, rtx
*value
)
1780 if (targetm
.calls
.return_in_msb (type
))
1782 HOST_WIDE_INT shift
;
1784 shift
= (GET_MODE_BITSIZE (GET_MODE (*value
))
1785 - BITS_PER_UNIT
* int_size_in_bytes (type
));
1788 /* Shift the value into the low part of the register. */
1789 *value
= expand_binop (GET_MODE (*value
), lshr_optab
, *value
,
1790 GEN_INT (shift
), 0, 1, OPTAB_WIDEN
);
1792 /* Truncate it to the type's mode, or its integer equivalent.
1793 This is subject to TRULY_NOOP_TRUNCATION. */
1794 *value
= convert_to_mode (int_mode_for_mode (TYPE_MODE (type
)),
1797 /* Now convert it to the final form. */
1798 *value
= gen_lowpart (TYPE_MODE (type
), *value
);
1805 /* Remove all REG_EQUIV notes found in the insn chain. */
1808 purge_reg_equiv_notes (void)
1812 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1816 rtx note
= find_reg_note (insn
, REG_EQUIV
, 0);
1819 /* Remove the note and keep looking at the notes for
1821 remove_note (insn
, note
);
1829 /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
1832 purge_mem_unchanging_flag (rtx x
)
1841 code
= GET_CODE (x
);
1845 if (RTX_UNCHANGING_P (x
)
1846 && (XEXP (x
, 0) == current_function_internal_arg_pointer
1847 || (GET_CODE (XEXP (x
, 0)) == PLUS
1848 && XEXP (XEXP (x
, 0), 0) ==
1849 current_function_internal_arg_pointer
1850 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)))
1851 RTX_UNCHANGING_P (x
) = 0;
1855 /* Scan all subexpressions. */
1856 fmt
= GET_RTX_FORMAT (code
);
1857 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1860 purge_mem_unchanging_flag (XEXP (x
, i
));
1861 else if (*fmt
== 'E')
1862 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1863 purge_mem_unchanging_flag (XVECEXP (x
, i
, j
));
1868 /* Generate all the code for a function call
1869 and return an rtx for its value.
1870 Store the value in TARGET (specified as an rtx) if convenient.
1871 If the value is stored in TARGET then TARGET is returned.
1872 If IGNORE is nonzero, then we ignore the value of the function call. */
1875 expand_call (tree exp
, rtx target
, int ignore
)
1877 /* Nonzero if we are currently expanding a call. */
1878 static int currently_expanding_call
= 0;
1880 /* List of actual parameters. */
1881 tree actparms
= TREE_OPERAND (exp
, 1);
1882 /* RTX for the function to be called. */
1884 /* Sequence of insns to perform a normal "call". */
1885 rtx normal_call_insns
= NULL_RTX
;
1886 /* Sequence of insns to perform a tail "call". */
1887 rtx tail_call_insns
= NULL_RTX
;
1888 /* Data type of the function. */
1890 tree type_arg_types
;
1891 /* Declaration of the function being called,
1892 or 0 if the function is computed (not known by name). */
1894 /* The type of the function being called. */
1896 bool try_tail_call
= CALL_EXPR_TAILCALL (exp
);
1899 /* Register in which non-BLKmode value will be returned,
1900 or 0 if no value or if value is BLKmode. */
1902 /* Address where we should return a BLKmode value;
1903 0 if value not BLKmode. */
1904 rtx structure_value_addr
= 0;
1905 /* Nonzero if that address is being passed by treating it as
1906 an extra, implicit first parameter. Otherwise,
1907 it is passed by being copied directly into struct_value_rtx. */
1908 int structure_value_addr_parm
= 0;
1909 /* Size of aggregate value wanted, or zero if none wanted
1910 or if we are using the non-reentrant PCC calling convention
1911 or expecting the value in registers. */
1912 HOST_WIDE_INT struct_value_size
= 0;
1913 /* Nonzero if called function returns an aggregate in memory PCC style,
1914 by returning the address of where to find it. */
1915 int pcc_struct_value
= 0;
1916 rtx struct_value
= 0;
1918 /* Number of actual parameters in this call, including struct value addr. */
1920 /* Number of named args. Args after this are anonymous ones
1921 and they must all go on the stack. */
1924 /* Vector of information about each argument.
1925 Arguments are numbered in the order they will be pushed,
1926 not the order they are written. */
1927 struct arg_data
*args
;
1929 /* Total size in bytes of all the stack-parms scanned so far. */
1930 struct args_size args_size
;
1931 struct args_size adjusted_args_size
;
1932 /* Size of arguments before any adjustments (such as rounding). */
1933 int unadjusted_args_size
;
1934 /* Data on reg parms scanned so far. */
1935 CUMULATIVE_ARGS args_so_far
;
1936 /* Nonzero if a reg parm has been scanned. */
1938 /* Nonzero if this is an indirect function call. */
1940 /* Nonzero if we must avoid push-insns in the args for this call.
1941 If stack space is allocated for register parameters, but not by the
1942 caller, then it is preallocated in the fixed part of the stack frame.
1943 So the entire argument block must then be preallocated (i.e., we
1944 ignore PUSH_ROUNDING in that case). */
1946 int must_preallocate
= !PUSH_ARGS
;
1948 /* Size of the stack reserved for parameter registers. */
1949 int reg_parm_stack_space
= 0;
1951 /* Address of space preallocated for stack parms
1952 (on machines that lack push insns), or 0 if space not preallocated. */
1955 /* Mask of ECF_ flags. */
1957 #ifdef REG_PARM_STACK_SPACE
1958 /* Define the boundary of the register parm stack space that needs to be
1960 int low_to_save
, high_to_save
;
1961 rtx save_area
= 0; /* Place that it is saved */
1964 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
1965 char *initial_stack_usage_map
= stack_usage_map
;
1967 int old_stack_allocated
;
1969 /* State variables to track stack modifications. */
1970 rtx old_stack_level
= 0;
1971 int old_stack_arg_under_construction
= 0;
1972 int old_pending_adj
= 0;
1973 int old_inhibit_defer_pop
= inhibit_defer_pop
;
1975 /* Some stack pointer alterations we make are performed via
1976 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1977 which we then also need to save/restore along the way. */
1978 int old_stack_pointer_delta
= 0;
1981 tree p
= TREE_OPERAND (exp
, 0);
1982 tree addr
= TREE_OPERAND (exp
, 0);
1984 /* The alignment of the stack, in bits. */
1985 HOST_WIDE_INT preferred_stack_boundary
;
1986 /* The alignment of the stack, in bytes. */
1987 HOST_WIDE_INT preferred_unit_stack_boundary
;
1988 /* The static chain value to use for this call. */
1989 rtx static_chain_value
;
1990 /* See if this is "nothrow" function call. */
1991 if (TREE_NOTHROW (exp
))
1992 flags
|= ECF_NOTHROW
;
1994 /* See if we can find a DECL-node for the actual function, and get the
1995 function attributes (flags) from the function decl or type node. */
1996 fndecl
= get_callee_fndecl (exp
);
1999 fntype
= TREE_TYPE (fndecl
);
2000 flags
|= flags_from_decl_or_type (fndecl
);
2004 fntype
= TREE_TYPE (TREE_TYPE (p
));
2005 flags
|= flags_from_decl_or_type (fntype
);
2008 struct_value
= targetm
.calls
.struct_value_rtx (fntype
, 0);
2010 /* Warn if this value is an aggregate type,
2011 regardless of which calling convention we are using for it. */
2012 if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
2013 warning ("function call has aggregate value");
2015 /* If the result of a pure or const function call is ignored (or void),
2016 and none of its arguments are volatile, we can avoid expanding the
2017 call and just evaluate the arguments for side-effects. */
2018 if ((flags
& (ECF_CONST
| ECF_PURE
))
2019 && (ignore
|| target
== const0_rtx
2020 || TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
))
2022 bool volatilep
= false;
2025 for (arg
= actparms
; arg
; arg
= TREE_CHAIN (arg
))
2026 if (TREE_THIS_VOLATILE (TREE_VALUE (arg
)))
2034 for (arg
= actparms
; arg
; arg
= TREE_CHAIN (arg
))
2035 expand_expr (TREE_VALUE (arg
), const0_rtx
,
2036 VOIDmode
, EXPAND_NORMAL
);
2041 #ifdef REG_PARM_STACK_SPACE
2042 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
2045 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2046 if (reg_parm_stack_space
> 0 && PUSH_ARGS
)
2047 must_preallocate
= 1;
2050 /* Set up a place to return a structure. */
2052 /* Cater to broken compilers. */
2053 if (aggregate_value_p (exp
, fndecl
))
2055 /* This call returns a big structure. */
2056 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
2058 #ifdef PCC_STATIC_STRUCT_RETURN
2060 pcc_struct_value
= 1;
2062 #else /* not PCC_STATIC_STRUCT_RETURN */
2064 struct_value_size
= int_size_in_bytes (TREE_TYPE (exp
));
2066 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp
))
2068 /* The structure value address arg is already in actparms.
2069 Pull it out. It might be nice to just leave it there, but
2070 we need to set structure_value_addr. */
2071 tree return_arg
= TREE_VALUE (actparms
);
2072 actparms
= TREE_CHAIN (actparms
);
2073 structure_value_addr
= expand_expr (return_arg
, NULL_RTX
,
2074 VOIDmode
, EXPAND_NORMAL
);
2076 else if (target
&& MEM_P (target
))
2077 structure_value_addr
= XEXP (target
, 0);
2080 /* For variable-sized objects, we must be called with a target
2081 specified. If we were to allocate space on the stack here,
2082 we would have no way of knowing when to free it. */
2083 rtx d
= assign_temp (TREE_TYPE (exp
), 1, 1, 1);
2085 mark_temp_addr_taken (d
);
2086 structure_value_addr
= XEXP (d
, 0);
2090 #endif /* not PCC_STATIC_STRUCT_RETURN */
2093 /* Figure out the amount to which the stack should be aligned. */
2094 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2097 struct cgraph_rtl_info
*i
= cgraph_rtl_info (fndecl
);
2098 if (i
&& i
->preferred_incoming_stack_boundary
)
2099 preferred_stack_boundary
= i
->preferred_incoming_stack_boundary
;
2102 /* Operand 0 is a pointer-to-function; get the type of the function. */
2103 funtype
= TREE_TYPE (addr
);
2104 if (! POINTER_TYPE_P (funtype
))
2106 funtype
= TREE_TYPE (funtype
);
2108 /* Munge the tree to split complex arguments into their imaginary
2110 if (targetm
.calls
.split_complex_arg
)
2112 type_arg_types
= split_complex_types (TYPE_ARG_TYPES (funtype
));
2113 actparms
= split_complex_values (actparms
);
2116 type_arg_types
= TYPE_ARG_TYPES (funtype
);
2118 if (flags
& ECF_MAY_BE_ALLOCA
)
2119 current_function_calls_alloca
= 1;
2121 /* If struct_value_rtx is 0, it means pass the address
2122 as if it were an extra parameter. */
2123 if (structure_value_addr
&& struct_value
== 0)
2125 /* If structure_value_addr is a REG other than
2126 virtual_outgoing_args_rtx, we can use always use it. If it
2127 is not a REG, we must always copy it into a register.
2128 If it is virtual_outgoing_args_rtx, we must copy it to another
2129 register in some cases. */
2130 rtx temp
= (!REG_P (structure_value_addr
)
2131 || (ACCUMULATE_OUTGOING_ARGS
2132 && stack_arg_under_construction
2133 && structure_value_addr
== virtual_outgoing_args_rtx
)
2134 ? copy_addr_to_reg (convert_memory_address
2135 (Pmode
, structure_value_addr
))
2136 : structure_value_addr
);
2139 = tree_cons (error_mark_node
,
2140 make_tree (build_pointer_type (TREE_TYPE (funtype
)),
2143 structure_value_addr_parm
= 1;
2146 /* Count the arguments and set NUM_ACTUALS. */
2147 for (p
= actparms
, num_actuals
= 0; p
; p
= TREE_CHAIN (p
))
2150 /* Compute number of named args.
2151 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2153 if (type_arg_types
!= 0)
2155 = (list_length (type_arg_types
)
2156 /* Count the struct value address, if it is passed as a parm. */
2157 + structure_value_addr_parm
);
2159 /* If we know nothing, treat all args as named. */
2160 n_named_args
= num_actuals
;
2162 /* Start updating where the next arg would go.
2164 On some machines (such as the PA) indirect calls have a different
2165 calling convention than normal calls. The fourth argument in
2166 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2168 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, fndecl
, n_named_args
);
2170 /* Now possibly adjust the number of named args.
2171 Normally, don't include the last named arg if anonymous args follow.
2172 We do include the last named arg if
2173 targetm.calls.strict_argument_naming() returns nonzero.
2174 (If no anonymous args follow, the result of list_length is actually
2175 one too large. This is harmless.)
2177 If targetm.calls.pretend_outgoing_varargs_named() returns
2178 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2179 this machine will be able to place unnamed args that were passed
2180 in registers into the stack. So treat all args as named. This
2181 allows the insns emitting for a specific argument list to be
2182 independent of the function declaration.
2184 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2185 we do not have any reliable way to pass unnamed args in
2186 registers, so we must force them into memory. */
2188 if (type_arg_types
!= 0
2189 && targetm
.calls
.strict_argument_naming (&args_so_far
))
2191 else if (type_arg_types
!= 0
2192 && ! targetm
.calls
.pretend_outgoing_varargs_named (&args_so_far
))
2193 /* Don't include the last named arg. */
2196 /* Treat all args as named. */
2197 n_named_args
= num_actuals
;
2199 /* Make a vector to hold all the information about each arg. */
2200 args
= alloca (num_actuals
* sizeof (struct arg_data
));
2201 memset (args
, 0, num_actuals
* sizeof (struct arg_data
));
2203 /* Build up entries in the ARGS array, compute the size of the
2204 arguments into ARGS_SIZE, etc. */
2205 initialize_argument_information (num_actuals
, args
, &args_size
,
2206 n_named_args
, actparms
, fndecl
,
2207 &args_so_far
, reg_parm_stack_space
,
2208 &old_stack_level
, &old_pending_adj
,
2209 &must_preallocate
, &flags
,
2210 &try_tail_call
, CALL_FROM_THUNK_P (exp
));
2214 /* If this function requires a variable-sized argument list, don't
2215 try to make a cse'able block for this call. We may be able to
2216 do this eventually, but it is too complicated to keep track of
2217 what insns go in the cse'able block and which don't. */
2219 flags
&= ~ECF_LIBCALL_BLOCK
;
2220 must_preallocate
= 1;
2223 /* Now make final decision about preallocating stack space. */
2224 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2228 /* If the structure value address will reference the stack pointer, we
2229 must stabilize it. We don't need to do this if we know that we are
2230 not going to adjust the stack pointer in processing this call. */
2232 if (structure_value_addr
2233 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2234 || reg_mentioned_p (virtual_outgoing_args_rtx
,
2235 structure_value_addr
))
2237 || (!ACCUMULATE_OUTGOING_ARGS
&& args_size
.constant
)))
2238 structure_value_addr
= copy_to_reg (structure_value_addr
);
2240 /* Tail calls can make things harder to debug, and we're traditionally
2241 pushed these optimizations into -O2. Don't try if we're already
2242 expanding a call, as that means we're an argument. Don't try if
2243 there's cleanups, as we know there's code to follow the call.
2245 If rtx_equal_function_value_matters is false, that means we've
2246 finished with regular parsing. Which means that some of the
2247 machinery we use to generate tail-calls is no longer in place.
2248 This is most often true of sjlj-exceptions, which we couldn't
2249 tail-call to anyway.
2251 If current_nesting_level () == 0, we're being called after
2252 the function body has been expanded. This can happen when
2253 setting up trampolines in expand_function_end. */
2254 if (currently_expanding_call
++ != 0
2255 || !flag_optimize_sibling_calls
2256 || !rtx_equal_function_value_matters
2257 || current_nesting_level () == 0
2259 || lookup_stmt_eh_region (exp
) >= 0)
2262 /* Rest of purposes for tail call optimizations to fail. */
2264 #ifdef HAVE_sibcall_epilogue
2265 !HAVE_sibcall_epilogue
2270 /* Doing sibling call optimization needs some work, since
2271 structure_value_addr can be allocated on the stack.
2272 It does not seem worth the effort since few optimizable
2273 sibling calls will return a structure. */
2274 || structure_value_addr
!= NULL_RTX
2275 /* Check whether the target is able to optimize the call
2277 || !targetm
.function_ok_for_sibcall (fndecl
, exp
)
2278 /* Functions that do not return exactly once may not be sibcall
2280 || (flags
& (ECF_RETURNS_TWICE
| ECF_LONGJMP
| ECF_NORETURN
))
2281 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr
)))
2282 /* If the called function is nested in the current one, it might access
2283 some of the caller's arguments, but could clobber them beforehand if
2284 the argument areas are shared. */
2285 || (fndecl
&& decl_function_context (fndecl
) == current_function_decl
)
2286 /* If this function requires more stack slots than the current
2287 function, we cannot change it into a sibling call. */
2288 || args_size
.constant
> current_function_args_size
2289 /* If the callee pops its own arguments, then it must pop exactly
2290 the same number of arguments as the current function. */
2291 || (RETURN_POPS_ARGS (fndecl
, funtype
, args_size
.constant
)
2292 != RETURN_POPS_ARGS (current_function_decl
,
2293 TREE_TYPE (current_function_decl
),
2294 current_function_args_size
))
2295 || !lang_hooks
.decls
.ok_for_sibcall (fndecl
))
2301 actparms
= NULL_TREE
;
2302 /* Ok, we're going to give the tail call the old college try.
2303 This means we're going to evaluate the function arguments
2304 up to three times. There are two degrees of badness we can
2305 encounter, those that can be unsaved and those that can't.
2306 (See unsafe_for_reeval commentary for details.)
2308 Generate a new argument list. Pass safe arguments through
2309 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2310 For hard badness, evaluate them now and put their resulting
2311 rtx in a temporary VAR_DECL.
2313 initialize_argument_information has ordered the array for the
2314 order to be pushed, and we must remember this when reconstructing
2315 the original argument order. */
2317 if (PUSH_ARGS_REVERSED
)
2326 i
= num_actuals
- 1;
2330 for (; i
!= end
; i
+= inc
)
2332 args
[i
].tree_value
= fix_unsafe_tree (args
[i
].tree_value
);
2334 /* Do the same for the function address if it is an expression. */
2336 addr
= fix_unsafe_tree (addr
);
2340 /* Ensure current function's preferred stack boundary is at least
2341 what we need. We don't have to increase alignment for recursive
2343 if (cfun
->preferred_stack_boundary
< preferred_stack_boundary
2344 && fndecl
!= current_function_decl
)
2345 cfun
->preferred_stack_boundary
= preferred_stack_boundary
;
2346 if (fndecl
== current_function_decl
)
2347 cfun
->recursive_call_emit
= true;
2349 preferred_unit_stack_boundary
= preferred_stack_boundary
/ BITS_PER_UNIT
;
2351 /* We want to make two insn chains; one for a sibling call, the other
2352 for a normal call. We will select one of the two chains after
2353 initial RTL generation is complete. */
2354 for (pass
= try_tail_call
? 0 : 1; pass
< 2; pass
++)
2356 int sibcall_failure
= 0;
2357 /* We want to emit any pending stack adjustments before the tail
2358 recursion "call". That way we know any adjustment after the tail
2359 recursion call can be ignored if we indeed use the tail
2361 int save_pending_stack_adjust
= 0;
2362 int save_stack_pointer_delta
= 0;
2364 rtx before_call
, next_arg_reg
;
2368 /* Emit any queued insns now; otherwise they would end up in
2369 only one of the alternates. */
2372 /* State variables we need to save and restore between
2374 save_pending_stack_adjust
= pending_stack_adjust
;
2375 save_stack_pointer_delta
= stack_pointer_delta
;
2378 flags
&= ~ECF_SIBCALL
;
2380 flags
|= ECF_SIBCALL
;
2382 /* Other state variables that we must reinitialize each time
2383 through the loop (that are not initialized by the loop itself). */
2387 /* Start a new sequence for the normal call case.
2389 From this point on, if the sibling call fails, we want to set
2390 sibcall_failure instead of continuing the loop. */
2395 /* We know at this point that there are not currently any
2396 pending cleanups. If, however, in the process of evaluating
2397 the arguments we were to create some, we'll need to be
2398 able to get rid of them. */
2399 expand_start_target_temps ();
2402 /* Don't let pending stack adjusts add up to too much.
2403 Also, do all pending adjustments now if there is any chance
2404 this might be a call to alloca or if we are expanding a sibling
2405 call sequence or if we are calling a function that is to return
2406 with stack pointer depressed. */
2407 if (pending_stack_adjust
>= 32
2408 || (pending_stack_adjust
> 0
2409 && (flags
& (ECF_MAY_BE_ALLOCA
| ECF_SP_DEPRESSED
)))
2411 do_pending_stack_adjust ();
2413 /* When calling a const function, we must pop the stack args right away,
2414 so that the pop is deleted or moved with the call. */
2415 if (pass
&& (flags
& ECF_LIBCALL_BLOCK
))
2418 /* Precompute any arguments as needed. */
2420 precompute_arguments (flags
, num_actuals
, args
);
2422 /* Now we are about to start emitting insns that can be deleted
2423 if a libcall is deleted. */
2424 if (pass
&& (flags
& (ECF_LIBCALL_BLOCK
| ECF_MALLOC
)))
2427 adjusted_args_size
= args_size
;
2428 /* Compute the actual size of the argument block required. The variable
2429 and constant sizes must be combined, the size may have to be rounded,
2430 and there may be a minimum required size. When generating a sibcall
2431 pattern, do not round up, since we'll be re-using whatever space our
2433 unadjusted_args_size
2434 = compute_argument_block_size (reg_parm_stack_space
,
2435 &adjusted_args_size
,
2437 : preferred_stack_boundary
));
2439 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
2441 /* The argument block when performing a sibling call is the
2442 incoming argument block. */
2445 argblock
= virtual_incoming_args_rtx
;
2447 #ifdef STACK_GROWS_DOWNWARD
2448 = plus_constant (argblock
, current_function_pretend_args_size
);
2450 = plus_constant (argblock
, -current_function_pretend_args_size
);
2452 stored_args_map
= sbitmap_alloc (args_size
.constant
);
2453 sbitmap_zero (stored_args_map
);
2456 /* If we have no actual push instructions, or shouldn't use them,
2457 make space for all args right now. */
2458 else if (adjusted_args_size
.var
!= 0)
2460 if (old_stack_level
== 0)
2462 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2463 old_stack_pointer_delta
= stack_pointer_delta
;
2464 old_pending_adj
= pending_stack_adjust
;
2465 pending_stack_adjust
= 0;
2466 /* stack_arg_under_construction says whether a stack arg is
2467 being constructed at the old stack level. Pushing the stack
2468 gets a clean outgoing argument block. */
2469 old_stack_arg_under_construction
= stack_arg_under_construction
;
2470 stack_arg_under_construction
= 0;
2472 argblock
= push_block (ARGS_SIZE_RTX (adjusted_args_size
), 0, 0);
2476 /* Note that we must go through the motions of allocating an argument
2477 block even if the size is zero because we may be storing args
2478 in the area reserved for register arguments, which may be part of
2481 int needed
= adjusted_args_size
.constant
;
2483 /* Store the maximum argument space used. It will be pushed by
2484 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2487 if (needed
> current_function_outgoing_args_size
)
2488 current_function_outgoing_args_size
= needed
;
2490 if (must_preallocate
)
2492 if (ACCUMULATE_OUTGOING_ARGS
)
2494 /* Since the stack pointer will never be pushed, it is
2495 possible for the evaluation of a parm to clobber
2496 something we have already written to the stack.
2497 Since most function calls on RISC machines do not use
2498 the stack, this is uncommon, but must work correctly.
2500 Therefore, we save any area of the stack that was already
2501 written and that we are using. Here we set up to do this
2502 by making a new stack usage map from the old one. The
2503 actual save will be done by store_one_arg.
2505 Another approach might be to try to reorder the argument
2506 evaluations to avoid this conflicting stack usage. */
2508 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2509 /* Since we will be writing into the entire argument area,
2510 the map must be allocated for its entire size, not just
2511 the part that is the responsibility of the caller. */
2512 needed
+= reg_parm_stack_space
;
2515 #ifdef ARGS_GROW_DOWNWARD
2516 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2519 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2522 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
2524 if (initial_highest_arg_in_use
)
2525 memcpy (stack_usage_map
, initial_stack_usage_map
,
2526 initial_highest_arg_in_use
);
2528 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2529 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
2530 (highest_outgoing_arg_in_use
2531 - initial_highest_arg_in_use
));
2534 /* The address of the outgoing argument list must not be
2535 copied to a register here, because argblock would be left
2536 pointing to the wrong place after the call to
2537 allocate_dynamic_stack_space below. */
2539 argblock
= virtual_outgoing_args_rtx
;
2543 if (inhibit_defer_pop
== 0)
2545 /* Try to reuse some or all of the pending_stack_adjust
2546 to get this space. */
2548 = (combine_pending_stack_adjustment_and_call
2549 (unadjusted_args_size
,
2550 &adjusted_args_size
,
2551 preferred_unit_stack_boundary
));
2553 /* combine_pending_stack_adjustment_and_call computes
2554 an adjustment before the arguments are allocated.
2555 Account for them and see whether or not the stack
2556 needs to go up or down. */
2557 needed
= unadjusted_args_size
- needed
;
2561 /* We're releasing stack space. */
2562 /* ??? We can avoid any adjustment at all if we're
2563 already aligned. FIXME. */
2564 pending_stack_adjust
= -needed
;
2565 do_pending_stack_adjust ();
2569 /* We need to allocate space. We'll do that in
2570 push_block below. */
2571 pending_stack_adjust
= 0;
2574 /* Special case this because overhead of `push_block' in
2575 this case is non-trivial. */
2577 argblock
= virtual_outgoing_args_rtx
;
2580 argblock
= push_block (GEN_INT (needed
), 0, 0);
2581 #ifdef ARGS_GROW_DOWNWARD
2582 argblock
= plus_constant (argblock
, needed
);
2586 /* We only really need to call `copy_to_reg' in the case
2587 where push insns are going to be used to pass ARGBLOCK
2588 to a function call in ARGS. In that case, the stack
2589 pointer changes value from the allocation point to the
2590 call point, and hence the value of
2591 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2592 as well always do it. */
2593 argblock
= copy_to_reg (argblock
);
2598 if (ACCUMULATE_OUTGOING_ARGS
)
2600 /* The save/restore code in store_one_arg handles all
2601 cases except one: a constructor call (including a C
2602 function returning a BLKmode struct) to initialize
2604 if (stack_arg_under_construction
)
2606 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2607 rtx push_size
= GEN_INT (reg_parm_stack_space
2608 + adjusted_args_size
.constant
);
2610 rtx push_size
= GEN_INT (adjusted_args_size
.constant
);
2612 if (old_stack_level
== 0)
2614 emit_stack_save (SAVE_BLOCK
, &old_stack_level
,
2616 old_stack_pointer_delta
= stack_pointer_delta
;
2617 old_pending_adj
= pending_stack_adjust
;
2618 pending_stack_adjust
= 0;
2619 /* stack_arg_under_construction says whether a stack
2620 arg is being constructed at the old stack level.
2621 Pushing the stack gets a clean outgoing argument
2623 old_stack_arg_under_construction
2624 = stack_arg_under_construction
;
2625 stack_arg_under_construction
= 0;
2626 /* Make a new map for the new argument list. */
2627 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
2628 memset (stack_usage_map
, 0, highest_outgoing_arg_in_use
);
2629 highest_outgoing_arg_in_use
= 0;
2631 allocate_dynamic_stack_space (push_size
, NULL_RTX
,
2635 /* If argument evaluation might modify the stack pointer,
2636 copy the address of the argument list to a register. */
2637 for (i
= 0; i
< num_actuals
; i
++)
2638 if (args
[i
].pass_on_stack
)
2640 argblock
= copy_addr_to_reg (argblock
);
2645 compute_argument_addresses (args
, argblock
, num_actuals
);
2647 /* If we push args individually in reverse order, perform stack alignment
2648 before the first push (the last arg). */
2649 if (PUSH_ARGS_REVERSED
&& argblock
== 0
2650 && adjusted_args_size
.constant
!= unadjusted_args_size
)
2652 /* When the stack adjustment is pending, we get better code
2653 by combining the adjustments. */
2654 if (pending_stack_adjust
2655 && ! (flags
& ECF_LIBCALL_BLOCK
)
2656 && ! inhibit_defer_pop
)
2658 pending_stack_adjust
2659 = (combine_pending_stack_adjustment_and_call
2660 (unadjusted_args_size
,
2661 &adjusted_args_size
,
2662 preferred_unit_stack_boundary
));
2663 do_pending_stack_adjust ();
2665 else if (argblock
== 0)
2666 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2667 - unadjusted_args_size
));
2669 /* Now that the stack is properly aligned, pops can't safely
2670 be deferred during the evaluation of the arguments. */
2673 funexp
= rtx_for_function_call (fndecl
, addr
);
2675 /* Figure out the register where the value, if any, will come back. */
2677 if (TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
2678 && ! structure_value_addr
)
2680 if (pcc_struct_value
)
2681 valreg
= hard_function_value (build_pointer_type (TREE_TYPE (exp
)),
2682 fndecl
, (pass
== 0));
2684 valreg
= hard_function_value (TREE_TYPE (exp
), fndecl
, (pass
== 0));
2687 /* Precompute all register parameters. It isn't safe to compute anything
2688 once we have started filling any specific hard regs. */
2689 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2691 if (TREE_OPERAND (exp
, 2))
2692 static_chain_value
= expand_expr (TREE_OPERAND (exp
, 2),
2693 NULL_RTX
, VOIDmode
, 0);
2695 static_chain_value
= 0;
2697 #ifdef REG_PARM_STACK_SPACE
2698 /* Save the fixed argument area if it's part of the caller's frame and
2699 is clobbered by argument setup for this call. */
2700 if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
2701 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2702 &low_to_save
, &high_to_save
);
2705 /* Now store (and compute if necessary) all non-register parms.
2706 These come before register parms, since they can require block-moves,
2707 which could clobber the registers used for register parms.
2708 Parms which have partial registers are not stored here,
2709 but we do preallocate space here if they want that. */
2711 for (i
= 0; i
< num_actuals
; i
++)
2712 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
2714 rtx before_arg
= get_last_insn ();
2716 if (store_one_arg (&args
[i
], argblock
, flags
,
2717 adjusted_args_size
.var
!= 0,
2718 reg_parm_stack_space
)
2720 && check_sibcall_argument_overlap (before_arg
,
2722 sibcall_failure
= 1;
2724 if (flags
& ECF_CONST
2726 && args
[i
].value
== args
[i
].stack
)
2727 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
2728 gen_rtx_USE (VOIDmode
,
2733 /* If we have a parm that is passed in registers but not in memory
2734 and whose alignment does not permit a direct copy into registers,
2735 make a group of pseudos that correspond to each register that we
2737 if (STRICT_ALIGNMENT
)
2738 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
2740 /* Now store any partially-in-registers parm.
2741 This is the last place a block-move can happen. */
2743 for (i
= 0; i
< num_actuals
; i
++)
2744 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
2746 rtx before_arg
= get_last_insn ();
2748 if (store_one_arg (&args
[i
], argblock
, flags
,
2749 adjusted_args_size
.var
!= 0,
2750 reg_parm_stack_space
)
2752 && check_sibcall_argument_overlap (before_arg
,
2754 sibcall_failure
= 1;
2757 /* If we pushed args in forward order, perform stack alignment
2758 after pushing the last arg. */
2759 if (!PUSH_ARGS_REVERSED
&& argblock
== 0)
2760 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2761 - unadjusted_args_size
));
2763 /* If register arguments require space on the stack and stack space
2764 was not preallocated, allocate stack space here for arguments
2765 passed in registers. */
2766 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2767 if (!ACCUMULATE_OUTGOING_ARGS
2768 && must_preallocate
== 0 && reg_parm_stack_space
> 0)
2769 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
2772 /* Pass the function the address in which to return a
2774 if (pass
!= 0 && structure_value_addr
&& ! structure_value_addr_parm
)
2776 structure_value_addr
2777 = convert_memory_address (Pmode
, structure_value_addr
);
2778 emit_move_insn (struct_value
,
2780 force_operand (structure_value_addr
,
2783 if (REG_P (struct_value
))
2784 use_reg (&call_fusage
, struct_value
);
2787 funexp
= prepare_call_address (funexp
, static_chain_value
,
2788 &call_fusage
, reg_parm_seen
, pass
== 0);
2790 load_register_parameters (args
, num_actuals
, &call_fusage
, flags
,
2791 pass
== 0, &sibcall_failure
);
2793 /* Perform postincrements before actually calling the function. */
2796 /* Save a pointer to the last insn before the call, so that we can
2797 later safely search backwards to find the CALL_INSN. */
2798 before_call
= get_last_insn ();
2800 /* Set up next argument register. For sibling calls on machines
2801 with register windows this should be the incoming register. */
2802 #ifdef FUNCTION_INCOMING_ARG
2804 next_arg_reg
= FUNCTION_INCOMING_ARG (args_so_far
, VOIDmode
,
2808 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
,
2811 /* All arguments and registers used for the call must be set up by
2814 /* Stack must be properly aligned now. */
2815 if (pass
&& stack_pointer_delta
% preferred_unit_stack_boundary
)
2818 /* Generate the actual call instruction. */
2819 emit_call_1 (funexp
, exp
, fndecl
, funtype
, unadjusted_args_size
,
2820 adjusted_args_size
.constant
, struct_value_size
,
2821 next_arg_reg
, valreg
, old_inhibit_defer_pop
, call_fusage
,
2822 flags
, & args_so_far
);
2824 /* If call is cse'able, make appropriate pair of reg-notes around it.
2825 Test valreg so we don't crash; may safely ignore `const'
2826 if return type is void. Disable for PARALLEL return values, because
2827 we have no way to move such values into a pseudo register. */
2828 if (pass
&& (flags
& ECF_LIBCALL_BLOCK
))
2832 bool failed
= valreg
== 0 || GET_CODE (valreg
) == PARALLEL
;
2834 insns
= get_insns ();
2836 /* Expansion of block moves possibly introduced a loop that may
2837 not appear inside libcall block. */
2838 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2850 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2852 /* Mark the return value as a pointer if needed. */
2853 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2854 mark_reg_pointer (temp
,
2855 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
))));
2858 if (flag_unsafe_math_optimizations
2860 && DECL_BUILT_IN (fndecl
)
2861 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRT
2862 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRTF
2863 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRTL
))
2864 note
= gen_rtx_fmt_e (SQRT
,
2866 args
[0].initial_value
);
2869 /* Construct an "equal form" for the value which
2870 mentions all the arguments in order as well as
2871 the function name. */
2872 for (i
= 0; i
< num_actuals
; i
++)
2873 note
= gen_rtx_EXPR_LIST (VOIDmode
,
2874 args
[i
].initial_value
, note
);
2875 note
= gen_rtx_EXPR_LIST (VOIDmode
, funexp
, note
);
2877 if (flags
& ECF_PURE
)
2878 note
= gen_rtx_EXPR_LIST (VOIDmode
,
2879 gen_rtx_USE (VOIDmode
,
2880 gen_rtx_MEM (BLKmode
,
2881 gen_rtx_SCRATCH (VOIDmode
))),
2884 emit_libcall_block (insns
, temp
, valreg
, note
);
2889 else if (pass
&& (flags
& ECF_MALLOC
))
2891 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2894 /* The return value from a malloc-like function is a pointer. */
2895 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2896 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
);
2898 emit_move_insn (temp
, valreg
);
2900 /* The return value from a malloc-like function can not alias
2902 last
= get_last_insn ();
2904 gen_rtx_EXPR_LIST (REG_NOALIAS
, temp
, REG_NOTES (last
));
2906 /* Write out the sequence. */
2907 insns
= get_insns ();
2913 /* For calls to `setjmp', etc., inform flow.c it should complain
2914 if nonvolatile values are live. For functions that cannot return,
2915 inform flow that control does not fall through. */
2917 if ((flags
& (ECF_NORETURN
| ECF_LONGJMP
)) || pass
== 0)
2919 /* The barrier must be emitted
2920 immediately after the CALL_INSN. Some ports emit more
2921 than just a CALL_INSN above, so we must search for it here. */
2923 rtx last
= get_last_insn ();
2924 while (!CALL_P (last
))
2926 last
= PREV_INSN (last
);
2927 /* There was no CALL_INSN? */
2928 if (last
== before_call
)
2932 emit_barrier_after (last
);
2934 /* Stack adjustments after a noreturn call are dead code.
2935 However when NO_DEFER_POP is in effect, we must preserve
2936 stack_pointer_delta. */
2937 if (inhibit_defer_pop
== 0)
2939 stack_pointer_delta
= old_stack_allocated
;
2940 pending_stack_adjust
= 0;
2944 if (flags
& ECF_LONGJMP
)
2945 current_function_calls_longjmp
= 1;
2947 /* If value type not void, return an rtx for the value. */
2949 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
2951 target
= const0_rtx
;
2952 else if (structure_value_addr
)
2954 if (target
== 0 || !MEM_P (target
))
2957 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2958 memory_address (TYPE_MODE (TREE_TYPE (exp
)),
2959 structure_value_addr
));
2960 set_mem_attributes (target
, exp
, 1);
2963 else if (pcc_struct_value
)
2965 /* This is the special C++ case where we need to
2966 know what the true target was. We take care to
2967 never use this value more than once in one expression. */
2968 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2969 copy_to_reg (valreg
));
2970 set_mem_attributes (target
, exp
, 1);
2972 /* Handle calls that return values in multiple non-contiguous locations.
2973 The Irix 6 ABI has examples of this. */
2974 else if (GET_CODE (valreg
) == PARALLEL
)
2978 /* This will only be assigned once, so it can be readonly. */
2979 tree nt
= build_qualified_type (TREE_TYPE (exp
),
2980 (TYPE_QUALS (TREE_TYPE (exp
))
2981 | TYPE_QUAL_CONST
));
2983 target
= assign_temp (nt
, 0, 1, 1);
2984 preserve_temp_slots (target
);
2987 if (! rtx_equal_p (target
, valreg
))
2988 emit_group_store (target
, valreg
, TREE_TYPE (exp
),
2989 int_size_in_bytes (TREE_TYPE (exp
)));
2991 /* We can not support sibling calls for this case. */
2992 sibcall_failure
= 1;
2995 && GET_MODE (target
) == TYPE_MODE (TREE_TYPE (exp
))
2996 && GET_MODE (target
) == GET_MODE (valreg
))
2998 /* TARGET and VALREG cannot be equal at this point because the
2999 latter would not have REG_FUNCTION_VALUE_P true, while the
3000 former would if it were referring to the same register.
3002 If they refer to the same register, this move will be a no-op,
3003 except when function inlining is being done. */
3004 emit_move_insn (target
, valreg
);
3006 /* If we are setting a MEM, this code must be executed. Since it is
3007 emitted after the call insn, sibcall optimization cannot be
3008 performed in that case. */
3010 sibcall_failure
= 1;
3012 else if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
3014 target
= copy_blkmode_from_reg (target
, valreg
, TREE_TYPE (exp
));
3016 /* We can not support sibling calls for this case. */
3017 sibcall_failure
= 1;
3021 if (shift_returned_value (TREE_TYPE (exp
), &valreg
))
3022 sibcall_failure
= 1;
3024 target
= copy_to_reg (valreg
);
3027 if (targetm
.calls
.promote_function_return(funtype
))
3029 /* If we promoted this return value, make the proper SUBREG. TARGET
3030 might be const0_rtx here, so be careful. */
3032 && TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
3033 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3035 tree type
= TREE_TYPE (exp
);
3036 int unsignedp
= TYPE_UNSIGNED (type
);
3039 /* If we don't promote as expected, something is wrong. */
3040 if (GET_MODE (target
)
3041 != promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1))
3044 if ((WORDS_BIG_ENDIAN
|| BYTES_BIG_ENDIAN
)
3045 && GET_MODE_SIZE (GET_MODE (target
))
3046 > GET_MODE_SIZE (TYPE_MODE (type
)))
3048 offset
= GET_MODE_SIZE (GET_MODE (target
))
3049 - GET_MODE_SIZE (TYPE_MODE (type
));
3050 if (! BYTES_BIG_ENDIAN
)
3051 offset
= (offset
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3052 else if (! WORDS_BIG_ENDIAN
)
3053 offset
%= UNITS_PER_WORD
;
3055 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, offset
);
3056 SUBREG_PROMOTED_VAR_P (target
) = 1;
3057 SUBREG_PROMOTED_UNSIGNED_SET (target
, unsignedp
);
3061 /* If size of args is variable or this was a constructor call for a stack
3062 argument, restore saved stack-pointer value. */
3064 if (old_stack_level
&& ! (flags
& ECF_SP_DEPRESSED
))
3066 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
3067 stack_pointer_delta
= old_stack_pointer_delta
;
3068 pending_stack_adjust
= old_pending_adj
;
3069 stack_arg_under_construction
= old_stack_arg_under_construction
;
3070 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3071 stack_usage_map
= initial_stack_usage_map
;
3072 sibcall_failure
= 1;
3074 else if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
3076 #ifdef REG_PARM_STACK_SPACE
3078 restore_fixed_argument_area (save_area
, argblock
,
3079 high_to_save
, low_to_save
);
3082 /* If we saved any argument areas, restore them. */
3083 for (i
= 0; i
< num_actuals
; i
++)
3084 if (args
[i
].save_area
)
3086 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
3088 = gen_rtx_MEM (save_mode
,
3089 memory_address (save_mode
,
3090 XEXP (args
[i
].stack_slot
, 0)));
3092 if (save_mode
!= BLKmode
)
3093 emit_move_insn (stack_area
, args
[i
].save_area
);
3095 emit_block_move (stack_area
, args
[i
].save_area
,
3096 GEN_INT (args
[i
].locate
.size
.constant
),
3097 BLOCK_OP_CALL_PARM
);
3100 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3101 stack_usage_map
= initial_stack_usage_map
;
3104 /* If this was alloca, record the new stack level for nonlocal gotos.
3105 Check for the handler slots since we might not have a save area
3106 for non-local gotos. */
3108 if ((flags
& ECF_MAY_BE_ALLOCA
) && cfun
->nonlocal_goto_save_area
!= 0)
3109 update_nonlocal_goto_save_area ();
3111 /* Free up storage we no longer need. */
3112 for (i
= 0; i
< num_actuals
; ++i
)
3113 if (args
[i
].aligned_regs
)
3114 free (args
[i
].aligned_regs
);
3118 /* Undo the fake expand_start_target_temps we did earlier. If
3119 there had been any cleanups created, we've already set
3121 expand_end_target_temps ();
3124 /* If this function is returning into a memory location marked as
3125 readonly, it means it is initializing that location. We normally treat
3126 functions as not clobbering such locations, so we need to specify that
3127 this one does. We do this by adding the appropriate CLOBBER to the
3128 CALL_INSN function usage list. This cannot be done by emitting a
3129 standalone CLOBBER after the call because the latter would be ignored
3130 by at least the delay slot scheduling pass. We do this now instead of
3131 adding to call_fusage before the call to emit_call_1 because TARGET
3132 may be modified in the meantime. */
3133 if (structure_value_addr
!= 0 && target
!= 0
3134 && MEM_P (target
) && RTX_UNCHANGING_P (target
))
3135 add_function_usage_to
3137 gen_rtx_EXPR_LIST (VOIDmode
, gen_rtx_CLOBBER (VOIDmode
, target
),
3140 insns
= get_insns ();
3145 tail_call_insns
= insns
;
3147 /* Restore the pending stack adjustment now that we have
3148 finished generating the sibling call sequence. */
3150 pending_stack_adjust
= save_pending_stack_adjust
;
3151 stack_pointer_delta
= save_stack_pointer_delta
;
3153 /* Prepare arg structure for next iteration. */
3154 for (i
= 0; i
< num_actuals
; i
++)
3157 args
[i
].aligned_regs
= 0;
3161 sbitmap_free (stored_args_map
);
3165 normal_call_insns
= insns
;
3167 /* Verify that we've deallocated all the stack we used. */
3168 if (! (flags
& (ECF_NORETURN
| ECF_LONGJMP
))
3169 && old_stack_allocated
!= stack_pointer_delta
3170 - pending_stack_adjust
)
3174 /* If something prevents making this a sibling call,
3175 zero out the sequence. */
3176 if (sibcall_failure
)
3177 tail_call_insns
= NULL_RTX
;
3182 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3183 arguments too, as argument area is now clobbered by the call. */
3184 if (tail_call_insns
)
3186 emit_insn (tail_call_insns
);
3187 cfun
->tail_call_emit
= true;
3190 emit_insn (normal_call_insns
);
3192 currently_expanding_call
--;
3194 /* If this function returns with the stack pointer depressed, ensure
3195 this block saves and restores the stack pointer, show it was
3196 changed, and adjust for any outgoing arg space. */
3197 if (flags
& ECF_SP_DEPRESSED
)
3199 clear_pending_stack_adjust ();
3200 emit_insn (gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
));
3201 emit_move_insn (virtual_stack_dynamic_rtx
, stack_pointer_rtx
);
3207 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3208 this function's incoming arguments.
3210 At the start of RTL generation we know the only REG_EQUIV notes
3211 in the rtl chain are those for incoming arguments, so we can safely
3212 flush any REG_EQUIV note.
3214 This is (slight) overkill. We could keep track of the highest
3215 argument we clobber and be more selective in removing notes, but it
3216 does not seem to be worth the effort. */
3218 fixup_tail_calls (void)
3223 purge_reg_equiv_notes ();
3225 /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
3226 flag of some incoming arguments MEM RTLs, because it can write into
3227 those slots. We clear all those bits now.
3229 This is (slight) overkill, we could keep track of which arguments
3230 we actually write into. */
3231 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3234 purge_mem_unchanging_flag (PATTERN (insn
));
3237 /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
3238 arguments passed in registers. */
3239 for (arg
= DECL_ARGUMENTS (current_function_decl
);
3241 arg
= TREE_CHAIN (arg
))
3243 if (REG_P (DECL_RTL (arg
)))
3244 RTX_UNCHANGING_P (DECL_RTL (arg
)) = false;
3248 /* Traverse an argument list in VALUES and expand all complex
3249 arguments into their components. */
3251 split_complex_values (tree values
)
3255 /* Before allocating memory, check for the common case of no complex. */
3256 for (p
= values
; p
; p
= TREE_CHAIN (p
))
3258 tree type
= TREE_TYPE (TREE_VALUE (p
));
3259 if (type
&& TREE_CODE (type
) == COMPLEX_TYPE
3260 && targetm
.calls
.split_complex_arg (type
))
3266 values
= copy_list (values
);
3268 for (p
= values
; p
; p
= TREE_CHAIN (p
))
3270 tree complex_value
= TREE_VALUE (p
);
3273 complex_type
= TREE_TYPE (complex_value
);
3277 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
3278 && targetm
.calls
.split_complex_arg (complex_type
))
3281 tree real
, imag
, next
;
3283 subtype
= TREE_TYPE (complex_type
);
3284 complex_value
= save_expr (complex_value
);
3285 real
= build1 (REALPART_EXPR
, subtype
, complex_value
);
3286 imag
= build1 (IMAGPART_EXPR
, subtype
, complex_value
);
3288 TREE_VALUE (p
) = real
;
3289 next
= TREE_CHAIN (p
);
3290 imag
= build_tree_list (NULL_TREE
, imag
);
3291 TREE_CHAIN (p
) = imag
;
3292 TREE_CHAIN (imag
) = next
;
3294 /* Skip the newly created node. */
3302 /* Traverse a list of TYPES and expand all complex types into their
3305 split_complex_types (tree types
)
3309 /* Before allocating memory, check for the common case of no complex. */
3310 for (p
= types
; p
; p
= TREE_CHAIN (p
))
3312 tree type
= TREE_VALUE (p
);
3313 if (TREE_CODE (type
) == COMPLEX_TYPE
3314 && targetm
.calls
.split_complex_arg (type
))
3320 types
= copy_list (types
);
3322 for (p
= types
; p
; p
= TREE_CHAIN (p
))
3324 tree complex_type
= TREE_VALUE (p
);
3326 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
3327 && targetm
.calls
.split_complex_arg (complex_type
))
3331 /* Rewrite complex type with component type. */
3332 TREE_VALUE (p
) = TREE_TYPE (complex_type
);
3333 next
= TREE_CHAIN (p
);
3335 /* Add another component type for the imaginary part. */
3336 imag
= build_tree_list (NULL_TREE
, TREE_VALUE (p
));
3337 TREE_CHAIN (p
) = imag
;
3338 TREE_CHAIN (imag
) = next
;
3340 /* Skip the newly created node. */
3348 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3349 The RETVAL parameter specifies whether return value needs to be saved, other
3350 parameters are documented in the emit_library_call function below. */
3353 emit_library_call_value_1 (int retval
, rtx orgfun
, rtx value
,
3354 enum libcall_type fn_type
,
3355 enum machine_mode outmode
, int nargs
, va_list p
)
3357 /* Total size in bytes of all the stack-parms scanned so far. */
3358 struct args_size args_size
;
3359 /* Size of arguments before any adjustments (such as rounding). */
3360 struct args_size original_args_size
;
3366 CUMULATIVE_ARGS args_so_far
;
3370 enum machine_mode mode
;
3373 struct locate_and_pad_arg_data locate
;
3377 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3378 rtx call_fusage
= 0;
3381 int pcc_struct_value
= 0;
3382 int struct_value_size
= 0;
3384 int reg_parm_stack_space
= 0;
3387 tree tfom
; /* type_for_mode (outmode, 0) */
3389 #ifdef REG_PARM_STACK_SPACE
3390 /* Define the boundary of the register parm stack space that needs to be
3392 int low_to_save
, high_to_save
;
3393 rtx save_area
= 0; /* Place that it is saved. */
3396 /* Size of the stack reserved for parameter registers. */
3397 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3398 char *initial_stack_usage_map
= stack_usage_map
;
3400 rtx struct_value
= targetm
.calls
.struct_value_rtx (0, 0);
3402 #ifdef REG_PARM_STACK_SPACE
3403 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3406 /* By default, library functions can not throw. */
3407 flags
= ECF_NOTHROW
;
3419 case LCT_CONST_MAKE_BLOCK
:
3420 flags
|= ECF_CONST
| ECF_LIBCALL_BLOCK
;
3422 case LCT_PURE_MAKE_BLOCK
:
3423 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
3426 flags
|= ECF_NORETURN
;
3429 flags
= ECF_NORETURN
;
3431 case LCT_ALWAYS_RETURN
:
3432 flags
= ECF_ALWAYS_RETURN
;
3434 case LCT_RETURNS_TWICE
:
3435 flags
= ECF_RETURNS_TWICE
;
3440 /* Ensure current function's preferred stack boundary is at least
3442 if (cfun
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
3443 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3445 /* If this kind of value comes back in memory,
3446 decide where in memory it should come back. */
3447 if (outmode
!= VOIDmode
)
3449 tfom
= lang_hooks
.types
.type_for_mode (outmode
, 0);
3450 if (aggregate_value_p (tfom
, 0))
3452 #ifdef PCC_STATIC_STRUCT_RETURN
3454 = hard_function_value (build_pointer_type (tfom
), 0, 0);
3455 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3456 pcc_struct_value
= 1;
3458 value
= gen_reg_rtx (outmode
);
3459 #else /* not PCC_STATIC_STRUCT_RETURN */
3460 struct_value_size
= GET_MODE_SIZE (outmode
);
3461 if (value
!= 0 && MEM_P (value
))
3464 mem_value
= assign_temp (tfom
, 0, 1, 1);
3466 /* This call returns a big structure. */
3467 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
3471 tfom
= void_type_node
;
3473 /* ??? Unfinished: must pass the memory address as an argument. */
3475 /* Copy all the libcall-arguments out of the varargs data
3476 and into a vector ARGVEC.
3478 Compute how to pass each argument. We only support a very small subset
3479 of the full argument passing conventions to limit complexity here since
3480 library functions shouldn't have many args. */
3482 argvec
= alloca ((nargs
+ 1) * sizeof (struct arg
));
3483 memset (argvec
, 0, (nargs
+ 1) * sizeof (struct arg
));
3485 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3486 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far
, outmode
, fun
);
3488 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0, nargs
);
3491 args_size
.constant
= 0;
3496 /* Now we are about to start emitting insns that can be deleted
3497 if a libcall is deleted. */
3498 if (flags
& ECF_LIBCALL_BLOCK
)
3503 /* If there's a structure value address to be passed,
3504 either pass it in the special place, or pass it as an extra argument. */
3505 if (mem_value
&& struct_value
== 0 && ! pcc_struct_value
)
3507 rtx addr
= XEXP (mem_value
, 0);
3510 /* Make sure it is a reasonable operand for a move or push insn. */
3511 if (!REG_P (addr
) && !MEM_P (addr
)
3512 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3513 addr
= force_operand (addr
, NULL_RTX
);
3515 argvec
[count
].value
= addr
;
3516 argvec
[count
].mode
= Pmode
;
3517 argvec
[count
].partial
= 0;
3519 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3520 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, Pmode
, NULL_TREE
, 1))
3523 locate_and_pad_parm (Pmode
, NULL_TREE
,
3524 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3527 argvec
[count
].reg
!= 0,
3529 0, NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3531 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3532 || reg_parm_stack_space
> 0)
3533 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3535 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3540 for (; count
< nargs
; count
++)
3542 rtx val
= va_arg (p
, rtx
);
3543 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
3545 /* We cannot convert the arg value to the mode the library wants here;
3546 must do it earlier where we know the signedness of the arg. */
3548 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
3551 /* There's no need to call protect_from_queue, because
3552 either emit_move_insn or emit_push_insn will do that. */
3554 /* Make sure it is a reasonable operand for a move or push insn. */
3555 if (!REG_P (val
) && !MEM_P (val
)
3556 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3557 val
= force_operand (val
, NULL_RTX
);
3559 if (pass_by_reference (&args_so_far
, mode
, NULL_TREE
, 1))
3562 int must_copy
= ! FUNCTION_ARG_CALLEE_COPIES (args_so_far
, mode
,
3565 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3566 functions, so we have to pretend this isn't such a function. */
3567 if (flags
& ECF_LIBCALL_BLOCK
)
3569 rtx insns
= get_insns ();
3573 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
3575 /* If this was a CONST function, it is now PURE since
3576 it now reads memory. */
3577 if (flags
& ECF_CONST
)
3579 flags
&= ~ECF_CONST
;
3583 if (GET_MODE (val
) == MEM
&& ! must_copy
)
3587 slot
= assign_temp (lang_hooks
.types
.type_for_mode (mode
, 0),
3589 emit_move_insn (slot
, val
);
3593 tree type
= lang_hooks
.types
.type_for_mode (mode
, 0);
3596 = gen_rtx_MEM (mode
,
3597 expand_expr (build1 (ADDR_EXPR
,
3598 build_pointer_type (type
),
3599 make_tree (type
, val
)),
3600 NULL_RTX
, VOIDmode
, 0));
3603 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3604 gen_rtx_USE (VOIDmode
, slot
),
3607 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3608 gen_rtx_CLOBBER (VOIDmode
,
3613 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
3616 argvec
[count
].value
= val
;
3617 argvec
[count
].mode
= mode
;
3619 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3621 argvec
[count
].partial
3622 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
3624 locate_and_pad_parm (mode
, NULL_TREE
,
3625 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3628 argvec
[count
].reg
!= 0,
3630 argvec
[count
].partial
,
3631 NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3633 if (argvec
[count
].locate
.size
.var
)
3636 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3637 || reg_parm_stack_space
> 0)
3638 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3640 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3643 /* If this machine requires an external definition for library
3644 functions, write one out. */
3645 assemble_external_libcall (fun
);
3647 original_args_size
= args_size
;
3648 args_size
.constant
= (((args_size
.constant
3649 + stack_pointer_delta
3653 - stack_pointer_delta
);
3655 args_size
.constant
= MAX (args_size
.constant
,
3656 reg_parm_stack_space
);
3658 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3659 args_size
.constant
-= reg_parm_stack_space
;
3662 if (args_size
.constant
> current_function_outgoing_args_size
)
3663 current_function_outgoing_args_size
= args_size
.constant
;
3665 if (ACCUMULATE_OUTGOING_ARGS
)
3667 /* Since the stack pointer will never be pushed, it is possible for
3668 the evaluation of a parm to clobber something we have already
3669 written to the stack. Since most function calls on RISC machines
3670 do not use the stack, this is uncommon, but must work correctly.
3672 Therefore, we save any area of the stack that was already written
3673 and that we are using. Here we set up to do this by making a new
3674 stack usage map from the old one.
3676 Another approach might be to try to reorder the argument
3677 evaluations to avoid this conflicting stack usage. */
3679 needed
= args_size
.constant
;
3681 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3682 /* Since we will be writing into the entire argument area, the
3683 map must be allocated for its entire size, not just the part that
3684 is the responsibility of the caller. */
3685 needed
+= reg_parm_stack_space
;
3688 #ifdef ARGS_GROW_DOWNWARD
3689 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3692 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3695 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
3697 if (initial_highest_arg_in_use
)
3698 memcpy (stack_usage_map
, initial_stack_usage_map
,
3699 initial_highest_arg_in_use
);
3701 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3702 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
3703 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3706 /* We must be careful to use virtual regs before they're instantiated,
3707 and real regs afterwards. Loop optimization, for example, can create
3708 new libcalls after we've instantiated the virtual regs, and if we
3709 use virtuals anyway, they won't match the rtl patterns. */
3711 if (virtuals_instantiated
)
3712 argblock
= plus_constant (stack_pointer_rtx
, STACK_POINTER_OFFSET
);
3714 argblock
= virtual_outgoing_args_rtx
;
3719 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
3722 /* If we push args individually in reverse order, perform stack alignment
3723 before the first push (the last arg). */
3724 if (argblock
== 0 && PUSH_ARGS_REVERSED
)
3725 anti_adjust_stack (GEN_INT (args_size
.constant
3726 - original_args_size
.constant
));
3728 if (PUSH_ARGS_REVERSED
)
3739 #ifdef REG_PARM_STACK_SPACE
3740 if (ACCUMULATE_OUTGOING_ARGS
)
3742 /* The argument list is the property of the called routine and it
3743 may clobber it. If the fixed area has been used for previous
3744 parameters, we must save and restore it. */
3745 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
3746 &low_to_save
, &high_to_save
);
3750 /* Push the args that need to be pushed. */
3752 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3753 are to be pushed. */
3754 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3756 enum machine_mode mode
= argvec
[argnum
].mode
;
3757 rtx val
= argvec
[argnum
].value
;
3758 rtx reg
= argvec
[argnum
].reg
;
3759 int partial
= argvec
[argnum
].partial
;
3760 int lower_bound
= 0, upper_bound
= 0, i
;
3762 if (! (reg
!= 0 && partial
== 0))
3764 if (ACCUMULATE_OUTGOING_ARGS
)
3766 /* If this is being stored into a pre-allocated, fixed-size,
3767 stack area, save any previous data at that location. */
3769 #ifdef ARGS_GROW_DOWNWARD
3770 /* stack_slot is negative, but we want to index stack_usage_map
3771 with positive values. */
3772 upper_bound
= -argvec
[argnum
].locate
.offset
.constant
+ 1;
3773 lower_bound
= upper_bound
- argvec
[argnum
].locate
.size
.constant
;
3775 lower_bound
= argvec
[argnum
].locate
.offset
.constant
;
3776 upper_bound
= lower_bound
+ argvec
[argnum
].locate
.size
.constant
;
3780 /* Don't worry about things in the fixed argument area;
3781 it has already been saved. */
3782 if (i
< reg_parm_stack_space
)
3783 i
= reg_parm_stack_space
;
3784 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
3787 if (i
< upper_bound
)
3789 /* We need to make a save area. */
3791 = argvec
[argnum
].locate
.size
.constant
* BITS_PER_UNIT
;
3792 enum machine_mode save_mode
3793 = mode_for_size (size
, MODE_INT
, 1);
3795 = plus_constant (argblock
,
3796 argvec
[argnum
].locate
.offset
.constant
);
3798 = gen_rtx_MEM (save_mode
, memory_address (save_mode
, adr
));
3800 if (save_mode
== BLKmode
)
3802 argvec
[argnum
].save_area
3803 = assign_stack_temp (BLKmode
,
3804 argvec
[argnum
].locate
.size
.constant
,
3807 emit_block_move (validize_mem (argvec
[argnum
].save_area
),
3809 GEN_INT (argvec
[argnum
].locate
.size
.constant
),
3810 BLOCK_OP_CALL_PARM
);
3814 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3816 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3821 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, PARM_BOUNDARY
,
3822 partial
, reg
, 0, argblock
,
3823 GEN_INT (argvec
[argnum
].locate
.offset
.constant
),
3824 reg_parm_stack_space
,
3825 ARGS_SIZE_RTX (argvec
[argnum
].locate
.alignment_pad
));
3827 /* Now mark the segment we just used. */
3828 if (ACCUMULATE_OUTGOING_ARGS
)
3829 for (i
= lower_bound
; i
< upper_bound
; i
++)
3830 stack_usage_map
[i
] = 1;
3836 /* If we pushed args in forward order, perform stack alignment
3837 after pushing the last arg. */
3838 if (argblock
== 0 && !PUSH_ARGS_REVERSED
)
3839 anti_adjust_stack (GEN_INT (args_size
.constant
3840 - original_args_size
.constant
));
3842 if (PUSH_ARGS_REVERSED
)
3847 fun
= prepare_call_address (fun
, NULL
, &call_fusage
, 0, 0);
3849 /* Now load any reg parms into their regs. */
3851 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3852 are to be pushed. */
3853 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3855 enum machine_mode mode
= argvec
[argnum
].mode
;
3856 rtx val
= argvec
[argnum
].value
;
3857 rtx reg
= argvec
[argnum
].reg
;
3858 int partial
= argvec
[argnum
].partial
;
3860 /* Handle calls that pass values in multiple non-contiguous
3861 locations. The PA64 has examples of this for library calls. */
3862 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3863 emit_group_load (reg
, val
, NULL_TREE
, GET_MODE_SIZE (mode
));
3864 else if (reg
!= 0 && partial
== 0)
3865 emit_move_insn (reg
, val
);
3870 /* Any regs containing parms remain in use through the call. */
3871 for (count
= 0; count
< nargs
; count
++)
3873 rtx reg
= argvec
[count
].reg
;
3874 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3875 use_group_regs (&call_fusage
, reg
);
3877 use_reg (&call_fusage
, reg
);
3880 /* Pass the function the address in which to return a structure value. */
3881 if (mem_value
!= 0 && struct_value
!= 0 && ! pcc_struct_value
)
3883 emit_move_insn (struct_value
,
3885 force_operand (XEXP (mem_value
, 0),
3887 if (REG_P (struct_value
))
3888 use_reg (&call_fusage
, struct_value
);
3891 /* Don't allow popping to be deferred, since then
3892 cse'ing of library calls could delete a call and leave the pop. */
3894 valreg
= (mem_value
== 0 && outmode
!= VOIDmode
3895 ? hard_libcall_value (outmode
) : NULL_RTX
);
3897 /* Stack must be properly aligned now. */
3898 if (stack_pointer_delta
& (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
3901 before_call
= get_last_insn ();
3903 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3904 will set inhibit_defer_pop to that value. */
3905 /* The return type is needed to decide how many bytes the function pops.
3906 Signedness plays no role in that, so for simplicity, we pretend it's
3907 always signed. We also assume that the list of arguments passed has
3908 no impact, so we pretend it is unknown. */
3910 emit_call_1 (fun
, NULL
,
3911 get_identifier (XSTR (orgfun
, 0)),
3912 build_function_type (tfom
, NULL_TREE
),
3913 original_args_size
.constant
, args_size
.constant
,
3915 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3917 old_inhibit_defer_pop
+ 1, call_fusage
, flags
, & args_so_far
);
3919 /* For calls to `setjmp', etc., inform flow.c it should complain
3920 if nonvolatile values are live. For functions that cannot return,
3921 inform flow that control does not fall through. */
3923 if (flags
& (ECF_NORETURN
| ECF_LONGJMP
))
3925 /* The barrier note must be emitted
3926 immediately after the CALL_INSN. Some ports emit more than
3927 just a CALL_INSN above, so we must search for it here. */
3929 rtx last
= get_last_insn ();
3930 while (!CALL_P (last
))
3932 last
= PREV_INSN (last
);
3933 /* There was no CALL_INSN? */
3934 if (last
== before_call
)
3938 emit_barrier_after (last
);
3941 /* Now restore inhibit_defer_pop to its actual original value. */
3944 /* If call is cse'able, make appropriate pair of reg-notes around it.
3945 Test valreg so we don't crash; may safely ignore `const'
3946 if return type is void. Disable for PARALLEL return values, because
3947 we have no way to move such values into a pseudo register. */
3948 if (flags
& ECF_LIBCALL_BLOCK
)
3954 insns
= get_insns ();
3964 if (GET_CODE (valreg
) == PARALLEL
)
3966 temp
= gen_reg_rtx (outmode
);
3967 emit_group_store (temp
, valreg
, NULL_TREE
,
3968 GET_MODE_SIZE (outmode
));
3972 temp
= gen_reg_rtx (GET_MODE (valreg
));
3974 /* Construct an "equal form" for the value which mentions all the
3975 arguments in order as well as the function name. */
3976 for (i
= 0; i
< nargs
; i
++)
3977 note
= gen_rtx_EXPR_LIST (VOIDmode
, argvec
[i
].value
, note
);
3978 note
= gen_rtx_EXPR_LIST (VOIDmode
, fun
, note
);
3980 insns
= get_insns ();
3983 if (flags
& ECF_PURE
)
3984 note
= gen_rtx_EXPR_LIST (VOIDmode
,
3985 gen_rtx_USE (VOIDmode
,
3986 gen_rtx_MEM (BLKmode
,
3987 gen_rtx_SCRATCH (VOIDmode
))),
3990 emit_libcall_block (insns
, temp
, valreg
, note
);
3997 /* Copy the value to the right place. */
3998 if (outmode
!= VOIDmode
&& retval
)
4004 if (value
!= mem_value
)
4005 emit_move_insn (value
, mem_value
);
4007 else if (GET_CODE (valreg
) == PARALLEL
)
4010 value
= gen_reg_rtx (outmode
);
4011 emit_group_store (value
, valreg
, NULL_TREE
, GET_MODE_SIZE (outmode
));
4013 else if (value
!= 0)
4014 emit_move_insn (value
, valreg
);
4019 if (ACCUMULATE_OUTGOING_ARGS
)
4021 #ifdef REG_PARM_STACK_SPACE
4023 restore_fixed_argument_area (save_area
, argblock
,
4024 high_to_save
, low_to_save
);
4027 /* If we saved any argument areas, restore them. */
4028 for (count
= 0; count
< nargs
; count
++)
4029 if (argvec
[count
].save_area
)
4031 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
4032 rtx adr
= plus_constant (argblock
,
4033 argvec
[count
].locate
.offset
.constant
);
4034 rtx stack_area
= gen_rtx_MEM (save_mode
,
4035 memory_address (save_mode
, adr
));
4037 if (save_mode
== BLKmode
)
4038 emit_block_move (stack_area
,
4039 validize_mem (argvec
[count
].save_area
),
4040 GEN_INT (argvec
[count
].locate
.size
.constant
),
4041 BLOCK_OP_CALL_PARM
);
4043 emit_move_insn (stack_area
, argvec
[count
].save_area
);
4046 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
4047 stack_usage_map
= initial_stack_usage_map
;
4054 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4055 (emitting the queue unless NO_QUEUE is nonzero),
4056 for a value of mode OUTMODE,
4057 with NARGS different arguments, passed as alternating rtx values
4058 and machine_modes to convert them to.
4059 The rtx values should have been passed through protect_from_queue already.
4061 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4062 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4063 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4064 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4065 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4066 or other LCT_ value for other types of library calls. */
4069 emit_library_call (rtx orgfun
, enum libcall_type fn_type
,
4070 enum machine_mode outmode
, int nargs
, ...)
4074 va_start (p
, nargs
);
4075 emit_library_call_value_1 (0, orgfun
, NULL_RTX
, fn_type
, outmode
, nargs
, p
);
4079 /* Like emit_library_call except that an extra argument, VALUE,
4080 comes second and says where to store the result.
4081 (If VALUE is zero, this function chooses a convenient way
4082 to return the value.
4084 This function returns an rtx for where the value is to be found.
4085 If VALUE is nonzero, VALUE is returned. */
4088 emit_library_call_value (rtx orgfun
, rtx value
,
4089 enum libcall_type fn_type
,
4090 enum machine_mode outmode
, int nargs
, ...)
4095 va_start (p
, nargs
);
4096 result
= emit_library_call_value_1 (1, orgfun
, value
, fn_type
, outmode
,
4103 /* Store a single argument for a function call
4104 into the register or memory area where it must be passed.
4105 *ARG describes the argument value and where to pass it.
4107 ARGBLOCK is the address of the stack-block for all the arguments,
4108 or 0 on a machine where arguments are pushed individually.
4110 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4111 so must be careful about how the stack is used.
4113 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4114 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4115 that we need not worry about saving and restoring the stack.
4117 FNDECL is the declaration of the function we are calling.
4119 Return nonzero if this arg should cause sibcall failure,
4123 store_one_arg (struct arg_data
*arg
, rtx argblock
, int flags
,
4124 int variable_size ATTRIBUTE_UNUSED
, int reg_parm_stack_space
)
4126 tree pval
= arg
->tree_value
;
4130 int i
, lower_bound
= 0, upper_bound
= 0;
4131 int sibcall_failure
= 0;
4133 if (TREE_CODE (pval
) == ERROR_MARK
)
4136 /* Push a new temporary level for any temporaries we make for
4140 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
))
4142 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4143 save any previous data at that location. */
4144 if (argblock
&& ! variable_size
&& arg
->stack
)
4146 #ifdef ARGS_GROW_DOWNWARD
4147 /* stack_slot is negative, but we want to index stack_usage_map
4148 with positive values. */
4149 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4150 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
4154 lower_bound
= upper_bound
- arg
->locate
.size
.constant
;
4156 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4157 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
4161 upper_bound
= lower_bound
+ arg
->locate
.size
.constant
;
4165 /* Don't worry about things in the fixed argument area;
4166 it has already been saved. */
4167 if (i
< reg_parm_stack_space
)
4168 i
= reg_parm_stack_space
;
4169 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
4172 if (i
< upper_bound
)
4174 /* We need to make a save area. */
4175 unsigned int size
= arg
->locate
.size
.constant
* BITS_PER_UNIT
;
4176 enum machine_mode save_mode
= mode_for_size (size
, MODE_INT
, 1);
4177 rtx adr
= memory_address (save_mode
, XEXP (arg
->stack_slot
, 0));
4178 rtx stack_area
= gen_rtx_MEM (save_mode
, adr
);
4180 if (save_mode
== BLKmode
)
4182 tree ot
= TREE_TYPE (arg
->tree_value
);
4183 tree nt
= build_qualified_type (ot
, (TYPE_QUALS (ot
)
4184 | TYPE_QUAL_CONST
));
4186 arg
->save_area
= assign_temp (nt
, 0, 1, 1);
4187 preserve_temp_slots (arg
->save_area
);
4188 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
4189 expr_size (arg
->tree_value
),
4190 BLOCK_OP_CALL_PARM
);
4194 arg
->save_area
= gen_reg_rtx (save_mode
);
4195 emit_move_insn (arg
->save_area
, stack_area
);
4201 /* If this isn't going to be placed on both the stack and in registers,
4202 set up the register and number of words. */
4203 if (! arg
->pass_on_stack
)
4205 if (flags
& ECF_SIBCALL
)
4206 reg
= arg
->tail_call_reg
;
4209 partial
= arg
->partial
;
4212 if (reg
!= 0 && partial
== 0)
4213 /* Being passed entirely in a register. We shouldn't be called in
4217 /* If this arg needs special alignment, don't load the registers
4219 if (arg
->n_aligned_regs
!= 0)
4222 /* If this is being passed partially in a register, we can't evaluate
4223 it directly into its stack slot. Otherwise, we can. */
4224 if (arg
->value
== 0)
4226 /* stack_arg_under_construction is nonzero if a function argument is
4227 being evaluated directly into the outgoing argument list and
4228 expand_call must take special action to preserve the argument list
4229 if it is called recursively.
4231 For scalar function arguments stack_usage_map is sufficient to
4232 determine which stack slots must be saved and restored. Scalar
4233 arguments in general have pass_on_stack == 0.
4235 If this argument is initialized by a function which takes the
4236 address of the argument (a C++ constructor or a C function
4237 returning a BLKmode structure), then stack_usage_map is
4238 insufficient and expand_call must push the stack around the
4239 function call. Such arguments have pass_on_stack == 1.
4241 Note that it is always safe to set stack_arg_under_construction,
4242 but this generates suboptimal code if set when not needed. */
4244 if (arg
->pass_on_stack
)
4245 stack_arg_under_construction
++;
4247 arg
->value
= expand_expr (pval
,
4249 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
4250 ? NULL_RTX
: arg
->stack
,
4251 VOIDmode
, EXPAND_STACK_PARM
);
4253 /* If we are promoting object (or for any other reason) the mode
4254 doesn't agree, convert the mode. */
4256 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
4257 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
4258 arg
->value
, arg
->unsignedp
);
4260 if (arg
->pass_on_stack
)
4261 stack_arg_under_construction
--;
4264 /* Don't allow anything left on stack from computation
4265 of argument to alloca. */
4266 if (flags
& ECF_MAY_BE_ALLOCA
)
4267 do_pending_stack_adjust ();
4269 if (arg
->value
== arg
->stack
)
4270 /* If the value is already in the stack slot, we are done. */
4272 else if (arg
->mode
!= BLKmode
)
4276 /* Argument is a scalar, not entirely passed in registers.
4277 (If part is passed in registers, arg->partial says how much
4278 and emit_push_insn will take care of putting it there.)
4280 Push it, and if its size is less than the
4281 amount of space allocated to it,
4282 also bump stack pointer by the additional space.
4283 Note that in C the default argument promotions
4284 will prevent such mismatches. */
4286 size
= GET_MODE_SIZE (arg
->mode
);
4287 /* Compute how much space the push instruction will push.
4288 On many machines, pushing a byte will advance the stack
4289 pointer by a halfword. */
4290 #ifdef PUSH_ROUNDING
4291 size
= PUSH_ROUNDING (size
);
4295 /* Compute how much space the argument should get:
4296 round up to a multiple of the alignment for arguments. */
4297 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
4298 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
4299 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
4300 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4302 /* This isn't already where we want it on the stack, so put it there.
4303 This can either be done with push or copy insns. */
4304 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
,
4305 PARM_BOUNDARY
, partial
, reg
, used
- size
, argblock
,
4306 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4307 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4309 /* Unless this is a partially-in-register argument, the argument is now
4312 arg
->value
= arg
->stack
;
4316 /* BLKmode, at least partly to be pushed. */
4318 unsigned int parm_align
;
4322 /* Pushing a nonscalar.
4323 If part is passed in registers, PARTIAL says how much
4324 and emit_push_insn will take care of putting it there. */
4326 /* Round its size up to a multiple
4327 of the allocation unit for arguments. */
4329 if (arg
->locate
.size
.var
!= 0)
4332 size_rtx
= ARGS_SIZE_RTX (arg
->locate
.size
);
4336 /* PUSH_ROUNDING has no effect on us, because
4337 emit_push_insn for BLKmode is careful to avoid it. */
4338 if (reg
&& GET_CODE (reg
) == PARALLEL
)
4340 /* Use the size of the elt to compute excess. */
4341 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
4342 excess
= (arg
->locate
.size
.constant
4343 - int_size_in_bytes (TREE_TYPE (pval
))
4344 + partial
* GET_MODE_SIZE (GET_MODE (elt
)));
4347 excess
= (arg
->locate
.size
.constant
4348 - int_size_in_bytes (TREE_TYPE (pval
))
4349 + partial
* UNITS_PER_WORD
);
4350 size_rtx
= expand_expr (size_in_bytes (TREE_TYPE (pval
)),
4351 NULL_RTX
, TYPE_MODE (sizetype
), 0);
4354 /* Some types will require stricter alignment, which will be
4355 provided for elsewhere in argument layout. */
4356 parm_align
= MAX (PARM_BOUNDARY
, TYPE_ALIGN (TREE_TYPE (pval
)));
4358 /* When an argument is padded down, the block is aligned to
4359 PARM_BOUNDARY, but the actual argument isn't. */
4360 if (FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)) == downward
)
4362 if (arg
->locate
.size
.var
)
4363 parm_align
= BITS_PER_UNIT
;
4366 unsigned int excess_align
= (excess
& -excess
) * BITS_PER_UNIT
;
4367 parm_align
= MIN (parm_align
, excess_align
);
4371 if ((flags
& ECF_SIBCALL
) && MEM_P (arg
->value
))
4373 /* emit_push_insn might not work properly if arg->value and
4374 argblock + arg->locate.offset areas overlap. */
4378 if (XEXP (x
, 0) == current_function_internal_arg_pointer
4379 || (GET_CODE (XEXP (x
, 0)) == PLUS
4380 && XEXP (XEXP (x
, 0), 0) ==
4381 current_function_internal_arg_pointer
4382 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
))
4384 if (XEXP (x
, 0) != current_function_internal_arg_pointer
)
4385 i
= INTVAL (XEXP (XEXP (x
, 0), 1));
4387 /* expand_call should ensure this. */
4388 if (arg
->locate
.offset
.var
|| GET_CODE (size_rtx
) != CONST_INT
)
4391 if (arg
->locate
.offset
.constant
> i
)
4393 if (arg
->locate
.offset
.constant
< i
+ INTVAL (size_rtx
))
4394 sibcall_failure
= 1;
4396 else if (arg
->locate
.offset
.constant
< i
)
4398 if (i
< arg
->locate
.offset
.constant
+ INTVAL (size_rtx
))
4399 sibcall_failure
= 1;
4404 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
4405 parm_align
, partial
, reg
, excess
, argblock
,
4406 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4407 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4409 /* Unless this is a partially-in-register argument, the argument is now
4412 ??? Unlike the case above, in which we want the actual
4413 address of the data, so that we can load it directly into a
4414 register, here we want the address of the stack slot, so that
4415 it's properly aligned for word-by-word copying or something
4416 like that. It's not clear that this is always correct. */
4418 arg
->value
= arg
->stack_slot
;
4421 /* Mark all slots this store used. */
4422 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
)
4423 && argblock
&& ! variable_size
&& arg
->stack
)
4424 for (i
= lower_bound
; i
< upper_bound
; i
++)
4425 stack_usage_map
[i
] = 1;
4427 /* Once we have pushed something, pops can't safely
4428 be deferred during the rest of the arguments. */
4431 /* ANSI doesn't require a sequence point here,
4432 but PCC has one, so this will avoid some problems. */
4435 /* Free any temporary slots made in processing this argument. Show
4436 that we might have taken the address of something and pushed that
4438 preserve_temp_slots (NULL_RTX
);
4442 return sibcall_failure
;
4445 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4448 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED
,
4454 /* If the type has variable size... */
4455 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4458 /* If the type is marked as addressable (it is required
4459 to be constructed into the stack)... */
4460 if (TREE_ADDRESSABLE (type
))
4466 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4467 takes trailing padding of a structure into account. */
4468 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4471 must_pass_in_stack_var_size_or_pad (enum machine_mode mode
, tree type
)
4476 /* If the type has variable size... */
4477 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4480 /* If the type is marked as addressable (it is required
4481 to be constructed into the stack)... */
4482 if (TREE_ADDRESSABLE (type
))
4485 /* If the padding and mode of the type is such that a copy into
4486 a register would put it into the wrong part of the register. */
4488 && int_size_in_bytes (type
) % (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4489 && (FUNCTION_ARG_PADDING (mode
, type
)
4490 == (BYTES_BIG_ENDIAN
? upward
: downward
)))