1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "stringpool.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "internal-fn.h"
46 #include "langhooks.h"
51 #include "tree-ssanames.h"
52 #include "tree-ssa-strlen.h"
54 #include "stringpool.h"
57 #include "gimple-fold.h"
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
62 /* Data structure and subroutines used within expand_call. */
66 /* Tree node for this argument. */
68 /* Mode for value; TYPE_MODE unless promoted. */
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
72 /* Initially-compute RTL value for argument; only for const functions. */
74 /* Register to pass this argument in, 0 if passed on stack, or an
75 PARALLEL if the arg is to be copied into multiple non-contiguous
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
85 /* If REG was promoted from the actual mode of the argument expression,
86 indicates whether the promotion is sign- or zero-extended. */
88 /* Number of bytes to put in registers. 0 means put the whole arg
89 in registers. Also 0 if not passed in registers. */
91 /* Nonzero if argument must be passed on stack.
92 Note that some arguments may be passed on the stack
93 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
94 pass_on_stack identifies arguments that *cannot* go in registers. */
96 /* Some fields packaged up for locate_and_pad_parm. */
97 struct locate_and_pad_arg_data locate
;
98 /* Location on the stack at which parameter should be stored. The store
99 has already been done if STACK == VALUE. */
101 /* Location on the stack of the start of this argument slot. This can
102 differ from STACK if this arg pads downward. This location is known
103 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
105 /* Place that this stack area has been saved, if needed. */
107 /* If an argument's alignment does not permit direct copying into registers,
108 copy in smaller-sized pieces into pseudos. These are stored in a
109 block pointed to by this field. The next field says how many
110 word-sized pseudos we made. */
115 /* A vector of one char per byte of stack space. A byte if nonzero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map
;
121 /* Size of STACK_USAGE_MAP. */
122 static unsigned int highest_outgoing_arg_in_use
;
124 /* Assume that any stack location at this byte index is used,
125 without checking the contents of stack_usage_map. */
126 static unsigned HOST_WIDE_INT stack_usage_watermark
= HOST_WIDE_INT_M1U
;
128 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
129 stack location's tail call argument has been already stored into the stack.
130 This bitmap is used to prevent sibling call optimization if function tries
131 to use parent's incoming argument slots when they have been already
132 overwritten with tail call arguments. */
133 static sbitmap stored_args_map
;
135 /* Assume that any virtual-incoming location at this byte index has been
136 stored, without checking the contents of stored_args_map. */
137 static unsigned HOST_WIDE_INT stored_args_watermark
;
139 /* stack_arg_under_construction is nonzero when an argument may be
140 initialized with a constructor call (including a C function that
141 returns a BLKmode struct) and expand_call must take special action
142 to make sure the object being constructed does not overlap the
143 argument list for the constructor call. */
144 static int stack_arg_under_construction
;
146 static void precompute_register_parameters (int, struct arg_data
*, int *);
147 static int store_one_arg (struct arg_data
*, rtx
, int, int, int);
148 static void store_unaligned_arguments_into_pseudos (struct arg_data
*, int);
149 static int finalize_must_preallocate (int, int, struct arg_data
*,
151 static void precompute_arguments (int, struct arg_data
*);
152 static void compute_argument_addresses (struct arg_data
*, rtx
, int);
153 static rtx
rtx_for_function_call (tree
, tree
);
154 static void load_register_parameters (struct arg_data
*, int, rtx
*, int,
156 static int special_function_p (const_tree
, int);
157 static int check_sibcall_argument_overlap_1 (rtx
);
158 static int check_sibcall_argument_overlap (rtx_insn
*, struct arg_data
*, int);
160 static tree
split_complex_types (tree
);
162 #ifdef REG_PARM_STACK_SPACE
163 static rtx
save_fixed_argument_area (int, rtx
, int *, int *);
164 static void restore_fixed_argument_area (rtx
, rtx
, int, int);
167 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
168 stack region might already be in use. */
171 stack_region_maybe_used_p (poly_uint64 lower_bound
, poly_uint64 upper_bound
,
172 unsigned int reg_parm_stack_space
)
174 unsigned HOST_WIDE_INT const_lower
, const_upper
;
175 const_lower
= constant_lower_bound (lower_bound
);
176 if (!upper_bound
.is_constant (&const_upper
))
177 const_upper
= HOST_WIDE_INT_M1U
;
179 if (const_upper
> stack_usage_watermark
)
182 /* Don't worry about things in the fixed argument area;
183 it has already been saved. */
184 const_lower
= MAX (const_lower
, reg_parm_stack_space
);
185 const_upper
= MIN (const_upper
, highest_outgoing_arg_in_use
);
186 for (unsigned HOST_WIDE_INT i
= const_lower
; i
< const_upper
; ++i
)
187 if (stack_usage_map
[i
])
192 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
193 stack region are now in use. */
196 mark_stack_region_used (poly_uint64 lower_bound
, poly_uint64 upper_bound
)
198 unsigned HOST_WIDE_INT const_lower
, const_upper
;
199 const_lower
= constant_lower_bound (lower_bound
);
200 if (upper_bound
.is_constant (&const_upper
))
201 for (unsigned HOST_WIDE_INT i
= const_lower
; i
< const_upper
; ++i
)
202 stack_usage_map
[i
] = 1;
204 stack_usage_watermark
= MIN (stack_usage_watermark
, const_lower
);
207 /* Force FUNEXP into a form suitable for the address of a CALL,
208 and return that as an rtx. Also load the static chain register
209 if FNDECL is a nested function.
211 CALL_FUSAGE points to a variable holding the prospective
212 CALL_INSN_FUNCTION_USAGE information. */
215 prepare_call_address (tree fndecl_or_type
, rtx funexp
, rtx static_chain_value
,
216 rtx
*call_fusage
, int reg_parm_seen
, int flags
)
218 /* Make a valid memory address and copy constants through pseudo-regs,
219 but not for a constant address if -fno-function-cse. */
220 if (GET_CODE (funexp
) != SYMBOL_REF
)
222 /* If it's an indirect call by descriptor, generate code to perform
223 runtime identification of the pointer and load the descriptor. */
224 if ((flags
& ECF_BY_DESCRIPTOR
) && !flag_trampolines
)
226 const int bit_val
= targetm
.calls
.custom_function_descriptors
;
227 rtx call_lab
= gen_label_rtx ();
229 gcc_assert (fndecl_or_type
&& TYPE_P (fndecl_or_type
));
231 = build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
233 DECL_STATIC_CHAIN (fndecl_or_type
) = 1;
234 rtx chain
= targetm
.calls
.static_chain (fndecl_or_type
, false);
236 if (GET_MODE (funexp
) != Pmode
)
237 funexp
= convert_memory_address (Pmode
, funexp
);
239 /* Avoid long live ranges around function calls. */
240 funexp
= copy_to_mode_reg (Pmode
, funexp
);
243 emit_insn (gen_rtx_CLOBBER (VOIDmode
, chain
));
245 /* Emit the runtime identification pattern. */
246 rtx mask
= gen_rtx_AND (Pmode
, funexp
, GEN_INT (bit_val
));
247 emit_cmp_and_jump_insns (mask
, const0_rtx
, EQ
, NULL_RTX
, Pmode
, 1,
250 /* Statically predict the branch to very likely taken. */
251 rtx_insn
*insn
= get_last_insn ();
253 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
, TAKEN
);
255 /* Load the descriptor. */
256 rtx mem
= gen_rtx_MEM (ptr_mode
,
257 plus_constant (Pmode
, funexp
, - bit_val
));
258 MEM_NOTRAP_P (mem
) = 1;
259 mem
= convert_memory_address (Pmode
, mem
);
260 emit_move_insn (chain
, mem
);
262 mem
= gen_rtx_MEM (ptr_mode
,
263 plus_constant (Pmode
, funexp
,
264 POINTER_SIZE
/ BITS_PER_UNIT
266 MEM_NOTRAP_P (mem
) = 1;
267 mem
= convert_memory_address (Pmode
, mem
);
268 emit_move_insn (funexp
, mem
);
270 emit_label (call_lab
);
274 use_reg (call_fusage
, chain
);
275 STATIC_CHAIN_REG_P (chain
) = 1;
278 /* Make sure we're not going to be overwritten below. */
279 gcc_assert (!static_chain_value
);
282 /* If we are using registers for parameters, force the
283 function address into a register now. */
284 funexp
= ((reg_parm_seen
285 && targetm
.small_register_classes_for_mode_p (FUNCTION_MODE
))
286 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
287 : memory_address (FUNCTION_MODE
, funexp
));
291 /* funexp could be a SYMBOL_REF represents a function pointer which is
292 of ptr_mode. In this case, it should be converted into address mode
293 to be a valid address for memory rtx pattern. See PR 64971. */
294 if (GET_MODE (funexp
) != Pmode
)
295 funexp
= convert_memory_address (Pmode
, funexp
);
297 if (!(flags
& ECF_SIBCALL
))
299 if (!NO_FUNCTION_CSE
&& optimize
&& ! flag_no_function_cse
)
300 funexp
= force_reg (Pmode
, funexp
);
304 if (static_chain_value
!= 0
305 && (TREE_CODE (fndecl_or_type
) != FUNCTION_DECL
306 || DECL_STATIC_CHAIN (fndecl_or_type
)))
310 chain
= targetm
.calls
.static_chain (fndecl_or_type
, false);
311 static_chain_value
= convert_memory_address (Pmode
, static_chain_value
);
313 emit_move_insn (chain
, static_chain_value
);
316 use_reg (call_fusage
, chain
);
317 STATIC_CHAIN_REG_P (chain
) = 1;
324 /* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
328 FNDECL is the declaration node of the function. This is given to the
329 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
332 FUNTYPE is the data type of the function. This is given to the hook
333 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
334 own args. We used to allow an identifier for library functions, but
335 that doesn't work when the return type is an aggregate type and the
336 calling convention says that the pointer to this aggregate is to be
337 popped by the callee.
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
348 NEXT_ARG_REG is the rtx that results from executing
349 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
367 emit_call_1 (rtx funexp
, tree fntree ATTRIBUTE_UNUSED
, tree fndecl ATTRIBUTE_UNUSED
,
368 tree funtype ATTRIBUTE_UNUSED
,
369 poly_int64 stack_size ATTRIBUTE_UNUSED
,
370 poly_int64 rounded_stack_size
,
371 poly_int64 struct_value_size ATTRIBUTE_UNUSED
,
372 rtx next_arg_reg ATTRIBUTE_UNUSED
, rtx valreg
,
373 int old_inhibit_defer_pop
, rtx call_fusage
, int ecf_flags
,
374 cumulative_args_t args_so_far ATTRIBUTE_UNUSED
)
376 rtx rounded_stack_size_rtx
= gen_int_mode (rounded_stack_size
, Pmode
);
377 rtx call
, funmem
, pat
;
378 int already_popped
= 0;
379 poly_int64 n_popped
= 0;
381 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
382 patterns exist). Any popping that the callee does on return will
383 be from our caller's frame rather than ours. */
384 if (!(ecf_flags
& ECF_SIBCALL
))
386 n_popped
+= targetm
.calls
.return_pops_args (fndecl
, funtype
, stack_size
);
388 #ifdef CALL_POPS_ARGS
389 n_popped
+= CALL_POPS_ARGS (*get_cumulative_args (args_so_far
));
393 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
394 and we don't want to load it into a register as an optimization,
395 because prepare_call_address already did it if it should be done. */
396 if (GET_CODE (funexp
) != SYMBOL_REF
)
397 funexp
= memory_address (FUNCTION_MODE
, funexp
);
399 funmem
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
400 if (fndecl
&& TREE_CODE (fndecl
) == FUNCTION_DECL
)
404 /* Although a built-in FUNCTION_DECL and its non-__builtin
405 counterpart compare equal and get a shared mem_attrs, they
406 produce different dump output in compare-debug compilations,
407 if an entry gets garbage collected in one compilation, then
408 adds a different (but equivalent) entry, while the other
409 doesn't run the garbage collector at the same spot and then
410 shares the mem_attr with the equivalent entry. */
411 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
413 tree t2
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
418 set_mem_expr (funmem
, t
);
421 set_mem_expr (funmem
, build_simple_mem_ref (CALL_EXPR_FN (fntree
)));
423 if (ecf_flags
& ECF_SIBCALL
)
426 pat
= targetm
.gen_sibcall_value (valreg
, funmem
,
427 rounded_stack_size_rtx
,
428 next_arg_reg
, NULL_RTX
);
430 pat
= targetm
.gen_sibcall (funmem
, rounded_stack_size_rtx
,
432 gen_int_mode (struct_value_size
, Pmode
));
434 /* If the target has "call" or "call_value" insns, then prefer them
435 if no arguments are actually popped. If the target does not have
436 "call" or "call_value" insns, then we must use the popping versions
437 even if the call has no arguments to pop. */
438 else if (maybe_ne (n_popped
, 0)
440 ? targetm
.have_call_value ()
441 : targetm
.have_call ()))
443 rtx n_pop
= gen_int_mode (n_popped
, Pmode
);
445 /* If this subroutine pops its own args, record that in the call insn
446 if possible, for the sake of frame pointer elimination. */
449 pat
= targetm
.gen_call_value_pop (valreg
, funmem
,
450 rounded_stack_size_rtx
,
451 next_arg_reg
, n_pop
);
453 pat
= targetm
.gen_call_pop (funmem
, rounded_stack_size_rtx
,
454 next_arg_reg
, n_pop
);
461 pat
= targetm
.gen_call_value (valreg
, funmem
, rounded_stack_size_rtx
,
462 next_arg_reg
, NULL_RTX
);
464 pat
= targetm
.gen_call (funmem
, rounded_stack_size_rtx
, next_arg_reg
,
465 gen_int_mode (struct_value_size
, Pmode
));
469 /* Find the call we just emitted. */
470 rtx_call_insn
*call_insn
= last_call_insn ();
472 /* Some target create a fresh MEM instead of reusing the one provided
473 above. Set its MEM_EXPR. */
474 call
= get_call_rtx_from (call_insn
);
476 && MEM_EXPR (XEXP (call
, 0)) == NULL_TREE
477 && MEM_EXPR (funmem
) != NULL_TREE
)
478 set_mem_expr (XEXP (call
, 0), MEM_EXPR (funmem
));
480 /* Put the register usage information there. */
481 add_function_usage_to (call_insn
, call_fusage
);
483 /* If this is a const call, then set the insn's unchanging bit. */
484 if (ecf_flags
& ECF_CONST
)
485 RTL_CONST_CALL_P (call_insn
) = 1;
487 /* If this is a pure call, then set the insn's unchanging bit. */
488 if (ecf_flags
& ECF_PURE
)
489 RTL_PURE_CALL_P (call_insn
) = 1;
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags
& ECF_LOOPING_CONST_OR_PURE
)
493 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn
) = 1;
495 /* Create a nothrow REG_EH_REGION note, if needed. */
496 make_reg_eh_region_note (call_insn
, ecf_flags
, 0);
498 if (ecf_flags
& ECF_NORETURN
)
499 add_reg_note (call_insn
, REG_NORETURN
, const0_rtx
);
501 if (ecf_flags
& ECF_RETURNS_TWICE
)
503 add_reg_note (call_insn
, REG_SETJMP
, const0_rtx
);
504 cfun
->calls_setjmp
= 1;
507 SIBLING_CALL_P (call_insn
) = ((ecf_flags
& ECF_SIBCALL
) != 0);
509 /* Restore this now, so that we do defer pops for this call's args
510 if the context of the call as a whole permits. */
511 inhibit_defer_pop
= old_inhibit_defer_pop
;
513 if (maybe_ne (n_popped
, 0))
516 CALL_INSN_FUNCTION_USAGE (call_insn
)
517 = gen_rtx_EXPR_LIST (VOIDmode
,
518 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
519 CALL_INSN_FUNCTION_USAGE (call_insn
));
520 rounded_stack_size
-= n_popped
;
521 rounded_stack_size_rtx
= gen_int_mode (rounded_stack_size
, Pmode
);
522 stack_pointer_delta
-= n_popped
;
524 add_args_size_note (call_insn
, stack_pointer_delta
);
526 /* If popup is needed, stack realign must use DRAP */
527 if (SUPPORTS_STACK_ALIGNMENT
)
528 crtl
->need_drap
= true;
530 /* For noreturn calls when not accumulating outgoing args force
531 REG_ARGS_SIZE note to prevent crossjumping of calls with different
533 else if (!ACCUMULATE_OUTGOING_ARGS
&& (ecf_flags
& ECF_NORETURN
) != 0)
534 add_args_size_note (call_insn
, stack_pointer_delta
);
536 if (!ACCUMULATE_OUTGOING_ARGS
)
538 /* If returning from the subroutine does not automatically pop the args,
539 we need an instruction to pop them sooner or later.
540 Perhaps do it now; perhaps just record how much space to pop later.
542 If returning from the subroutine does pop the args, indicate that the
543 stack pointer will be changed. */
545 if (maybe_ne (rounded_stack_size
, 0))
547 if (ecf_flags
& ECF_NORETURN
)
548 /* Just pretend we did the pop. */
549 stack_pointer_delta
-= rounded_stack_size
;
550 else if (flag_defer_pop
&& inhibit_defer_pop
== 0
551 && ! (ecf_flags
& (ECF_CONST
| ECF_PURE
)))
552 pending_stack_adjust
+= rounded_stack_size
;
554 adjust_stack (rounded_stack_size_rtx
);
557 /* When we accumulate outgoing args, we must avoid any stack manipulations.
558 Restore the stack pointer to its original value now. Usually
559 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
560 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
561 popping variants of functions exist as well.
563 ??? We may optimize similar to defer_pop above, but it is
564 probably not worthwhile.
566 ??? It will be worthwhile to enable combine_stack_adjustments even for
568 else if (maybe_ne (n_popped
, 0))
569 anti_adjust_stack (gen_int_mode (n_popped
, Pmode
));
572 /* Determine if the function identified by FNDECL is one with
573 special properties we wish to know about. Modify FLAGS accordingly.
575 For example, if the function might return more than one time (setjmp), then
576 set ECF_RETURNS_TWICE.
578 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
579 space from the stack such as alloca. */
582 special_function_p (const_tree fndecl
, int flags
)
584 tree name_decl
= DECL_NAME (fndecl
);
586 if (fndecl
&& name_decl
587 && IDENTIFIER_LENGTH (name_decl
) <= 11
588 /* Exclude functions not at the file scope, or not `extern',
589 since they are not the magic functions we would otherwise
591 FIXME: this should be handled with attributes, not with this
592 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
593 because you can declare fork() inside a function if you
595 && (DECL_CONTEXT (fndecl
) == NULL_TREE
596 || TREE_CODE (DECL_CONTEXT (fndecl
)) == TRANSLATION_UNIT_DECL
)
597 && TREE_PUBLIC (fndecl
))
599 const char *name
= IDENTIFIER_POINTER (name_decl
);
600 const char *tname
= name
;
602 /* We assume that alloca will always be called by name. It
603 makes no sense to pass it as a pointer-to-function to
604 anything that does not understand its behavior. */
605 if (IDENTIFIER_LENGTH (name_decl
) == 6
607 && ! strcmp (name
, "alloca"))
608 flags
|= ECF_MAY_BE_ALLOCA
;
610 /* Disregard prefix _ or __. */
619 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
620 if (! strcmp (tname
, "setjmp")
621 || ! strcmp (tname
, "sigsetjmp")
622 || ! strcmp (name
, "savectx")
623 || ! strcmp (name
, "vfork")
624 || ! strcmp (name
, "getcontext"))
625 flags
|= ECF_RETURNS_TWICE
;
628 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
629 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl
)))
630 flags
|= ECF_MAY_BE_ALLOCA
;
635 /* Similar to special_function_p; return a set of ERF_ flags for the
638 decl_return_flags (tree fndecl
)
641 tree type
= TREE_TYPE (fndecl
);
645 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
649 attr
= TREE_VALUE (TREE_VALUE (attr
));
650 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
653 switch (TREE_STRING_POINTER (attr
)[0])
659 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
670 /* Return nonzero when FNDECL represents a call to setjmp. */
673 setjmp_call_p (const_tree fndecl
)
675 if (DECL_IS_RETURNS_TWICE (fndecl
))
676 return ECF_RETURNS_TWICE
;
677 return special_function_p (fndecl
, 0) & ECF_RETURNS_TWICE
;
681 /* Return true if STMT may be an alloca call. */
684 gimple_maybe_alloca_call_p (const gimple
*stmt
)
688 if (!is_gimple_call (stmt
))
691 fndecl
= gimple_call_fndecl (stmt
);
692 if (fndecl
&& (special_function_p (fndecl
, 0) & ECF_MAY_BE_ALLOCA
))
698 /* Return true if STMT is a builtin alloca call. */
701 gimple_alloca_call_p (const gimple
*stmt
)
705 if (!is_gimple_call (stmt
))
708 fndecl
= gimple_call_fndecl (stmt
);
709 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
710 switch (DECL_FUNCTION_CODE (fndecl
))
712 CASE_BUILT_IN_ALLOCA
:
713 return gimple_call_num_args (stmt
) > 0;
721 /* Return true when exp contains a builtin alloca call. */
724 alloca_call_p (const_tree exp
)
727 if (TREE_CODE (exp
) == CALL_EXPR
728 && (fndecl
= get_callee_fndecl (exp
))
729 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
730 switch (DECL_FUNCTION_CODE (fndecl
))
732 CASE_BUILT_IN_ALLOCA
:
741 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
742 function. Return FALSE otherwise. */
745 is_tm_builtin (const_tree fndecl
)
750 if (decl_is_tm_clone (fndecl
))
753 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
755 switch (DECL_FUNCTION_CODE (fndecl
))
757 case BUILT_IN_TM_COMMIT
:
758 case BUILT_IN_TM_COMMIT_EH
:
759 case BUILT_IN_TM_ABORT
:
760 case BUILT_IN_TM_IRREVOCABLE
:
761 case BUILT_IN_TM_GETTMCLONE_IRR
:
762 case BUILT_IN_TM_MEMCPY
:
763 case BUILT_IN_TM_MEMMOVE
:
764 case BUILT_IN_TM_MEMSET
:
765 CASE_BUILT_IN_TM_STORE (1):
766 CASE_BUILT_IN_TM_STORE (2):
767 CASE_BUILT_IN_TM_STORE (4):
768 CASE_BUILT_IN_TM_STORE (8):
769 CASE_BUILT_IN_TM_STORE (FLOAT
):
770 CASE_BUILT_IN_TM_STORE (DOUBLE
):
771 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
772 CASE_BUILT_IN_TM_STORE (M64
):
773 CASE_BUILT_IN_TM_STORE (M128
):
774 CASE_BUILT_IN_TM_STORE (M256
):
775 CASE_BUILT_IN_TM_LOAD (1):
776 CASE_BUILT_IN_TM_LOAD (2):
777 CASE_BUILT_IN_TM_LOAD (4):
778 CASE_BUILT_IN_TM_LOAD (8):
779 CASE_BUILT_IN_TM_LOAD (FLOAT
):
780 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
781 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
782 CASE_BUILT_IN_TM_LOAD (M64
):
783 CASE_BUILT_IN_TM_LOAD (M128
):
784 CASE_BUILT_IN_TM_LOAD (M256
):
785 case BUILT_IN_TM_LOG
:
786 case BUILT_IN_TM_LOG_1
:
787 case BUILT_IN_TM_LOG_2
:
788 case BUILT_IN_TM_LOG_4
:
789 case BUILT_IN_TM_LOG_8
:
790 case BUILT_IN_TM_LOG_FLOAT
:
791 case BUILT_IN_TM_LOG_DOUBLE
:
792 case BUILT_IN_TM_LOG_LDOUBLE
:
793 case BUILT_IN_TM_LOG_M64
:
794 case BUILT_IN_TM_LOG_M128
:
795 case BUILT_IN_TM_LOG_M256
:
804 /* Detect flags (function attributes) from the function decl or type node. */
807 flags_from_decl_or_type (const_tree exp
)
813 /* The function exp may have the `malloc' attribute. */
814 if (DECL_IS_MALLOC (exp
))
817 /* The function exp may have the `returns_twice' attribute. */
818 if (DECL_IS_RETURNS_TWICE (exp
))
819 flags
|= ECF_RETURNS_TWICE
;
821 /* Process the pure and const attributes. */
822 if (TREE_READONLY (exp
))
824 if (DECL_PURE_P (exp
))
826 if (DECL_LOOPING_CONST_OR_PURE_P (exp
))
827 flags
|= ECF_LOOPING_CONST_OR_PURE
;
829 if (DECL_IS_NOVOPS (exp
))
831 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp
)))
833 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp
)))
836 if (TREE_NOTHROW (exp
))
837 flags
|= ECF_NOTHROW
;
841 if (is_tm_builtin (exp
))
842 flags
|= ECF_TM_BUILTIN
;
843 else if ((flags
& (ECF_CONST
|ECF_NOVOPS
)) != 0
844 || lookup_attribute ("transaction_pure",
845 TYPE_ATTRIBUTES (TREE_TYPE (exp
))))
846 flags
|= ECF_TM_PURE
;
849 flags
= special_function_p (exp
, flags
);
851 else if (TYPE_P (exp
))
853 if (TYPE_READONLY (exp
))
857 && ((flags
& ECF_CONST
) != 0
858 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp
))))
859 flags
|= ECF_TM_PURE
;
864 if (TREE_THIS_VOLATILE (exp
))
866 flags
|= ECF_NORETURN
;
867 if (flags
& (ECF_CONST
|ECF_PURE
))
868 flags
|= ECF_LOOPING_CONST_OR_PURE
;
874 /* Detect flags from a CALL_EXPR. */
877 call_expr_flags (const_tree t
)
880 tree decl
= get_callee_fndecl (t
);
883 flags
= flags_from_decl_or_type (decl
);
884 else if (CALL_EXPR_FN (t
) == NULL_TREE
)
885 flags
= internal_fn_flags (CALL_EXPR_IFN (t
));
888 tree type
= TREE_TYPE (CALL_EXPR_FN (t
));
889 if (type
&& TREE_CODE (type
) == POINTER_TYPE
)
890 flags
= flags_from_decl_or_type (TREE_TYPE (type
));
893 if (CALL_EXPR_BY_DESCRIPTOR (t
))
894 flags
|= ECF_BY_DESCRIPTOR
;
900 /* Return true if TYPE should be passed by invisible reference. */
903 pass_by_reference (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
904 tree type
, bool named_arg
)
908 /* If this type contains non-trivial constructors, then it is
909 forbidden for the middle-end to create any new copies. */
910 if (TREE_ADDRESSABLE (type
))
913 /* GCC post 3.4 passes *all* variable sized types by reference. */
914 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
917 /* If a record type should be passed the same as its first (and only)
918 member, use the type and mode of that member. */
919 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
921 type
= TREE_TYPE (first_field (type
));
922 mode
= TYPE_MODE (type
);
926 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
930 /* Return true if TYPE, which is passed by reference, should be callee
931 copied instead of caller copied. */
934 reference_callee_copied (CUMULATIVE_ARGS
*ca
, machine_mode mode
,
935 tree type
, bool named_arg
)
937 if (type
&& TREE_ADDRESSABLE (type
))
939 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
944 /* Precompute all register parameters as described by ARGS, storing values
945 into fields within the ARGS array.
947 NUM_ACTUALS indicates the total number elements in the ARGS array.
949 Set REG_PARM_SEEN if we encounter a register parameter. */
952 precompute_register_parameters (int num_actuals
, struct arg_data
*args
,
959 for (i
= 0; i
< num_actuals
; i
++)
960 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
964 if (args
[i
].value
== 0)
967 args
[i
].value
= expand_normal (args
[i
].tree_value
);
968 preserve_temp_slots (args
[i
].value
);
972 /* If we are to promote the function arg to a wider mode,
975 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
977 = convert_modes (args
[i
].mode
,
978 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
979 args
[i
].value
, args
[i
].unsignedp
);
981 /* If the value is a non-legitimate constant, force it into a
982 pseudo now. TLS symbols sometimes need a call to resolve. */
983 if (CONSTANT_P (args
[i
].value
)
984 && !targetm
.legitimate_constant_p (args
[i
].mode
, args
[i
].value
))
985 args
[i
].value
= force_reg (args
[i
].mode
, args
[i
].value
);
987 /* If we're going to have to load the value by parts, pull the
988 parts into pseudos. The part extraction process can involve
989 non-trivial computation. */
990 if (GET_CODE (args
[i
].reg
) == PARALLEL
)
992 tree type
= TREE_TYPE (args
[i
].tree_value
);
993 args
[i
].parallel_value
994 = emit_group_load_into_temps (args
[i
].reg
, args
[i
].value
,
995 type
, int_size_in_bytes (type
));
998 /* If the value is expensive, and we are inside an appropriately
999 short loop, put the value into a pseudo and then put the pseudo
1002 For small register classes, also do this if this call uses
1003 register parameters. This is to avoid reload conflicts while
1004 loading the parameters registers. */
1006 else if ((! (REG_P (args
[i
].value
)
1007 || (GET_CODE (args
[i
].value
) == SUBREG
1008 && REG_P (SUBREG_REG (args
[i
].value
)))))
1009 && args
[i
].mode
!= BLKmode
1010 && (set_src_cost (args
[i
].value
, args
[i
].mode
,
1011 optimize_insn_for_speed_p ())
1012 > COSTS_N_INSNS (1))
1014 && targetm
.small_register_classes_for_mode_p (args
[i
].mode
))
1016 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
1020 #ifdef REG_PARM_STACK_SPACE
1022 /* The argument list is the property of the called routine and it
1023 may clobber it. If the fixed area has been used for previous
1024 parameters, we must save and restore it. */
1027 save_fixed_argument_area (int reg_parm_stack_space
, rtx argblock
, int *low_to_save
, int *high_to_save
)
1032 /* Compute the boundary of the area that needs to be saved, if any. */
1033 high
= reg_parm_stack_space
;
1034 if (ARGS_GROW_DOWNWARD
)
1037 if (high
> highest_outgoing_arg_in_use
)
1038 high
= highest_outgoing_arg_in_use
;
1040 for (low
= 0; low
< high
; low
++)
1041 if (stack_usage_map
[low
] != 0 || low
>= stack_usage_watermark
)
1044 machine_mode save_mode
;
1050 while (stack_usage_map
[--high
] == 0)
1054 *high_to_save
= high
;
1056 num_to_save
= high
- low
+ 1;
1058 /* If we don't have the required alignment, must do this
1060 scalar_int_mode imode
;
1061 if (int_mode_for_size (num_to_save
* BITS_PER_UNIT
, 1).exists (&imode
)
1062 && (low
& (MIN (GET_MODE_SIZE (imode
),
1063 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)) == 0)
1066 save_mode
= BLKmode
;
1068 if (ARGS_GROW_DOWNWARD
)
1073 addr
= plus_constant (Pmode
, argblock
, delta
);
1074 stack_area
= gen_rtx_MEM (save_mode
, memory_address (save_mode
, addr
));
1076 set_mem_align (stack_area
, PARM_BOUNDARY
);
1077 if (save_mode
== BLKmode
)
1079 save_area
= assign_stack_temp (BLKmode
, num_to_save
);
1080 emit_block_move (validize_mem (save_area
), stack_area
,
1081 GEN_INT (num_to_save
), BLOCK_OP_CALL_PARM
);
1085 save_area
= gen_reg_rtx (save_mode
);
1086 emit_move_insn (save_area
, stack_area
);
1096 restore_fixed_argument_area (rtx save_area
, rtx argblock
, int high_to_save
, int low_to_save
)
1098 machine_mode save_mode
= GET_MODE (save_area
);
1100 rtx addr
, stack_area
;
1102 if (ARGS_GROW_DOWNWARD
)
1103 delta
= -high_to_save
;
1105 delta
= low_to_save
;
1107 addr
= plus_constant (Pmode
, argblock
, delta
);
1108 stack_area
= gen_rtx_MEM (save_mode
, memory_address (save_mode
, addr
));
1109 set_mem_align (stack_area
, PARM_BOUNDARY
);
1111 if (save_mode
!= BLKmode
)
1112 emit_move_insn (stack_area
, save_area
);
1114 emit_block_move (stack_area
, validize_mem (save_area
),
1115 GEN_INT (high_to_save
- low_to_save
+ 1),
1116 BLOCK_OP_CALL_PARM
);
1118 #endif /* REG_PARM_STACK_SPACE */
1120 /* If any elements in ARGS refer to parameters that are to be passed in
1121 registers, but not in memory, and whose alignment does not permit a
1122 direct copy into registers. Copy the values into a group of pseudos
1123 which we will later copy into the appropriate hard registers.
1125 Pseudos for each unaligned argument will be stored into the array
1126 args[argnum].aligned_regs. The caller is responsible for deallocating
1127 the aligned_regs array if it is nonzero. */
1130 store_unaligned_arguments_into_pseudos (struct arg_data
*args
, int num_actuals
)
1134 for (i
= 0; i
< num_actuals
; i
++)
1135 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
1136 && GET_CODE (args
[i
].reg
) != PARALLEL
1137 && args
[i
].mode
== BLKmode
1138 && MEM_P (args
[i
].value
)
1139 && (MEM_ALIGN (args
[i
].value
)
1140 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
1142 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1143 int endian_correction
= 0;
1145 if (args
[i
].partial
)
1147 gcc_assert (args
[i
].partial
% UNITS_PER_WORD
== 0);
1148 args
[i
].n_aligned_regs
= args
[i
].partial
/ UNITS_PER_WORD
;
1152 args
[i
].n_aligned_regs
1153 = (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1156 args
[i
].aligned_regs
= XNEWVEC (rtx
, args
[i
].n_aligned_regs
);
1158 /* Structures smaller than a word are normally aligned to the
1159 least significant byte. On a BYTES_BIG_ENDIAN machine,
1160 this means we must skip the empty high order bytes when
1161 calculating the bit offset. */
1162 if (bytes
< UNITS_PER_WORD
1163 #ifdef BLOCK_REG_PADDING
1164 && (BLOCK_REG_PADDING (args
[i
].mode
,
1165 TREE_TYPE (args
[i
].tree_value
), 1)
1171 endian_correction
= BITS_PER_WORD
- bytes
* BITS_PER_UNIT
;
1173 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1175 rtx reg
= gen_reg_rtx (word_mode
);
1176 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
1177 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
1179 args
[i
].aligned_regs
[j
] = reg
;
1180 word
= extract_bit_field (word
, bitsize
, 0, 1, NULL_RTX
,
1181 word_mode
, word_mode
, false, NULL
);
1183 /* There is no need to restrict this code to loading items
1184 in TYPE_ALIGN sized hunks. The bitfield instructions can
1185 load up entire word sized registers efficiently.
1187 ??? This may not be needed anymore.
1188 We use to emit a clobber here but that doesn't let later
1189 passes optimize the instructions we emit. By storing 0 into
1190 the register later passes know the first AND to zero out the
1191 bitfield being set in the register is unnecessary. The store
1192 of 0 will be deleted as will at least the first AND. */
1194 emit_move_insn (reg
, const0_rtx
);
1196 bytes
-= bitsize
/ BITS_PER_UNIT
;
1197 store_bit_field (reg
, bitsize
, endian_correction
, 0, 0,
1198 word_mode
, word
, false);
1203 /* The limit set by -Walloc-larger-than=. */
1204 static GTY(()) tree alloc_object_size_limit
;
1206 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1207 setting if the option is specified, or to the maximum object size if it
1208 is not. Return the initialized value. */
1211 alloc_max_size (void)
1213 if (alloc_object_size_limit
)
1214 return alloc_object_size_limit
;
1216 HOST_WIDE_INT limit
= warn_alloc_size_limit
;
1217 if (limit
== HOST_WIDE_INT_MAX
)
1218 limit
= tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node
));
1220 alloc_object_size_limit
= build_int_cst (size_type_node
, limit
);
1222 return alloc_object_size_limit
;
1225 /* Return true when EXP's range can be determined and set RANGE[] to it
1226 after adjusting it if necessary to make EXP a represents a valid size
1227 of object, or a valid size argument to an allocation function declared
1228 with attribute alloc_size (whose argument may be signed), or to a string
1229 manipulation function like memset. When ALLOW_ZERO is true, allow
1230 returning a range of [0, 0] for a size in an anti-range [1, N] where
1231 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1232 allocation functions like malloc but it is a valid argument to
1233 functions like memset. */
1236 get_size_range (tree exp
, tree range
[2], bool allow_zero
/* = false */)
1238 if (tree_fits_uhwi_p (exp
))
1240 /* EXP is a constant. */
1241 range
[0] = range
[1] = exp
;
1245 tree exptype
= TREE_TYPE (exp
);
1246 bool integral
= INTEGRAL_TYPE_P (exptype
);
1249 enum value_range_kind range_type
;
1252 range_type
= determine_value_range (exp
, &min
, &max
);
1254 range_type
= VR_VARYING
;
1256 if (range_type
== VR_VARYING
)
1260 /* Use the full range of the type of the expression when
1261 no value range information is available. */
1262 range
[0] = TYPE_MIN_VALUE (exptype
);
1263 range
[1] = TYPE_MAX_VALUE (exptype
);
1267 range
[0] = NULL_TREE
;
1268 range
[1] = NULL_TREE
;
1272 unsigned expprec
= TYPE_PRECISION (exptype
);
1274 bool signed_p
= !TYPE_UNSIGNED (exptype
);
1276 if (range_type
== VR_ANTI_RANGE
)
1280 if (wi::les_p (max
, 0))
1282 /* EXP is not in a strictly negative range. That means
1283 it must be in some (not necessarily strictly) positive
1284 range which includes zero. Since in signed to unsigned
1285 conversions negative values end up converted to large
1286 positive values, and otherwise they are not valid sizes,
1287 the resulting range is in both cases [0, TYPE_MAX]. */
1288 min
= wi::zero (expprec
);
1289 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
1291 else if (wi::les_p (min
- 1, 0))
1293 /* EXP is not in a negative-positive range. That means EXP
1294 is either negative, or greater than max. Since negative
1295 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1297 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
1302 min
= wi::zero (expprec
);
1305 else if (wi::eq_p (0, min
- 1))
1307 /* EXP is unsigned and not in the range [1, MAX]. That means
1308 it's either zero or greater than MAX. Even though 0 would
1309 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1310 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1311 is greater than the limit the whole range is diagnosed. */
1313 min
= max
= wi::zero (expprec
);
1317 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
1323 min
= wi::zero (expprec
);
1327 range
[0] = wide_int_to_tree (exptype
, min
);
1328 range
[1] = wide_int_to_tree (exptype
, max
);
1333 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1334 whose argument numbers given by IDX with values given by ARGS exceed
1335 the maximum object size or cause an unsigned oveflow (wrapping) when
1336 multiplied. FN is null when EXP is a call via a function pointer.
1337 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1338 for functions like malloc, and non-null for those like calloc that
1339 are decorated with a two-argument attribute alloc_size. */
1342 maybe_warn_alloc_args_overflow (tree fn
, tree exp
, tree args
[2], int idx
[2])
1344 /* The range each of the (up to) two arguments is known to be in. */
1345 tree argrange
[2][2] = { { NULL_TREE
, NULL_TREE
}, { NULL_TREE
, NULL_TREE
} };
1347 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1348 tree maxobjsize
= alloc_max_size ();
1350 location_t loc
= EXPR_LOCATION (exp
);
1352 tree fntype
= fn
? TREE_TYPE (fn
) : TREE_TYPE (TREE_TYPE (exp
));
1353 bool warned
= false;
1355 /* Validate each argument individually. */
1356 for (unsigned i
= 0; i
!= 2 && args
[i
]; ++i
)
1358 if (TREE_CODE (args
[i
]) == INTEGER_CST
)
1360 argrange
[i
][0] = args
[i
];
1361 argrange
[i
][1] = args
[i
];
1363 if (tree_int_cst_lt (args
[i
], integer_zero_node
))
1365 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1366 "%Kargument %i value %qE is negative",
1367 exp
, idx
[i
] + 1, args
[i
]);
1369 else if (integer_zerop (args
[i
]))
1371 /* Avoid issuing -Walloc-zero for allocation functions other
1372 than __builtin_alloca that are declared with attribute
1373 returns_nonnull because there's no portability risk. This
1374 avoids warning for such calls to libiberty's xmalloc and
1376 Also avoid issuing the warning for calls to function named
1378 if (fn
&& fndecl_built_in_p (fn
, BUILT_IN_ALLOCA
)
1379 ? IDENTIFIER_LENGTH (DECL_NAME (fn
)) != 6
1380 : !lookup_attribute ("returns_nonnull",
1381 TYPE_ATTRIBUTES (fntype
)))
1382 warned
= warning_at (loc
, OPT_Walloc_zero
,
1383 "%Kargument %i value is zero",
1386 else if (tree_int_cst_lt (maxobjsize
, args
[i
]))
1388 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1389 mode and with -fno-exceptions as a way to indicate array
1390 size overflow. There's no good way to detect C++98 here
1391 so avoid diagnosing these calls for all C++ modes. */
1396 && DECL_IS_OPERATOR_NEW_P (fn
)
1397 && integer_all_onesp (args
[i
]))
1400 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1401 "%Kargument %i value %qE exceeds "
1402 "maximum object size %E",
1403 exp
, idx
[i
] + 1, args
[i
], maxobjsize
);
1406 else if (TREE_CODE (args
[i
]) == SSA_NAME
1407 && get_size_range (args
[i
], argrange
[i
]))
1409 /* Verify that the argument's range is not negative (including
1410 upper bound of zero). */
1411 if (tree_int_cst_lt (argrange
[i
][0], integer_zero_node
)
1412 && tree_int_cst_le (argrange
[i
][1], integer_zero_node
))
1414 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1415 "%Kargument %i range [%E, %E] is negative",
1417 argrange
[i
][0], argrange
[i
][1]);
1419 else if (tree_int_cst_lt (maxobjsize
, argrange
[i
][0]))
1421 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1422 "%Kargument %i range [%E, %E] exceeds "
1423 "maximum object size %E",
1425 argrange
[i
][0], argrange
[i
][1],
1434 /* For a two-argument alloc_size, validate the product of the two
1435 arguments if both of their values or ranges are known. */
1436 if (!warned
&& tree_fits_uhwi_p (argrange
[0][0])
1437 && argrange
[1][0] && tree_fits_uhwi_p (argrange
[1][0])
1438 && !integer_onep (argrange
[0][0])
1439 && !integer_onep (argrange
[1][0]))
1441 /* Check for overflow in the product of a function decorated with
1442 attribute alloc_size (X, Y). */
1443 unsigned szprec
= TYPE_PRECISION (size_type_node
);
1444 wide_int x
= wi::to_wide (argrange
[0][0], szprec
);
1445 wide_int y
= wi::to_wide (argrange
[1][0], szprec
);
1447 wi::overflow_type vflow
;
1448 wide_int prod
= wi::umul (x
, y
, &vflow
);
1451 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1452 "%Kproduct %<%E * %E%> of arguments %i and %i "
1453 "exceeds %<SIZE_MAX%>",
1454 exp
, argrange
[0][0], argrange
[1][0],
1455 idx
[0] + 1, idx
[1] + 1);
1456 else if (wi::ltu_p (wi::to_wide (maxobjsize
, szprec
), prod
))
1457 warned
= warning_at (loc
, OPT_Walloc_size_larger_than_
,
1458 "%Kproduct %<%E * %E%> of arguments %i and %i "
1459 "exceeds maximum object size %E",
1460 exp
, argrange
[0][0], argrange
[1][0],
1461 idx
[0] + 1, idx
[1] + 1,
1466 /* Print the full range of each of the two arguments to make
1467 it clear when it is, in fact, in a range and not constant. */
1468 if (argrange
[0][0] != argrange
[0][1])
1469 inform (loc
, "argument %i in the range [%E, %E]",
1470 idx
[0] + 1, argrange
[0][0], argrange
[0][1]);
1471 if (argrange
[1][0] != argrange
[1][1])
1472 inform (loc
, "argument %i in the range [%E, %E]",
1473 idx
[1] + 1, argrange
[1][0], argrange
[1][1]);
1479 location_t fnloc
= DECL_SOURCE_LOCATION (fn
);
1481 if (DECL_IS_BUILTIN (fn
))
1483 "in a call to built-in allocation function %qD", fn
);
1486 "in a call to allocation function %qD declared here", fn
);
1490 /* If EXPR refers to a character array or pointer declared attribute
1491 nonstring return a decl for that array or pointer and set *REF to
1492 the referenced enclosing object or pointer. Otherwise returns
1496 get_attr_nonstring_decl (tree expr
, tree
*ref
)
1499 tree var
= NULL_TREE
;
1500 if (TREE_CODE (decl
) == SSA_NAME
)
1502 gimple
*def
= SSA_NAME_DEF_STMT (decl
);
1504 if (is_gimple_assign (def
))
1506 tree_code code
= gimple_assign_rhs_code (def
);
1507 if (code
== ADDR_EXPR
1508 || code
== COMPONENT_REF
1509 || code
== VAR_DECL
)
1510 decl
= gimple_assign_rhs1 (def
);
1513 var
= SSA_NAME_VAR (decl
);
1516 if (TREE_CODE (decl
) == ADDR_EXPR
)
1517 decl
= TREE_OPERAND (decl
, 0);
1519 /* To simplify calling code, store the referenced DECL regardless of
1520 the attribute determined below, but avoid storing the SSA_NAME_VAR
1521 obtained above (it's not useful for dataflow purposes). */
1525 /* Use the SSA_NAME_VAR that was determined above to see if it's
1526 declared nonstring. Otherwise drill down into the referenced
1530 else if (TREE_CODE (decl
) == ARRAY_REF
)
1531 decl
= TREE_OPERAND (decl
, 0);
1532 else if (TREE_CODE (decl
) == COMPONENT_REF
)
1533 decl
= TREE_OPERAND (decl
, 1);
1534 else if (TREE_CODE (decl
) == MEM_REF
)
1535 return get_attr_nonstring_decl (TREE_OPERAND (decl
, 0), ref
);
1538 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl
)))
1544 /* Warn about passing a non-string array/pointer to a function that
1545 expects a nul-terminated string argument. */
1548 maybe_warn_nonstring_arg (tree fndecl
, tree exp
)
1550 if (!fndecl
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1553 if (TREE_NO_WARNING (exp
) || !warn_stringop_overflow
)
1556 /* Avoid clearly invalid calls (more checking done below). */
1557 unsigned nargs
= call_expr_nargs (exp
);
1561 /* The bound argument to a bounded string function like strncpy. */
1562 tree bound
= NULL_TREE
;
1564 /* The longest known or possible string argument to one of the comparison
1565 functions. If the length is less than the bound it is used instead.
1566 Since the length is only used for warning and not for code generation
1567 disable strict mode in the calls to get_range_strlen below. */
1568 tree maxlen
= NULL_TREE
;
1570 /* It's safe to call "bounded" string functions with a non-string
1571 argument since the functions provide an explicit bound for this
1572 purpose. The exception is strncat where the bound may refer to
1573 either the destination or the source. */
1574 int fncode
= DECL_FUNCTION_CODE (fndecl
);
1577 case BUILT_IN_STRCMP
:
1578 case BUILT_IN_STRNCMP
:
1579 case BUILT_IN_STRNCASECMP
:
1581 /* For these, if one argument refers to one or more of a set
1582 of string constants or arrays of known size, determine
1583 the range of their known or possible lengths and use it
1584 conservatively as the bound for the unbounded function,
1585 and to adjust the range of the bound of the bounded ones. */
1586 for (unsigned argno
= 0;
1587 argno
< MIN (nargs
, 2)
1588 && !(maxlen
&& TREE_CODE (maxlen
) == INTEGER_CST
); argno
++)
1590 tree arg
= CALL_EXPR_ARG (exp
, argno
);
1591 if (!get_attr_nonstring_decl (arg
))
1593 c_strlen_data lendata
= { };
1594 get_range_strlen (arg
, &lendata
, /* eltsize = */ 1);
1595 maxlen
= lendata
.maxbound
;
1601 case BUILT_IN_STRNCAT
:
1602 case BUILT_IN_STPNCPY
:
1603 case BUILT_IN_STRNCPY
:
1605 bound
= CALL_EXPR_ARG (exp
, 2);
1608 case BUILT_IN_STRNDUP
:
1610 bound
= CALL_EXPR_ARG (exp
, 1);
1613 case BUILT_IN_STRNLEN
:
1615 tree arg
= CALL_EXPR_ARG (exp
, 0);
1616 if (!get_attr_nonstring_decl (arg
))
1618 c_strlen_data lendata
= { };
1619 get_range_strlen (arg
, &lendata
, /* eltsize = */ 1);
1620 maxlen
= lendata
.maxbound
;
1623 bound
= CALL_EXPR_ARG (exp
, 1);
1631 /* Determine the range of the bound argument (if specified). */
1632 tree bndrng
[2] = { NULL_TREE
, NULL_TREE
};
1636 get_size_range (bound
, bndrng
);
1639 location_t loc
= EXPR_LOCATION (exp
);
1643 /* Diagnose excessive bound prior the adjustment below and
1644 regardless of attribute nonstring. */
1645 tree maxobjsize
= max_object_size ();
1646 if (tree_int_cst_lt (maxobjsize
, bndrng
[0]))
1648 if (tree_int_cst_equal (bndrng
[0], bndrng
[1]))
1649 warning_at (loc
, OPT_Wstringop_overflow_
,
1650 "%K%qD specified bound %E "
1651 "exceeds maximum object size %E",
1652 exp
, fndecl
, bndrng
[0], maxobjsize
);
1654 warning_at (loc
, OPT_Wstringop_overflow_
,
1655 "%K%qD specified bound [%E, %E] "
1656 "exceeds maximum object size %E",
1657 exp
, fndecl
, bndrng
[0], bndrng
[1], maxobjsize
);
1662 if (maxlen
&& !integer_all_onesp (maxlen
))
1664 /* Add one for the nul. */
1665 maxlen
= const_binop (PLUS_EXPR
, TREE_TYPE (maxlen
), maxlen
,
1670 /* Conservatively use the upper bound of the lengths for
1671 both the lower and the upper bound of the operation. */
1674 bound
= void_type_node
;
1678 /* Replace the bound on the operation with the upper bound
1679 of the length of the string if the latter is smaller. */
1680 if (tree_int_cst_lt (maxlen
, bndrng
[0]))
1682 else if (tree_int_cst_lt (maxlen
, bndrng
[1]))
1687 /* Iterate over the built-in function's formal arguments and check
1688 each const char* against the actual argument. If the actual
1689 argument is declared attribute non-string issue a warning unless
1690 the argument's maximum length is bounded. */
1691 function_args_iterator it
;
1692 function_args_iter_init (&it
, TREE_TYPE (fndecl
));
1694 for (unsigned argno
= 0; ; ++argno
, function_args_iter_next (&it
))
1696 /* Avoid iterating past the declared argument in a call
1697 to function declared without a prototype. */
1701 tree argtype
= function_args_iter_cond (&it
);
1705 if (TREE_CODE (argtype
) != POINTER_TYPE
)
1708 argtype
= TREE_TYPE (argtype
);
1710 if (TREE_CODE (argtype
) != INTEGER_TYPE
1711 || !TYPE_READONLY (argtype
))
1714 argtype
= TYPE_MAIN_VARIANT (argtype
);
1715 if (argtype
!= char_type_node
)
1718 tree callarg
= CALL_EXPR_ARG (exp
, argno
);
1719 if (TREE_CODE (callarg
) == ADDR_EXPR
)
1720 callarg
= TREE_OPERAND (callarg
, 0);
1722 /* See if the destination is declared with attribute "nonstring". */
1723 tree decl
= get_attr_nonstring_decl (callarg
);
1727 /* The maximum number of array elements accessed. */
1728 offset_int wibnd
= 0;
1730 if (argno
&& fncode
== BUILT_IN_STRNCAT
)
1732 /* See if the bound in strncat is derived from the length
1733 of the strlen of the destination (as it's expected to be).
1734 If so, reset BOUND and FNCODE to trigger a warning. */
1735 tree dstarg
= CALL_EXPR_ARG (exp
, 0);
1736 if (is_strlen_related_p (dstarg
, bound
))
1738 /* The bound applies to the destination, not to the source,
1739 so reset these to trigger a warning without mentioning
1745 /* Use the upper bound of the range for strncat. */
1746 wibnd
= wi::to_offset (bndrng
[1]);
1749 /* Use the lower bound of the range for functions other than
1751 wibnd
= wi::to_offset (bndrng
[0]);
1753 /* Determine the size of the argument array if it is one. */
1754 offset_int asize
= wibnd
;
1755 bool known_size
= false;
1756 tree type
= TREE_TYPE (decl
);
1758 /* Determine the array size. For arrays of unknown bound and
1759 pointers reset BOUND to trigger the appropriate warning. */
1760 if (TREE_CODE (type
) == ARRAY_TYPE
)
1762 if (tree arrbnd
= TYPE_DOMAIN (type
))
1764 if ((arrbnd
= TYPE_MAX_VALUE (arrbnd
)))
1766 asize
= wi::to_offset (arrbnd
) + 1;
1770 else if (bound
== void_type_node
)
1773 else if (bound
== void_type_node
)
1776 /* In a call to strncat with a bound in a range whose lower but
1777 not upper bound is less than the array size, reset ASIZE to
1778 be the same as the bound and the other variable to trigger
1779 the apprpriate warning below. */
1780 if (fncode
== BUILT_IN_STRNCAT
1781 && bndrng
[0] != bndrng
[1]
1782 && wi::ltu_p (wi::to_offset (bndrng
[0]), asize
)
1784 || wi::ltu_p (asize
, wibnd
)))
1791 bool warned
= false;
1793 auto_diagnostic_group d
;
1794 if (wi::ltu_p (asize
, wibnd
))
1796 if (bndrng
[0] == bndrng
[1])
1797 warned
= warning_at (loc
, OPT_Wstringop_overflow_
,
1798 "%qD argument %i declared attribute "
1799 "%<nonstring%> is smaller than the specified "
1801 fndecl
, argno
+ 1, wibnd
.to_uhwi ());
1802 else if (wi::ltu_p (asize
, wi::to_offset (bndrng
[0])))
1803 warned
= warning_at (loc
, OPT_Wstringop_overflow_
,
1804 "%qD argument %i declared attribute "
1805 "%<nonstring%> is smaller than "
1806 "the specified bound [%E, %E]",
1807 fndecl
, argno
+ 1, bndrng
[0], bndrng
[1]);
1809 warned
= warning_at (loc
, OPT_Wstringop_overflow_
,
1810 "%qD argument %i declared attribute "
1811 "%<nonstring%> may be smaller than "
1812 "the specified bound [%E, %E]",
1813 fndecl
, argno
+ 1, bndrng
[0], bndrng
[1]);
1815 else if (fncode
== BUILT_IN_STRNCAT
)
1816 ; /* Avoid warning for calls to strncat() when the bound
1817 is equal to the size of the non-string argument. */
1819 warned
= warning_at (loc
, OPT_Wstringop_overflow_
,
1820 "%qD argument %i declared attribute %<nonstring%>",
1824 inform (DECL_SOURCE_LOCATION (decl
),
1825 "argument %qD declared here", decl
);
1829 /* Issue an error if CALL_EXPR was flagged as requiring
1830 tall-call optimization. */
1833 maybe_complain_about_tail_call (tree call_expr
, const char *reason
)
1835 gcc_assert (TREE_CODE (call_expr
) == CALL_EXPR
);
1836 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr
))
1839 error_at (EXPR_LOCATION (call_expr
), "cannot tail-call: %s", reason
);
1842 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1845 NUM_ACTUALS is the total number of parameters.
1847 N_NAMED_ARGS is the total number of named arguments.
1849 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1852 FNDECL is the tree code for the target of this call (if known)
1854 ARGS_SO_FAR holds state needed by the target to know where to place
1857 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1858 for arguments which are passed in registers.
1860 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1861 and may be modified by this routine.
1863 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1864 flags which may be modified by this routine.
1866 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1867 that requires allocation of stack space.
1869 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1870 the thunked-to function. */
1873 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED
,
1874 struct arg_data
*args
,
1875 struct args_size
*args_size
,
1876 int n_named_args ATTRIBUTE_UNUSED
,
1877 tree exp
, tree struct_value_addr_value
,
1878 tree fndecl
, tree fntype
,
1879 cumulative_args_t args_so_far
,
1880 int reg_parm_stack_space
,
1881 rtx
*old_stack_level
,
1882 poly_int64_pod
*old_pending_adj
,
1883 int *must_preallocate
, int *ecf_flags
,
1884 bool *may_tailcall
, bool call_from_thunk_p
)
1886 CUMULATIVE_ARGS
*args_so_far_pnt
= get_cumulative_args (args_so_far
);
1887 location_t loc
= EXPR_LOCATION (exp
);
1889 /* Count arg position in order args appear. */
1894 args_size
->constant
= 0;
1897 bitmap_obstack_initialize (NULL
);
1899 /* In this loop, we consider args in the order they are written.
1900 We fill up ARGS from the back. */
1902 i
= num_actuals
- 1;
1905 call_expr_arg_iterator iter
;
1907 bitmap slots
= NULL
;
1909 if (struct_value_addr_value
)
1911 args
[j
].tree_value
= struct_value_addr_value
;
1915 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
1917 tree argtype
= TREE_TYPE (arg
);
1919 if (targetm
.calls
.split_complex_arg
1921 && TREE_CODE (argtype
) == COMPLEX_TYPE
1922 && targetm
.calls
.split_complex_arg (argtype
))
1924 tree subtype
= TREE_TYPE (argtype
);
1925 args
[j
].tree_value
= build1 (REALPART_EXPR
, subtype
, arg
);
1927 args
[j
].tree_value
= build1 (IMAGPART_EXPR
, subtype
, arg
);
1930 args
[j
].tree_value
= arg
;
1936 BITMAP_FREE (slots
);
1939 bitmap_obstack_release (NULL
);
1941 /* Extract attribute alloc_size from the type of the called expression
1942 (which could be a function or a function pointer) and if set, store
1943 the indices of the corresponding arguments in ALLOC_IDX, and then
1944 the actual argument(s) at those indices in ALLOC_ARGS. */
1945 int alloc_idx
[2] = { -1, -1 };
1946 if (tree alloc_size
= lookup_attribute ("alloc_size",
1947 TYPE_ATTRIBUTES (fntype
)))
1949 tree args
= TREE_VALUE (alloc_size
);
1950 alloc_idx
[0] = TREE_INT_CST_LOW (TREE_VALUE (args
)) - 1;
1951 if (TREE_CHAIN (args
))
1952 alloc_idx
[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args
))) - 1;
1955 /* Array for up to the two attribute alloc_size arguments. */
1956 tree alloc_args
[] = { NULL_TREE
, NULL_TREE
};
1958 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1959 for (argpos
= 0; argpos
< num_actuals
; i
--, argpos
++)
1961 tree type
= TREE_TYPE (args
[i
].tree_value
);
1965 /* Replace erroneous argument with constant zero. */
1966 if (type
== error_mark_node
|| !COMPLETE_TYPE_P (type
))
1967 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
1969 /* If TYPE is a transparent union or record, pass things the way
1970 we would pass the first field of the union or record. We have
1971 already verified that the modes are the same. */
1972 if ((TREE_CODE (type
) == UNION_TYPE
|| TREE_CODE (type
) == RECORD_TYPE
)
1973 && TYPE_TRANSPARENT_AGGR (type
))
1974 type
= TREE_TYPE (first_field (type
));
1976 /* Decide where to pass this arg.
1978 args[i].reg is nonzero if all or part is passed in registers.
1980 args[i].partial is nonzero if part but not all is passed in registers,
1981 and the exact value says how many bytes are passed in registers.
1983 args[i].pass_on_stack is nonzero if the argument must at least be
1984 computed on the stack. It may then be loaded back into registers
1985 if args[i].reg is nonzero.
1987 These decisions are driven by the FUNCTION_... macros and must agree
1988 with those made by function.c. */
1990 /* See if this argument should be passed by invisible reference. */
1991 if (pass_by_reference (args_so_far_pnt
, TYPE_MODE (type
),
1992 type
, argpos
< n_named_args
))
1995 tree base
= NULL_TREE
;
1998 = reference_callee_copied (args_so_far_pnt
, TYPE_MODE (type
),
1999 type
, argpos
< n_named_args
);
2001 /* If we're compiling a thunk, pass through invisible references
2002 instead of making a copy. */
2003 if (call_from_thunk_p
2005 && !TREE_ADDRESSABLE (type
)
2006 && (base
= get_base_address (args
[i
].tree_value
))
2007 && TREE_CODE (base
) != SSA_NAME
2008 && (!DECL_P (base
) || MEM_P (DECL_RTL (base
)))))
2010 /* We may have turned the parameter value into an SSA name.
2011 Go back to the original parameter so we can take the
2013 if (TREE_CODE (args
[i
].tree_value
) == SSA_NAME
)
2015 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args
[i
].tree_value
));
2016 args
[i
].tree_value
= SSA_NAME_VAR (args
[i
].tree_value
);
2017 gcc_assert (TREE_CODE (args
[i
].tree_value
) == PARM_DECL
);
2019 /* Argument setup code may have copied the value to register. We
2020 revert that optimization now because the tail call code must
2021 use the original location. */
2022 if (TREE_CODE (args
[i
].tree_value
) == PARM_DECL
2023 && !MEM_P (DECL_RTL (args
[i
].tree_value
))
2024 && DECL_INCOMING_RTL (args
[i
].tree_value
)
2025 && MEM_P (DECL_INCOMING_RTL (args
[i
].tree_value
)))
2026 set_decl_rtl (args
[i
].tree_value
,
2027 DECL_INCOMING_RTL (args
[i
].tree_value
));
2029 mark_addressable (args
[i
].tree_value
);
2031 /* We can't use sibcalls if a callee-copied argument is
2032 stored in the current function's frame. */
2033 if (!call_from_thunk_p
&& DECL_P (base
) && !TREE_STATIC (base
))
2035 *may_tailcall
= false;
2036 maybe_complain_about_tail_call (exp
,
2037 "a callee-copied argument is"
2038 " stored in the current"
2039 " function's frame");
2042 args
[i
].tree_value
= build_fold_addr_expr_loc (loc
,
2043 args
[i
].tree_value
);
2044 type
= TREE_TYPE (args
[i
].tree_value
);
2046 if (*ecf_flags
& ECF_CONST
)
2047 *ecf_flags
&= ~(ECF_CONST
| ECF_LOOPING_CONST_OR_PURE
);
2051 /* We make a copy of the object and pass the address to the
2052 function being called. */
2055 if (!COMPLETE_TYPE_P (type
)
2056 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2057 || (flag_stack_check
== GENERIC_STACK_CHECK
2058 && compare_tree_int (TYPE_SIZE_UNIT (type
),
2059 STACK_CHECK_MAX_VAR_SIZE
) > 0))
2061 /* This is a variable-sized object. Make space on the stack
2063 rtx size_rtx
= expr_size (args
[i
].tree_value
);
2065 if (*old_stack_level
== 0)
2067 emit_stack_save (SAVE_BLOCK
, old_stack_level
);
2068 *old_pending_adj
= pending_stack_adjust
;
2069 pending_stack_adjust
= 0;
2072 /* We can pass TRUE as the 4th argument because we just
2073 saved the stack pointer and will restore it right after
2075 copy
= allocate_dynamic_stack_space (size_rtx
,
2078 max_int_size_in_bytes
2081 copy
= gen_rtx_MEM (BLKmode
, copy
);
2082 set_mem_attributes (copy
, type
, 1);
2085 copy
= assign_temp (type
, 1, 0);
2087 store_expr (args
[i
].tree_value
, copy
, 0, false, false);
2089 /* Just change the const function to pure and then let
2090 the next test clear the pure based on
2092 if (*ecf_flags
& ECF_CONST
)
2094 *ecf_flags
&= ~ECF_CONST
;
2095 *ecf_flags
|= ECF_PURE
;
2098 if (!callee_copies
&& *ecf_flags
& ECF_PURE
)
2099 *ecf_flags
&= ~(ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
2102 = build_fold_addr_expr_loc (loc
, make_tree (type
, copy
));
2103 type
= TREE_TYPE (args
[i
].tree_value
);
2104 *may_tailcall
= false;
2105 maybe_complain_about_tail_call (exp
,
2106 "argument must be passed"
2111 unsignedp
= TYPE_UNSIGNED (type
);
2112 mode
= promote_function_mode (type
, TYPE_MODE (type
), &unsignedp
,
2113 fndecl
? TREE_TYPE (fndecl
) : fntype
, 0);
2115 args
[i
].unsignedp
= unsignedp
;
2116 args
[i
].mode
= mode
;
2118 targetm
.calls
.warn_parameter_passing_abi (args_so_far
, type
);
2120 args
[i
].reg
= targetm
.calls
.function_arg (args_so_far
, mode
, type
,
2121 argpos
< n_named_args
);
2123 if (args
[i
].reg
&& CONST_INT_P (args
[i
].reg
))
2126 /* If this is a sibling call and the machine has register windows, the
2127 register window has to be unwinded before calling the routine, so
2128 arguments have to go into the incoming registers. */
2129 if (targetm
.calls
.function_incoming_arg
!= targetm
.calls
.function_arg
)
2130 args
[i
].tail_call_reg
2131 = targetm
.calls
.function_incoming_arg (args_so_far
, mode
, type
,
2132 argpos
< n_named_args
);
2134 args
[i
].tail_call_reg
= args
[i
].reg
;
2138 = targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, type
,
2139 argpos
< n_named_args
);
2141 args
[i
].pass_on_stack
= targetm
.calls
.must_pass_in_stack (mode
, type
);
2143 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2144 it means that we are to pass this arg in the register(s) designated
2145 by the PARALLEL, but also to pass it in the stack. */
2146 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
2147 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
2148 args
[i
].pass_on_stack
= 1;
2150 /* If this is an addressable type, we must preallocate the stack
2151 since we must evaluate the object into its final location.
2153 If this is to be passed in both registers and the stack, it is simpler
2155 if (TREE_ADDRESSABLE (type
)
2156 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
2157 *must_preallocate
= 1;
2159 /* Compute the stack-size of this argument. */
2160 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
2161 || reg_parm_stack_space
> 0
2162 || args
[i
].pass_on_stack
)
2163 locate_and_pad_parm (mode
, type
,
2164 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2169 reg_parm_stack_space
,
2170 args
[i
].pass_on_stack
? 0 : args
[i
].partial
,
2171 fndecl
, args_size
, &args
[i
].locate
);
2172 #ifdef BLOCK_REG_PADDING
2174 /* The argument is passed entirely in registers. See at which
2175 end it should be padded. */
2176 args
[i
].locate
.where_pad
=
2177 BLOCK_REG_PADDING (mode
, type
,
2178 int_size_in_bytes (type
) <= UNITS_PER_WORD
);
2181 /* Update ARGS_SIZE, the total stack space for args so far. */
2183 args_size
->constant
+= args
[i
].locate
.size
.constant
;
2184 if (args
[i
].locate
.size
.var
)
2185 ADD_PARM_SIZE (*args_size
, args
[i
].locate
.size
.var
);
2187 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2188 have been used, etc. */
2190 targetm
.calls
.function_arg_advance (args_so_far
, TYPE_MODE (type
),
2191 type
, argpos
< n_named_args
);
2193 /* Store argument values for functions decorated with attribute
2195 if (argpos
== alloc_idx
[0])
2196 alloc_args
[0] = args
[i
].tree_value
;
2197 else if (argpos
== alloc_idx
[1])
2198 alloc_args
[1] = args
[i
].tree_value
;
2203 /* Check the arguments of functions decorated with attribute
2205 maybe_warn_alloc_args_overflow (fndecl
, exp
, alloc_args
, alloc_idx
);
2208 /* Detect passing non-string arguments to functions expecting
2209 nul-terminated strings. */
2210 maybe_warn_nonstring_arg (fndecl
, exp
);
2213 /* Update ARGS_SIZE to contain the total size for the argument block.
2214 Return the original constant component of the argument block's size.
2216 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2217 for arguments passed in registers. */
2220 compute_argument_block_size (int reg_parm_stack_space
,
2221 struct args_size
*args_size
,
2222 tree fndecl ATTRIBUTE_UNUSED
,
2223 tree fntype ATTRIBUTE_UNUSED
,
2224 int preferred_stack_boundary ATTRIBUTE_UNUSED
)
2226 poly_int64 unadjusted_args_size
= args_size
->constant
;
2228 /* For accumulate outgoing args mode we don't need to align, since the frame
2229 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2230 backends from generating misaligned frame sizes. */
2231 if (ACCUMULATE_OUTGOING_ARGS
&& preferred_stack_boundary
> STACK_BOUNDARY
)
2232 preferred_stack_boundary
= STACK_BOUNDARY
;
2234 /* Compute the actual size of the argument block required. The variable
2235 and constant sizes must be combined, the size may have to be rounded,
2236 and there may be a minimum required size. */
2240 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
2241 args_size
->constant
= 0;
2243 preferred_stack_boundary
/= BITS_PER_UNIT
;
2244 if (preferred_stack_boundary
> 1)
2246 /* We don't handle this case yet. To handle it correctly we have
2247 to add the delta, round and subtract the delta.
2248 Currently no machine description requires this support. */
2249 gcc_assert (multiple_p (stack_pointer_delta
,
2250 preferred_stack_boundary
));
2251 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
2254 if (reg_parm_stack_space
> 0)
2257 = size_binop (MAX_EXPR
, args_size
->var
,
2258 ssize_int (reg_parm_stack_space
));
2260 /* The area corresponding to register parameters is not to count in
2261 the size of the block we need. So make the adjustment. */
2262 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
2264 = size_binop (MINUS_EXPR
, args_size
->var
,
2265 ssize_int (reg_parm_stack_space
));
2270 preferred_stack_boundary
/= BITS_PER_UNIT
;
2271 if (preferred_stack_boundary
< 1)
2272 preferred_stack_boundary
= 1;
2273 args_size
->constant
= (aligned_upper_bound (args_size
->constant
2274 + stack_pointer_delta
,
2275 preferred_stack_boundary
)
2276 - stack_pointer_delta
);
2278 args_size
->constant
= upper_bound (args_size
->constant
,
2279 reg_parm_stack_space
);
2281 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
2282 args_size
->constant
-= reg_parm_stack_space
;
2284 return unadjusted_args_size
;
2287 /* Precompute parameters as needed for a function call.
2289 FLAGS is mask of ECF_* constants.
2291 NUM_ACTUALS is the number of arguments.
2293 ARGS is an array containing information for each argument; this
2294 routine fills in the INITIAL_VALUE and VALUE fields for each
2295 precomputed argument. */
2298 precompute_arguments (int num_actuals
, struct arg_data
*args
)
2302 /* If this is a libcall, then precompute all arguments so that we do not
2303 get extraneous instructions emitted as part of the libcall sequence. */
2305 /* If we preallocated the stack space, and some arguments must be passed
2306 on the stack, then we must precompute any parameter which contains a
2307 function call which will store arguments on the stack.
2308 Otherwise, evaluating the parameter may clobber previous parameters
2309 which have already been stored into the stack. (we have code to avoid
2310 such case by saving the outgoing stack arguments, but it results in
2312 if (!ACCUMULATE_OUTGOING_ARGS
)
2315 for (i
= 0; i
< num_actuals
; i
++)
2320 if (TREE_CODE (args
[i
].tree_value
) != CALL_EXPR
)
2323 /* If this is an addressable type, we cannot pre-evaluate it. */
2324 type
= TREE_TYPE (args
[i
].tree_value
);
2325 gcc_assert (!TREE_ADDRESSABLE (type
));
2327 args
[i
].initial_value
= args
[i
].value
2328 = expand_normal (args
[i
].tree_value
);
2330 mode
= TYPE_MODE (type
);
2331 if (mode
!= args
[i
].mode
)
2333 int unsignedp
= args
[i
].unsignedp
;
2335 = convert_modes (args
[i
].mode
, mode
,
2336 args
[i
].value
, args
[i
].unsignedp
);
2338 /* CSE will replace this only if it contains args[i].value
2339 pseudo, so convert it down to the declared mode using
2341 if (REG_P (args
[i
].value
)
2342 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
2343 && promote_mode (type
, mode
, &unsignedp
) != args
[i
].mode
)
2345 args
[i
].initial_value
2346 = gen_lowpart_SUBREG (mode
, args
[i
].value
);
2347 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
2348 SUBREG_PROMOTED_SET (args
[i
].initial_value
, args
[i
].unsignedp
);
2354 /* Given the current state of MUST_PREALLOCATE and information about
2355 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2356 compute and return the final value for MUST_PREALLOCATE. */
2359 finalize_must_preallocate (int must_preallocate
, int num_actuals
,
2360 struct arg_data
*args
, struct args_size
*args_size
)
2362 /* See if we have or want to preallocate stack space.
2364 If we would have to push a partially-in-regs parm
2365 before other stack parms, preallocate stack space instead.
2367 If the size of some parm is not a multiple of the required stack
2368 alignment, we must preallocate.
2370 If the total size of arguments that would otherwise create a copy in
2371 a temporary (such as a CALL) is more than half the total argument list
2372 size, preallocation is faster.
2374 Another reason to preallocate is if we have a machine (like the m88k)
2375 where stack alignment is required to be maintained between every
2376 pair of insns, not just when the call is made. However, we assume here
2377 that such machines either do not have push insns (and hence preallocation
2378 would occur anyway) or the problem is taken care of with
2381 if (! must_preallocate
)
2383 int partial_seen
= 0;
2384 poly_int64 copy_to_evaluate_size
= 0;
2387 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
2389 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
2391 else if (partial_seen
&& args
[i
].reg
== 0)
2392 must_preallocate
= 1;
2394 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
2395 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
2396 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
2397 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
2398 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
2399 copy_to_evaluate_size
2400 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
2403 if (maybe_ne (args_size
->constant
, 0)
2404 && maybe_ge (copy_to_evaluate_size
* 2, args_size
->constant
))
2405 must_preallocate
= 1;
2407 return must_preallocate
;
2410 /* If we preallocated stack space, compute the address of each argument
2411 and store it into the ARGS array.
2413 We need not ensure it is a valid memory address here; it will be
2414 validized when it is used.
2416 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2419 compute_argument_addresses (struct arg_data
*args
, rtx argblock
, int num_actuals
)
2423 rtx arg_reg
= argblock
;
2425 poly_int64 arg_offset
= 0;
2427 if (GET_CODE (argblock
) == PLUS
)
2429 arg_reg
= XEXP (argblock
, 0);
2430 arg_offset
= rtx_to_poly_int64 (XEXP (argblock
, 1));
2433 for (i
= 0; i
< num_actuals
; i
++)
2435 rtx offset
= ARGS_SIZE_RTX (args
[i
].locate
.offset
);
2436 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].locate
.slot_offset
);
2438 unsigned int align
, boundary
;
2439 poly_uint64 units_on_stack
= 0;
2440 machine_mode partial_mode
= VOIDmode
;
2442 /* Skip this parm if it will not be passed on the stack. */
2443 if (! args
[i
].pass_on_stack
2445 && args
[i
].partial
== 0)
2448 if (TYPE_EMPTY_P (TREE_TYPE (args
[i
].tree_value
)))
2451 addr
= simplify_gen_binary (PLUS
, Pmode
, arg_reg
, offset
);
2452 addr
= plus_constant (Pmode
, addr
, arg_offset
);
2454 if (args
[i
].partial
!= 0)
2456 /* Only part of the parameter is being passed on the stack.
2457 Generate a simple memory reference of the correct size. */
2458 units_on_stack
= args
[i
].locate
.size
.constant
;
2459 poly_uint64 bits_on_stack
= units_on_stack
* BITS_PER_UNIT
;
2460 partial_mode
= int_mode_for_size (bits_on_stack
, 1).else_blk ();
2461 args
[i
].stack
= gen_rtx_MEM (partial_mode
, addr
);
2462 set_mem_size (args
[i
].stack
, units_on_stack
);
2466 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
2467 set_mem_attributes (args
[i
].stack
,
2468 TREE_TYPE (args
[i
].tree_value
), 1);
2470 align
= BITS_PER_UNIT
;
2471 boundary
= args
[i
].locate
.boundary
;
2472 poly_int64 offset_val
;
2473 if (args
[i
].locate
.where_pad
!= PAD_DOWNWARD
)
2475 else if (poly_int_rtx_p (offset
, &offset_val
))
2477 align
= least_bit_hwi (boundary
);
2478 unsigned int offset_align
2479 = known_alignment (offset_val
) * BITS_PER_UNIT
;
2480 if (offset_align
!= 0)
2481 align
= MIN (align
, offset_align
);
2483 set_mem_align (args
[i
].stack
, align
);
2485 addr
= simplify_gen_binary (PLUS
, Pmode
, arg_reg
, slot_offset
);
2486 addr
= plus_constant (Pmode
, addr
, arg_offset
);
2488 if (args
[i
].partial
!= 0)
2490 /* Only part of the parameter is being passed on the stack.
2491 Generate a simple memory reference of the correct size.
2493 args
[i
].stack_slot
= gen_rtx_MEM (partial_mode
, addr
);
2494 set_mem_size (args
[i
].stack_slot
, units_on_stack
);
2498 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
2499 set_mem_attributes (args
[i
].stack_slot
,
2500 TREE_TYPE (args
[i
].tree_value
), 1);
2502 set_mem_align (args
[i
].stack_slot
, args
[i
].locate
.boundary
);
2504 /* Function incoming arguments may overlap with sibling call
2505 outgoing arguments and we cannot allow reordering of reads
2506 from function arguments with stores to outgoing arguments
2507 of sibling calls. */
2508 set_mem_alias_set (args
[i
].stack
, 0);
2509 set_mem_alias_set (args
[i
].stack_slot
, 0);
2514 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2515 in a call instruction.
2517 FNDECL is the tree node for the target function. For an indirect call
2518 FNDECL will be NULL_TREE.
2520 ADDR is the operand 0 of CALL_EXPR for this call. */
2523 rtx_for_function_call (tree fndecl
, tree addr
)
2527 /* Get the function to call, in the form of RTL. */
2530 if (!TREE_USED (fndecl
) && fndecl
!= current_function_decl
)
2531 TREE_USED (fndecl
) = 1;
2533 /* Get a SYMBOL_REF rtx for the function address. */
2534 funexp
= XEXP (DECL_RTL (fndecl
), 0);
2537 /* Generate an rtx (probably a pseudo-register) for the address. */
2540 funexp
= expand_normal (addr
);
2541 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2546 /* Return the static chain for this function, if any. */
2549 rtx_for_static_chain (const_tree fndecl_or_type
, bool incoming_p
)
2551 if (DECL_P (fndecl_or_type
) && !DECL_STATIC_CHAIN (fndecl_or_type
))
2554 return targetm
.calls
.static_chain (fndecl_or_type
, incoming_p
);
2557 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2560 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2561 or NULL_RTX if none has been scanned yet. */
2562 rtx_insn
*scan_start
;
2563 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2564 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2565 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2566 with fixed offset, or PC if this is with variable or unknown offset. */
2568 } internal_arg_pointer_exp_state
;
2570 static rtx
internal_arg_pointer_based_exp (const_rtx
, bool);
2572 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2573 the tail call sequence, starting with first insn that hasn't been
2574 scanned yet, and note for each pseudo on the LHS whether it is based
2575 on crtl->args.internal_arg_pointer or not, and what offset from that
2576 that pointer it has. */
2579 internal_arg_pointer_based_exp_scan (void)
2581 rtx_insn
*insn
, *scan_start
= internal_arg_pointer_exp_state
.scan_start
;
2583 if (scan_start
== NULL_RTX
)
2584 insn
= get_insns ();
2586 insn
= NEXT_INSN (scan_start
);
2590 rtx set
= single_set (insn
);
2591 if (set
&& REG_P (SET_DEST (set
)) && !HARD_REGISTER_P (SET_DEST (set
)))
2594 unsigned int idx
= REGNO (SET_DEST (set
)) - FIRST_PSEUDO_REGISTER
;
2595 /* Punt on pseudos set multiple times. */
2596 if (idx
< internal_arg_pointer_exp_state
.cache
.length ()
2597 && (internal_arg_pointer_exp_state
.cache
[idx
]
2601 val
= internal_arg_pointer_based_exp (SET_SRC (set
), false);
2602 if (val
!= NULL_RTX
)
2604 if (idx
>= internal_arg_pointer_exp_state
.cache
.length ())
2605 internal_arg_pointer_exp_state
.cache
2606 .safe_grow_cleared (idx
+ 1);
2607 internal_arg_pointer_exp_state
.cache
[idx
] = val
;
2610 if (NEXT_INSN (insn
) == NULL_RTX
)
2612 insn
= NEXT_INSN (insn
);
2615 internal_arg_pointer_exp_state
.scan_start
= scan_start
;
2618 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2619 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2620 it with fixed offset, or PC if this is with variable or unknown offset.
2621 TOPLEVEL is true if the function is invoked at the topmost level. */
2624 internal_arg_pointer_based_exp (const_rtx rtl
, bool toplevel
)
2626 if (CONSTANT_P (rtl
))
2629 if (rtl
== crtl
->args
.internal_arg_pointer
)
2632 if (REG_P (rtl
) && HARD_REGISTER_P (rtl
))
2636 if (GET_CODE (rtl
) == PLUS
&& poly_int_rtx_p (XEXP (rtl
, 1), &offset
))
2638 rtx val
= internal_arg_pointer_based_exp (XEXP (rtl
, 0), toplevel
);
2639 if (val
== NULL_RTX
|| val
== pc_rtx
)
2641 return plus_constant (Pmode
, val
, offset
);
2644 /* When called at the topmost level, scan pseudo assignments in between the
2645 last scanned instruction in the tail call sequence and the latest insn
2646 in that sequence. */
2648 internal_arg_pointer_based_exp_scan ();
2652 unsigned int idx
= REGNO (rtl
) - FIRST_PSEUDO_REGISTER
;
2653 if (idx
< internal_arg_pointer_exp_state
.cache
.length ())
2654 return internal_arg_pointer_exp_state
.cache
[idx
];
2659 subrtx_iterator::array_type array
;
2660 FOR_EACH_SUBRTX (iter
, array
, rtl
, NONCONST
)
2662 const_rtx x
= *iter
;
2663 if (REG_P (x
) && internal_arg_pointer_based_exp (x
, false) != NULL_RTX
)
2666 iter
.skip_subrtxes ();
2672 /* Return true if SIZE bytes starting from address ADDR might overlap an
2673 already-clobbered argument area. This function is used to determine
2674 if we should give up a sibcall. */
2677 mem_might_overlap_already_clobbered_arg_p (rtx addr
, poly_uint64 size
)
2680 unsigned HOST_WIDE_INT start
, end
;
2683 if (bitmap_empty_p (stored_args_map
)
2684 && stored_args_watermark
== HOST_WIDE_INT_M1U
)
2686 val
= internal_arg_pointer_based_exp (addr
, true);
2687 if (val
== NULL_RTX
)
2689 else if (!poly_int_rtx_p (val
, &i
))
2692 if (known_eq (size
, 0U))
2695 if (STACK_GROWS_DOWNWARD
)
2696 i
-= crtl
->args
.pretend_args_size
;
2698 i
+= crtl
->args
.pretend_args_size
;
2700 if (ARGS_GROW_DOWNWARD
)
2703 /* We can ignore any references to the function's pretend args,
2704 which at this point would manifest as negative values of I. */
2705 if (known_le (i
, 0) && known_le (size
, poly_uint64 (-i
)))
2708 start
= maybe_lt (i
, 0) ? 0 : constant_lower_bound (i
);
2709 if (!(i
+ size
).is_constant (&end
))
2710 end
= HOST_WIDE_INT_M1U
;
2712 if (end
> stored_args_watermark
)
2715 end
= MIN (end
, SBITMAP_SIZE (stored_args_map
));
2716 for (unsigned HOST_WIDE_INT k
= start
; k
< end
; ++k
)
2717 if (bitmap_bit_p (stored_args_map
, k
))
2723 /* Do the register loads required for any wholly-register parms or any
2724 parms which are passed both on the stack and in a register. Their
2725 expressions were already evaluated.
2727 Mark all register-parms as living through the call, putting these USE
2728 insns in the CALL_INSN_FUNCTION_USAGE field.
2730 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2731 checking, setting *SIBCALL_FAILURE if appropriate. */
2734 load_register_parameters (struct arg_data
*args
, int num_actuals
,
2735 rtx
*call_fusage
, int flags
, int is_sibcall
,
2736 int *sibcall_failure
)
2740 for (i
= 0; i
< num_actuals
; i
++)
2742 rtx reg
= ((flags
& ECF_SIBCALL
)
2743 ? args
[i
].tail_call_reg
: args
[i
].reg
);
2746 int partial
= args
[i
].partial
;
2748 poly_int64 size
= 0;
2749 HOST_WIDE_INT const_size
= 0;
2750 rtx_insn
*before_arg
= get_last_insn ();
2751 /* Set non-negative if we must move a word at a time, even if
2752 just one word (e.g, partial == 4 && mode == DFmode). Set
2753 to -1 if we just use a normal move insn. This value can be
2754 zero if the argument is a zero size structure. */
2756 if (GET_CODE (reg
) == PARALLEL
)
2760 gcc_assert (partial
% UNITS_PER_WORD
== 0);
2761 nregs
= partial
/ UNITS_PER_WORD
;
2763 else if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
)
2765 /* Variable-sized parameters should be described by a
2766 PARALLEL instead. */
2767 const_size
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
2768 gcc_assert (const_size
>= 0);
2769 nregs
= (const_size
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2773 size
= GET_MODE_SIZE (args
[i
].mode
);
2775 /* Handle calls that pass values in multiple non-contiguous
2776 locations. The Irix 6 ABI has examples of this. */
2778 if (GET_CODE (reg
) == PARALLEL
)
2779 emit_group_move (reg
, args
[i
].parallel_value
);
2781 /* If simple case, just do move. If normal partial, store_one_arg
2782 has already loaded the register for us. In all other cases,
2783 load the register(s) from memory. */
2785 else if (nregs
== -1)
2787 emit_move_insn (reg
, args
[i
].value
);
2788 #ifdef BLOCK_REG_PADDING
2789 /* Handle case where we have a value that needs shifting
2790 up to the msb. eg. a QImode value and we're padding
2791 upward on a BYTES_BIG_ENDIAN machine. */
2792 if (args
[i
].locate
.where_pad
2793 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
))
2795 gcc_checking_assert (ordered_p (size
, UNITS_PER_WORD
));
2796 if (maybe_lt (size
, UNITS_PER_WORD
))
2800 = (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2802 /* Assigning REG here rather than a temp makes
2803 CALL_FUSAGE report the whole reg as used.
2804 Strictly speaking, the call only uses SIZE
2805 bytes at the msb end, but it doesn't seem worth
2806 generating rtl to say that. */
2807 reg
= gen_rtx_REG (word_mode
, REGNO (reg
));
2808 x
= expand_shift (LSHIFT_EXPR
, word_mode
,
2809 reg
, shift
, reg
, 1);
2811 emit_move_insn (reg
, x
);
2817 /* If we have pre-computed the values to put in the registers in
2818 the case of non-aligned structures, copy them in now. */
2820 else if (args
[i
].n_aligned_regs
!= 0)
2821 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
2822 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
2823 args
[i
].aligned_regs
[j
]);
2825 else if (partial
== 0 || args
[i
].pass_on_stack
)
2827 /* SIZE and CONST_SIZE are 0 for partial arguments and
2828 the size of a BLKmode type otherwise. */
2829 gcc_checking_assert (known_eq (size
, const_size
));
2830 rtx mem
= validize_mem (copy_rtx (args
[i
].value
));
2832 /* Check for overlap with already clobbered argument area,
2833 providing that this has non-zero size. */
2836 && (mem_might_overlap_already_clobbered_arg_p
2837 (XEXP (args
[i
].value
, 0), const_size
)))
2838 *sibcall_failure
= 1;
2840 if (const_size
% UNITS_PER_WORD
== 0
2841 || MEM_ALIGN (mem
) % BITS_PER_WORD
== 0)
2842 move_block_to_reg (REGNO (reg
), mem
, nregs
, args
[i
].mode
);
2846 move_block_to_reg (REGNO (reg
), mem
, nregs
- 1,
2848 rtx dest
= gen_rtx_REG (word_mode
, REGNO (reg
) + nregs
- 1);
2849 unsigned int bitoff
= (nregs
- 1) * BITS_PER_WORD
;
2850 unsigned int bitsize
= const_size
* BITS_PER_UNIT
- bitoff
;
2851 rtx x
= extract_bit_field (mem
, bitsize
, bitoff
, 1, dest
,
2852 word_mode
, word_mode
, false,
2854 if (BYTES_BIG_ENDIAN
)
2855 x
= expand_shift (LSHIFT_EXPR
, word_mode
, x
,
2856 BITS_PER_WORD
- bitsize
, dest
, 1);
2858 emit_move_insn (dest
, x
);
2861 /* Handle a BLKmode that needs shifting. */
2862 if (nregs
== 1 && const_size
< UNITS_PER_WORD
2863 #ifdef BLOCK_REG_PADDING
2864 && args
[i
].locate
.where_pad
== PAD_DOWNWARD
2870 rtx dest
= gen_rtx_REG (word_mode
, REGNO (reg
));
2871 int shift
= (UNITS_PER_WORD
- const_size
) * BITS_PER_UNIT
;
2872 enum tree_code dir
= (BYTES_BIG_ENDIAN
2873 ? RSHIFT_EXPR
: LSHIFT_EXPR
);
2876 x
= expand_shift (dir
, word_mode
, dest
, shift
, dest
, 1);
2878 emit_move_insn (dest
, x
);
2882 /* When a parameter is a block, and perhaps in other cases, it is
2883 possible that it did a load from an argument slot that was
2884 already clobbered. */
2886 && check_sibcall_argument_overlap (before_arg
, &args
[i
], 0))
2887 *sibcall_failure
= 1;
2889 /* Handle calls that pass values in multiple non-contiguous
2890 locations. The Irix 6 ABI has examples of this. */
2891 if (GET_CODE (reg
) == PARALLEL
)
2892 use_group_regs (call_fusage
, reg
);
2893 else if (nregs
== -1)
2894 use_reg_mode (call_fusage
, reg
,
2895 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)));
2897 use_regs (call_fusage
, REGNO (reg
), nregs
);
2902 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2903 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2904 bytes, then we would need to push some additional bytes to pad the
2905 arguments. So, we try to compute an adjust to the stack pointer for an
2906 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2907 bytes. Then, when the arguments are pushed the stack will be perfectly
2910 Return true if this optimization is possible, storing the adjustment
2911 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2912 bytes that should be popped after the call. */
2915 combine_pending_stack_adjustment_and_call (poly_int64_pod
*adjustment_out
,
2916 poly_int64 unadjusted_args_size
,
2917 struct args_size
*args_size
,
2918 unsigned int preferred_unit_stack_boundary
)
2920 /* The number of bytes to pop so that the stack will be
2921 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2922 poly_int64 adjustment
;
2923 /* The alignment of the stack after the arguments are pushed, if we
2924 just pushed the arguments without adjust the stack here. */
2925 unsigned HOST_WIDE_INT unadjusted_alignment
;
2927 if (!known_misalignment (stack_pointer_delta
+ unadjusted_args_size
,
2928 preferred_unit_stack_boundary
,
2929 &unadjusted_alignment
))
2932 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2933 as possible -- leaving just enough left to cancel out the
2934 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2935 PENDING_STACK_ADJUST is non-negative, and congruent to
2936 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2938 /* Begin by trying to pop all the bytes. */
2939 unsigned HOST_WIDE_INT tmp_misalignment
;
2940 if (!known_misalignment (pending_stack_adjust
,
2941 preferred_unit_stack_boundary
,
2944 unadjusted_alignment
-= tmp_misalignment
;
2945 adjustment
= pending_stack_adjust
;
2946 /* Push enough additional bytes that the stack will be aligned
2947 after the arguments are pushed. */
2948 if (preferred_unit_stack_boundary
> 1 && unadjusted_alignment
)
2949 adjustment
-= preferred_unit_stack_boundary
- unadjusted_alignment
;
2951 /* We need to know whether the adjusted argument size
2952 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2953 or a deallocation. */
2954 if (!ordered_p (adjustment
, unadjusted_args_size
))
2957 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2958 bytes after the call. The right number is the entire
2959 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2960 by the arguments in the first place. */
2962 = pending_stack_adjust
- adjustment
+ unadjusted_args_size
;
2964 *adjustment_out
= adjustment
;
2968 /* Scan X expression if it does not dereference any argument slots
2969 we already clobbered by tail call arguments (as noted in stored_args_map
2971 Return nonzero if X expression dereferences such argument slots,
2975 check_sibcall_argument_overlap_1 (rtx x
)
2984 code
= GET_CODE (x
);
2986 /* We need not check the operands of the CALL expression itself. */
2991 return (mem_might_overlap_already_clobbered_arg_p
2992 (XEXP (x
, 0), GET_MODE_SIZE (GET_MODE (x
))));
2994 /* Scan all subexpressions. */
2995 fmt
= GET_RTX_FORMAT (code
);
2996 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3000 if (check_sibcall_argument_overlap_1 (XEXP (x
, i
)))
3003 else if (*fmt
== 'E')
3005 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3006 if (check_sibcall_argument_overlap_1 (XVECEXP (x
, i
, j
)))
3013 /* Scan sequence after INSN if it does not dereference any argument slots
3014 we already clobbered by tail call arguments (as noted in stored_args_map
3015 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3016 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3017 should be 0). Return nonzero if sequence after INSN dereferences such argument
3018 slots, zero otherwise. */
3021 check_sibcall_argument_overlap (rtx_insn
*insn
, struct arg_data
*arg
,
3022 int mark_stored_args_map
)
3024 poly_uint64 low
, high
;
3025 unsigned HOST_WIDE_INT const_low
, const_high
;
3027 if (insn
== NULL_RTX
)
3028 insn
= get_insns ();
3030 insn
= NEXT_INSN (insn
);
3032 for (; insn
; insn
= NEXT_INSN (insn
))
3034 && check_sibcall_argument_overlap_1 (PATTERN (insn
)))
3037 if (mark_stored_args_map
)
3039 if (ARGS_GROW_DOWNWARD
)
3040 low
= -arg
->locate
.slot_offset
.constant
- arg
->locate
.size
.constant
;
3042 low
= arg
->locate
.slot_offset
.constant
;
3043 high
= low
+ arg
->locate
.size
.constant
;
3045 const_low
= constant_lower_bound (low
);
3046 if (high
.is_constant (&const_high
))
3047 for (unsigned HOST_WIDE_INT i
= const_low
; i
< const_high
; ++i
)
3048 bitmap_set_bit (stored_args_map
, i
);
3050 stored_args_watermark
= MIN (stored_args_watermark
, const_low
);
3052 return insn
!= NULL_RTX
;
3055 /* Given that a function returns a value of mode MODE at the most
3056 significant end of hard register VALUE, shift VALUE left or right
3057 as specified by LEFT_P. Return true if some action was needed. */
3060 shift_return_value (machine_mode mode
, bool left_p
, rtx value
)
3062 gcc_assert (REG_P (value
) && HARD_REGISTER_P (value
));
3063 machine_mode value_mode
= GET_MODE (value
);
3064 poly_int64 shift
= GET_MODE_BITSIZE (value_mode
) - GET_MODE_BITSIZE (mode
);
3066 if (known_eq (shift
, 0))
3069 /* Use ashr rather than lshr for right shifts. This is for the benefit
3070 of the MIPS port, which requires SImode values to be sign-extended
3071 when stored in 64-bit registers. */
3072 if (!force_expand_binop (value_mode
, left_p
? ashl_optab
: ashr_optab
,
3073 value
, gen_int_shift_amount (value_mode
, shift
),
3074 value
, 1, OPTAB_WIDEN
))
3079 /* If X is a likely-spilled register value, copy it to a pseudo
3080 register and return that register. Return X otherwise. */
3083 avoid_likely_spilled_reg (rtx x
)
3088 && HARD_REGISTER_P (x
)
3089 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x
))))
3091 /* Make sure that we generate a REG rather than a CONCAT.
3092 Moves into CONCATs can need nontrivial instructions,
3093 and the whole point of this function is to avoid
3094 using the hard register directly in such a situation. */
3095 generating_concat_p
= 0;
3096 new_rtx
= gen_reg_rtx (GET_MODE (x
));
3097 generating_concat_p
= 1;
3098 emit_move_insn (new_rtx
, x
);
3104 /* Helper function for expand_call.
3105 Return false is EXP is not implementable as a sibling call. */
3108 can_implement_as_sibling_call_p (tree exp
,
3109 rtx structure_value_addr
,
3111 int reg_parm_stack_space ATTRIBUTE_UNUSED
,
3115 const args_size
&args_size
)
3117 if (!targetm
.have_sibcall_epilogue ())
3119 maybe_complain_about_tail_call
3121 "machine description does not have"
3122 " a sibcall_epilogue instruction pattern");
3126 /* Doing sibling call optimization needs some work, since
3127 structure_value_addr can be allocated on the stack.
3128 It does not seem worth the effort since few optimizable
3129 sibling calls will return a structure. */
3130 if (structure_value_addr
!= NULL_RTX
)
3132 maybe_complain_about_tail_call (exp
, "callee returns a structure");
3136 #ifdef REG_PARM_STACK_SPACE
3137 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3138 if (OUTGOING_REG_PARM_STACK_SPACE (funtype
)
3139 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl
))
3140 || (reg_parm_stack_space
!= REG_PARM_STACK_SPACE (current_function_decl
)))
3142 maybe_complain_about_tail_call (exp
,
3143 "inconsistent size of stack space"
3144 " allocated for arguments which are"
3145 " passed in registers");
3150 /* Check whether the target is able to optimize the call
3152 if (!targetm
.function_ok_for_sibcall (fndecl
, exp
))
3154 maybe_complain_about_tail_call (exp
,
3155 "target is not able to optimize the"
3156 " call into a sibling call");
3160 /* Functions that do not return exactly once may not be sibcall
3162 if (flags
& ECF_RETURNS_TWICE
)
3164 maybe_complain_about_tail_call (exp
, "callee returns twice");
3167 if (flags
& ECF_NORETURN
)
3169 maybe_complain_about_tail_call (exp
, "callee does not return");
3173 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr
))))
3175 maybe_complain_about_tail_call (exp
, "volatile function type");
3179 /* If the called function is nested in the current one, it might access
3180 some of the caller's arguments, but could clobber them beforehand if
3181 the argument areas are shared. */
3182 if (fndecl
&& decl_function_context (fndecl
) == current_function_decl
)
3184 maybe_complain_about_tail_call (exp
, "nested function");
3188 /* If this function requires more stack slots than the current
3189 function, we cannot change it into a sibling call.
3190 crtl->args.pretend_args_size is not part of the
3191 stack allocated by our caller. */
3192 if (maybe_gt (args_size
.constant
,
3193 crtl
->args
.size
- crtl
->args
.pretend_args_size
))
3195 maybe_complain_about_tail_call (exp
,
3196 "callee required more stack slots"
3197 " than the caller");
3201 /* If the callee pops its own arguments, then it must pop exactly
3202 the same number of arguments as the current function. */
3203 if (maybe_ne (targetm
.calls
.return_pops_args (fndecl
, funtype
,
3204 args_size
.constant
),
3205 targetm
.calls
.return_pops_args (current_function_decl
,
3207 (current_function_decl
),
3210 maybe_complain_about_tail_call (exp
,
3211 "inconsistent number of"
3212 " popped arguments");
3216 if (!lang_hooks
.decls
.ok_for_sibcall (fndecl
))
3218 maybe_complain_about_tail_call (exp
, "frontend does not support"
3223 /* All checks passed. */
3227 /* Update stack alignment when the parameter is passed in the stack
3228 since the outgoing parameter requires extra alignment on the calling
3232 update_stack_alignment_for_call (struct locate_and_pad_arg_data
*locate
)
3234 if (crtl
->stack_alignment_needed
< locate
->boundary
)
3235 crtl
->stack_alignment_needed
= locate
->boundary
;
3236 if (crtl
->preferred_stack_boundary
< locate
->boundary
)
3237 crtl
->preferred_stack_boundary
= locate
->boundary
;
3240 /* Generate all the code for a CALL_EXPR exp
3241 and return an rtx for its value.
3242 Store the value in TARGET (specified as an rtx) if convenient.
3243 If the value is stored in TARGET then TARGET is returned.
3244 If IGNORE is nonzero, then we ignore the value of the function call. */
3247 expand_call (tree exp
, rtx target
, int ignore
)
3249 /* Nonzero if we are currently expanding a call. */
3250 static int currently_expanding_call
= 0;
3252 /* RTX for the function to be called. */
3254 /* Sequence of insns to perform a normal "call". */
3255 rtx_insn
*normal_call_insns
= NULL
;
3256 /* Sequence of insns to perform a tail "call". */
3257 rtx_insn
*tail_call_insns
= NULL
;
3258 /* Data type of the function. */
3260 tree type_arg_types
;
3262 /* Declaration of the function being called,
3263 or 0 if the function is computed (not known by name). */
3265 /* The type of the function being called. */
3267 bool try_tail_call
= CALL_EXPR_TAILCALL (exp
);
3268 bool must_tail_call
= CALL_EXPR_MUST_TAIL_CALL (exp
);
3271 /* Register in which non-BLKmode value will be returned,
3272 or 0 if no value or if value is BLKmode. */
3274 /* Address where we should return a BLKmode value;
3275 0 if value not BLKmode. */
3276 rtx structure_value_addr
= 0;
3277 /* Nonzero if that address is being passed by treating it as
3278 an extra, implicit first parameter. Otherwise,
3279 it is passed by being copied directly into struct_value_rtx. */
3280 int structure_value_addr_parm
= 0;
3281 /* Holds the value of implicit argument for the struct value. */
3282 tree structure_value_addr_value
= NULL_TREE
;
3283 /* Size of aggregate value wanted, or zero if none wanted
3284 or if we are using the non-reentrant PCC calling convention
3285 or expecting the value in registers. */
3286 poly_int64 struct_value_size
= 0;
3287 /* Nonzero if called function returns an aggregate in memory PCC style,
3288 by returning the address of where to find it. */
3289 int pcc_struct_value
= 0;
3290 rtx struct_value
= 0;
3292 /* Number of actual parameters in this call, including struct value addr. */
3294 /* Number of named args. Args after this are anonymous ones
3295 and they must all go on the stack. */
3297 /* Number of complex actual arguments that need to be split. */
3298 int num_complex_actuals
= 0;
3300 /* Vector of information about each argument.
3301 Arguments are numbered in the order they will be pushed,
3302 not the order they are written. */
3303 struct arg_data
*args
;
3305 /* Total size in bytes of all the stack-parms scanned so far. */
3306 struct args_size args_size
;
3307 struct args_size adjusted_args_size
;
3308 /* Size of arguments before any adjustments (such as rounding). */
3309 poly_int64 unadjusted_args_size
;
3310 /* Data on reg parms scanned so far. */
3311 CUMULATIVE_ARGS args_so_far_v
;
3312 cumulative_args_t args_so_far
;
3313 /* Nonzero if a reg parm has been scanned. */
3315 /* Nonzero if this is an indirect function call. */
3317 /* Nonzero if we must avoid push-insns in the args for this call.
3318 If stack space is allocated for register parameters, but not by the
3319 caller, then it is preallocated in the fixed part of the stack frame.
3320 So the entire argument block must then be preallocated (i.e., we
3321 ignore PUSH_ROUNDING in that case). */
3323 int must_preallocate
= !PUSH_ARGS
;
3325 /* Size of the stack reserved for parameter registers. */
3326 int reg_parm_stack_space
= 0;
3328 /* Address of space preallocated for stack parms
3329 (on machines that lack push insns), or 0 if space not preallocated. */
3332 /* Mask of ECF_ and ERF_ flags. */
3334 int return_flags
= 0;
3335 #ifdef REG_PARM_STACK_SPACE
3336 /* Define the boundary of the register parm stack space that needs to be
3338 int low_to_save
, high_to_save
;
3339 rtx save_area
= 0; /* Place that it is saved */
3342 unsigned int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3343 char *initial_stack_usage_map
= stack_usage_map
;
3344 unsigned HOST_WIDE_INT initial_stack_usage_watermark
= stack_usage_watermark
;
3345 char *stack_usage_map_buf
= NULL
;
3347 poly_int64 old_stack_allocated
;
3349 /* State variables to track stack modifications. */
3350 rtx old_stack_level
= 0;
3351 int old_stack_arg_under_construction
= 0;
3352 poly_int64 old_pending_adj
= 0;
3353 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3355 /* Some stack pointer alterations we make are performed via
3356 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3357 which we then also need to save/restore along the way. */
3358 poly_int64 old_stack_pointer_delta
= 0;
3361 tree addr
= CALL_EXPR_FN (exp
);
3363 /* The alignment of the stack, in bits. */
3364 unsigned HOST_WIDE_INT preferred_stack_boundary
;
3365 /* The alignment of the stack, in bytes. */
3366 unsigned HOST_WIDE_INT preferred_unit_stack_boundary
;
3367 /* The static chain value to use for this call. */
3368 rtx static_chain_value
;
3369 /* See if this is "nothrow" function call. */
3370 if (TREE_NOTHROW (exp
))
3371 flags
|= ECF_NOTHROW
;
3373 /* See if we can find a DECL-node for the actual function, and get the
3374 function attributes (flags) from the function decl or type node. */
3375 fndecl
= get_callee_fndecl (exp
);
3378 fntype
= TREE_TYPE (fndecl
);
3379 flags
|= flags_from_decl_or_type (fndecl
);
3380 return_flags
|= decl_return_flags (fndecl
);
3384 fntype
= TREE_TYPE (TREE_TYPE (addr
));
3385 flags
|= flags_from_decl_or_type (fntype
);
3386 if (CALL_EXPR_BY_DESCRIPTOR (exp
))
3387 flags
|= ECF_BY_DESCRIPTOR
;
3389 rettype
= TREE_TYPE (exp
);
3391 struct_value
= targetm
.calls
.struct_value_rtx (fntype
, 0);
3393 /* Warn if this value is an aggregate type,
3394 regardless of which calling convention we are using for it. */
3395 if (AGGREGATE_TYPE_P (rettype
))
3396 warning (OPT_Waggregate_return
, "function call has aggregate value");
3398 /* If the result of a non looping pure or const function call is
3399 ignored (or void), and none of its arguments are volatile, we can
3400 avoid expanding the call and just evaluate the arguments for
3402 if ((flags
& (ECF_CONST
| ECF_PURE
))
3403 && (!(flags
& ECF_LOOPING_CONST_OR_PURE
))
3404 && (ignore
|| target
== const0_rtx
3405 || TYPE_MODE (rettype
) == VOIDmode
))
3407 bool volatilep
= false;
3409 call_expr_arg_iterator iter
;
3411 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3412 if (TREE_THIS_VOLATILE (arg
))
3420 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3421 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3426 #ifdef REG_PARM_STACK_SPACE
3427 reg_parm_stack_space
= REG_PARM_STACK_SPACE (!fndecl
? fntype
: fndecl
);
3430 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
)))
3431 && reg_parm_stack_space
> 0 && PUSH_ARGS
)
3432 must_preallocate
= 1;
3434 /* Set up a place to return a structure. */
3436 /* Cater to broken compilers. */
3437 if (aggregate_value_p (exp
, fntype
))
3439 /* This call returns a big structure. */
3440 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
3442 #ifdef PCC_STATIC_STRUCT_RETURN
3444 pcc_struct_value
= 1;
3446 #else /* not PCC_STATIC_STRUCT_RETURN */
3448 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype
), &struct_value_size
))
3449 struct_value_size
= -1;
3451 /* Even if it is semantically safe to use the target as the return
3452 slot, it may be not sufficiently aligned for the return type. */
3453 if (CALL_EXPR_RETURN_SLOT_OPT (exp
)
3456 /* If rettype is addressable, we may not create a temporary.
3457 If target is properly aligned at runtime and the compiler
3458 just doesn't know about it, it will work fine, otherwise it
3460 && (TREE_ADDRESSABLE (rettype
)
3461 || !(MEM_ALIGN (target
) < TYPE_ALIGN (rettype
)
3462 && targetm
.slow_unaligned_access (TYPE_MODE (rettype
),
3463 MEM_ALIGN (target
)))))
3464 structure_value_addr
= XEXP (target
, 0);
3467 /* For variable-sized objects, we must be called with a target
3468 specified. If we were to allocate space on the stack here,
3469 we would have no way of knowing when to free it. */
3470 rtx d
= assign_temp (rettype
, 1, 1);
3471 structure_value_addr
= XEXP (d
, 0);
3475 #endif /* not PCC_STATIC_STRUCT_RETURN */
3478 /* Figure out the amount to which the stack should be aligned. */
3479 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3482 struct cgraph_rtl_info
*i
= cgraph_node::rtl_info (fndecl
);
3483 /* Without automatic stack alignment, we can't increase preferred
3484 stack boundary. With automatic stack alignment, it is
3485 unnecessary since unless we can guarantee that all callers will
3486 align the outgoing stack properly, callee has to align its
3489 && i
->preferred_incoming_stack_boundary
3490 && i
->preferred_incoming_stack_boundary
< preferred_stack_boundary
)
3491 preferred_stack_boundary
= i
->preferred_incoming_stack_boundary
;
3494 /* Operand 0 is a pointer-to-function; get the type of the function. */
3495 funtype
= TREE_TYPE (addr
);
3496 gcc_assert (POINTER_TYPE_P (funtype
));
3497 funtype
= TREE_TYPE (funtype
);
3499 /* Count whether there are actual complex arguments that need to be split
3500 into their real and imaginary parts. Munge the type_arg_types
3501 appropriately here as well. */
3502 if (targetm
.calls
.split_complex_arg
)
3504 call_expr_arg_iterator iter
;
3506 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3508 tree type
= TREE_TYPE (arg
);
3509 if (type
&& TREE_CODE (type
) == COMPLEX_TYPE
3510 && targetm
.calls
.split_complex_arg (type
))
3511 num_complex_actuals
++;
3513 type_arg_types
= split_complex_types (TYPE_ARG_TYPES (funtype
));
3516 type_arg_types
= TYPE_ARG_TYPES (funtype
);
3518 if (flags
& ECF_MAY_BE_ALLOCA
)
3519 cfun
->calls_alloca
= 1;
3521 /* If struct_value_rtx is 0, it means pass the address
3522 as if it were an extra parameter. Put the argument expression
3523 in structure_value_addr_value. */
3524 if (structure_value_addr
&& struct_value
== 0)
3526 /* If structure_value_addr is a REG other than
3527 virtual_outgoing_args_rtx, we can use always use it. If it
3528 is not a REG, we must always copy it into a register.
3529 If it is virtual_outgoing_args_rtx, we must copy it to another
3530 register in some cases. */
3531 rtx temp
= (!REG_P (structure_value_addr
)
3532 || (ACCUMULATE_OUTGOING_ARGS
3533 && stack_arg_under_construction
3534 && structure_value_addr
== virtual_outgoing_args_rtx
)
3535 ? copy_addr_to_reg (convert_memory_address
3536 (Pmode
, structure_value_addr
))
3537 : structure_value_addr
);
3539 structure_value_addr_value
=
3540 make_tree (build_pointer_type (TREE_TYPE (funtype
)), temp
);
3541 structure_value_addr_parm
= 1;
3544 /* Count the arguments and set NUM_ACTUALS. */
3546 call_expr_nargs (exp
) + num_complex_actuals
+ structure_value_addr_parm
;
3548 /* Compute number of named args.
3549 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3551 if (type_arg_types
!= 0)
3553 = (list_length (type_arg_types
)
3554 /* Count the struct value address, if it is passed as a parm. */
3555 + structure_value_addr_parm
);
3557 /* If we know nothing, treat all args as named. */
3558 n_named_args
= num_actuals
;
3560 /* Start updating where the next arg would go.
3562 On some machines (such as the PA) indirect calls have a different
3563 calling convention than normal calls. The fourth argument in
3564 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3566 INIT_CUMULATIVE_ARGS (args_so_far_v
, funtype
, NULL_RTX
, fndecl
, n_named_args
);
3567 args_so_far
= pack_cumulative_args (&args_so_far_v
);
3569 /* Now possibly adjust the number of named args.
3570 Normally, don't include the last named arg if anonymous args follow.
3571 We do include the last named arg if
3572 targetm.calls.strict_argument_naming() returns nonzero.
3573 (If no anonymous args follow, the result of list_length is actually
3574 one too large. This is harmless.)
3576 If targetm.calls.pretend_outgoing_varargs_named() returns
3577 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3578 this machine will be able to place unnamed args that were passed
3579 in registers into the stack. So treat all args as named. This
3580 allows the insns emitting for a specific argument list to be
3581 independent of the function declaration.
3583 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3584 we do not have any reliable way to pass unnamed args in
3585 registers, so we must force them into memory. */
3587 if (type_arg_types
!= 0
3588 && targetm
.calls
.strict_argument_naming (args_so_far
))
3590 else if (type_arg_types
!= 0
3591 && ! targetm
.calls
.pretend_outgoing_varargs_named (args_so_far
))
3592 /* Don't include the last named arg. */
3595 /* Treat all args as named. */
3596 n_named_args
= num_actuals
;
3598 /* Make a vector to hold all the information about each arg. */
3599 args
= XCNEWVEC (struct arg_data
, num_actuals
);
3601 /* Build up entries in the ARGS array, compute the size of the
3602 arguments into ARGS_SIZE, etc. */
3603 initialize_argument_information (num_actuals
, args
, &args_size
,
3605 structure_value_addr_value
, fndecl
, fntype
,
3606 args_so_far
, reg_parm_stack_space
,
3607 &old_stack_level
, &old_pending_adj
,
3608 &must_preallocate
, &flags
,
3609 &try_tail_call
, CALL_FROM_THUNK_P (exp
));
3612 must_preallocate
= 1;
3614 /* Now make final decision about preallocating stack space. */
3615 must_preallocate
= finalize_must_preallocate (must_preallocate
,
3619 /* If the structure value address will reference the stack pointer, we
3620 must stabilize it. We don't need to do this if we know that we are
3621 not going to adjust the stack pointer in processing this call. */
3623 if (structure_value_addr
3624 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
3625 || reg_mentioned_p (virtual_outgoing_args_rtx
,
3626 structure_value_addr
))
3628 || (!ACCUMULATE_OUTGOING_ARGS
3629 && maybe_ne (args_size
.constant
, 0))))
3630 structure_value_addr
= copy_to_reg (structure_value_addr
);
3632 /* Tail calls can make things harder to debug, and we've traditionally
3633 pushed these optimizations into -O2. Don't try if we're already
3634 expanding a call, as that means we're an argument. Don't try if
3635 there's cleanups, as we know there's code to follow the call. */
3636 if (currently_expanding_call
++ != 0
3637 || (!flag_optimize_sibling_calls
&& !CALL_FROM_THUNK_P (exp
))
3639 || dbg_cnt (tail_call
) == false)
3642 /* Workaround buggy C/C++ wrappers around Fortran routines with
3643 character(len=constant) arguments if the hidden string length arguments
3644 are passed on the stack; if the callers forget to pass those arguments,
3645 attempting to tail call in such routines leads to stack corruption.
3646 Avoid tail calls in functions where at least one such hidden string
3647 length argument is passed (partially or fully) on the stack in the
3648 caller and the callee needs to pass any arguments on the stack.
3650 if (try_tail_call
&& maybe_ne (args_size
.constant
, 0))
3651 for (tree arg
= DECL_ARGUMENTS (current_function_decl
);
3652 arg
; arg
= DECL_CHAIN (arg
))
3653 if (DECL_HIDDEN_STRING_LENGTH (arg
) && DECL_INCOMING_RTL (arg
))
3655 subrtx_iterator::array_type array
;
3656 FOR_EACH_SUBRTX (iter
, array
, DECL_INCOMING_RTL (arg
), NONCONST
)
3664 /* If the user has marked the function as requiring tail-call
3665 optimization, attempt it. */
3669 /* Rest of purposes for tail call optimizations to fail. */
3671 try_tail_call
= can_implement_as_sibling_call_p (exp
,
3672 structure_value_addr
,
3674 reg_parm_stack_space
,
3676 flags
, addr
, args_size
);
3678 /* Check if caller and callee disagree in promotion of function
3682 machine_mode caller_mode
, caller_promoted_mode
;
3683 machine_mode callee_mode
, callee_promoted_mode
;
3684 int caller_unsignedp
, callee_unsignedp
;
3685 tree caller_res
= DECL_RESULT (current_function_decl
);
3687 caller_unsignedp
= TYPE_UNSIGNED (TREE_TYPE (caller_res
));
3688 caller_mode
= DECL_MODE (caller_res
);
3689 callee_unsignedp
= TYPE_UNSIGNED (TREE_TYPE (funtype
));
3690 callee_mode
= TYPE_MODE (TREE_TYPE (funtype
));
3691 caller_promoted_mode
3692 = promote_function_mode (TREE_TYPE (caller_res
), caller_mode
,
3694 TREE_TYPE (current_function_decl
), 1);
3695 callee_promoted_mode
3696 = promote_function_mode (TREE_TYPE (funtype
), callee_mode
,
3699 if (caller_mode
!= VOIDmode
3700 && (caller_promoted_mode
!= callee_promoted_mode
3701 || ((caller_mode
!= caller_promoted_mode
3702 || callee_mode
!= callee_promoted_mode
)
3703 && (caller_unsignedp
!= callee_unsignedp
3704 || partial_subreg_p (caller_mode
, callee_mode
)))))
3707 maybe_complain_about_tail_call (exp
,
3708 "caller and callee disagree in"
3709 " promotion of function"
3714 /* Ensure current function's preferred stack boundary is at least
3715 what we need. Stack alignment may also increase preferred stack
3717 for (i
= 0; i
< num_actuals
; i
++)
3718 if (reg_parm_stack_space
> 0
3720 || args
[i
].partial
!= 0
3721 || args
[i
].pass_on_stack
)
3722 update_stack_alignment_for_call (&args
[i
].locate
);
3723 if (crtl
->preferred_stack_boundary
< preferred_stack_boundary
)
3724 crtl
->preferred_stack_boundary
= preferred_stack_boundary
;
3726 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
3728 preferred_unit_stack_boundary
= preferred_stack_boundary
/ BITS_PER_UNIT
;
3730 /* We want to make two insn chains; one for a sibling call, the other
3731 for a normal call. We will select one of the two chains after
3732 initial RTL generation is complete. */
3733 for (pass
= try_tail_call
? 0 : 1; pass
< 2; pass
++)
3735 int sibcall_failure
= 0;
3736 /* We want to emit any pending stack adjustments before the tail
3737 recursion "call". That way we know any adjustment after the tail
3738 recursion call can be ignored if we indeed use the tail
3740 saved_pending_stack_adjust save
;
3741 rtx_insn
*insns
, *before_call
, *after_args
;
3746 /* State variables we need to save and restore between
3748 save_pending_stack_adjust (&save
);
3751 flags
&= ~ECF_SIBCALL
;
3753 flags
|= ECF_SIBCALL
;
3755 /* Other state variables that we must reinitialize each time
3756 through the loop (that are not initialized by the loop itself). */
3760 /* Start a new sequence for the normal call case.
3762 From this point on, if the sibling call fails, we want to set
3763 sibcall_failure instead of continuing the loop. */
3766 /* Don't let pending stack adjusts add up to too much.
3767 Also, do all pending adjustments now if there is any chance
3768 this might be a call to alloca or if we are expanding a sibling
3770 Also do the adjustments before a throwing call, otherwise
3771 exception handling can fail; PR 19225. */
3772 if (maybe_ge (pending_stack_adjust
, 32)
3773 || (maybe_ne (pending_stack_adjust
, 0)
3774 && (flags
& ECF_MAY_BE_ALLOCA
))
3775 || (maybe_ne (pending_stack_adjust
, 0)
3776 && flag_exceptions
&& !(flags
& ECF_NOTHROW
))
3778 do_pending_stack_adjust ();
3780 /* Precompute any arguments as needed. */
3782 precompute_arguments (num_actuals
, args
);
3784 /* Now we are about to start emitting insns that can be deleted
3785 if a libcall is deleted. */
3786 if (pass
&& (flags
& ECF_MALLOC
))
3790 && crtl
->stack_protect_guard
3791 && targetm
.stack_protect_runtime_enabled_p ())
3792 stack_protect_epilogue ();
3794 adjusted_args_size
= args_size
;
3795 /* Compute the actual size of the argument block required. The variable
3796 and constant sizes must be combined, the size may have to be rounded,
3797 and there may be a minimum required size. When generating a sibcall
3798 pattern, do not round up, since we'll be re-using whatever space our
3800 unadjusted_args_size
3801 = compute_argument_block_size (reg_parm_stack_space
,
3802 &adjusted_args_size
,
3805 : preferred_stack_boundary
));
3807 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
3809 /* The argument block when performing a sibling call is the
3810 incoming argument block. */
3813 argblock
= crtl
->args
.internal_arg_pointer
;
3814 if (STACK_GROWS_DOWNWARD
)
3816 = plus_constant (Pmode
, argblock
, crtl
->args
.pretend_args_size
);
3819 = plus_constant (Pmode
, argblock
, -crtl
->args
.pretend_args_size
);
3821 HOST_WIDE_INT map_size
= constant_lower_bound (args_size
.constant
);
3822 stored_args_map
= sbitmap_alloc (map_size
);
3823 bitmap_clear (stored_args_map
);
3824 stored_args_watermark
= HOST_WIDE_INT_M1U
;
3827 /* If we have no actual push instructions, or shouldn't use them,
3828 make space for all args right now. */
3829 else if (adjusted_args_size
.var
!= 0)
3831 if (old_stack_level
== 0)
3833 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
3834 old_stack_pointer_delta
= stack_pointer_delta
;
3835 old_pending_adj
= pending_stack_adjust
;
3836 pending_stack_adjust
= 0;
3837 /* stack_arg_under_construction says whether a stack arg is
3838 being constructed at the old stack level. Pushing the stack
3839 gets a clean outgoing argument block. */
3840 old_stack_arg_under_construction
= stack_arg_under_construction
;
3841 stack_arg_under_construction
= 0;
3843 argblock
= push_block (ARGS_SIZE_RTX (adjusted_args_size
), 0, 0);
3844 if (flag_stack_usage_info
)
3845 current_function_has_unbounded_dynamic_stack_size
= 1;
3849 /* Note that we must go through the motions of allocating an argument
3850 block even if the size is zero because we may be storing args
3851 in the area reserved for register arguments, which may be part of
3854 poly_int64 needed
= adjusted_args_size
.constant
;
3856 /* Store the maximum argument space used. It will be pushed by
3857 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3860 crtl
->outgoing_args_size
= upper_bound (crtl
->outgoing_args_size
,
3863 if (must_preallocate
)
3865 if (ACCUMULATE_OUTGOING_ARGS
)
3867 /* Since the stack pointer will never be pushed, it is
3868 possible for the evaluation of a parm to clobber
3869 something we have already written to the stack.
3870 Since most function calls on RISC machines do not use
3871 the stack, this is uncommon, but must work correctly.
3873 Therefore, we save any area of the stack that was already
3874 written and that we are using. Here we set up to do this
3875 by making a new stack usage map from the old one. The
3876 actual save will be done by store_one_arg.
3878 Another approach might be to try to reorder the argument
3879 evaluations to avoid this conflicting stack usage. */
3881 /* Since we will be writing into the entire argument area,
3882 the map must be allocated for its entire size, not just
3883 the part that is the responsibility of the caller. */
3884 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
3885 needed
+= reg_parm_stack_space
;
3887 poly_int64 limit
= needed
;
3888 if (ARGS_GROW_DOWNWARD
)
3891 /* For polynomial sizes, this is the maximum possible
3892 size needed for arguments with a constant size
3894 HOST_WIDE_INT const_limit
= constant_lower_bound (limit
);
3895 highest_outgoing_arg_in_use
3896 = MAX (initial_highest_arg_in_use
, const_limit
);
3898 free (stack_usage_map_buf
);
3899 stack_usage_map_buf
= XNEWVEC (char, highest_outgoing_arg_in_use
);
3900 stack_usage_map
= stack_usage_map_buf
;
3902 if (initial_highest_arg_in_use
)
3903 memcpy (stack_usage_map
, initial_stack_usage_map
,
3904 initial_highest_arg_in_use
);
3906 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3907 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
3908 (highest_outgoing_arg_in_use
3909 - initial_highest_arg_in_use
));
3912 /* The address of the outgoing argument list must not be
3913 copied to a register here, because argblock would be left
3914 pointing to the wrong place after the call to
3915 allocate_dynamic_stack_space below. */
3917 argblock
= virtual_outgoing_args_rtx
;
3921 /* Try to reuse some or all of the pending_stack_adjust
3922 to get this space. */
3923 if (inhibit_defer_pop
== 0
3924 && (combine_pending_stack_adjustment_and_call
3926 unadjusted_args_size
,
3927 &adjusted_args_size
,
3928 preferred_unit_stack_boundary
)))
3930 /* combine_pending_stack_adjustment_and_call computes
3931 an adjustment before the arguments are allocated.
3932 Account for them and see whether or not the stack
3933 needs to go up or down. */
3934 needed
= unadjusted_args_size
- needed
;
3937 combine_pending_stack_adjustment_and_call. */
3938 gcc_checking_assert (ordered_p (needed
, 0));
3939 if (maybe_lt (needed
, 0))
3941 /* We're releasing stack space. */
3942 /* ??? We can avoid any adjustment at all if we're
3943 already aligned. FIXME. */
3944 pending_stack_adjust
= -needed
;
3945 do_pending_stack_adjust ();
3949 /* We need to allocate space. We'll do that in
3950 push_block below. */
3951 pending_stack_adjust
= 0;
3954 /* Special case this because overhead of `push_block' in
3955 this case is non-trivial. */
3956 if (known_eq (needed
, 0))
3957 argblock
= virtual_outgoing_args_rtx
;
3960 rtx needed_rtx
= gen_int_mode (needed
, Pmode
);
3961 argblock
= push_block (needed_rtx
, 0, 0);
3962 if (ARGS_GROW_DOWNWARD
)
3963 argblock
= plus_constant (Pmode
, argblock
, needed
);
3966 /* We only really need to call `copy_to_reg' in the case
3967 where push insns are going to be used to pass ARGBLOCK
3968 to a function call in ARGS. In that case, the stack
3969 pointer changes value from the allocation point to the
3970 call point, and hence the value of
3971 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3972 as well always do it. */
3973 argblock
= copy_to_reg (argblock
);
3978 if (ACCUMULATE_OUTGOING_ARGS
)
3980 /* The save/restore code in store_one_arg handles all
3981 cases except one: a constructor call (including a C
3982 function returning a BLKmode struct) to initialize
3984 if (stack_arg_under_construction
)
3988 (adjusted_args_size
.constant
3989 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl
? fntype
3990 : TREE_TYPE (fndecl
))
3991 ? 0 : reg_parm_stack_space
), Pmode
));
3992 if (old_stack_level
== 0)
3994 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
3995 old_stack_pointer_delta
= stack_pointer_delta
;
3996 old_pending_adj
= pending_stack_adjust
;
3997 pending_stack_adjust
= 0;
3998 /* stack_arg_under_construction says whether a stack
3999 arg is being constructed at the old stack level.
4000 Pushing the stack gets a clean outgoing argument
4002 old_stack_arg_under_construction
4003 = stack_arg_under_construction
;
4004 stack_arg_under_construction
= 0;
4005 /* Make a new map for the new argument list. */
4006 free (stack_usage_map_buf
);
4007 stack_usage_map_buf
= XCNEWVEC (char, highest_outgoing_arg_in_use
);
4008 stack_usage_map
= stack_usage_map_buf
;
4009 highest_outgoing_arg_in_use
= 0;
4010 stack_usage_watermark
= HOST_WIDE_INT_M1U
;
4012 /* We can pass TRUE as the 4th argument because we just
4013 saved the stack pointer and will restore it right after
4015 allocate_dynamic_stack_space (push_size
, 0, BIGGEST_ALIGNMENT
,
4019 /* If argument evaluation might modify the stack pointer,
4020 copy the address of the argument list to a register. */
4021 for (i
= 0; i
< num_actuals
; i
++)
4022 if (args
[i
].pass_on_stack
)
4024 argblock
= copy_addr_to_reg (argblock
);
4029 compute_argument_addresses (args
, argblock
, num_actuals
);
4031 /* Stack is properly aligned, pops can't safely be deferred during
4032 the evaluation of the arguments. */
4035 /* Precompute all register parameters. It isn't safe to compute
4036 anything once we have started filling any specific hard regs.
4037 TLS symbols sometimes need a call to resolve. Precompute
4038 register parameters before any stack pointer manipulation
4039 to avoid unaligned stack in the called function. */
4040 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
4044 /* Perform stack alignment before the first push (the last arg). */
4046 && maybe_gt (adjusted_args_size
.constant
, reg_parm_stack_space
)
4047 && maybe_ne (adjusted_args_size
.constant
, unadjusted_args_size
))
4049 /* When the stack adjustment is pending, we get better code
4050 by combining the adjustments. */
4051 if (maybe_ne (pending_stack_adjust
, 0)
4052 && ! inhibit_defer_pop
4053 && (combine_pending_stack_adjustment_and_call
4054 (&pending_stack_adjust
,
4055 unadjusted_args_size
,
4056 &adjusted_args_size
,
4057 preferred_unit_stack_boundary
)))
4058 do_pending_stack_adjust ();
4059 else if (argblock
== 0)
4060 anti_adjust_stack (gen_int_mode (adjusted_args_size
.constant
4061 - unadjusted_args_size
,
4064 /* Now that the stack is properly aligned, pops can't safely
4065 be deferred during the evaluation of the arguments. */
4068 /* Record the maximum pushed stack space size. We need to delay
4069 doing it this far to take into account the optimization done
4070 by combine_pending_stack_adjustment_and_call. */
4071 if (flag_stack_usage_info
4072 && !ACCUMULATE_OUTGOING_ARGS
4074 && adjusted_args_size
.var
== 0)
4076 poly_int64 pushed
= (adjusted_args_size
.constant
4077 + pending_stack_adjust
);
4078 current_function_pushed_stack_size
4079 = upper_bound (current_function_pushed_stack_size
, pushed
);
4082 funexp
= rtx_for_function_call (fndecl
, addr
);
4084 if (CALL_EXPR_STATIC_CHAIN (exp
))
4085 static_chain_value
= expand_normal (CALL_EXPR_STATIC_CHAIN (exp
));
4087 static_chain_value
= 0;
4089 #ifdef REG_PARM_STACK_SPACE
4090 /* Save the fixed argument area if it's part of the caller's frame and
4091 is clobbered by argument setup for this call. */
4092 if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
4093 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
4094 &low_to_save
, &high_to_save
);
4097 /* Now store (and compute if necessary) all non-register parms.
4098 These come before register parms, since they can require block-moves,
4099 which could clobber the registers used for register parms.
4100 Parms which have partial registers are not stored here,
4101 but we do preallocate space here if they want that. */
4103 for (i
= 0; i
< num_actuals
; i
++)
4105 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
4107 rtx_insn
*before_arg
= get_last_insn ();
4109 /* We don't allow passing huge (> 2^30 B) arguments
4110 by value. It would cause an overflow later on. */
4111 if (constant_lower_bound (adjusted_args_size
.constant
)
4112 >= (1 << (HOST_BITS_PER_INT
- 2)))
4114 sorry ("passing too large argument on stack");
4118 if (store_one_arg (&args
[i
], argblock
, flags
,
4119 adjusted_args_size
.var
!= 0,
4120 reg_parm_stack_space
)
4122 && check_sibcall_argument_overlap (before_arg
,
4124 sibcall_failure
= 1;
4129 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
4130 gen_rtx_USE (VOIDmode
, args
[i
].stack
),
4134 /* If we have a parm that is passed in registers but not in memory
4135 and whose alignment does not permit a direct copy into registers,
4136 make a group of pseudos that correspond to each register that we
4138 if (STRICT_ALIGNMENT
)
4139 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
4141 /* Now store any partially-in-registers parm.
4142 This is the last place a block-move can happen. */
4144 for (i
= 0; i
< num_actuals
; i
++)
4145 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
4147 rtx_insn
*before_arg
= get_last_insn ();
4149 /* On targets with weird calling conventions (e.g. PA) it's
4150 hard to ensure that all cases of argument overlap between
4151 stack and registers work. Play it safe and bail out. */
4152 if (ARGS_GROW_DOWNWARD
&& !STACK_GROWS_DOWNWARD
)
4154 sibcall_failure
= 1;
4158 if (store_one_arg (&args
[i
], argblock
, flags
,
4159 adjusted_args_size
.var
!= 0,
4160 reg_parm_stack_space
)
4162 && check_sibcall_argument_overlap (before_arg
,
4164 sibcall_failure
= 1;
4167 bool any_regs
= false;
4168 for (i
= 0; i
< num_actuals
; i
++)
4169 if (args
[i
].reg
!= NULL_RTX
)
4172 targetm
.calls
.call_args (args
[i
].reg
, funtype
);
4175 targetm
.calls
.call_args (pc_rtx
, funtype
);
4177 /* Figure out the register where the value, if any, will come back. */
4179 if (TYPE_MODE (rettype
) != VOIDmode
4180 && ! structure_value_addr
)
4182 if (pcc_struct_value
)
4183 valreg
= hard_function_value (build_pointer_type (rettype
),
4184 fndecl
, NULL
, (pass
== 0));
4186 valreg
= hard_function_value (rettype
, fndecl
, fntype
,
4189 /* If VALREG is a PARALLEL whose first member has a zero
4190 offset, use that. This is for targets such as m68k that
4191 return the same value in multiple places. */
4192 if (GET_CODE (valreg
) == PARALLEL
)
4194 rtx elem
= XVECEXP (valreg
, 0, 0);
4195 rtx where
= XEXP (elem
, 0);
4196 rtx offset
= XEXP (elem
, 1);
4197 if (offset
== const0_rtx
4198 && GET_MODE (where
) == GET_MODE (valreg
))
4203 /* If register arguments require space on the stack and stack space
4204 was not preallocated, allocate stack space here for arguments
4205 passed in registers. */
4206 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
)))
4207 && !ACCUMULATE_OUTGOING_ARGS
4208 && must_preallocate
== 0 && reg_parm_stack_space
> 0)
4209 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
4211 /* Pass the function the address in which to return a
4213 if (pass
!= 0 && structure_value_addr
&& ! structure_value_addr_parm
)
4215 structure_value_addr
4216 = convert_memory_address (Pmode
, structure_value_addr
);
4217 emit_move_insn (struct_value
,
4219 force_operand (structure_value_addr
,
4222 if (REG_P (struct_value
))
4223 use_reg (&call_fusage
, struct_value
);
4226 after_args
= get_last_insn ();
4227 funexp
= prepare_call_address (fndecl
? fndecl
: fntype
, funexp
,
4228 static_chain_value
, &call_fusage
,
4229 reg_parm_seen
, flags
);
4231 load_register_parameters (args
, num_actuals
, &call_fusage
, flags
,
4232 pass
== 0, &sibcall_failure
);
4234 /* Save a pointer to the last insn before the call, so that we can
4235 later safely search backwards to find the CALL_INSN. */
4236 before_call
= get_last_insn ();
4238 /* Set up next argument register. For sibling calls on machines
4239 with register windows this should be the incoming register. */
4241 next_arg_reg
= targetm
.calls
.function_incoming_arg (args_so_far
,
4246 next_arg_reg
= targetm
.calls
.function_arg (args_so_far
,
4247 VOIDmode
, void_type_node
,
4250 if (pass
== 1 && (return_flags
& ERF_RETURNS_ARG
))
4252 int arg_nr
= return_flags
& ERF_RETURN_ARG_MASK
;
4253 arg_nr
= num_actuals
- arg_nr
- 1;
4255 && arg_nr
< num_actuals
4259 && GET_MODE (args
[arg_nr
].reg
) == GET_MODE (valreg
))
4261 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args
[arg_nr
].tree_value
)),
4262 gen_rtx_SET (valreg
, args
[arg_nr
].reg
),
4265 /* All arguments and registers used for the call must be set up by
4268 /* Stack must be properly aligned now. */
4270 || multiple_p (stack_pointer_delta
,
4271 preferred_unit_stack_boundary
));
4273 /* Generate the actual call instruction. */
4274 emit_call_1 (funexp
, exp
, fndecl
, funtype
, unadjusted_args_size
,
4275 adjusted_args_size
.constant
, struct_value_size
,
4276 next_arg_reg
, valreg
, old_inhibit_defer_pop
, call_fusage
,
4277 flags
, args_so_far
);
4281 rtx_call_insn
*last
;
4282 rtx datum
= NULL_RTX
;
4283 if (fndecl
!= NULL_TREE
)
4285 datum
= XEXP (DECL_RTL (fndecl
), 0);
4286 gcc_assert (datum
!= NULL_RTX
4287 && GET_CODE (datum
) == SYMBOL_REF
);
4289 last
= last_call_insn ();
4290 add_reg_note (last
, REG_CALL_DECL
, datum
);
4293 /* If the call setup or the call itself overlaps with anything
4294 of the argument setup we probably clobbered our call address.
4295 In that case we can't do sibcalls. */
4297 && check_sibcall_argument_overlap (after_args
, 0, 0))
4298 sibcall_failure
= 1;
4300 /* If a non-BLKmode value is returned at the most significant end
4301 of a register, shift the register right by the appropriate amount
4302 and update VALREG accordingly. BLKmode values are handled by the
4303 group load/store machinery below. */
4304 if (!structure_value_addr
4305 && !pcc_struct_value
4306 && TYPE_MODE (rettype
) != VOIDmode
4307 && TYPE_MODE (rettype
) != BLKmode
4309 && targetm
.calls
.return_in_msb (rettype
))
4311 if (shift_return_value (TYPE_MODE (rettype
), false, valreg
))
4312 sibcall_failure
= 1;
4313 valreg
= gen_rtx_REG (TYPE_MODE (rettype
), REGNO (valreg
));
4316 if (pass
&& (flags
& ECF_MALLOC
))
4318 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
4319 rtx_insn
*last
, *insns
;
4321 /* The return value from a malloc-like function is a pointer. */
4322 if (TREE_CODE (rettype
) == POINTER_TYPE
)
4323 mark_reg_pointer (temp
, MALLOC_ABI_ALIGNMENT
);
4325 emit_move_insn (temp
, valreg
);
4327 /* The return value from a malloc-like function cannot alias
4329 last
= get_last_insn ();
4330 add_reg_note (last
, REG_NOALIAS
, temp
);
4332 /* Write out the sequence. */
4333 insns
= get_insns ();
4339 /* For calls to `setjmp', etc., inform
4340 function.c:setjmp_warnings that it should complain if
4341 nonvolatile values are live. For functions that cannot
4342 return, inform flow that control does not fall through. */
4344 if ((flags
& ECF_NORETURN
) || pass
== 0)
4346 /* The barrier must be emitted
4347 immediately after the CALL_INSN. Some ports emit more
4348 than just a CALL_INSN above, so we must search for it here. */
4350 rtx_insn
*last
= get_last_insn ();
4351 while (!CALL_P (last
))
4353 last
= PREV_INSN (last
);
4354 /* There was no CALL_INSN? */
4355 gcc_assert (last
!= before_call
);
4358 emit_barrier_after (last
);
4360 /* Stack adjustments after a noreturn call are dead code.
4361 However when NO_DEFER_POP is in effect, we must preserve
4362 stack_pointer_delta. */
4363 if (inhibit_defer_pop
== 0)
4365 stack_pointer_delta
= old_stack_allocated
;
4366 pending_stack_adjust
= 0;
4370 /* If value type not void, return an rtx for the value. */
4372 if (TYPE_MODE (rettype
) == VOIDmode
4374 target
= const0_rtx
;
4375 else if (structure_value_addr
)
4377 if (target
== 0 || !MEM_P (target
))
4380 = gen_rtx_MEM (TYPE_MODE (rettype
),
4381 memory_address (TYPE_MODE (rettype
),
4382 structure_value_addr
));
4383 set_mem_attributes (target
, rettype
, 1);
4386 else if (pcc_struct_value
)
4388 /* This is the special C++ case where we need to
4389 know what the true target was. We take care to
4390 never use this value more than once in one expression. */
4391 target
= gen_rtx_MEM (TYPE_MODE (rettype
),
4392 copy_to_reg (valreg
));
4393 set_mem_attributes (target
, rettype
, 1);
4395 /* Handle calls that return values in multiple non-contiguous locations.
4396 The Irix 6 ABI has examples of this. */
4397 else if (GET_CODE (valreg
) == PARALLEL
)
4400 target
= emit_group_move_into_temps (valreg
);
4401 else if (rtx_equal_p (target
, valreg
))
4403 else if (GET_CODE (target
) == PARALLEL
)
4404 /* Handle the result of a emit_group_move_into_temps
4405 call in the previous pass. */
4406 emit_group_move (target
, valreg
);
4408 emit_group_store (target
, valreg
, rettype
,
4409 int_size_in_bytes (rettype
));
4412 && GET_MODE (target
) == TYPE_MODE (rettype
)
4413 && GET_MODE (target
) == GET_MODE (valreg
))
4415 bool may_overlap
= false;
4417 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4418 reg to a plain register. */
4419 if (!REG_P (target
) || HARD_REGISTER_P (target
))
4420 valreg
= avoid_likely_spilled_reg (valreg
);
4422 /* If TARGET is a MEM in the argument area, and we have
4423 saved part of the argument area, then we can't store
4424 directly into TARGET as it may get overwritten when we
4425 restore the argument save area below. Don't work too
4426 hard though and simply force TARGET to a register if it
4427 is a MEM; the optimizer is quite likely to sort it out. */
4428 if (ACCUMULATE_OUTGOING_ARGS
&& pass
&& MEM_P (target
))
4429 for (i
= 0; i
< num_actuals
; i
++)
4430 if (args
[i
].save_area
)
4437 target
= copy_to_reg (valreg
);
4440 /* TARGET and VALREG cannot be equal at this point
4441 because the latter would not have
4442 REG_FUNCTION_VALUE_P true, while the former would if
4443 it were referring to the same register.
4445 If they refer to the same register, this move will be
4446 a no-op, except when function inlining is being
4448 emit_move_insn (target
, valreg
);
4450 /* If we are setting a MEM, this code must be executed.
4451 Since it is emitted after the call insn, sibcall
4452 optimization cannot be performed in that case. */
4454 sibcall_failure
= 1;
4458 target
= copy_to_reg (avoid_likely_spilled_reg (valreg
));
4460 /* If we promoted this return value, make the proper SUBREG.
4461 TARGET might be const0_rtx here, so be careful. */
4463 && TYPE_MODE (rettype
) != BLKmode
4464 && GET_MODE (target
) != TYPE_MODE (rettype
))
4466 tree type
= rettype
;
4467 int unsignedp
= TYPE_UNSIGNED (type
);
4470 /* Ensure we promote as expected, and get the new unsignedness. */
4471 pmode
= promote_function_mode (type
, TYPE_MODE (type
), &unsignedp
,
4473 gcc_assert (GET_MODE (target
) == pmode
);
4475 poly_uint64 offset
= subreg_lowpart_offset (TYPE_MODE (type
),
4477 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, offset
);
4478 SUBREG_PROMOTED_VAR_P (target
) = 1;
4479 SUBREG_PROMOTED_SET (target
, unsignedp
);
4482 /* If size of args is variable or this was a constructor call for a stack
4483 argument, restore saved stack-pointer value. */
4485 if (old_stack_level
)
4487 rtx_insn
*prev
= get_last_insn ();
4489 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
4490 stack_pointer_delta
= old_stack_pointer_delta
;
4492 fixup_args_size_notes (prev
, get_last_insn (), stack_pointer_delta
);
4494 pending_stack_adjust
= old_pending_adj
;
4495 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
4496 stack_arg_under_construction
= old_stack_arg_under_construction
;
4497 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
4498 stack_usage_map
= initial_stack_usage_map
;
4499 stack_usage_watermark
= initial_stack_usage_watermark
;
4500 sibcall_failure
= 1;
4502 else if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
4504 #ifdef REG_PARM_STACK_SPACE
4506 restore_fixed_argument_area (save_area
, argblock
,
4507 high_to_save
, low_to_save
);
4510 /* If we saved any argument areas, restore them. */
4511 for (i
= 0; i
< num_actuals
; i
++)
4512 if (args
[i
].save_area
)
4514 machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
4516 = gen_rtx_MEM (save_mode
,
4517 memory_address (save_mode
,
4518 XEXP (args
[i
].stack_slot
, 0)));
4520 if (save_mode
!= BLKmode
)
4521 emit_move_insn (stack_area
, args
[i
].save_area
);
4523 emit_block_move (stack_area
, args
[i
].save_area
,
4525 (args
[i
].locate
.size
.constant
, Pmode
)),
4526 BLOCK_OP_CALL_PARM
);
4529 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
4530 stack_usage_map
= initial_stack_usage_map
;
4531 stack_usage_watermark
= initial_stack_usage_watermark
;
4534 /* If this was alloca, record the new stack level. */
4535 if (flags
& ECF_MAY_BE_ALLOCA
)
4536 record_new_stack_level ();
4538 /* Free up storage we no longer need. */
4539 for (i
= 0; i
< num_actuals
; ++i
)
4540 free (args
[i
].aligned_regs
);
4542 targetm
.calls
.end_call_args ();
4544 insns
= get_insns ();
4549 tail_call_insns
= insns
;
4551 /* Restore the pending stack adjustment now that we have
4552 finished generating the sibling call sequence. */
4554 restore_pending_stack_adjust (&save
);
4556 /* Prepare arg structure for next iteration. */
4557 for (i
= 0; i
< num_actuals
; i
++)
4560 args
[i
].aligned_regs
= 0;
4564 sbitmap_free (stored_args_map
);
4565 internal_arg_pointer_exp_state
.scan_start
= NULL
;
4566 internal_arg_pointer_exp_state
.cache
.release ();
4570 normal_call_insns
= insns
;
4572 /* Verify that we've deallocated all the stack we used. */
4573 gcc_assert ((flags
& ECF_NORETURN
)
4574 || known_eq (old_stack_allocated
,
4576 - pending_stack_adjust
));
4579 /* If something prevents making this a sibling call,
4580 zero out the sequence. */
4581 if (sibcall_failure
)
4582 tail_call_insns
= NULL
;
4587 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4588 arguments too, as argument area is now clobbered by the call. */
4589 if (tail_call_insns
)
4591 emit_insn (tail_call_insns
);
4592 crtl
->tail_call_emit
= true;
4596 emit_insn (normal_call_insns
);
4598 /* Ideally we'd emit a message for all of the ways that it could
4600 maybe_complain_about_tail_call (exp
, "tail call production failed");
4603 currently_expanding_call
--;
4605 free (stack_usage_map_buf
);
4610 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4611 this function's incoming arguments.
4613 At the start of RTL generation we know the only REG_EQUIV notes
4614 in the rtl chain are those for incoming arguments, so we can look
4615 for REG_EQUIV notes between the start of the function and the
4616 NOTE_INSN_FUNCTION_BEG.
4618 This is (slight) overkill. We could keep track of the highest
4619 argument we clobber and be more selective in removing notes, but it
4620 does not seem to be worth the effort. */
4623 fixup_tail_calls (void)
4627 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4631 /* There are never REG_EQUIV notes for the incoming arguments
4632 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4634 && NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
)
4637 note
= find_reg_note (insn
, REG_EQUIV
, 0);
4639 remove_note (insn
, note
);
4640 note
= find_reg_note (insn
, REG_EQUIV
, 0);
4645 /* Traverse a list of TYPES and expand all complex types into their
4648 split_complex_types (tree types
)
4652 /* Before allocating memory, check for the common case of no complex. */
4653 for (p
= types
; p
; p
= TREE_CHAIN (p
))
4655 tree type
= TREE_VALUE (p
);
4656 if (TREE_CODE (type
) == COMPLEX_TYPE
4657 && targetm
.calls
.split_complex_arg (type
))
4663 types
= copy_list (types
);
4665 for (p
= types
; p
; p
= TREE_CHAIN (p
))
4667 tree complex_type
= TREE_VALUE (p
);
4669 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
4670 && targetm
.calls
.split_complex_arg (complex_type
))
4674 /* Rewrite complex type with component type. */
4675 TREE_VALUE (p
) = TREE_TYPE (complex_type
);
4676 next
= TREE_CHAIN (p
);
4678 /* Add another component type for the imaginary part. */
4679 imag
= build_tree_list (NULL_TREE
, TREE_VALUE (p
));
4680 TREE_CHAIN (p
) = imag
;
4681 TREE_CHAIN (imag
) = next
;
4683 /* Skip the newly created node. */
4691 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4692 for a value of mode OUTMODE,
4693 with NARGS different arguments, passed as ARGS.
4694 Store the return value if RETVAL is nonzero: store it in VALUE if
4695 VALUE is nonnull, otherwise pick a convenient location. In either
4696 case return the location of the stored value.
4698 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4699 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4700 other types of library calls. */
4703 emit_library_call_value_1 (int retval
, rtx orgfun
, rtx value
,
4704 enum libcall_type fn_type
,
4705 machine_mode outmode
, int nargs
, rtx_mode_t
*args
)
4707 /* Total size in bytes of all the stack-parms scanned so far. */
4708 struct args_size args_size
;
4709 /* Size of arguments before any adjustments (such as rounding). */
4710 struct args_size original_args_size
;
4713 /* Todo, choose the correct decl type of orgfun. Sadly this information
4714 isn't present here, so we default to native calling abi here. */
4715 tree fndecl ATTRIBUTE_UNUSED
= NULL_TREE
; /* library calls default to host calling abi ? */
4716 tree fntype ATTRIBUTE_UNUSED
= NULL_TREE
; /* library calls default to host calling abi ? */
4719 CUMULATIVE_ARGS args_so_far_v
;
4720 cumulative_args_t args_so_far
;
4727 struct locate_and_pad_arg_data locate
;
4731 int old_inhibit_defer_pop
= inhibit_defer_pop
;
4732 rtx call_fusage
= 0;
4735 int pcc_struct_value
= 0;
4736 poly_int64 struct_value_size
= 0;
4738 int reg_parm_stack_space
= 0;
4740 rtx_insn
*before_call
;
4741 bool have_push_fusage
;
4742 tree tfom
; /* type_for_mode (outmode, 0) */
4744 #ifdef REG_PARM_STACK_SPACE
4745 /* Define the boundary of the register parm stack space that needs to be
4747 int low_to_save
= 0, high_to_save
= 0;
4748 rtx save_area
= 0; /* Place that it is saved. */
4751 /* Size of the stack reserved for parameter registers. */
4752 unsigned int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
4753 char *initial_stack_usage_map
= stack_usage_map
;
4754 unsigned HOST_WIDE_INT initial_stack_usage_watermark
= stack_usage_watermark
;
4755 char *stack_usage_map_buf
= NULL
;
4757 rtx struct_value
= targetm
.calls
.struct_value_rtx (0, 0);
4759 #ifdef REG_PARM_STACK_SPACE
4760 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
4763 /* By default, library functions cannot throw. */
4764 flags
= ECF_NOTHROW
;
4777 flags
|= ECF_NORETURN
;
4780 flags
&= ~ECF_NOTHROW
;
4782 case LCT_RETURNS_TWICE
:
4783 flags
= ECF_RETURNS_TWICE
;
4788 /* Ensure current function's preferred stack boundary is at least
4790 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
4791 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
4793 /* If this kind of value comes back in memory,
4794 decide where in memory it should come back. */
4795 if (outmode
!= VOIDmode
)
4797 tfom
= lang_hooks
.types
.type_for_mode (outmode
, 0);
4798 if (aggregate_value_p (tfom
, 0))
4800 #ifdef PCC_STATIC_STRUCT_RETURN
4802 = hard_function_value (build_pointer_type (tfom
), 0, 0, 0);
4803 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
4804 pcc_struct_value
= 1;
4806 value
= gen_reg_rtx (outmode
);
4807 #else /* not PCC_STATIC_STRUCT_RETURN */
4808 struct_value_size
= GET_MODE_SIZE (outmode
);
4809 if (value
!= 0 && MEM_P (value
))
4812 mem_value
= assign_temp (tfom
, 1, 1);
4814 /* This call returns a big structure. */
4815 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
);
4819 tfom
= void_type_node
;
4821 /* ??? Unfinished: must pass the memory address as an argument. */
4823 /* Copy all the libcall-arguments out of the varargs data
4824 and into a vector ARGVEC.
4826 Compute how to pass each argument. We only support a very small subset
4827 of the full argument passing conventions to limit complexity here since
4828 library functions shouldn't have many args. */
4830 argvec
= XALLOCAVEC (struct arg
, nargs
+ 1);
4831 memset (argvec
, 0, (nargs
+ 1) * sizeof (struct arg
));
4833 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4834 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v
, outmode
, fun
);
4836 INIT_CUMULATIVE_ARGS (args_so_far_v
, NULL_TREE
, fun
, 0, nargs
);
4838 args_so_far
= pack_cumulative_args (&args_so_far_v
);
4840 args_size
.constant
= 0;
4847 /* If there's a structure value address to be passed,
4848 either pass it in the special place, or pass it as an extra argument. */
4849 if (mem_value
&& struct_value
== 0 && ! pcc_struct_value
)
4851 rtx addr
= XEXP (mem_value
, 0);
4855 /* Make sure it is a reasonable operand for a move or push insn. */
4856 if (!REG_P (addr
) && !MEM_P (addr
)
4857 && !(CONSTANT_P (addr
)
4858 && targetm
.legitimate_constant_p (Pmode
, addr
)))
4859 addr
= force_operand (addr
, NULL_RTX
);
4861 argvec
[count
].value
= addr
;
4862 argvec
[count
].mode
= Pmode
;
4863 argvec
[count
].partial
= 0;
4865 argvec
[count
].reg
= targetm
.calls
.function_arg (args_so_far
,
4866 Pmode
, NULL_TREE
, true);
4867 gcc_assert (targetm
.calls
.arg_partial_bytes (args_so_far
, Pmode
,
4868 NULL_TREE
, 1) == 0);
4870 locate_and_pad_parm (Pmode
, NULL_TREE
,
4871 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4874 argvec
[count
].reg
!= 0,
4876 reg_parm_stack_space
, 0,
4877 NULL_TREE
, &args_size
, &argvec
[count
].locate
);
4879 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
4880 || reg_parm_stack_space
> 0)
4881 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
4883 targetm
.calls
.function_arg_advance (args_so_far
, Pmode
, (tree
) 0, true);
4888 for (unsigned int i
= 0; count
< nargs
; i
++, count
++)
4890 rtx val
= args
[i
].first
;
4891 machine_mode mode
= args
[i
].second
;
4894 /* We cannot convert the arg value to the mode the library wants here;
4895 must do it earlier where we know the signedness of the arg. */
4896 gcc_assert (mode
!= BLKmode
4897 && (GET_MODE (val
) == mode
|| GET_MODE (val
) == VOIDmode
));
4899 /* Make sure it is a reasonable operand for a move or push insn. */
4900 if (!REG_P (val
) && !MEM_P (val
)
4901 && !(CONSTANT_P (val
) && targetm
.legitimate_constant_p (mode
, val
)))
4902 val
= force_operand (val
, NULL_RTX
);
4904 if (pass_by_reference (&args_so_far_v
, mode
, NULL_TREE
, 1))
4908 = !reference_callee_copied (&args_so_far_v
, mode
, NULL_TREE
, 1);
4910 /* If this was a CONST function, it is now PURE since it now
4912 if (flags
& ECF_CONST
)
4914 flags
&= ~ECF_CONST
;
4918 if (MEM_P (val
) && !must_copy
)
4920 tree val_expr
= MEM_EXPR (val
);
4922 mark_addressable (val_expr
);
4927 slot
= assign_temp (lang_hooks
.types
.type_for_mode (mode
, 0),
4929 emit_move_insn (slot
, val
);
4932 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
4933 gen_rtx_USE (VOIDmode
, slot
),
4936 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
4937 gen_rtx_CLOBBER (VOIDmode
,
4942 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
4945 mode
= promote_function_mode (NULL_TREE
, mode
, &unsigned_p
, NULL_TREE
, 0);
4946 argvec
[count
].mode
= mode
;
4947 argvec
[count
].value
= convert_modes (mode
, GET_MODE (val
), val
, unsigned_p
);
4948 argvec
[count
].reg
= targetm
.calls
.function_arg (args_so_far
, mode
,
4951 argvec
[count
].partial
4952 = targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL_TREE
, 1);
4954 if (argvec
[count
].reg
== 0
4955 || argvec
[count
].partial
!= 0
4956 || reg_parm_stack_space
> 0)
4958 locate_and_pad_parm (mode
, NULL_TREE
,
4959 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4962 argvec
[count
].reg
!= 0,
4964 reg_parm_stack_space
, argvec
[count
].partial
,
4965 NULL_TREE
, &args_size
, &argvec
[count
].locate
);
4966 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
4967 gcc_assert (!argvec
[count
].locate
.size
.var
);
4969 #ifdef BLOCK_REG_PADDING
4971 /* The argument is passed entirely in registers. See at which
4972 end it should be padded. */
4973 argvec
[count
].locate
.where_pad
=
4974 BLOCK_REG_PADDING (mode
, NULL_TREE
,
4975 known_le (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
4978 targetm
.calls
.function_arg_advance (args_so_far
, mode
, (tree
) 0, true);
4981 for (int i
= 0; i
< nargs
; i
++)
4982 if (reg_parm_stack_space
> 0
4983 || argvec
[i
].reg
== 0
4984 || argvec
[i
].partial
!= 0)
4985 update_stack_alignment_for_call (&argvec
[i
].locate
);
4987 /* If this machine requires an external definition for library
4988 functions, write one out. */
4989 assemble_external_libcall (fun
);
4991 original_args_size
= args_size
;
4992 args_size
.constant
= (aligned_upper_bound (args_size
.constant
4993 + stack_pointer_delta
,
4995 - stack_pointer_delta
);
4997 args_size
.constant
= upper_bound (args_size
.constant
,
4998 reg_parm_stack_space
);
5000 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
5001 args_size
.constant
-= reg_parm_stack_space
;
5003 crtl
->outgoing_args_size
= upper_bound (crtl
->outgoing_args_size
,
5004 args_size
.constant
);
5006 if (flag_stack_usage_info
&& !ACCUMULATE_OUTGOING_ARGS
)
5008 poly_int64 pushed
= args_size
.constant
+ pending_stack_adjust
;
5009 current_function_pushed_stack_size
5010 = upper_bound (current_function_pushed_stack_size
, pushed
);
5013 if (ACCUMULATE_OUTGOING_ARGS
)
5015 /* Since the stack pointer will never be pushed, it is possible for
5016 the evaluation of a parm to clobber something we have already
5017 written to the stack. Since most function calls on RISC machines
5018 do not use the stack, this is uncommon, but must work correctly.
5020 Therefore, we save any area of the stack that was already written
5021 and that we are using. Here we set up to do this by making a new
5022 stack usage map from the old one.
5024 Another approach might be to try to reorder the argument
5025 evaluations to avoid this conflicting stack usage. */
5027 needed
= args_size
.constant
;
5029 /* Since we will be writing into the entire argument area, the
5030 map must be allocated for its entire size, not just the part that
5031 is the responsibility of the caller. */
5032 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl
? fntype
: TREE_TYPE (fndecl
))))
5033 needed
+= reg_parm_stack_space
;
5035 poly_int64 limit
= needed
;
5036 if (ARGS_GROW_DOWNWARD
)
5039 /* For polynomial sizes, this is the maximum possible size needed
5040 for arguments with a constant size and offset. */
5041 HOST_WIDE_INT const_limit
= constant_lower_bound (limit
);
5042 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
5045 stack_usage_map_buf
= XNEWVEC (char, highest_outgoing_arg_in_use
);
5046 stack_usage_map
= stack_usage_map_buf
;
5048 if (initial_highest_arg_in_use
)
5049 memcpy (stack_usage_map
, initial_stack_usage_map
,
5050 initial_highest_arg_in_use
);
5052 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
5053 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
5054 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
5057 /* We must be careful to use virtual regs before they're instantiated,
5058 and real regs afterwards. Loop optimization, for example, can create
5059 new libcalls after we've instantiated the virtual regs, and if we
5060 use virtuals anyway, they won't match the rtl patterns. */
5062 if (virtuals_instantiated
)
5063 argblock
= plus_constant (Pmode
, stack_pointer_rtx
,
5064 STACK_POINTER_OFFSET
);
5066 argblock
= virtual_outgoing_args_rtx
;
5071 argblock
= push_block (gen_int_mode (args_size
.constant
, Pmode
), 0, 0);
5074 /* We push args individually in reverse order, perform stack alignment
5075 before the first push (the last arg). */
5077 anti_adjust_stack (gen_int_mode (args_size
.constant
5078 - original_args_size
.constant
,
5083 #ifdef REG_PARM_STACK_SPACE
5084 if (ACCUMULATE_OUTGOING_ARGS
)
5086 /* The argument list is the property of the called routine and it
5087 may clobber it. If the fixed area has been used for previous
5088 parameters, we must save and restore it. */
5089 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
5090 &low_to_save
, &high_to_save
);
5094 /* When expanding a normal call, args are stored in push order,
5095 which is the reverse of what we have here. */
5096 bool any_regs
= false;
5097 for (int i
= nargs
; i
-- > 0; )
5098 if (argvec
[i
].reg
!= NULL_RTX
)
5100 targetm
.calls
.call_args (argvec
[i
].reg
, NULL_TREE
);
5104 targetm
.calls
.call_args (pc_rtx
, NULL_TREE
);
5106 /* Push the args that need to be pushed. */
5108 have_push_fusage
= false;
5110 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5111 are to be pushed. */
5112 for (count
= 0; count
< nargs
; count
++, argnum
--)
5114 machine_mode mode
= argvec
[argnum
].mode
;
5115 rtx val
= argvec
[argnum
].value
;
5116 rtx reg
= argvec
[argnum
].reg
;
5117 int partial
= argvec
[argnum
].partial
;
5118 unsigned int parm_align
= argvec
[argnum
].locate
.boundary
;
5119 poly_int64 lower_bound
= 0, upper_bound
= 0;
5121 if (! (reg
!= 0 && partial
== 0))
5125 if (ACCUMULATE_OUTGOING_ARGS
)
5127 /* If this is being stored into a pre-allocated, fixed-size,
5128 stack area, save any previous data at that location. */
5130 if (ARGS_GROW_DOWNWARD
)
5132 /* stack_slot is negative, but we want to index stack_usage_map
5133 with positive values. */
5134 upper_bound
= -argvec
[argnum
].locate
.slot_offset
.constant
+ 1;
5135 lower_bound
= upper_bound
- argvec
[argnum
].locate
.size
.constant
;
5139 lower_bound
= argvec
[argnum
].locate
.slot_offset
.constant
;
5140 upper_bound
= lower_bound
+ argvec
[argnum
].locate
.size
.constant
;
5143 if (stack_region_maybe_used_p (lower_bound
, upper_bound
,
5144 reg_parm_stack_space
))
5146 /* We need to make a save area. */
5148 = argvec
[argnum
].locate
.size
.constant
* BITS_PER_UNIT
;
5149 machine_mode save_mode
5150 = int_mode_for_size (size
, 1).else_blk ();
5152 = plus_constant (Pmode
, argblock
,
5153 argvec
[argnum
].locate
.offset
.constant
);
5155 = gen_rtx_MEM (save_mode
, memory_address (save_mode
, adr
));
5157 if (save_mode
== BLKmode
)
5159 argvec
[argnum
].save_area
5160 = assign_stack_temp (BLKmode
,
5161 argvec
[argnum
].locate
.size
.constant
5164 emit_block_move (validize_mem
5165 (copy_rtx (argvec
[argnum
].save_area
)),
5168 (argvec
[argnum
].locate
.size
.constant
,
5170 BLOCK_OP_CALL_PARM
);
5174 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
5176 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
5181 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, parm_align
,
5182 partial
, reg
, 0, argblock
,
5184 (argvec
[argnum
].locate
.offset
.constant
, Pmode
)),
5185 reg_parm_stack_space
,
5186 ARGS_SIZE_RTX (argvec
[argnum
].locate
.alignment_pad
), false);
5188 /* Now mark the segment we just used. */
5189 if (ACCUMULATE_OUTGOING_ARGS
)
5190 mark_stack_region_used (lower_bound
, upper_bound
);
5194 /* Indicate argument access so that alias.c knows that these
5197 use
= plus_constant (Pmode
, argblock
,
5198 argvec
[argnum
].locate
.offset
.constant
);
5199 else if (have_push_fusage
)
5203 /* When arguments are pushed, trying to tell alias.c where
5204 exactly this argument is won't work, because the
5205 auto-increment causes confusion. So we merely indicate
5206 that we access something with a known mode somewhere on
5208 use
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5209 gen_rtx_SCRATCH (Pmode
));
5210 have_push_fusage
= true;
5212 use
= gen_rtx_MEM (argvec
[argnum
].mode
, use
);
5213 use
= gen_rtx_USE (VOIDmode
, use
);
5214 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
, use
, call_fusage
);
5220 fun
= prepare_call_address (NULL
, fun
, NULL
, &call_fusage
, 0, 0);
5222 /* Now load any reg parms into their regs. */
5224 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5225 are to be pushed. */
5226 for (count
= 0; count
< nargs
; count
++, argnum
--)
5228 machine_mode mode
= argvec
[argnum
].mode
;
5229 rtx val
= argvec
[argnum
].value
;
5230 rtx reg
= argvec
[argnum
].reg
;
5231 int partial
= argvec
[argnum
].partial
;
5233 /* Handle calls that pass values in multiple non-contiguous
5234 locations. The PA64 has examples of this for library calls. */
5235 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
5236 emit_group_load (reg
, val
, NULL_TREE
, GET_MODE_SIZE (mode
));
5237 else if (reg
!= 0 && partial
== 0)
5239 emit_move_insn (reg
, val
);
5240 #ifdef BLOCK_REG_PADDING
5241 poly_int64 size
= GET_MODE_SIZE (argvec
[argnum
].mode
);
5243 /* Copied from load_register_parameters. */
5245 /* Handle case where we have a value that needs shifting
5246 up to the msb. eg. a QImode value and we're padding
5247 upward on a BYTES_BIG_ENDIAN machine. */
5248 if (known_lt (size
, UNITS_PER_WORD
)
5249 && (argvec
[argnum
].locate
.where_pad
5250 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
5253 poly_int64 shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
5255 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5256 report the whole reg as used. Strictly speaking, the
5257 call only uses SIZE bytes at the msb end, but it doesn't
5258 seem worth generating rtl to say that. */
5259 reg
= gen_rtx_REG (word_mode
, REGNO (reg
));
5260 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, shift
, reg
, 1);
5262 emit_move_insn (reg
, x
);
5270 /* Any regs containing parms remain in use through the call. */
5271 for (count
= 0; count
< nargs
; count
++)
5273 rtx reg
= argvec
[count
].reg
;
5274 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
5275 use_group_regs (&call_fusage
, reg
);
5278 int partial
= argvec
[count
].partial
;
5282 gcc_assert (partial
% UNITS_PER_WORD
== 0);
5283 nregs
= partial
/ UNITS_PER_WORD
;
5284 use_regs (&call_fusage
, REGNO (reg
), nregs
);
5287 use_reg (&call_fusage
, reg
);
5291 /* Pass the function the address in which to return a structure value. */
5292 if (mem_value
!= 0 && struct_value
!= 0 && ! pcc_struct_value
)
5294 emit_move_insn (struct_value
,
5296 force_operand (XEXP (mem_value
, 0),
5298 if (REG_P (struct_value
))
5299 use_reg (&call_fusage
, struct_value
);
5302 /* Don't allow popping to be deferred, since then
5303 cse'ing of library calls could delete a call and leave the pop. */
5305 valreg
= (mem_value
== 0 && outmode
!= VOIDmode
5306 ? hard_libcall_value (outmode
, orgfun
) : NULL_RTX
);
5308 /* Stack must be properly aligned now. */
5309 gcc_assert (multiple_p (stack_pointer_delta
,
5310 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
));
5312 before_call
= get_last_insn ();
5314 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5315 will set inhibit_defer_pop to that value. */
5316 /* The return type is needed to decide how many bytes the function pops.
5317 Signedness plays no role in that, so for simplicity, we pretend it's
5318 always signed. We also assume that the list of arguments passed has
5319 no impact, so we pretend it is unknown. */
5321 emit_call_1 (fun
, NULL
,
5322 get_identifier (XSTR (orgfun
, 0)),
5323 build_function_type (tfom
, NULL_TREE
),
5324 original_args_size
.constant
, args_size
.constant
,
5326 targetm
.calls
.function_arg (args_so_far
,
5327 VOIDmode
, void_type_node
, true),
5329 old_inhibit_defer_pop
+ 1, call_fusage
, flags
, args_so_far
);
5334 gcc_assert (GET_CODE (datum
) == SYMBOL_REF
);
5335 rtx_call_insn
*last
= last_call_insn ();
5336 add_reg_note (last
, REG_CALL_DECL
, datum
);
5339 /* Right-shift returned value if necessary. */
5340 if (!pcc_struct_value
5341 && TYPE_MODE (tfom
) != BLKmode
5342 && targetm
.calls
.return_in_msb (tfom
))
5344 shift_return_value (TYPE_MODE (tfom
), false, valreg
);
5345 valreg
= gen_rtx_REG (TYPE_MODE (tfom
), REGNO (valreg
));
5348 targetm
.calls
.end_call_args ();
5350 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5351 that it should complain if nonvolatile values are live. For
5352 functions that cannot return, inform flow that control does not
5354 if (flags
& ECF_NORETURN
)
5356 /* The barrier note must be emitted
5357 immediately after the CALL_INSN. Some ports emit more than
5358 just a CALL_INSN above, so we must search for it here. */
5359 rtx_insn
*last
= get_last_insn ();
5360 while (!CALL_P (last
))
5362 last
= PREV_INSN (last
);
5363 /* There was no CALL_INSN? */
5364 gcc_assert (last
!= before_call
);
5367 emit_barrier_after (last
);
5370 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5371 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5372 if (flags
& ECF_NOTHROW
)
5374 rtx_insn
*last
= get_last_insn ();
5375 while (!CALL_P (last
))
5377 last
= PREV_INSN (last
);
5378 /* There was no CALL_INSN? */
5379 gcc_assert (last
!= before_call
);
5382 make_reg_eh_region_note_nothrow_nononlocal (last
);
5385 /* Now restore inhibit_defer_pop to its actual original value. */
5390 /* Copy the value to the right place. */
5391 if (outmode
!= VOIDmode
&& retval
)
5397 if (value
!= mem_value
)
5398 emit_move_insn (value
, mem_value
);
5400 else if (GET_CODE (valreg
) == PARALLEL
)
5403 value
= gen_reg_rtx (outmode
);
5404 emit_group_store (value
, valreg
, NULL_TREE
, GET_MODE_SIZE (outmode
));
5408 /* Convert to the proper mode if a promotion has been active. */
5409 if (GET_MODE (valreg
) != outmode
)
5411 int unsignedp
= TYPE_UNSIGNED (tfom
);
5413 gcc_assert (promote_function_mode (tfom
, outmode
, &unsignedp
,
5414 fndecl
? TREE_TYPE (fndecl
) : fntype
, 1)
5415 == GET_MODE (valreg
));
5416 valreg
= convert_modes (outmode
, GET_MODE (valreg
), valreg
, 0);
5420 emit_move_insn (value
, valreg
);
5426 if (ACCUMULATE_OUTGOING_ARGS
)
5428 #ifdef REG_PARM_STACK_SPACE
5430 restore_fixed_argument_area (save_area
, argblock
,
5431 high_to_save
, low_to_save
);
5434 /* If we saved any argument areas, restore them. */
5435 for (count
= 0; count
< nargs
; count
++)
5436 if (argvec
[count
].save_area
)
5438 machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
5439 rtx adr
= plus_constant (Pmode
, argblock
,
5440 argvec
[count
].locate
.offset
.constant
);
5441 rtx stack_area
= gen_rtx_MEM (save_mode
,
5442 memory_address (save_mode
, adr
));
5444 if (save_mode
== BLKmode
)
5445 emit_block_move (stack_area
,
5447 (copy_rtx (argvec
[count
].save_area
)),
5449 (argvec
[count
].locate
.size
.constant
, Pmode
)),
5450 BLOCK_OP_CALL_PARM
);
5452 emit_move_insn (stack_area
, argvec
[count
].save_area
);
5455 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
5456 stack_usage_map
= initial_stack_usage_map
;
5457 stack_usage_watermark
= initial_stack_usage_watermark
;
5460 free (stack_usage_map_buf
);
5467 /* Store a single argument for a function call
5468 into the register or memory area where it must be passed.
5469 *ARG describes the argument value and where to pass it.
5471 ARGBLOCK is the address of the stack-block for all the arguments,
5472 or 0 on a machine where arguments are pushed individually.
5474 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5475 so must be careful about how the stack is used.
5477 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5478 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5479 that we need not worry about saving and restoring the stack.
5481 FNDECL is the declaration of the function we are calling.
5483 Return nonzero if this arg should cause sibcall failure,
5487 store_one_arg (struct arg_data
*arg
, rtx argblock
, int flags
,
5488 int variable_size ATTRIBUTE_UNUSED
, int reg_parm_stack_space
)
5490 tree pval
= arg
->tree_value
;
5493 poly_int64 used
= 0;
5494 poly_int64 lower_bound
= 0, upper_bound
= 0;
5495 int sibcall_failure
= 0;
5497 if (TREE_CODE (pval
) == ERROR_MARK
)
5500 /* Push a new temporary level for any temporaries we make for
5504 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
))
5506 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5507 save any previous data at that location. */
5508 if (argblock
&& ! variable_size
&& arg
->stack
)
5510 if (ARGS_GROW_DOWNWARD
)
5512 /* stack_slot is negative, but we want to index stack_usage_map
5513 with positive values. */
5514 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
5516 rtx offset
= XEXP (XEXP (arg
->stack_slot
, 0), 1);
5517 upper_bound
= -rtx_to_poly_int64 (offset
) + 1;
5522 lower_bound
= upper_bound
- arg
->locate
.size
.constant
;
5526 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
5528 rtx offset
= XEXP (XEXP (arg
->stack_slot
, 0), 1);
5529 lower_bound
= rtx_to_poly_int64 (offset
);
5534 upper_bound
= lower_bound
+ arg
->locate
.size
.constant
;
5537 if (stack_region_maybe_used_p (lower_bound
, upper_bound
,
5538 reg_parm_stack_space
))
5540 /* We need to make a save area. */
5541 poly_uint64 size
= arg
->locate
.size
.constant
* BITS_PER_UNIT
;
5542 machine_mode save_mode
5543 = int_mode_for_size (size
, 1).else_blk ();
5544 rtx adr
= memory_address (save_mode
, XEXP (arg
->stack_slot
, 0));
5545 rtx stack_area
= gen_rtx_MEM (save_mode
, adr
);
5547 if (save_mode
== BLKmode
)
5550 = assign_temp (TREE_TYPE (arg
->tree_value
), 1, 1);
5551 preserve_temp_slots (arg
->save_area
);
5552 emit_block_move (validize_mem (copy_rtx (arg
->save_area
)),
5555 (arg
->locate
.size
.constant
, Pmode
)),
5556 BLOCK_OP_CALL_PARM
);
5560 arg
->save_area
= gen_reg_rtx (save_mode
);
5561 emit_move_insn (arg
->save_area
, stack_area
);
5567 /* If this isn't going to be placed on both the stack and in registers,
5568 set up the register and number of words. */
5569 if (! arg
->pass_on_stack
)
5571 if (flags
& ECF_SIBCALL
)
5572 reg
= arg
->tail_call_reg
;
5575 partial
= arg
->partial
;
5578 /* Being passed entirely in a register. We shouldn't be called in
5580 gcc_assert (reg
== 0 || partial
!= 0);
5582 /* If this arg needs special alignment, don't load the registers
5584 if (arg
->n_aligned_regs
!= 0)
5587 /* If this is being passed partially in a register, we can't evaluate
5588 it directly into its stack slot. Otherwise, we can. */
5589 if (arg
->value
== 0)
5591 /* stack_arg_under_construction is nonzero if a function argument is
5592 being evaluated directly into the outgoing argument list and
5593 expand_call must take special action to preserve the argument list
5594 if it is called recursively.
5596 For scalar function arguments stack_usage_map is sufficient to
5597 determine which stack slots must be saved and restored. Scalar
5598 arguments in general have pass_on_stack == 0.
5600 If this argument is initialized by a function which takes the
5601 address of the argument (a C++ constructor or a C function
5602 returning a BLKmode structure), then stack_usage_map is
5603 insufficient and expand_call must push the stack around the
5604 function call. Such arguments have pass_on_stack == 1.
5606 Note that it is always safe to set stack_arg_under_construction,
5607 but this generates suboptimal code if set when not needed. */
5609 if (arg
->pass_on_stack
)
5610 stack_arg_under_construction
++;
5612 arg
->value
= expand_expr (pval
,
5614 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
5615 ? NULL_RTX
: arg
->stack
,
5616 VOIDmode
, EXPAND_STACK_PARM
);
5618 /* If we are promoting object (or for any other reason) the mode
5619 doesn't agree, convert the mode. */
5621 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
5622 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
5623 arg
->value
, arg
->unsignedp
);
5625 if (arg
->pass_on_stack
)
5626 stack_arg_under_construction
--;
5629 /* Check for overlap with already clobbered argument area. */
5630 if ((flags
& ECF_SIBCALL
)
5631 && MEM_P (arg
->value
)
5632 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg
->value
, 0),
5633 arg
->locate
.size
.constant
))
5634 sibcall_failure
= 1;
5636 /* Don't allow anything left on stack from computation
5637 of argument to alloca. */
5638 if (flags
& ECF_MAY_BE_ALLOCA
)
5639 do_pending_stack_adjust ();
5641 if (arg
->value
== arg
->stack
)
5642 /* If the value is already in the stack slot, we are done. */
5644 else if (arg
->mode
!= BLKmode
)
5646 unsigned int parm_align
;
5648 /* Argument is a scalar, not entirely passed in registers.
5649 (If part is passed in registers, arg->partial says how much
5650 and emit_push_insn will take care of putting it there.)
5652 Push it, and if its size is less than the
5653 amount of space allocated to it,
5654 also bump stack pointer by the additional space.
5655 Note that in C the default argument promotions
5656 will prevent such mismatches. */
5658 poly_int64 size
= (TYPE_EMPTY_P (TREE_TYPE (pval
))
5659 ? 0 : GET_MODE_SIZE (arg
->mode
));
5661 /* Compute how much space the push instruction will push.
5662 On many machines, pushing a byte will advance the stack
5663 pointer by a halfword. */
5664 #ifdef PUSH_ROUNDING
5665 size
= PUSH_ROUNDING (size
);
5669 /* Compute how much space the argument should get:
5670 round up to a multiple of the alignment for arguments. */
5671 if (targetm
.calls
.function_arg_padding (arg
->mode
, TREE_TYPE (pval
))
5673 /* At the moment we don't (need to) support ABIs for which the
5674 padding isn't known at compile time. In principle it should
5675 be easy to add though. */
5676 used
= force_align_up (size
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
5678 /* Compute the alignment of the pushed argument. */
5679 parm_align
= arg
->locate
.boundary
;
5680 if (targetm
.calls
.function_arg_padding (arg
->mode
, TREE_TYPE (pval
))
5683 poly_int64 pad
= used
- size
;
5684 unsigned int pad_align
= known_alignment (pad
) * BITS_PER_UNIT
;
5686 parm_align
= MIN (parm_align
, pad_align
);
5689 /* This isn't already where we want it on the stack, so put it there.
5690 This can either be done with push or copy insns. */
5691 if (maybe_ne (used
, 0)
5692 && !emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
),
5693 NULL_RTX
, parm_align
, partial
, reg
, used
- size
,
5694 argblock
, ARGS_SIZE_RTX (arg
->locate
.offset
),
5695 reg_parm_stack_space
,
5696 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
), true))
5697 sibcall_failure
= 1;
5699 /* Unless this is a partially-in-register argument, the argument is now
5702 arg
->value
= arg
->stack
;
5706 /* BLKmode, at least partly to be pushed. */
5708 unsigned int parm_align
;
5712 /* Pushing a nonscalar.
5713 If part is passed in registers, PARTIAL says how much
5714 and emit_push_insn will take care of putting it there. */
5716 /* Round its size up to a multiple
5717 of the allocation unit for arguments. */
5719 if (arg
->locate
.size
.var
!= 0)
5722 size_rtx
= ARGS_SIZE_RTX (arg
->locate
.size
);
5726 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5727 for BLKmode is careful to avoid it. */
5728 excess
= (arg
->locate
.size
.constant
5729 - arg_int_size_in_bytes (TREE_TYPE (pval
))
5731 size_rtx
= expand_expr (arg_size_in_bytes (TREE_TYPE (pval
)),
5732 NULL_RTX
, TYPE_MODE (sizetype
),
5736 parm_align
= arg
->locate
.boundary
;
5738 /* When an argument is padded down, the block is aligned to
5739 PARM_BOUNDARY, but the actual argument isn't. */
5740 if (targetm
.calls
.function_arg_padding (arg
->mode
, TREE_TYPE (pval
))
5743 if (arg
->locate
.size
.var
)
5744 parm_align
= BITS_PER_UNIT
;
5747 unsigned int excess_align
5748 = known_alignment (excess
) * BITS_PER_UNIT
;
5749 if (excess_align
!= 0)
5750 parm_align
= MIN (parm_align
, excess_align
);
5754 if ((flags
& ECF_SIBCALL
) && MEM_P (arg
->value
))
5756 /* emit_push_insn might not work properly if arg->value and
5757 argblock + arg->locate.offset areas overlap. */
5761 if (strip_offset (XEXP (x
, 0), &i
)
5762 == crtl
->args
.internal_arg_pointer
)
5764 /* arg.locate doesn't contain the pretend_args_size offset,
5765 it's part of argblock. Ensure we don't count it in I. */
5766 if (STACK_GROWS_DOWNWARD
)
5767 i
-= crtl
->args
.pretend_args_size
;
5769 i
+= crtl
->args
.pretend_args_size
;
5771 /* expand_call should ensure this. */
5772 gcc_assert (!arg
->locate
.offset
.var
5773 && arg
->locate
.size
.var
== 0);
5774 poly_int64 size_val
= rtx_to_poly_int64 (size_rtx
);
5776 if (known_eq (arg
->locate
.offset
.constant
, i
))
5778 /* Even though they appear to be at the same location,
5779 if part of the outgoing argument is in registers,
5780 they aren't really at the same location. Check for
5781 this by making sure that the incoming size is the
5782 same as the outgoing size. */
5783 if (maybe_ne (arg
->locate
.size
.constant
, size_val
))
5784 sibcall_failure
= 1;
5786 else if (maybe_in_range_p (arg
->locate
.offset
.constant
,
5788 sibcall_failure
= 1;
5789 /* Use arg->locate.size.constant instead of size_rtx
5790 because we only care about the part of the argument
5792 else if (maybe_in_range_p (i
, arg
->locate
.offset
.constant
,
5793 arg
->locate
.size
.constant
))
5794 sibcall_failure
= 1;
5798 if (!CONST_INT_P (size_rtx
) || INTVAL (size_rtx
) != 0)
5799 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
5800 parm_align
, partial
, reg
, excess
, argblock
,
5801 ARGS_SIZE_RTX (arg
->locate
.offset
),
5802 reg_parm_stack_space
,
5803 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
), false);
5805 /* Unless this is a partially-in-register argument, the argument is now
5808 ??? Unlike the case above, in which we want the actual
5809 address of the data, so that we can load it directly into a
5810 register, here we want the address of the stack slot, so that
5811 it's properly aligned for word-by-word copying or something
5812 like that. It's not clear that this is always correct. */
5814 arg
->value
= arg
->stack_slot
;
5817 if (arg
->reg
&& GET_CODE (arg
->reg
) == PARALLEL
)
5819 tree type
= TREE_TYPE (arg
->tree_value
);
5821 = emit_group_load_into_temps (arg
->reg
, arg
->value
, type
,
5822 int_size_in_bytes (type
));
5825 /* Mark all slots this store used. */
5826 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
)
5827 && argblock
&& ! variable_size
&& arg
->stack
)
5828 mark_stack_region_used (lower_bound
, upper_bound
);
5830 /* Once we have pushed something, pops can't safely
5831 be deferred during the rest of the arguments. */
5834 /* Free any temporary slots made in processing this argument. */
5837 return sibcall_failure
;
5840 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5843 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED
,
5849 /* If the type has variable size... */
5850 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5853 /* If the type is marked as addressable (it is required
5854 to be constructed into the stack)... */
5855 if (TREE_ADDRESSABLE (type
))
5861 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5862 takes trailing padding of a structure into account. */
5863 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5866 must_pass_in_stack_var_size_or_pad (machine_mode mode
, const_tree type
)
5871 /* If the type has variable size... */
5872 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5875 /* If the type is marked as addressable (it is required
5876 to be constructed into the stack)... */
5877 if (TREE_ADDRESSABLE (type
))
5880 if (TYPE_EMPTY_P (type
))
5883 /* If the padding and mode of the type is such that a copy into
5884 a register would put it into the wrong part of the register. */
5886 && int_size_in_bytes (type
) % (PARM_BOUNDARY
/ BITS_PER_UNIT
)
5887 && (targetm
.calls
.function_arg_padding (mode
, type
)
5888 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
5894 /* Tell the garbage collector about GTY markers in this source file. */
5895 #include "gt-calls.h"