]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Introduce can_implement_as_sibling_call_p
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
f1717362 2 Copyright (C) 1989-2016 Free Software Foundation, Inc.
66d433c7 3
f12b58b3 4This file is part of GCC.
66d433c7 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
66d433c7 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
66d433c7 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
66d433c7 19
20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
25#include "rtl.h"
9ef16211 26#include "tree.h"
27#include "gimple.h"
7c29e30e 28#include "predict.h"
7c29e30e 29#include "tm_p.h"
30#include "stringpool.h"
31#include "expmed.h"
32#include "optabs.h"
7c29e30e 33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "diagnostic-core.h"
b20a8bb4 36#include "fold-const.h"
9ed99284 37#include "stor-layout.h"
38#include "varasm.h"
bc61cadb 39#include "internal-fn.h"
d53441c8 40#include "dojump.h"
41#include "explow.h"
42#include "calls.h"
405711de 43#include "expr.h"
cd03a192 44#include "output.h"
771d21fa 45#include "langhooks.h"
95cedffb 46#include "except.h"
3072d30e 47#include "dbgcnt.h"
474ce66a 48#include "rtl-iter.h"
058a1b7a 49#include "tree-chkp.h"
50#include "rtl-chkp.h"
66d433c7 51
a8b58ffb 52
dfb1ee39 53/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 55
56/* Data structure and subroutines used within expand_call. */
57
58struct arg_data
59{
60 /* Tree node for this argument. */
61 tree tree_value;
1c0c37a5 62 /* Mode for value; TYPE_MODE unless promoted. */
3754d046 63 machine_mode mode;
66d433c7 64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 rtx value;
66 /* Initially-compute RTL value for argument; only for const functions. */
67 rtx initial_value;
68 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 69 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 70 registers. */
71 rtx reg;
0e0be288 72 /* Register to pass this argument in when generating tail call sequence.
73 This is not the same register as for normal calls on machines with
74 register windows. */
75 rtx tail_call_reg;
b600a907 76 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
77 form for emit_group_move. */
78 rtx parallel_value;
058a1b7a 79 /* If value is passed in neither reg nor stack, this field holds a number
80 of a special slot to be used. */
81 rtx special_slot;
82 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
83 there is no such pointer. */
84 int pointer_arg;
85 /* If pointer_arg refers a structure, then pointer_offset holds an offset
86 of a pointer in this structure. */
87 int pointer_offset;
23eb5fa6 88 /* If REG was promoted from the actual mode of the argument expression,
89 indicates whether the promotion is sign- or zero-extended. */
90 int unsignedp;
83272ab4 91 /* Number of bytes to put in registers. 0 means put the whole arg
92 in registers. Also 0 if not passed in registers. */
66d433c7 93 int partial;
d10cfa8d 94 /* Nonzero if argument must be passed on stack.
f848041f 95 Note that some arguments may be passed on the stack
96 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
97 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 98 int pass_on_stack;
241399f6 99 /* Some fields packaged up for locate_and_pad_parm. */
100 struct locate_and_pad_arg_data locate;
66d433c7 101 /* Location on the stack at which parameter should be stored. The store
102 has already been done if STACK == VALUE. */
103 rtx stack;
104 /* Location on the stack of the start of this argument slot. This can
105 differ from STACK if this arg pads downward. This location is known
bd99ba64 106 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
66d433c7 107 rtx stack_slot;
66d433c7 108 /* Place that this stack area has been saved, if needed. */
109 rtx save_area;
f28c7a75 110 /* If an argument's alignment does not permit direct copying into registers,
111 copy in smaller-sized pieces into pseudos. These are stored in a
112 block pointed to by this field. The next field says how many
113 word-sized pseudos we made. */
114 rtx *aligned_regs;
115 int n_aligned_regs;
66d433c7 116};
117
d10cfa8d 118/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 119 the corresponding stack location has been used.
120 This vector is used to prevent a function call within an argument from
121 clobbering any stack already set up. */
122static char *stack_usage_map;
123
124/* Size of STACK_USAGE_MAP. */
125static int highest_outgoing_arg_in_use;
d1b03b62 126
7ecc63d3 127/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
128 stack location's tail call argument has been already stored into the stack.
129 This bitmap is used to prevent sibling call optimization if function tries
130 to use parent's incoming argument slots when they have been already
131 overwritten with tail call arguments. */
132static sbitmap stored_args_map;
133
d1b03b62 134/* stack_arg_under_construction is nonzero when an argument may be
135 initialized with a constructor call (including a C function that
136 returns a BLKmode struct) and expand_call must take special action
137 to make sure the object being constructed does not overlap the
138 argument list for the constructor call. */
fbbbfe26 139static int stack_arg_under_construction;
66d433c7 140
4ee9c684 141static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
4c9e08a4 142 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
39cba157 143 cumulative_args_t);
4c9e08a4 144static void precompute_register_parameters (int, struct arg_data *, int *);
058a1b7a 145static void store_bounds (struct arg_data *, struct arg_data *);
4c9e08a4 146static int store_one_arg (struct arg_data *, rtx, int, int, int);
147static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
148static int finalize_must_preallocate (int, int, struct arg_data *,
149 struct args_size *);
2dd6f9ed 150static void precompute_arguments (int, struct arg_data *);
fa20f865 151static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
4c9e08a4 152static void initialize_argument_information (int, struct arg_data *,
cd46caee 153 struct args_size *, int,
154 tree, tree,
39cba157 155 tree, tree, cumulative_args_t, int,
eaa112a0 156 rtx *, int *, int *, int *,
4ee9c684 157 bool *, bool);
4c9e08a4 158static void compute_argument_addresses (struct arg_data *, rtx, int);
159static rtx rtx_for_function_call (tree, tree);
160static void load_register_parameters (struct arg_data *, int, rtx *, int,
161 int, int *);
162static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
3754d046 163 machine_mode, int, va_list);
5d1b319b 164static int special_function_p (const_tree, int);
4c9e08a4 165static int check_sibcall_argument_overlap_1 (rtx);
3663becd 166static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
4c9e08a4 167
168static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
38413c80 169 unsigned int);
5ab29745 170static tree split_complex_types (tree);
cde25025 171
4448f543 172#ifdef REG_PARM_STACK_SPACE
4c9e08a4 173static rtx save_fixed_argument_area (int, rtx, int *, int *);
174static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 175#endif
66d433c7 176\f
66d433c7 177/* Force FUNEXP into a form suitable for the address of a CALL,
178 and return that as an rtx. Also load the static chain register
179 if FNDECL is a nested function.
180
8866f42d 181 CALL_FUSAGE points to a variable holding the prospective
182 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 183
d9076622 184rtx
156cc902 185prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4ee9c684 186 rtx *call_fusage, int reg_parm_seen, int sibcallp)
66d433c7 187{
c7bf1374 188 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 189 but not for a constant address if -fno-function-cse. */
190 if (GET_CODE (funexp) != SYMBOL_REF)
a89aeae3 191 /* If we are using registers for parameters, force the
0dbd1c74 192 function address into a register now. */
ed5527ca 193 funexp = ((reg_parm_seen
194 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
0dbd1c74 195 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
196 : memory_address (FUNCTION_MODE, funexp));
707ff8b1 197 else if (! sibcallp)
66d433c7 198 {
93516111 199 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
fb154d03 200 funexp = force_reg (Pmode, funexp);
66d433c7 201 }
202
156cc902 203 if (static_chain_value != 0
204 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
205 || DECL_STATIC_CHAIN (fndecl_or_type)))
66d433c7 206 {
82c7907c 207 rtx chain;
208
156cc902 209 chain = targetm.calls.static_chain (fndecl_or_type, false);
3dce56cc 210 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 211
82c7907c 212 emit_move_insn (chain, static_chain_value);
213 if (REG_P (chain))
214 use_reg (call_fusage, chain);
66d433c7 215 }
216
217 return funexp;
218}
219
220/* Generate instructions to call function FUNEXP,
221 and optionally pop the results.
222 The CALL_INSN is the first insn generated.
223
c74d0a20 224 FNDECL is the declaration node of the function. This is given to the
f5bc28da 225 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
226 its own args.
e93a4612 227
f5bc28da 228 FUNTYPE is the data type of the function. This is given to the hook
229 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
230 own args. We used to allow an identifier for library functions, but
231 that doesn't work when the return type is an aggregate type and the
232 calling convention says that the pointer to this aggregate is to be
233 popped by the callee.
66d433c7 234
235 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 236 ROUNDED_STACK_SIZE is that number rounded up to
237 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
238 both to put into the call insn and to generate explicit popping
239 code if necessary.
66d433c7 240
241 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
242 It is zero if this call doesn't want a structure value.
243
244 NEXT_ARG_REG is the rtx that results from executing
f387af4f 245 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
66d433c7 246 just after all the args have had their registers assigned.
247 This could be whatever you like, but normally it is the first
248 arg-register beyond those used for args in this call,
249 or 0 if all the arg-registers are used in this call.
250 It is passed on to `gen_call' so you can put this info in the call insn.
251
252 VALREG is a hard register in which a value is returned,
253 or 0 if the call does not return a value.
254
255 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
256 the args to this call were processed.
257 We restore `inhibit_defer_pop' to that value.
258
07409b3a 259 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 260 denote registers used by the called function. */
c87678e4 261
8ddf1c7e 262static void
16c9337c 263emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
4ee9c684 264 tree funtype ATTRIBUTE_UNUSED,
4c9e08a4 265 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
266 HOST_WIDE_INT rounded_stack_size,
267 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
268 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
269 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
39cba157 270 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
66d433c7 271{
dd837bff 272 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
7f265a08 273 rtx call, funmem, pat;
66d433c7 274 int already_popped = 0;
d94a1f53 275 HOST_WIDE_INT n_popped = 0;
276
277 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
278 patterns exist). Any popping that the callee does on return will
279 be from our caller's frame rather than ours. */
280 if (!(ecf_flags & ECF_SIBCALL))
281 {
282 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
66d433c7 283
87e19636 284#ifdef CALL_POPS_ARGS
d94a1f53 285 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
87e19636 286#endif
d94a1f53 287 }
4c9e08a4 288
66d433c7 289 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
290 and we don't want to load it into a register as an optimization,
291 because prepare_call_address already did it if it should be done. */
292 if (GET_CODE (funexp) != SYMBOL_REF)
293 funexp = memory_address (FUNCTION_MODE, funexp);
294
57999964 295 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
296 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
854aa6aa 297 {
298 tree t = fndecl;
b9a16870 299
854aa6aa 300 /* Although a built-in FUNCTION_DECL and its non-__builtin
301 counterpart compare equal and get a shared mem_attrs, they
302 produce different dump output in compare-debug compilations,
303 if an entry gets garbage collected in one compilation, then
304 adds a different (but equivalent) entry, while the other
305 doesn't run the garbage collector at the same spot and then
306 shares the mem_attr with the equivalent entry. */
b9a16870 307 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
308 {
309 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
310 if (t2)
311 t = t2;
312 }
313
314 set_mem_expr (funmem, t);
854aa6aa 315 }
57999964 316 else if (fntree)
2622064f 317 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
57999964 318
7f265a08 319 if (ecf_flags & ECF_SIBCALL)
60ecc450 320 {
60ecc450 321 if (valreg)
7f265a08 322 pat = targetm.gen_sibcall_value (valreg, funmem,
323 rounded_stack_size_rtx,
324 next_arg_reg, NULL_RTX);
60ecc450 325 else
7f265a08 326 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
327 next_arg_reg, GEN_INT (struct_value_size));
60ecc450 328 }
2a631e19 329 /* If the target has "call" or "call_value" insns, then prefer them
330 if no arguments are actually popped. If the target does not have
331 "call" or "call_value" insns, then we must use the popping versions
332 even if the call has no arguments to pop. */
7f265a08 333 else if (n_popped > 0
334 || !(valreg
335 ? targetm.have_call_value ()
336 : targetm.have_call ()))
66d433c7 337 {
e39fae61 338 rtx n_pop = GEN_INT (n_popped);
66d433c7 339
340 /* If this subroutine pops its own args, record that in the call insn
341 if possible, for the sake of frame pointer elimination. */
e93a4612 342
66d433c7 343 if (valreg)
7f265a08 344 pat = targetm.gen_call_value_pop (valreg, funmem,
345 rounded_stack_size_rtx,
346 next_arg_reg, n_pop);
66d433c7 347 else
7f265a08 348 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
349 next_arg_reg, n_pop);
66d433c7 350
66d433c7 351 already_popped = 1;
352 }
353 else
60ecc450 354 {
355 if (valreg)
7f265a08 356 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
357 next_arg_reg, NULL_RTX);
60ecc450 358 else
7f265a08 359 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
360 GEN_INT (struct_value_size));
60ecc450 361 }
7f265a08 362 emit_insn (pat);
66d433c7 363
d5f9786f 364 /* Find the call we just emitted. */
9ed997be 365 rtx_call_insn *call_insn = last_call_insn ();
66d433c7 366
57999964 367 /* Some target create a fresh MEM instead of reusing the one provided
368 above. Set its MEM_EXPR. */
cf7fb72d 369 call = get_call_rtx_from (call_insn);
370 if (call
57999964 371 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
372 && MEM_EXPR (funmem) != NULL_TREE)
373 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
374
058a1b7a 375 /* Mark instrumented calls. */
376 if (call && fntree)
377 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
378
d5f9786f 379 /* Put the register usage information there. */
380 add_function_usage_to (call_insn, call_fusage);
66d433c7 381
382 /* If this is a const call, then set the insn's unchanging bit. */
9c2a0c05 383 if (ecf_flags & ECF_CONST)
384 RTL_CONST_CALL_P (call_insn) = 1;
385
386 /* If this is a pure call, then set the insn's unchanging bit. */
387 if (ecf_flags & ECF_PURE)
388 RTL_PURE_CALL_P (call_insn) = 1;
389
390 /* If this is a const call, then set the insn's unchanging bit. */
391 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
392 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 393
e38def9c 394 /* Create a nothrow REG_EH_REGION note, if needed. */
395 make_reg_eh_region_note (call_insn, ecf_flags, 0);
00dd2e9e 396
356b51a0 397 if (ecf_flags & ECF_NORETURN)
a1ddb869 398 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
356b51a0 399
9239aee6 400 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 401 {
a1ddb869 402 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
18d50ae6 403 cfun->calls_setjmp = 1;
0ff18307 404 }
9239aee6 405
60ecc450 406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407
d1f88d00 408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
411
e39fae61 412 if (n_popped > 0)
66d433c7 413 {
414 if (!already_popped)
37808e3a 415 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 419 rounded_stack_size -= n_popped;
dd837bff 420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
91b70175 421 stack_pointer_delta -= n_popped;
27a7a23a 422
dfe00a8f 423 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
424
27a7a23a 425 /* If popup is needed, stack realign must use DRAP */
426 if (SUPPORTS_STACK_ALIGNMENT)
427 crtl->need_drap = true;
66d433c7 428 }
27827244 429 /* For noreturn calls when not accumulating outgoing args force
430 REG_ARGS_SIZE note to prevent crossjumping of calls with different
431 args sizes. */
432 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
433 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
66d433c7 434
4448f543 435 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 436 {
4448f543 437 /* If returning from the subroutine does not automatically pop the args,
438 we need an instruction to pop them sooner or later.
439 Perhaps do it now; perhaps just record how much space to pop later.
440
441 If returning from the subroutine does pop the args, indicate that the
442 stack pointer will be changed. */
443
10d1a2c0 444 if (rounded_stack_size != 0)
4448f543 445 {
ff3ae375 446 if (ecf_flags & ECF_NORETURN)
10d1a2c0 447 /* Just pretend we did the pop. */
448 stack_pointer_delta -= rounded_stack_size;
449 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 450 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 451 pending_stack_adjust += rounded_stack_size;
452 else
453 adjust_stack (rounded_stack_size_rtx);
454 }
66d433c7 455 }
4448f543 456 /* When we accumulate outgoing args, we must avoid any stack manipulations.
457 Restore the stack pointer to its original value now. Usually
458 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
459 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
460 popping variants of functions exist as well.
461
462 ??? We may optimize similar to defer_pop above, but it is
463 probably not worthwhile.
c87678e4 464
4448f543 465 ??? It will be worthwhile to enable combine_stack_adjustments even for
466 such machines. */
467 else if (n_popped)
468 anti_adjust_stack (GEN_INT (n_popped));
66d433c7 469}
470
6a0e6138 471/* Determine if the function identified by NAME and FNDECL is one with
472 special properties we wish to know about.
473
474 For example, if the function might return more than one time (setjmp), then
475 set RETURNS_TWICE to a nonzero value.
476
4c8db992 477 Similarly set NORETURN if the function is in the longjmp family.
6a0e6138 478
6a0e6138 479 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
480 space from the stack such as alloca. */
481
dfe08167 482static int
5d1b319b 483special_function_p (const_tree fndecl, int flags)
6a0e6138 484{
058a1b7a 485 tree name_decl = DECL_NAME (fndecl);
486
487 /* For instrumentation clones we want to derive flags
488 from the original name. */
489 if (cgraph_node::get (fndecl)
490 && cgraph_node::get (fndecl)->instrumentation_clone)
491 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
492
493 if (fndecl && name_decl
494 && IDENTIFIER_LENGTH (name_decl) <= 17
6a0e6138 495 /* Exclude functions not at the file scope, or not `extern',
496 since they are not the magic functions we would otherwise
40109983 497 think they are.
a0c938f0 498 FIXME: this should be handled with attributes, not with this
499 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
500 because you can declare fork() inside a function if you
501 wish. */
0d568ddf 502 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 503 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
504 && TREE_PUBLIC (fndecl))
6a0e6138 505 {
058a1b7a 506 const char *name = IDENTIFIER_POINTER (name_decl);
71d9fc9b 507 const char *tname = name;
6a0e6138 508
cc7cc47f 509 /* We assume that alloca will always be called by name. It
510 makes no sense to pass it as a pointer-to-function to
511 anything that does not understand its behavior. */
26fa902d 512 if (IDENTIFIER_LENGTH (name_decl) == 6
513 && name[0] == 'a'
514 && ! strcmp (name, "alloca"))
dfe08167 515 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 516
73d3c8f2 517 /* Disregard prefix _, __, __x or __builtin_. */
6a0e6138 518 if (name[0] == '_')
519 {
73d3c8f2 520 if (name[1] == '_'
521 && name[2] == 'b'
522 && !strncmp (name + 3, "uiltin_", 7))
523 tname += 10;
524 else if (name[1] == '_' && name[2] == 'x')
6a0e6138 525 tname += 3;
526 else if (name[1] == '_')
527 tname += 2;
528 else
529 tname += 1;
530 }
531
532 if (tname[0] == 's')
533 {
dfe08167 534 if ((tname[1] == 'e'
535 && (! strcmp (tname, "setjmp")
536 || ! strcmp (tname, "setjmp_syscall")))
537 || (tname[1] == 'i'
538 && ! strcmp (tname, "sigsetjmp"))
539 || (tname[1] == 'a'
540 && ! strcmp (tname, "savectx")))
3621f68e 541 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
dfe08167 542
6a0e6138 543 if (tname[1] == 'i'
544 && ! strcmp (tname, "siglongjmp"))
4fec1d6c 545 flags |= ECF_NORETURN;
6a0e6138 546 }
547 else if ((tname[0] == 'q' && tname[1] == 's'
548 && ! strcmp (tname, "qsetjmp"))
549 || (tname[0] == 'v' && tname[1] == 'f'
0b4cb8ec 550 && ! strcmp (tname, "vfork"))
551 || (tname[0] == 'g' && tname[1] == 'e'
552 && !strcmp (tname, "getcontext")))
3621f68e 553 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
6a0e6138 554
555 else if (tname[0] == 'l' && tname[1] == 'o'
556 && ! strcmp (tname, "longjmp"))
4fec1d6c 557 flags |= ECF_NORETURN;
6a0e6138 558 }
73673831 559
26fa902d 560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
561 switch (DECL_FUNCTION_CODE (fndecl))
562 {
563 case BUILT_IN_ALLOCA:
564 case BUILT_IN_ALLOCA_WITH_ALIGN:
565 flags |= ECF_MAY_BE_ALLOCA;
566 break;
567 default:
568 break;
569 }
570
dfe08167 571 return flags;
6a0e6138 572}
573
c8010b80 574/* Similar to special_function_p; return a set of ERF_ flags for the
575 function FNDECL. */
576static int
577decl_return_flags (tree fndecl)
578{
579 tree attr;
580 tree type = TREE_TYPE (fndecl);
581 if (!type)
582 return 0;
583
584 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
585 if (!attr)
586 return 0;
587
588 attr = TREE_VALUE (TREE_VALUE (attr));
589 if (!attr || TREE_STRING_LENGTH (attr) < 1)
590 return 0;
591
592 switch (TREE_STRING_POINTER (attr)[0])
593 {
594 case '1':
595 case '2':
596 case '3':
597 case '4':
598 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
599
600 case 'm':
601 return ERF_NOALIAS;
602
603 case '.':
604 default:
605 return 0;
606 }
607}
608
4c8db992 609/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 610
dfe08167 611int
5d1b319b 612setjmp_call_p (const_tree fndecl)
dfe08167 613{
69010134 614 if (DECL_IS_RETURNS_TWICE (fndecl))
615 return ECF_RETURNS_TWICE;
dfe08167 616 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
617}
618
75a70cf9 619
620/* Return true if STMT is an alloca call. */
621
622bool
42acab1c 623gimple_alloca_call_p (const gimple *stmt)
75a70cf9 624{
625 tree fndecl;
626
627 if (!is_gimple_call (stmt))
628 return false;
629
630 fndecl = gimple_call_fndecl (stmt);
631 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
632 return true;
633
634 return false;
635}
636
9a7ecb49 637/* Return true when exp contains alloca call. */
75a70cf9 638
9a7ecb49 639bool
5d1b319b 640alloca_call_p (const_tree exp)
9a7ecb49 641{
0b7282f1 642 tree fndecl;
9a7ecb49 643 if (TREE_CODE (exp) == CALL_EXPR
0b7282f1 644 && (fndecl = get_callee_fndecl (exp))
645 && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
9a7ecb49 646 return true;
647 return false;
648}
649
4c0315d0 650/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
651 function. Return FALSE otherwise. */
652
653static bool
654is_tm_builtin (const_tree fndecl)
655{
656 if (fndecl == NULL)
657 return false;
658
659 if (decl_is_tm_clone (fndecl))
660 return true;
661
662 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
663 {
664 switch (DECL_FUNCTION_CODE (fndecl))
665 {
666 case BUILT_IN_TM_COMMIT:
667 case BUILT_IN_TM_COMMIT_EH:
668 case BUILT_IN_TM_ABORT:
669 case BUILT_IN_TM_IRREVOCABLE:
670 case BUILT_IN_TM_GETTMCLONE_IRR:
671 case BUILT_IN_TM_MEMCPY:
672 case BUILT_IN_TM_MEMMOVE:
673 case BUILT_IN_TM_MEMSET:
674 CASE_BUILT_IN_TM_STORE (1):
675 CASE_BUILT_IN_TM_STORE (2):
676 CASE_BUILT_IN_TM_STORE (4):
677 CASE_BUILT_IN_TM_STORE (8):
678 CASE_BUILT_IN_TM_STORE (FLOAT):
679 CASE_BUILT_IN_TM_STORE (DOUBLE):
680 CASE_BUILT_IN_TM_STORE (LDOUBLE):
681 CASE_BUILT_IN_TM_STORE (M64):
682 CASE_BUILT_IN_TM_STORE (M128):
683 CASE_BUILT_IN_TM_STORE (M256):
684 CASE_BUILT_IN_TM_LOAD (1):
685 CASE_BUILT_IN_TM_LOAD (2):
686 CASE_BUILT_IN_TM_LOAD (4):
687 CASE_BUILT_IN_TM_LOAD (8):
688 CASE_BUILT_IN_TM_LOAD (FLOAT):
689 CASE_BUILT_IN_TM_LOAD (DOUBLE):
690 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
691 CASE_BUILT_IN_TM_LOAD (M64):
692 CASE_BUILT_IN_TM_LOAD (M128):
693 CASE_BUILT_IN_TM_LOAD (M256):
694 case BUILT_IN_TM_LOG:
695 case BUILT_IN_TM_LOG_1:
696 case BUILT_IN_TM_LOG_2:
697 case BUILT_IN_TM_LOG_4:
698 case BUILT_IN_TM_LOG_8:
699 case BUILT_IN_TM_LOG_FLOAT:
700 case BUILT_IN_TM_LOG_DOUBLE:
701 case BUILT_IN_TM_LOG_LDOUBLE:
702 case BUILT_IN_TM_LOG_M64:
703 case BUILT_IN_TM_LOG_M128:
704 case BUILT_IN_TM_LOG_M256:
705 return true;
706 default:
707 break;
708 }
709 }
710 return false;
711}
712
5edaabad 713/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 714
805e22b2 715int
5d1b319b 716flags_from_decl_or_type (const_tree exp)
dfe08167 717{
718 int flags = 0;
7a24815f 719
dfe08167 720 if (DECL_P (exp))
721 {
722 /* The function exp may have the `malloc' attribute. */
7a24815f 723 if (DECL_IS_MALLOC (exp))
dfe08167 724 flags |= ECF_MALLOC;
725
26d1c5ff 726 /* The function exp may have the `returns_twice' attribute. */
727 if (DECL_IS_RETURNS_TWICE (exp))
728 flags |= ECF_RETURNS_TWICE;
729
9c2a0c05 730 /* Process the pure and const attributes. */
67fa4078 731 if (TREE_READONLY (exp))
9c2a0c05 732 flags |= ECF_CONST;
733 if (DECL_PURE_P (exp))
ef689d4e 734 flags |= ECF_PURE;
9c2a0c05 735 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
736 flags |= ECF_LOOPING_CONST_OR_PURE;
26dfc457 737
fc09b200 738 if (DECL_IS_NOVOPS (exp))
739 flags |= ECF_NOVOPS;
7bd95dfd 740 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
741 flags |= ECF_LEAF;
fc09b200 742
dfe08167 743 if (TREE_NOTHROW (exp))
744 flags |= ECF_NOTHROW;
b15db406 745
4c0315d0 746 if (flag_tm)
747 {
748 if (is_tm_builtin (exp))
749 flags |= ECF_TM_BUILTIN;
c86dbacd 750 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
4c0315d0 751 || lookup_attribute ("transaction_pure",
752 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
753 flags |= ECF_TM_PURE;
754 }
755
4ee9c684 756 flags = special_function_p (exp, flags);
dfe08167 757 }
4c0315d0 758 else if (TYPE_P (exp))
759 {
760 if (TYPE_READONLY (exp))
761 flags |= ECF_CONST;
762
763 if (flag_tm
764 && ((flags & ECF_CONST) != 0
765 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
766 flags |= ECF_TM_PURE;
767 }
c579aed5 768 else
769 gcc_unreachable ();
dfe08167 770
771 if (TREE_THIS_VOLATILE (exp))
67fa4078 772 {
773 flags |= ECF_NORETURN;
774 if (flags & (ECF_CONST|ECF_PURE))
775 flags |= ECF_LOOPING_CONST_OR_PURE;
776 }
dfe08167 777
778 return flags;
779}
780
886a914d 781/* Detect flags from a CALL_EXPR. */
782
783int
b7bf20db 784call_expr_flags (const_tree t)
886a914d 785{
786 int flags;
787 tree decl = get_callee_fndecl (t);
788
789 if (decl)
790 flags = flags_from_decl_or_type (decl);
4036aeb0 791 else if (CALL_EXPR_FN (t) == NULL_TREE)
792 flags = internal_fn_flags (CALL_EXPR_IFN (t));
886a914d 793 else
794 {
c2f47e15 795 t = TREE_TYPE (CALL_EXPR_FN (t));
886a914d 796 if (t && TREE_CODE (t) == POINTER_TYPE)
797 flags = flags_from_decl_or_type (TREE_TYPE (t));
798 else
799 flags = 0;
800 }
801
802 return flags;
803}
804
a3c76fda 805/* Return true if TYPE should be passed by invisible reference. */
806
807bool
808pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
809 tree type, bool named_arg)
810{
811 if (type)
812 {
813 /* If this type contains non-trivial constructors, then it is
814 forbidden for the middle-end to create any new copies. */
815 if (TREE_ADDRESSABLE (type))
816 return true;
817
818 /* GCC post 3.4 passes *all* variable sized types by reference. */
819 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
820 return true;
821
822 /* If a record type should be passed the same as its first (and only)
823 member, use the type and mode of that member. */
824 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
825 {
826 type = TREE_TYPE (first_field (type));
827 mode = TYPE_MODE (type);
828 }
829 }
830
831 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
832 type, named_arg);
833}
834
835/* Return true if TYPE, which is passed by reference, should be callee
836 copied instead of caller copied. */
837
838bool
839reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
840 tree type, bool named_arg)
841{
842 if (type && TREE_ADDRESSABLE (type))
843 return false;
844 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
845 named_arg);
846}
847
848
6a0e6138 849/* Precompute all register parameters as described by ARGS, storing values
850 into fields within the ARGS array.
851
852 NUM_ACTUALS indicates the total number elements in the ARGS array.
853
854 Set REG_PARM_SEEN if we encounter a register parameter. */
855
856static void
e2ff5c1b 857precompute_register_parameters (int num_actuals, struct arg_data *args,
858 int *reg_parm_seen)
6a0e6138 859{
860 int i;
861
862 *reg_parm_seen = 0;
863
864 for (i = 0; i < num_actuals; i++)
865 if (args[i].reg != 0 && ! args[i].pass_on_stack)
866 {
867 *reg_parm_seen = 1;
868
869 if (args[i].value == 0)
870 {
871 push_temp_slots ();
8ec3c5c2 872 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 873 preserve_temp_slots (args[i].value);
874 pop_temp_slots ();
6a0e6138 875 }
876
877 /* If we are to promote the function arg to a wider mode,
878 do it now. */
879
880 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
881 args[i].value
882 = convert_modes (args[i].mode,
883 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
884 args[i].value, args[i].unsignedp);
885
5e050fa1 886 /* If the value is a non-legitimate constant, force it into a
887 pseudo now. TLS symbols sometimes need a call to resolve. */
888 if (CONSTANT_P (args[i].value)
889 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
890 args[i].value = force_reg (args[i].mode, args[i].value);
891
e2ff5c1b 892 /* If we're going to have to load the value by parts, pull the
893 parts into pseudos. The part extraction process can involve
894 non-trivial computation. */
895 if (GET_CODE (args[i].reg) == PARALLEL)
896 {
897 tree type = TREE_TYPE (args[i].tree_value);
b600a907 898 args[i].parallel_value
e2ff5c1b 899 = emit_group_load_into_temps (args[i].reg, args[i].value,
900 type, int_size_in_bytes (type));
901 }
902
c87678e4 903 /* If the value is expensive, and we are inside an appropriately
6a0e6138 904 short loop, put the value into a pseudo and then put the pseudo
905 into the hard reg.
906
907 For small register classes, also do this if this call uses
908 register parameters. This is to avoid reload conflicts while
909 loading the parameters registers. */
910
e2ff5c1b 911 else if ((! (REG_P (args[i].value)
912 || (GET_CODE (args[i].value) == SUBREG
913 && REG_P (SUBREG_REG (args[i].value)))))
914 && args[i].mode != BLKmode
5ae4887d 915 && (set_src_cost (args[i].value, args[i].mode,
916 optimize_insn_for_speed_p ())
917 > COSTS_N_INSNS (1))
ed5527ca 918 && ((*reg_parm_seen
919 && targetm.small_register_classes_for_mode_p (args[i].mode))
e2ff5c1b 920 || optimize))
6a0e6138 921 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
922 }
923}
924
4448f543 925#ifdef REG_PARM_STACK_SPACE
6a0e6138 926
927 /* The argument list is the property of the called routine and it
928 may clobber it. If the fixed area has been used for previous
929 parameters, we must save and restore it. */
f7c44134 930
6a0e6138 931static rtx
4c9e08a4 932save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 933{
6e96b626 934 int low;
935 int high;
6a0e6138 936
6e96b626 937 /* Compute the boundary of the area that needs to be saved, if any. */
938 high = reg_parm_stack_space;
ccccd62c 939 if (ARGS_GROW_DOWNWARD)
940 high += 1;
941
6e96b626 942 if (high > highest_outgoing_arg_in_use)
943 high = highest_outgoing_arg_in_use;
6a0e6138 944
6e96b626 945 for (low = 0; low < high; low++)
946 if (stack_usage_map[low] != 0)
947 {
948 int num_to_save;
3754d046 949 machine_mode save_mode;
6e96b626 950 int delta;
29c05e22 951 rtx addr;
6e96b626 952 rtx stack_area;
953 rtx save_area;
6a0e6138 954
6e96b626 955 while (stack_usage_map[--high] == 0)
956 ;
6a0e6138 957
6e96b626 958 *low_to_save = low;
959 *high_to_save = high;
960
961 num_to_save = high - low + 1;
962 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
6a0e6138 963
6e96b626 964 /* If we don't have the required alignment, must do this
965 in BLKmode. */
966 if ((low & (MIN (GET_MODE_SIZE (save_mode),
967 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
968 save_mode = BLKmode;
6a0e6138 969
ccccd62c 970 if (ARGS_GROW_DOWNWARD)
971 delta = -high;
972 else
973 delta = low;
974
29c05e22 975 addr = plus_constant (Pmode, argblock, delta);
976 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
2a631e19 977
6e96b626 978 set_mem_align (stack_area, PARM_BOUNDARY);
979 if (save_mode == BLKmode)
980 {
0ab48139 981 save_area = assign_stack_temp (BLKmode, num_to_save);
6e96b626 982 emit_block_move (validize_mem (save_area), stack_area,
983 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
984 }
985 else
986 {
987 save_area = gen_reg_rtx (save_mode);
988 emit_move_insn (save_area, stack_area);
989 }
2a631e19 990
6e96b626 991 return save_area;
992 }
993
994 return NULL_RTX;
6a0e6138 995}
996
997static void
4c9e08a4 998restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 999{
3754d046 1000 machine_mode save_mode = GET_MODE (save_area);
6e96b626 1001 int delta;
29c05e22 1002 rtx addr, stack_area;
6e96b626 1003
ccccd62c 1004 if (ARGS_GROW_DOWNWARD)
1005 delta = -high_to_save;
1006 else
1007 delta = low_to_save;
1008
29c05e22 1009 addr = plus_constant (Pmode, argblock, delta);
1010 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
6e96b626 1011 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 1012
1013 if (save_mode != BLKmode)
1014 emit_move_insn (stack_area, save_area);
1015 else
0378dbdc 1016 emit_block_move (stack_area, validize_mem (save_area),
1017 GEN_INT (high_to_save - low_to_save + 1),
1018 BLOCK_OP_CALL_PARM);
6a0e6138 1019}
f6025ee7 1020#endif /* REG_PARM_STACK_SPACE */
c87678e4 1021
6a0e6138 1022/* If any elements in ARGS refer to parameters that are to be passed in
1023 registers, but not in memory, and whose alignment does not permit a
1024 direct copy into registers. Copy the values into a group of pseudos
c87678e4 1025 which we will later copy into the appropriate hard registers.
6d801f27 1026
1027 Pseudos for each unaligned argument will be stored into the array
1028 args[argnum].aligned_regs. The caller is responsible for deallocating
1029 the aligned_regs array if it is nonzero. */
1030
6a0e6138 1031static void
4c9e08a4 1032store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 1033{
1034 int i, j;
c87678e4 1035
6a0e6138 1036 for (i = 0; i < num_actuals; i++)
1037 if (args[i].reg != 0 && ! args[i].pass_on_stack
33eb84dc 1038 && GET_CODE (args[i].reg) != PARALLEL
6a0e6138 1039 && args[i].mode == BLKmode
77f1b1bb 1040 && MEM_P (args[i].value)
1041 && (MEM_ALIGN (args[i].value)
6a0e6138 1042 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1043 {
1044 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 1045 int endian_correction = 0;
6a0e6138 1046
f054eb3c 1047 if (args[i].partial)
1048 {
1049 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1050 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1051 }
1052 else
1053 {
1054 args[i].n_aligned_regs
1055 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1056 }
1057
4c36ffe6 1058 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 1059
5f4cd670 1060 /* Structures smaller than a word are normally aligned to the
1061 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 1062 this means we must skip the empty high order bytes when
1063 calculating the bit offset. */
5f4cd670 1064 if (bytes < UNITS_PER_WORD
1065#ifdef BLOCK_REG_PADDING
1066 && (BLOCK_REG_PADDING (args[i].mode,
1067 TREE_TYPE (args[i].tree_value), 1)
1068 == downward)
1069#else
1070 && BYTES_BIG_ENDIAN
1071#endif
1072 )
1073 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 1074
1075 for (j = 0; j < args[i].n_aligned_regs; j++)
1076 {
1077 rtx reg = gen_reg_rtx (word_mode);
1078 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1079 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 1080
1081 args[i].aligned_regs[j] = reg;
3f71db40 1082 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
292237f3 1083 word_mode, word_mode, false);
6a0e6138 1084
1085 /* There is no need to restrict this code to loading items
1086 in TYPE_ALIGN sized hunks. The bitfield instructions can
1087 load up entire word sized registers efficiently.
1088
1089 ??? This may not be needed anymore.
1090 We use to emit a clobber here but that doesn't let later
1091 passes optimize the instructions we emit. By storing 0 into
1092 the register later passes know the first AND to zero out the
1093 bitfield being set in the register is unnecessary. The store
1094 of 0 will be deleted as will at least the first AND. */
1095
1096 emit_move_insn (reg, const0_rtx);
1097
1098 bytes -= bitsize / BITS_PER_UNIT;
4bb60ec7 1099 store_bit_field (reg, bitsize, endian_correction, 0, 0,
292237f3 1100 word_mode, word, false);
6a0e6138 1101 }
1102 }
1103}
1104
cb543c54 1105/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
48e1416a 1106 CALL_EXPR EXP.
cb543c54 1107
1108 NUM_ACTUALS is the total number of parameters.
1109
1110 N_NAMED_ARGS is the total number of named arguments.
1111
cd46caee 1112 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1113 value, or null.
1114
cb543c54 1115 FNDECL is the tree code for the target of this call (if known)
1116
1117 ARGS_SO_FAR holds state needed by the target to know where to place
1118 the next argument.
1119
1120 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1121 for arguments which are passed in registers.
1122
1123 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1124 and may be modified by this routine.
1125
dfe08167 1126 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
47ae02b7 1127 flags which may be modified by this routine.
eaa112a0 1128
4ee9c684 1129 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1130 that requires allocation of stack space.
1131
eaa112a0 1132 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1133 the thunked-to function. */
cb543c54 1134
1135static void
4c9e08a4 1136initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1137 struct arg_data *args,
1138 struct args_size *args_size,
1139 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 1140 tree exp, tree struct_value_addr_value,
d8b9c828 1141 tree fndecl, tree fntype,
39cba157 1142 cumulative_args_t args_so_far,
4c9e08a4 1143 int reg_parm_stack_space,
1144 rtx *old_stack_level, int *old_pending_adj,
eaa112a0 1145 int *must_preallocate, int *ecf_flags,
4ee9c684 1146 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 1147{
39cba157 1148 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
389dd41b 1149 location_t loc = EXPR_LOCATION (exp);
cb543c54 1150
1151 /* Count arg position in order args appear. */
1152 int argpos;
1153
1154 int i;
c87678e4 1155
cb543c54 1156 args_size->constant = 0;
1157 args_size->var = 0;
1158
058a1b7a 1159 bitmap_obstack_initialize (NULL);
1160
cb543c54 1161 /* In this loop, we consider args in the order they are written.
bf29c577 1162 We fill up ARGS from the back. */
cb543c54 1163
bf29c577 1164 i = num_actuals - 1;
cd46caee 1165 {
058a1b7a 1166 int j = i, ptr_arg = -1;
cd46caee 1167 call_expr_arg_iterator iter;
1168 tree arg;
058a1b7a 1169 bitmap slots = NULL;
cd46caee 1170
1171 if (struct_value_addr_value)
1172 {
1173 args[j].tree_value = struct_value_addr_value;
bf29c577 1174 j--;
058a1b7a 1175
1176 /* If we pass structure address then we need to
1177 create bounds for it. Since created bounds is
1178 a call statement, we expand it right here to avoid
1179 fixing all other places where it may be expanded. */
1180 if (CALL_WITH_BOUNDS_P (exp))
1181 {
1182 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1183 args[j].tree_value
1184 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1185 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1186 EXPAND_NORMAL, 0, false);
1187 args[j].pointer_arg = j + 1;
1188 j--;
1189 }
cd46caee 1190 }
e66d763a 1191 argpos = 0;
cd46caee 1192 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1193 {
1194 tree argtype = TREE_TYPE (arg);
058a1b7a 1195
1196 /* Remember last param with pointer and associate it
1197 with following pointer bounds. */
1198 if (CALL_WITH_BOUNDS_P (exp)
1199 && chkp_type_has_pointer (argtype))
1200 {
1201 if (slots)
1202 BITMAP_FREE (slots);
1203 ptr_arg = j;
1204 if (!BOUNDED_TYPE_P (argtype))
1205 {
1206 slots = BITMAP_ALLOC (NULL);
1207 chkp_find_bound_slots (argtype, slots);
1208 }
1209 }
e66d763a 1210 else if (CALL_WITH_BOUNDS_P (exp)
1211 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1212 argpos < n_named_args))
1213 {
1214 if (slots)
1215 BITMAP_FREE (slots);
1216 ptr_arg = j;
1217 }
058a1b7a 1218 else if (POINTER_BOUNDS_TYPE_P (argtype))
1219 {
1220 /* We expect bounds in instrumented calls only.
1221 Otherwise it is a sign we lost flag due to some optimization
1222 and may emit call args incorrectly. */
1223 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1224
1225 /* For structures look for the next available pointer. */
1226 if (ptr_arg != -1 && slots)
1227 {
1228 unsigned bnd_no = bitmap_first_set_bit (slots);
1229 args[j].pointer_offset =
1230 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1231
1232 bitmap_clear_bit (slots, bnd_no);
1233
1234 /* Check we have no more pointers in the structure. */
1235 if (bitmap_empty_p (slots))
1236 BITMAP_FREE (slots);
1237 }
1238 args[j].pointer_arg = ptr_arg;
1239
1240 /* Check we covered all pointers in the previous
1241 non bounds arg. */
1242 if (!slots)
1243 ptr_arg = -1;
1244 }
1245 else
1246 ptr_arg = -1;
1247
cd46caee 1248 if (targetm.calls.split_complex_arg
1249 && argtype
1250 && TREE_CODE (argtype) == COMPLEX_TYPE
1251 && targetm.calls.split_complex_arg (argtype))
1252 {
1253 tree subtype = TREE_TYPE (argtype);
cd46caee 1254 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
bf29c577 1255 j--;
cd46caee 1256 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1257 }
1258 else
1259 args[j].tree_value = arg;
bf29c577 1260 j--;
e66d763a 1261 argpos++;
cd46caee 1262 }
058a1b7a 1263
1264 if (slots)
1265 BITMAP_FREE (slots);
cd46caee 1266 }
1267
058a1b7a 1268 bitmap_obstack_release (NULL);
1269
cb543c54 1270 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
bf29c577 1271 for (argpos = 0; argpos < num_actuals; i--, argpos++)
cb543c54 1272 {
cd46caee 1273 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 1274 int unsignedp;
3754d046 1275 machine_mode mode;
cb543c54 1276
cb543c54 1277 /* Replace erroneous argument with constant zero. */
4b72716d 1278 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 1279 args[i].tree_value = integer_zero_node, type = integer_type_node;
1280
8df5a43d 1281 /* If TYPE is a transparent union or record, pass things the way
1282 we would pass the first field of the union or record. We have
1283 already verified that the modes are the same. */
1284 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1285 && TYPE_TRANSPARENT_AGGR (type))
1286 type = TREE_TYPE (first_field (type));
cb543c54 1287
1288 /* Decide where to pass this arg.
1289
1290 args[i].reg is nonzero if all or part is passed in registers.
1291
1292 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1293 and the exact value says how many bytes are passed in registers.
cb543c54 1294
1295 args[i].pass_on_stack is nonzero if the argument must at least be
1296 computed on the stack. It may then be loaded back into registers
1297 if args[i].reg is nonzero.
1298
1299 These decisions are driven by the FUNCTION_... macros and must agree
1300 with those made by function.c. */
1301
1302 /* See if this argument should be passed by invisible reference. */
39cba157 1303 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
cc9b8628 1304 type, argpos < n_named_args))
cb543c54 1305 {
41dc12b4 1306 bool callee_copies;
bc4577c4 1307 tree base = NULL_TREE;
41dc12b4 1308
1309 callee_copies
39cba157 1310 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
13f08ee7 1311 type, argpos < n_named_args);
41dc12b4 1312
1313 /* If we're compiling a thunk, pass through invisible references
1314 instead of making a copy. */
eaa112a0 1315 if (call_from_thunk_p
41dc12b4 1316 || (callee_copies
1317 && !TREE_ADDRESSABLE (type)
1318 && (base = get_base_address (args[i].tree_value))
d6230243 1319 && TREE_CODE (base) != SSA_NAME
41dc12b4 1320 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 1321 {
6b7d03d8 1322 /* We may have turned the parameter value into an SSA name.
1323 Go back to the original parameter so we can take the
1324 address. */
1325 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1326 {
1327 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1328 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1329 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1330 }
9502706b 1331 /* Argument setup code may have copied the value to register. We
1332 revert that optimization now because the tail call code must
1333 use the original location. */
1334 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1335 && !MEM_P (DECL_RTL (args[i].tree_value))
1336 && DECL_INCOMING_RTL (args[i].tree_value)
1337 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1338 set_decl_rtl (args[i].tree_value,
1339 DECL_INCOMING_RTL (args[i].tree_value));
1340
006e2d5a 1341 mark_addressable (args[i].tree_value);
1342
41dc12b4 1343 /* We can't use sibcalls if a callee-copied argument is
1344 stored in the current function's frame. */
1345 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
c71e72dd 1346 *may_tailcall = false;
1347
389dd41b 1348 args[i].tree_value = build_fold_addr_expr_loc (loc,
1349 args[i].tree_value);
41dc12b4 1350 type = TREE_TYPE (args[i].tree_value);
1351
9c2a0c05 1352 if (*ecf_flags & ECF_CONST)
1353 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
ce95a955 1354 }
cb543c54 1355 else
1356 {
1357 /* We make a copy of the object and pass the address to the
1358 function being called. */
1359 rtx copy;
1360
4b72716d 1361 if (!COMPLETE_TYPE_P (type)
4852b829 1362 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1363 || (flag_stack_check == GENERIC_STACK_CHECK
1364 && compare_tree_int (TYPE_SIZE_UNIT (type),
1365 STACK_CHECK_MAX_VAR_SIZE) > 0))
cb543c54 1366 {
1367 /* This is a variable-sized object. Make space on the stack
1368 for it. */
cd46caee 1369 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 1370
1371 if (*old_stack_level == 0)
1372 {
e9c97615 1373 emit_stack_save (SAVE_BLOCK, old_stack_level);
cb543c54 1374 *old_pending_adj = pending_stack_adjust;
1375 pending_stack_adjust = 0;
1376 }
1377
990495a7 1378 /* We can pass TRUE as the 4th argument because we just
1379 saved the stack pointer and will restore it right after
1380 the call. */
5be42b39 1381 copy = allocate_dynamic_stack_space (size_rtx,
1382 TYPE_ALIGN (type),
1383 TYPE_ALIGN (type),
1384 true);
1385 copy = gen_rtx_MEM (BLKmode, copy);
f7c44134 1386 set_mem_attributes (copy, type, 1);
cb543c54 1387 }
1388 else
0ab48139 1389 copy = assign_temp (type, 1, 0);
cb543c54 1390
292237f3 1391 store_expr (args[i].tree_value, copy, 0, false, false);
cb543c54 1392
9c2a0c05 1393 /* Just change the const function to pure and then let
1394 the next test clear the pure based on
1395 callee_copies. */
1396 if (*ecf_flags & ECF_CONST)
1397 {
1398 *ecf_flags &= ~ECF_CONST;
1399 *ecf_flags |= ECF_PURE;
1400 }
1401
1402 if (!callee_copies && *ecf_flags & ECF_PURE)
1403 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
41dc12b4 1404
1405 args[i].tree_value
389dd41b 1406 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
41dc12b4 1407 type = TREE_TYPE (args[i].tree_value);
4ee9c684 1408 *may_tailcall = false;
cb543c54 1409 }
1410 }
1411
78a8ed03 1412 unsignedp = TYPE_UNSIGNED (type);
3b2411a8 1413 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1414 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
cb543c54 1415
1416 args[i].unsignedp = unsignedp;
1417 args[i].mode = mode;
7a8d641b 1418
f387af4f 1419 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1420 argpos < n_named_args);
1421
058a1b7a 1422 if (args[i].reg && CONST_INT_P (args[i].reg))
1423 {
1424 args[i].special_slot = args[i].reg;
1425 args[i].reg = NULL;
1426 }
1427
7a8d641b 1428 /* If this is a sibling call and the machine has register windows, the
1429 register window has to be unwinded before calling the routine, so
1430 arguments have to go into the incoming registers. */
f387af4f 1431 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1432 args[i].tail_call_reg
1433 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1434 argpos < n_named_args);
1435 else
1436 args[i].tail_call_reg = args[i].reg;
7a8d641b 1437
cb543c54 1438 if (args[i].reg)
1439 args[i].partial
f054eb3c 1440 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1441 argpos < n_named_args);
cb543c54 1442
0336f0f0 1443 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 1444
1445 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1446 it means that we are to pass this arg in the register(s) designated
1447 by the PARALLEL, but also to pass it in the stack. */
1448 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1449 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1450 args[i].pass_on_stack = 1;
1451
1452 /* If this is an addressable type, we must preallocate the stack
1453 since we must evaluate the object into its final location.
1454
1455 If this is to be passed in both registers and the stack, it is simpler
1456 to preallocate. */
1457 if (TREE_ADDRESSABLE (type)
1458 || (args[i].pass_on_stack && args[i].reg != 0))
1459 *must_preallocate = 1;
1460
058a1b7a 1461 /* No stack allocation and padding for bounds. */
1462 if (POINTER_BOUNDS_P (args[i].tree_value))
1463 ;
cb543c54 1464 /* Compute the stack-size of this argument. */
058a1b7a 1465 else if (args[i].reg == 0 || args[i].partial != 0
1466 || reg_parm_stack_space > 0
1467 || args[i].pass_on_stack)
cb543c54 1468 locate_and_pad_parm (mode, type,
1469#ifdef STACK_PARMS_IN_REG_PARM_AREA
1470 1,
1471#else
1472 args[i].reg != 0,
1473#endif
2e090bf6 1474 reg_parm_stack_space,
241399f6 1475 args[i].pass_on_stack ? 0 : args[i].partial,
1476 fndecl, args_size, &args[i].locate);
0fee47f4 1477#ifdef BLOCK_REG_PADDING
1478 else
1479 /* The argument is passed entirely in registers. See at which
1480 end it should be padded. */
1481 args[i].locate.where_pad =
1482 BLOCK_REG_PADDING (mode, type,
1483 int_size_in_bytes (type) <= UNITS_PER_WORD);
1484#endif
c87678e4 1485
cb543c54 1486 /* Update ARGS_SIZE, the total stack space for args so far. */
1487
241399f6 1488 args_size->constant += args[i].locate.size.constant;
1489 if (args[i].locate.size.var)
1490 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 1491
1492 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1493 have been used, etc. */
1494
f387af4f 1495 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1496 type, argpos < n_named_args);
cb543c54 1497 }
1498}
1499
cc45e5e8 1500/* Update ARGS_SIZE to contain the total size for the argument block.
1501 Return the original constant component of the argument block's size.
1502
1503 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1504 for arguments passed in registers. */
1505
1506static int
4c9e08a4 1507compute_argument_block_size (int reg_parm_stack_space,
1508 struct args_size *args_size,
60e2260d 1509 tree fndecl ATTRIBUTE_UNUSED,
fa20f865 1510 tree fntype ATTRIBUTE_UNUSED,
4c9e08a4 1511 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 1512{
1513 int unadjusted_args_size = args_size->constant;
1514
4448f543 1515 /* For accumulate outgoing args mode we don't need to align, since the frame
1516 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 1517 backends from generating misaligned frame sizes. */
4448f543 1518 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1519 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 1520
cc45e5e8 1521 /* Compute the actual size of the argument block required. The variable
1522 and constant sizes must be combined, the size may have to be rounded,
1523 and there may be a minimum required size. */
1524
1525 if (args_size->var)
1526 {
1527 args_size->var = ARGS_SIZE_TREE (*args_size);
1528 args_size->constant = 0;
1529
d0285dd8 1530 preferred_stack_boundary /= BITS_PER_UNIT;
1531 if (preferred_stack_boundary > 1)
91b70175 1532 {
1533 /* We don't handle this case yet. To handle it correctly we have
35a3065a 1534 to add the delta, round and subtract the delta.
91b70175 1535 Currently no machine description requires this support. */
231bd014 1536 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
91b70175 1537 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1538 }
cc45e5e8 1539
1540 if (reg_parm_stack_space > 0)
1541 {
1542 args_size->var
1543 = size_binop (MAX_EXPR, args_size->var,
902de8ed 1544 ssize_int (reg_parm_stack_space));
cc45e5e8 1545
cc45e5e8 1546 /* The area corresponding to register parameters is not to count in
1547 the size of the block we need. So make the adjustment. */
fa20f865 1548 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1549 args_size->var
1550 = size_binop (MINUS_EXPR, args_size->var,
1551 ssize_int (reg_parm_stack_space));
cc45e5e8 1552 }
1553 }
1554 else
1555 {
d0285dd8 1556 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 1557 if (preferred_stack_boundary < 1)
1558 preferred_stack_boundary = 1;
e39fae61 1559 args_size->constant = (((args_size->constant
91b70175 1560 + stack_pointer_delta
d0285dd8 1561 + preferred_stack_boundary - 1)
1562 / preferred_stack_boundary
1563 * preferred_stack_boundary)
91b70175 1564 - stack_pointer_delta);
cc45e5e8 1565
1566 args_size->constant = MAX (args_size->constant,
1567 reg_parm_stack_space);
1568
fa20f865 1569 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 1570 args_size->constant -= reg_parm_stack_space;
cc45e5e8 1571 }
1572 return unadjusted_args_size;
1573}
1574
caa1595a 1575/* Precompute parameters as needed for a function call.
04707f1c 1576
dfe08167 1577 FLAGS is mask of ECF_* constants.
04707f1c 1578
04707f1c 1579 NUM_ACTUALS is the number of arguments.
1580
c87678e4 1581 ARGS is an array containing information for each argument; this
1582 routine fills in the INITIAL_VALUE and VALUE fields for each
1583 precomputed argument. */
04707f1c 1584
1585static void
2dd6f9ed 1586precompute_arguments (int num_actuals, struct arg_data *args)
04707f1c 1587{
1588 int i;
1589
8c78c14b 1590 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 1591 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 1592
1593 /* If we preallocated the stack space, and some arguments must be passed
1594 on the stack, then we must precompute any parameter which contains a
1595 function call which will store arguments on the stack.
1596 Otherwise, evaluating the parameter may clobber previous parameters
1597 which have already been stored into the stack. (we have code to avoid
1598 such case by saving the outgoing stack arguments, but it results in
1599 worse code) */
2dd6f9ed 1600 if (!ACCUMULATE_OUTGOING_ARGS)
67c155cb 1601 return;
0d568ddf 1602
04707f1c 1603 for (i = 0; i < num_actuals; i++)
67c155cb 1604 {
3b2411a8 1605 tree type;
3754d046 1606 machine_mode mode;
701e46d0 1607
2dd6f9ed 1608 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
c5dc094f 1609 continue;
1610
67c155cb 1611 /* If this is an addressable type, we cannot pre-evaluate it. */
3b2411a8 1612 type = TREE_TYPE (args[i].tree_value);
1613 gcc_assert (!TREE_ADDRESSABLE (type));
04707f1c 1614
67c155cb 1615 args[i].initial_value = args[i].value
8ec3c5c2 1616 = expand_normal (args[i].tree_value);
04707f1c 1617
3b2411a8 1618 mode = TYPE_MODE (type);
67c155cb 1619 if (mode != args[i].mode)
1620 {
3b2411a8 1621 int unsignedp = args[i].unsignedp;
67c155cb 1622 args[i].value
1623 = convert_modes (args[i].mode, mode,
1624 args[i].value, args[i].unsignedp);
3b2411a8 1625
67c155cb 1626 /* CSE will replace this only if it contains args[i].value
1627 pseudo, so convert it down to the declared mode using
1628 a SUBREG. */
1629 if (REG_P (args[i].value)
3b2411a8 1630 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1631 && promote_mode (type, mode, &unsignedp) != args[i].mode)
67c155cb 1632 {
1633 args[i].initial_value
1634 = gen_lowpart_SUBREG (mode, args[i].value);
1635 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
5a9ccd1b 1636 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
67c155cb 1637 }
67c155cb 1638 }
1639 }
04707f1c 1640}
1641
e717ffc2 1642/* Given the current state of MUST_PREALLOCATE and information about
1643 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1644 compute and return the final value for MUST_PREALLOCATE. */
1645
1646static int
48e1416a 1647finalize_must_preallocate (int must_preallocate, int num_actuals,
c2f47e15 1648 struct arg_data *args, struct args_size *args_size)
e717ffc2 1649{
1650 /* See if we have or want to preallocate stack space.
1651
1652 If we would have to push a partially-in-regs parm
1653 before other stack parms, preallocate stack space instead.
1654
1655 If the size of some parm is not a multiple of the required stack
1656 alignment, we must preallocate.
1657
1658 If the total size of arguments that would otherwise create a copy in
1659 a temporary (such as a CALL) is more than half the total argument list
1660 size, preallocation is faster.
1661
1662 Another reason to preallocate is if we have a machine (like the m88k)
1663 where stack alignment is required to be maintained between every
1664 pair of insns, not just when the call is made. However, we assume here
1665 that such machines either do not have push insns (and hence preallocation
1666 would occur anyway) or the problem is taken care of with
1667 PUSH_ROUNDING. */
1668
1669 if (! must_preallocate)
1670 {
1671 int partial_seen = 0;
1672 int copy_to_evaluate_size = 0;
1673 int i;
1674
1675 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1676 {
1677 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1678 partial_seen = 1;
1679 else if (partial_seen && args[i].reg == 0)
1680 must_preallocate = 1;
058a1b7a 1681 /* We preallocate in case there are bounds passed
1682 in the bounds table to have precomputed address
1683 for bounds association. */
1684 else if (POINTER_BOUNDS_P (args[i].tree_value)
1685 && !args[i].reg)
1686 must_preallocate = 1;
e717ffc2 1687
1688 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1689 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1690 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1691 || TREE_CODE (args[i].tree_value) == COND_EXPR
1692 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1693 copy_to_evaluate_size
1694 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1695 }
1696
1697 if (copy_to_evaluate_size * 2 >= args_size->constant
1698 && args_size->constant > 0)
1699 must_preallocate = 1;
1700 }
1701 return must_preallocate;
1702}
cc45e5e8 1703
f3012854 1704/* If we preallocated stack space, compute the address of each argument
1705 and store it into the ARGS array.
1706
c87678e4 1707 We need not ensure it is a valid memory address here; it will be
f3012854 1708 validized when it is used.
1709
1710 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1711
1712static void
4c9e08a4 1713compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 1714{
1715 if (argblock)
1716 {
1717 rtx arg_reg = argblock;
1718 int i, arg_offset = 0;
1719
1720 if (GET_CODE (argblock) == PLUS)
1721 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1722
1723 for (i = 0; i < num_actuals; i++)
1724 {
241399f6 1725 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1726 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 1727 rtx addr;
c5dc0c32 1728 unsigned int align, boundary;
c2ca1bab 1729 unsigned int units_on_stack = 0;
3754d046 1730 machine_mode partial_mode = VOIDmode;
f3012854 1731
1732 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 1733 if (! args[i].pass_on_stack
1734 && args[i].reg != 0
1735 && args[i].partial == 0)
f3012854 1736 continue;
1737
058a1b7a 1738 /* Pointer Bounds are never passed on the stack. */
1739 if (POINTER_BOUNDS_P (args[i].tree_value))
1740 continue;
1741
971ba038 1742 if (CONST_INT_P (offset))
29c05e22 1743 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
f3012854 1744 else
1745 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1746
29c05e22 1747 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 1748
1749 if (args[i].partial != 0)
1750 {
1751 /* Only part of the parameter is being passed on the stack.
1752 Generate a simple memory reference of the correct size. */
1753 units_on_stack = args[i].locate.size.constant;
1754 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1755 MODE_INT, 1);
1756 args[i].stack = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 1757 set_mem_size (args[i].stack, units_on_stack);
c2ca1bab 1758 }
1759 else
1760 {
1761 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1762 set_mem_attributes (args[i].stack,
1763 TREE_TYPE (args[i].tree_value), 1);
1764 }
c5dc0c32 1765 align = BITS_PER_UNIT;
1766 boundary = args[i].locate.boundary;
1767 if (args[i].locate.where_pad != downward)
1768 align = boundary;
971ba038 1769 else if (CONST_INT_P (offset))
c5dc0c32 1770 {
1771 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1772 align = align & -align;
1773 }
1774 set_mem_align (args[i].stack, align);
f3012854 1775
971ba038 1776 if (CONST_INT_P (slot_offset))
29c05e22 1777 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
f3012854 1778 else
1779 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1780
29c05e22 1781 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 1782
1783 if (args[i].partial != 0)
1784 {
1785 /* Only part of the parameter is being passed on the stack.
1786 Generate a simple memory reference of the correct size.
1787 */
1788 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 1789 set_mem_size (args[i].stack_slot, units_on_stack);
c2ca1bab 1790 }
1791 else
1792 {
1793 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1794 set_mem_attributes (args[i].stack_slot,
1795 TREE_TYPE (args[i].tree_value), 1);
1796 }
c5dc0c32 1797 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 1798
1799 /* Function incoming arguments may overlap with sibling call
1800 outgoing arguments and we cannot allow reordering of reads
1801 from function arguments with stores to outgoing arguments
1802 of sibling calls. */
ab6ab77e 1803 set_mem_alias_set (args[i].stack, 0);
1804 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 1805 }
1806 }
1807}
c87678e4 1808
f3012854 1809/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1810 in a call instruction.
1811
1812 FNDECL is the tree node for the target function. For an indirect call
1813 FNDECL will be NULL_TREE.
1814
95672afe 1815 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 1816
1817static rtx
4c9e08a4 1818rtx_for_function_call (tree fndecl, tree addr)
f3012854 1819{
1820 rtx funexp;
1821
1822 /* Get the function to call, in the form of RTL. */
1823 if (fndecl)
1824 {
3d053e06 1825 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
ea259bbe 1826 TREE_USED (fndecl) = 1;
f3012854 1827
1828 /* Get a SYMBOL_REF rtx for the function address. */
1829 funexp = XEXP (DECL_RTL (fndecl), 0);
1830 }
1831 else
1832 /* Generate an rtx (probably a pseudo-register) for the address. */
1833 {
1834 push_temp_slots ();
8ec3c5c2 1835 funexp = expand_normal (addr);
c87678e4 1836 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 1837 }
1838 return funexp;
1839}
1840
74c02416 1841/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1842static struct
1843{
1844 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1845 or NULL_RTX if none has been scanned yet. */
3663becd 1846 rtx_insn *scan_start;
74c02416 1847 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1848 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1849 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1850 with fixed offset, or PC if this is with variable or unknown offset. */
f1f41a6c 1851 vec<rtx> cache;
74c02416 1852} internal_arg_pointer_exp_state;
1853
474ce66a 1854static rtx internal_arg_pointer_based_exp (const_rtx, bool);
74c02416 1855
1856/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1857 the tail call sequence, starting with first insn that hasn't been
1858 scanned yet, and note for each pseudo on the LHS whether it is based
1859 on crtl->args.internal_arg_pointer or not, and what offset from that
1860 that pointer it has. */
1861
1862static void
1863internal_arg_pointer_based_exp_scan (void)
1864{
3663becd 1865 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
74c02416 1866
1867 if (scan_start == NULL_RTX)
1868 insn = get_insns ();
1869 else
1870 insn = NEXT_INSN (scan_start);
1871
1872 while (insn)
1873 {
1874 rtx set = single_set (insn);
1875 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1876 {
1877 rtx val = NULL_RTX;
1878 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1879 /* Punt on pseudos set multiple times. */
f1f41a6c 1880 if (idx < internal_arg_pointer_exp_state.cache.length ()
1881 && (internal_arg_pointer_exp_state.cache[idx]
74c02416 1882 != NULL_RTX))
1883 val = pc_rtx;
1884 else
1885 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1886 if (val != NULL_RTX)
1887 {
f1f41a6c 1888 if (idx >= internal_arg_pointer_exp_state.cache.length ())
9af5ce0c 1889 internal_arg_pointer_exp_state.cache
1890 .safe_grow_cleared (idx + 1);
f1f41a6c 1891 internal_arg_pointer_exp_state.cache[idx] = val;
74c02416 1892 }
1893 }
1894 if (NEXT_INSN (insn) == NULL_RTX)
1895 scan_start = insn;
1896 insn = NEXT_INSN (insn);
1897 }
1898
1899 internal_arg_pointer_exp_state.scan_start = scan_start;
1900}
1901
74c02416 1902/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1903 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1904 it with fixed offset, or PC if this is with variable or unknown offset.
1905 TOPLEVEL is true if the function is invoked at the topmost level. */
1906
1907static rtx
474ce66a 1908internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
74c02416 1909{
1910 if (CONSTANT_P (rtl))
1911 return NULL_RTX;
1912
1913 if (rtl == crtl->args.internal_arg_pointer)
1914 return const0_rtx;
1915
1916 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1917 return NULL_RTX;
1918
1919 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1920 {
1921 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1922 if (val == NULL_RTX || val == pc_rtx)
1923 return val;
29c05e22 1924 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
74c02416 1925 }
1926
1927 /* When called at the topmost level, scan pseudo assignments in between the
1928 last scanned instruction in the tail call sequence and the latest insn
1929 in that sequence. */
1930 if (toplevel)
1931 internal_arg_pointer_based_exp_scan ();
1932
1933 if (REG_P (rtl))
1934 {
1935 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
f1f41a6c 1936 if (idx < internal_arg_pointer_exp_state.cache.length ())
1937 return internal_arg_pointer_exp_state.cache[idx];
74c02416 1938
1939 return NULL_RTX;
1940 }
1941
474ce66a 1942 subrtx_iterator::array_type array;
1943 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
1944 {
1945 const_rtx x = *iter;
1946 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
1947 return pc_rtx;
1948 if (MEM_P (x))
1949 iter.skip_subrtxes ();
1950 }
74c02416 1951
1952 return NULL_RTX;
1953}
1954
ff6c0ab2 1955/* Return true if and only if SIZE storage units (usually bytes)
1956 starting from address ADDR overlap with already clobbered argument
1957 area. This function is used to determine if we should give up a
1958 sibcall. */
1959
1960static bool
1961mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1962{
1963 HOST_WIDE_INT i;
74c02416 1964 rtx val;
ff6c0ab2 1965
53c5d9d4 1966 if (bitmap_empty_p (stored_args_map))
9ddeff7e 1967 return false;
74c02416 1968 val = internal_arg_pointer_based_exp (addr, true);
1969 if (val == NULL_RTX)
1970 return false;
1971 else if (val == pc_rtx)
cc0595c0 1972 return true;
ff6c0ab2 1973 else
74c02416 1974 i = INTVAL (val);
a8b58ffb 1975
1976 if (STACK_GROWS_DOWNWARD)
1977 i -= crtl->args.pretend_args_size;
1978 else
1979 i += crtl->args.pretend_args_size;
1980
ff6c0ab2 1981
ccccd62c 1982 if (ARGS_GROW_DOWNWARD)
1983 i = -i - size;
1984
ff6c0ab2 1985 if (size > 0)
1986 {
1987 unsigned HOST_WIDE_INT k;
1988
1989 for (k = 0; k < size; k++)
156093aa 1990 if (i + k < SBITMAP_SIZE (stored_args_map)
08b7917c 1991 && bitmap_bit_p (stored_args_map, i + k))
ff6c0ab2 1992 return true;
1993 }
1994
1995 return false;
1996}
1997
cde25025 1998/* Do the register loads required for any wholly-register parms or any
1999 parms which are passed both on the stack and in a register. Their
c87678e4 2000 expressions were already evaluated.
cde25025 2001
2002 Mark all register-parms as living through the call, putting these USE
4c9e08a4 2003 insns in the CALL_INSN_FUNCTION_USAGE field.
2004
dc537795 2005 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 2006 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 2007
2008static void
4c9e08a4 2009load_register_parameters (struct arg_data *args, int num_actuals,
2010 rtx *call_fusage, int flags, int is_sibcall,
2011 int *sibcall_failure)
cde25025 2012{
2013 int i, j;
2014
cde25025 2015 for (i = 0; i < num_actuals; i++)
cde25025 2016 {
0e0be288 2017 rtx reg = ((flags & ECF_SIBCALL)
2018 ? args[i].tail_call_reg : args[i].reg);
cde25025 2019 if (reg)
2020 {
5f4cd670 2021 int partial = args[i].partial;
2022 int nregs;
2023 int size = 0;
3663becd 2024 rtx_insn *before_arg = get_last_insn ();
83272ab4 2025 /* Set non-negative if we must move a word at a time, even if
2026 just one word (e.g, partial == 4 && mode == DFmode). Set
2027 to -1 if we just use a normal move insn. This value can be
2028 zero if the argument is a zero size structure. */
5f4cd670 2029 nregs = -1;
f054eb3c 2030 if (GET_CODE (reg) == PARALLEL)
2031 ;
2032 else if (partial)
2033 {
2034 gcc_assert (partial % UNITS_PER_WORD == 0);
2035 nregs = partial / UNITS_PER_WORD;
2036 }
5f4cd670 2037 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2038 {
2039 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2040 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2041 }
2042 else
2043 size = GET_MODE_SIZE (args[i].mode);
cde25025 2044
2045 /* Handle calls that pass values in multiple non-contiguous
2046 locations. The Irix 6 ABI has examples of this. */
2047
2048 if (GET_CODE (reg) == PARALLEL)
b600a907 2049 emit_group_move (reg, args[i].parallel_value);
cde25025 2050
2051 /* If simple case, just do move. If normal partial, store_one_arg
2052 has already loaded the register for us. In all other cases,
2053 load the register(s) from memory. */
2054
8e67abab 2055 else if (nregs == -1)
2056 {
2057 emit_move_insn (reg, args[i].value);
5f4cd670 2058#ifdef BLOCK_REG_PADDING
8e67abab 2059 /* Handle case where we have a value that needs shifting
2060 up to the msb. eg. a QImode value and we're padding
2061 upward on a BYTES_BIG_ENDIAN machine. */
2062 if (size < UNITS_PER_WORD
2063 && (args[i].locate.where_pad
2064 == (BYTES_BIG_ENDIAN ? upward : downward)))
2065 {
8e67abab 2066 rtx x;
2067 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
b2abd798 2068
2069 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2070 report the whole reg as used. Strictly speaking, the
2071 call only uses SIZE bytes at the msb end, but it doesn't
2072 seem worth generating rtl to say that. */
2073 reg = gen_rtx_REG (word_mode, REGNO (reg));
f5ff0b21 2074 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
b2abd798 2075 if (x != reg)
2076 emit_move_insn (reg, x);
8e67abab 2077 }
5f4cd670 2078#endif
8e67abab 2079 }
cde25025 2080
2081 /* If we have pre-computed the values to put in the registers in
2082 the case of non-aligned structures, copy them in now. */
2083
2084 else if (args[i].n_aligned_regs != 0)
2085 for (j = 0; j < args[i].n_aligned_regs; j++)
2086 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2087 args[i].aligned_regs[j]);
2088
e2e0ef92 2089 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 2090 {
d2b9158b 2091 rtx mem = validize_mem (copy_rtx (args[i].value));
5f4cd670 2092
e2e0ef92 2093 /* Check for overlap with already clobbered argument area,
2094 providing that this has non-zero size. */
ff6c0ab2 2095 if (is_sibcall
77478042 2096 && size != 0
2097 && (mem_overlaps_already_clobbered_arg_p
2098 (XEXP (args[i].value, 0), size)))
ff6c0ab2 2099 *sibcall_failure = 1;
2100
72f2d6cc 2101 if (size % UNITS_PER_WORD == 0
2102 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2103 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2104 else
2105 {
2106 if (nregs > 1)
2107 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2108 args[i].mode);
2109 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2110 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2111 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
292237f3 2112 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2113 word_mode, word_mode, false);
72f2d6cc 2114 if (BYTES_BIG_ENDIAN)
2115 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2116 BITS_PER_WORD - bitsize, dest, 1);
2117 if (x != dest)
2118 emit_move_insn (dest, x);
2119 }
2120
5f4cd670 2121 /* Handle a BLKmode that needs shifting. */
8e67abab 2122 if (nregs == 1 && size < UNITS_PER_WORD
2c267f1a 2123#ifdef BLOCK_REG_PADDING
2124 && args[i].locate.where_pad == downward
2125#else
2126 && BYTES_BIG_ENDIAN
2127#endif
72f2d6cc 2128 )
5f4cd670 2129 {
72f2d6cc 2130 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
5f4cd670 2131 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
72f2d6cc 2132 enum tree_code dir = (BYTES_BIG_ENDIAN
2133 ? RSHIFT_EXPR : LSHIFT_EXPR);
2134 rtx x;
5f4cd670 2135
72f2d6cc 2136 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2137 if (x != dest)
2138 emit_move_insn (dest, x);
5f4cd670 2139 }
5f4cd670 2140 }
cde25025 2141
42b11544 2142 /* When a parameter is a block, and perhaps in other cases, it is
2143 possible that it did a load from an argument slot that was
6a8fa8e2 2144 already clobbered. */
42b11544 2145 if (is_sibcall
2146 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2147 *sibcall_failure = 1;
2148
cde25025 2149 /* Handle calls that pass values in multiple non-contiguous
2150 locations. The Irix 6 ABI has examples of this. */
2151 if (GET_CODE (reg) == PARALLEL)
2152 use_group_regs (call_fusage, reg);
2153 else if (nregs == -1)
b4eeceb9 2154 use_reg_mode (call_fusage, reg,
2155 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
c75d013c 2156 else if (nregs > 0)
2157 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 2158 }
2159 }
2160}
2161
92e1ef5b 2162/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2163 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2164 bytes, then we would need to push some additional bytes to pad the
481feae3 2165 arguments. So, we compute an adjust to the stack pointer for an
2166 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2167 bytes. Then, when the arguments are pushed the stack will be perfectly
2168 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2169 be popped after the call. Returns the adjustment. */
92e1ef5b 2170
481feae3 2171static int
4c9e08a4 2172combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2173 struct args_size *args_size,
38413c80 2174 unsigned int preferred_unit_stack_boundary)
92e1ef5b 2175{
2176 /* The number of bytes to pop so that the stack will be
2177 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2178 HOST_WIDE_INT adjustment;
2179 /* The alignment of the stack after the arguments are pushed, if we
2180 just pushed the arguments without adjust the stack here. */
38413c80 2181 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 2182
c87678e4 2183 unadjusted_alignment
92e1ef5b 2184 = ((stack_pointer_delta + unadjusted_args_size)
2185 % preferred_unit_stack_boundary);
2186
2187 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2188 as possible -- leaving just enough left to cancel out the
2189 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2190 PENDING_STACK_ADJUST is non-negative, and congruent to
2191 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2192
2193 /* Begin by trying to pop all the bytes. */
c87678e4 2194 unadjusted_alignment
2195 = (unadjusted_alignment
92e1ef5b 2196 - (pending_stack_adjust % preferred_unit_stack_boundary));
2197 adjustment = pending_stack_adjust;
2198 /* Push enough additional bytes that the stack will be aligned
2199 after the arguments are pushed. */
d3ef58ec 2200 if (preferred_unit_stack_boundary > 1)
2201 {
3dc35e62 2202 if (unadjusted_alignment > 0)
c87678e4 2203 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
d3ef58ec 2204 else
c87678e4 2205 adjustment += unadjusted_alignment;
d3ef58ec 2206 }
c87678e4 2207
92e1ef5b 2208 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2209 bytes after the call. The right number is the entire
2210 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2211 by the arguments in the first place. */
c87678e4 2212 args_size->constant
92e1ef5b 2213 = pending_stack_adjust - adjustment + unadjusted_args_size;
2214
481feae3 2215 return adjustment;
92e1ef5b 2216}
2217
7ecc63d3 2218/* Scan X expression if it does not dereference any argument slots
2219 we already clobbered by tail call arguments (as noted in stored_args_map
2220 bitmap).
d10cfa8d 2221 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 2222 zero otherwise. */
2223
2224static int
4c9e08a4 2225check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 2226{
2227 RTX_CODE code;
2228 int i, j;
7ecc63d3 2229 const char *fmt;
2230
2231 if (x == NULL_RTX)
2232 return 0;
2233
2234 code = GET_CODE (x);
2235
cc0595c0 2236 /* We need not check the operands of the CALL expression itself. */
2237 if (code == CALL)
2238 return 0;
2239
7ecc63d3 2240 if (code == MEM)
ff6c0ab2 2241 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2242 GET_MODE_SIZE (GET_MODE (x)));
7ecc63d3 2243
c87678e4 2244 /* Scan all subexpressions. */
7ecc63d3 2245 fmt = GET_RTX_FORMAT (code);
2246 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2247 {
2248 if (*fmt == 'e')
c87678e4 2249 {
2250 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2251 return 1;
2252 }
7ecc63d3 2253 else if (*fmt == 'E')
c87678e4 2254 {
2255 for (j = 0; j < XVECLEN (x, i); j++)
2256 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2257 return 1;
2258 }
7ecc63d3 2259 }
2260 return 0;
7ecc63d3 2261}
2262
2263/* Scan sequence after INSN if it does not dereference any argument slots
2264 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 2265 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2266 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2267 should be 0). Return nonzero if sequence after INSN dereferences such argument
2268 slots, zero otherwise. */
7ecc63d3 2269
2270static int
3663becd 2271check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2272 int mark_stored_args_map)
c87678e4 2273{
7ecc63d3 2274 int low, high;
2275
2276 if (insn == NULL_RTX)
2277 insn = get_insns ();
2278 else
2279 insn = NEXT_INSN (insn);
2280
2281 for (; insn; insn = NEXT_INSN (insn))
c87678e4 2282 if (INSN_P (insn)
2283 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 2284 break;
2285
42b11544 2286 if (mark_stored_args_map)
2287 {
ccccd62c 2288 if (ARGS_GROW_DOWNWARD)
2289 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2290 else
2291 low = arg->locate.slot_offset.constant;
db10eec8 2292
241399f6 2293 for (high = low + arg->locate.size.constant; low < high; low++)
08b7917c 2294 bitmap_set_bit (stored_args_map, low);
42b11544 2295 }
7ecc63d3 2296 return insn != NULL_RTX;
2297}
2298
05d18e8b 2299/* Given that a function returns a value of mode MODE at the most
2300 significant end of hard register VALUE, shift VALUE left or right
2301 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 2302
05d18e8b 2303bool
3754d046 2304shift_return_value (machine_mode mode, bool left_p, rtx value)
2c8ff1ed 2305{
05d18e8b 2306 HOST_WIDE_INT shift;
2307
2308 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2309 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2310 if (shift == 0)
2311 return false;
2312
2313 /* Use ashr rather than lshr for right shifts. This is for the benefit
2314 of the MIPS port, which requires SImode values to be sign-extended
2315 when stored in 64-bit registers. */
2316 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2317 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2318 gcc_unreachable ();
2319 return true;
2c8ff1ed 2320}
2321
90af1361 2322/* If X is a likely-spilled register value, copy it to a pseudo
2323 register and return that register. Return X otherwise. */
2324
2325static rtx
2326avoid_likely_spilled_reg (rtx x)
2327{
f4e36c33 2328 rtx new_rtx;
90af1361 2329
2330 if (REG_P (x)
2331 && HARD_REGISTER_P (x)
24dd0668 2332 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
90af1361 2333 {
2334 /* Make sure that we generate a REG rather than a CONCAT.
2335 Moves into CONCATs can need nontrivial instructions,
2336 and the whole point of this function is to avoid
2337 using the hard register directly in such a situation. */
2338 generating_concat_p = 0;
f4e36c33 2339 new_rtx = gen_reg_rtx (GET_MODE (x));
90af1361 2340 generating_concat_p = 1;
f4e36c33 2341 emit_move_insn (new_rtx, x);
2342 return new_rtx;
90af1361 2343 }
2344 return x;
2345}
2346
80e11038 2347/* Helper function for expand_call.
2348 Return false is EXP is not implementable as a sibling call. */
2349
2350static bool
2351can_implement_as_sibling_call_p (tree exp,
2352 rtx structure_value_addr,
2353 tree funtype,
2354 int reg_parm_stack_space,
2355 tree fndecl,
2356 int flags,
2357 tree addr,
2358 const args_size &args_size)
2359{
2360 if (!targetm.have_sibcall_epilogue ())
2361 return false;
2362
2363 /* Doing sibling call optimization needs some work, since
2364 structure_value_addr can be allocated on the stack.
2365 It does not seem worth the effort since few optimizable
2366 sibling calls will return a structure. */
2367 if (structure_value_addr != NULL_RTX)
2368 return false;
2369
2370#ifdef REG_PARM_STACK_SPACE
2371 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2372 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2373 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
2374 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
2375 return false;
2376#endif
2377
2378 /* Check whether the target is able to optimize the call
2379 into a sibcall. */
2380 if (!targetm.function_ok_for_sibcall (fndecl, exp))
2381 return false;
2382
2383 /* Functions that do not return exactly once may not be sibcall
2384 optimized. */
2385 if (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2386 return false;
2387
2388 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
2389 return false;
2390
2391 /* If the called function is nested in the current one, it might access
2392 some of the caller's arguments, but could clobber them beforehand if
2393 the argument areas are shared. */
2394 if (fndecl && decl_function_context (fndecl) == current_function_decl)
2395 return false;
2396
2397 /* If this function requires more stack slots than the current
2398 function, we cannot change it into a sibling call.
2399 crtl->args.pretend_args_size is not part of the
2400 stack allocated by our caller. */
2401 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size))
2402 return false;
2403
2404 /* If the callee pops its own arguments, then it must pop exactly
2405 the same number of arguments as the current function. */
2406 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2407 != targetm.calls.return_pops_args (current_function_decl,
2408 TREE_TYPE (current_function_decl),
2409 crtl->args.size))
2410 return false;
2411
2412 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
2413 return false;
2414
2415 /* All checks passed. */
2416 return true;
2417}
2418
c2f47e15 2419/* Generate all the code for a CALL_EXPR exp
66d433c7 2420 and return an rtx for its value.
2421 Store the value in TARGET (specified as an rtx) if convenient.
2422 If the value is stored in TARGET then TARGET is returned.
2423 If IGNORE is nonzero, then we ignore the value of the function call. */
2424
2425rtx
4c9e08a4 2426expand_call (tree exp, rtx target, int ignore)
66d433c7 2427{
60ecc450 2428 /* Nonzero if we are currently expanding a call. */
2429 static int currently_expanding_call = 0;
2430
66d433c7 2431 /* RTX for the function to be called. */
2432 rtx funexp;
60ecc450 2433 /* Sequence of insns to perform a normal "call". */
3663becd 2434 rtx_insn *normal_call_insns = NULL;
4ee9c684 2435 /* Sequence of insns to perform a tail "call". */
3663becd 2436 rtx_insn *tail_call_insns = NULL;
66d433c7 2437 /* Data type of the function. */
2438 tree funtype;
915e81b8 2439 tree type_arg_types;
16c9337c 2440 tree rettype;
66d433c7 2441 /* Declaration of the function being called,
2442 or 0 if the function is computed (not known by name). */
2443 tree fndecl = 0;
e100aadc 2444 /* The type of the function being called. */
2445 tree fntype;
4ee9c684 2446 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
60ecc450 2447 int pass;
66d433c7 2448
2449 /* Register in which non-BLKmode value will be returned,
2450 or 0 if no value or if value is BLKmode. */
2451 rtx valreg;
058a1b7a 2452 /* Register(s) in which bounds are returned. */
2453 rtx valbnd = NULL;
66d433c7 2454 /* Address where we should return a BLKmode value;
2455 0 if value not BLKmode. */
2456 rtx structure_value_addr = 0;
2457 /* Nonzero if that address is being passed by treating it as
2458 an extra, implicit first parameter. Otherwise,
2459 it is passed by being copied directly into struct_value_rtx. */
2460 int structure_value_addr_parm = 0;
cd46caee 2461 /* Holds the value of implicit argument for the struct value. */
2462 tree structure_value_addr_value = NULL_TREE;
66d433c7 2463 /* Size of aggregate value wanted, or zero if none wanted
2464 or if we are using the non-reentrant PCC calling convention
2465 or expecting the value in registers. */
997d68fe 2466 HOST_WIDE_INT struct_value_size = 0;
66d433c7 2467 /* Nonzero if called function returns an aggregate in memory PCC style,
2468 by returning the address of where to find it. */
2469 int pcc_struct_value = 0;
45550790 2470 rtx struct_value = 0;
66d433c7 2471
2472 /* Number of actual parameters in this call, including struct value addr. */
2473 int num_actuals;
2474 /* Number of named args. Args after this are anonymous ones
2475 and they must all go on the stack. */
2476 int n_named_args;
cd46caee 2477 /* Number of complex actual arguments that need to be split. */
2478 int num_complex_actuals = 0;
66d433c7 2479
2480 /* Vector of information about each argument.
2481 Arguments are numbered in the order they will be pushed,
2482 not the order they are written. */
2483 struct arg_data *args;
2484
2485 /* Total size in bytes of all the stack-parms scanned so far. */
2486 struct args_size args_size;
0e0be288 2487 struct args_size adjusted_args_size;
66d433c7 2488 /* Size of arguments before any adjustments (such as rounding). */
cc45e5e8 2489 int unadjusted_args_size;
66d433c7 2490 /* Data on reg parms scanned so far. */
39cba157 2491 CUMULATIVE_ARGS args_so_far_v;
2492 cumulative_args_t args_so_far;
66d433c7 2493 /* Nonzero if a reg parm has been scanned. */
2494 int reg_parm_seen;
a50ca374 2495 /* Nonzero if this is an indirect function call. */
66d433c7 2496
c87678e4 2497 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 2498 If stack space is allocated for register parameters, but not by the
2499 caller, then it is preallocated in the fixed part of the stack frame.
2500 So the entire argument block must then be preallocated (i.e., we
2501 ignore PUSH_ROUNDING in that case). */
2502
4448f543 2503 int must_preallocate = !PUSH_ARGS;
66d433c7 2504
eb2f80f3 2505 /* Size of the stack reserved for parameter registers. */
2d7187c2 2506 int reg_parm_stack_space = 0;
2507
66d433c7 2508 /* Address of space preallocated for stack parms
2509 (on machines that lack push insns), or 0 if space not preallocated. */
2510 rtx argblock = 0;
2511
c8010b80 2512 /* Mask of ECF_ and ERF_ flags. */
dfe08167 2513 int flags = 0;
c8010b80 2514 int return_flags = 0;
4448f543 2515#ifdef REG_PARM_STACK_SPACE
66d433c7 2516 /* Define the boundary of the register parm stack space that needs to be
6e96b626 2517 saved, if any. */
2518 int low_to_save, high_to_save;
66d433c7 2519 rtx save_area = 0; /* Place that it is saved */
2520#endif
2521
66d433c7 2522 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2523 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 2524 char *stack_usage_map_buf = NULL;
66d433c7 2525
9069face 2526 int old_stack_allocated;
2527
2528 /* State variables to track stack modifications. */
66d433c7 2529 rtx old_stack_level = 0;
9069face 2530 int old_stack_arg_under_construction = 0;
65dccdb1 2531 int old_pending_adj = 0;
66d433c7 2532 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 2533
2534 /* Some stack pointer alterations we make are performed via
2535 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2536 which we then also need to save/restore along the way. */
31d035ca 2537 int old_stack_pointer_delta = 0;
9069face 2538
60ecc450 2539 rtx call_fusage;
c2f47e15 2540 tree addr = CALL_EXPR_FN (exp);
19cb6b50 2541 int i;
92e1ef5b 2542 /* The alignment of the stack, in bits. */
38413c80 2543 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 2544 /* The alignment of the stack, in bytes. */
38413c80 2545 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 2546 /* The static chain value to use for this call. */
2547 rtx static_chain_value;
dfe08167 2548 /* See if this is "nothrow" function call. */
2549 if (TREE_NOTHROW (exp))
2550 flags |= ECF_NOTHROW;
2551
4ee9c684 2552 /* See if we can find a DECL-node for the actual function, and get the
2553 function attributes (flags) from the function decl or type node. */
97a1590b 2554 fndecl = get_callee_fndecl (exp);
2555 if (fndecl)
66d433c7 2556 {
e100aadc 2557 fntype = TREE_TYPE (fndecl);
97a1590b 2558 flags |= flags_from_decl_or_type (fndecl);
c8010b80 2559 return_flags |= decl_return_flags (fndecl);
66d433c7 2560 }
97a1590b 2561 else
8a8cdb8d 2562 {
16c9337c 2563 fntype = TREE_TYPE (TREE_TYPE (addr));
e100aadc 2564 flags |= flags_from_decl_or_type (fntype);
8a8cdb8d 2565 }
16c9337c 2566 rettype = TREE_TYPE (exp);
d490e2f2 2567
e100aadc 2568 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 2569
4a081ddd 2570 /* Warn if this value is an aggregate type,
2571 regardless of which calling convention we are using for it. */
16c9337c 2572 if (AGGREGATE_TYPE_P (rettype))
efb9d9ee 2573 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 2574
9c2a0c05 2575 /* If the result of a non looping pure or const function call is
2576 ignored (or void), and none of its arguments are volatile, we can
2577 avoid expanding the call and just evaluate the arguments for
2578 side-effects. */
4a081ddd 2579 if ((flags & (ECF_CONST | ECF_PURE))
9c2a0c05 2580 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
4a081ddd 2581 && (ignore || target == const0_rtx
16c9337c 2582 || TYPE_MODE (rettype) == VOIDmode))
4a081ddd 2583 {
2584 bool volatilep = false;
2585 tree arg;
cd46caee 2586 call_expr_arg_iterator iter;
4a081ddd 2587
cd46caee 2588 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2589 if (TREE_THIS_VOLATILE (arg))
4a081ddd 2590 {
2591 volatilep = true;
2592 break;
2593 }
2594
2595 if (! volatilep)
2596 {
cd46caee 2597 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2598 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 2599 return const0_rtx;
2600 }
2601 }
2602
2d7187c2 2603#ifdef REG_PARM_STACK_SPACE
fa20f865 2604 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2d7187c2 2605#endif
2d7187c2 2606
fa20f865 2607 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 2608 && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 2609 must_preallocate = 1;
997d68fe 2610
66d433c7 2611 /* Set up a place to return a structure. */
2612
2613 /* Cater to broken compilers. */
4cd5bb61 2614 if (aggregate_value_p (exp, fntype))
66d433c7 2615 {
2616 /* This call returns a big structure. */
2dd6f9ed 2617 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
66d433c7 2618
2619#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 2620 {
2621 pcc_struct_value = 1;
f49c64ba 2622 }
2623#else /* not PCC_STATIC_STRUCT_RETURN */
2624 {
16c9337c 2625 struct_value_size = int_size_in_bytes (rettype);
66d433c7 2626
e012cdc7 2627 /* Even if it is semantically safe to use the target as the return
2628 slot, it may be not sufficiently aligned for the return type. */
2629 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2630 && target
2631 && MEM_P (target)
2632 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2633 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
2634 MEM_ALIGN (target))))
f49c64ba 2635 structure_value_addr = XEXP (target, 0);
2636 else
2637 {
f49c64ba 2638 /* For variable-sized objects, we must be called with a target
2639 specified. If we were to allocate space on the stack here,
2640 we would have no way of knowing when to free it. */
0ab48139 2641 rtx d = assign_temp (rettype, 1, 1);
930f0e87 2642 structure_value_addr = XEXP (d, 0);
f49c64ba 2643 target = 0;
2644 }
2645 }
2646#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 2647 }
2648
0e0be288 2649 /* Figure out the amount to which the stack should be aligned. */
0e0be288 2650 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 2651 if (fndecl)
2652 {
35ee1c66 2653 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
9a27561f 2654 /* Without automatic stack alignment, we can't increase preferred
2655 stack boundary. With automatic stack alignment, it is
2656 unnecessary since unless we can guarantee that all callers will
2657 align the outgoing stack properly, callee has to align its
2658 stack anyway. */
2659 if (i
2660 && i->preferred_incoming_stack_boundary
2661 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
28992b23 2662 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2663 }
0e0be288 2664
2665 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 2666 funtype = TREE_TYPE (addr);
231bd014 2667 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 2668 funtype = TREE_TYPE (funtype);
2669
cd46caee 2670 /* Count whether there are actual complex arguments that need to be split
2671 into their real and imaginary parts. Munge the type_arg_types
2672 appropriately here as well. */
92d40bc4 2673 if (targetm.calls.split_complex_arg)
915e81b8 2674 {
cd46caee 2675 call_expr_arg_iterator iter;
2676 tree arg;
2677 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2678 {
2679 tree type = TREE_TYPE (arg);
2680 if (type && TREE_CODE (type) == COMPLEX_TYPE
2681 && targetm.calls.split_complex_arg (type))
2682 num_complex_actuals++;
2683 }
915e81b8 2684 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 2685 }
2686 else
2687 type_arg_types = TYPE_ARG_TYPES (funtype);
2688
0e0be288 2689 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 2690 cfun->calls_alloca = 1;
0e0be288 2691
2692 /* If struct_value_rtx is 0, it means pass the address
cd46caee 2693 as if it were an extra parameter. Put the argument expression
2694 in structure_value_addr_value. */
45550790 2695 if (structure_value_addr && struct_value == 0)
0e0be288 2696 {
2697 /* If structure_value_addr is a REG other than
2698 virtual_outgoing_args_rtx, we can use always use it. If it
2699 is not a REG, we must always copy it into a register.
2700 If it is virtual_outgoing_args_rtx, we must copy it to another
2701 register in some cases. */
8ad4c111 2702 rtx temp = (!REG_P (structure_value_addr)
0e0be288 2703 || (ACCUMULATE_OUTGOING_ARGS
2704 && stack_arg_under_construction
2705 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 2706 ? copy_addr_to_reg (convert_memory_address
e100aadc 2707 (Pmode, structure_value_addr))
0e0be288 2708 : structure_value_addr);
2709
cd46caee 2710 structure_value_addr_value =
2711 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
058a1b7a 2712 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
0e0be288 2713 }
2714
2715 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 2716 num_actuals =
2717 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 2718
2719 /* Compute number of named args.
30a10006 2720 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2721
2722 if (type_arg_types != 0)
2723 n_named_args
2724 = (list_length (type_arg_types)
2725 /* Count the struct value address, if it is passed as a parm. */
2726 + structure_value_addr_parm);
2727 else
2728 /* If we know nothing, treat all args as named. */
2729 n_named_args = num_actuals;
2730
2731 /* Start updating where the next arg would go.
2732
2733 On some machines (such as the PA) indirect calls have a different
2734 calling convention than normal calls. The fourth argument in
2735 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2736 or not. */
39cba157 2737 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2738 args_so_far = pack_cumulative_args (&args_so_far_v);
30a10006 2739
2740 /* Now possibly adjust the number of named args.
0e0be288 2741 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 2742 We do include the last named arg if
2743 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 2744 (If no anonymous args follow, the result of list_length is actually
2745 one too large. This is harmless.)
2746
a107cd89 2747 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 2748 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2749 this machine will be able to place unnamed args that were passed
2750 in registers into the stack. So treat all args as named. This
2751 allows the insns emitting for a specific argument list to be
2752 independent of the function declaration.
a107cd89 2753
2754 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2755 we do not have any reliable way to pass unnamed args in
2756 registers, so we must force them into memory. */
0e0be288 2757
30a10006 2758 if (type_arg_types != 0
39cba157 2759 && targetm.calls.strict_argument_naming (args_so_far))
30a10006 2760 ;
2761 else if (type_arg_types != 0
39cba157 2762 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
30a10006 2763 /* Don't include the last named arg. */
2764 --n_named_args;
0e0be288 2765 else
30a10006 2766 /* Treat all args as named. */
0e0be288 2767 n_named_args = num_actuals;
2768
0e0be288 2769 /* Make a vector to hold all the information about each arg. */
364c0c59 2770 args = XALLOCAVEC (struct arg_data, num_actuals);
f0af5a88 2771 memset (args, 0, num_actuals * sizeof (struct arg_data));
0e0be288 2772
00dddcf2 2773 /* Build up entries in the ARGS array, compute the size of the
2774 arguments into ARGS_SIZE, etc. */
0e0be288 2775 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 2776 n_named_args, exp,
d8b9c828 2777 structure_value_addr_value, fndecl, fntype,
39cba157 2778 args_so_far, reg_parm_stack_space,
0e0be288 2779 &old_stack_level, &old_pending_adj,
eaa112a0 2780 &must_preallocate, &flags,
4ee9c684 2781 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 2782
2783 if (args_size.var)
2dd6f9ed 2784 must_preallocate = 1;
0e0be288 2785
2786 /* Now make final decision about preallocating stack space. */
2787 must_preallocate = finalize_must_preallocate (must_preallocate,
2788 num_actuals, args,
2789 &args_size);
2790
2791 /* If the structure value address will reference the stack pointer, we
2792 must stabilize it. We don't need to do this if we know that we are
2793 not going to adjust the stack pointer in processing this call. */
2794
2795 if (structure_value_addr
2796 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2797 || reg_mentioned_p (virtual_outgoing_args_rtx,
2798 structure_value_addr))
2799 && (args_size.var
2800 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2801 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 2802
0d568ddf 2803 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 2804 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 2805 expanding a call, as that means we're an argument. Don't try if
011e6b51 2806 there's cleanups, as we know there's code to follow the call. */
60ecc450 2807
0e0be288 2808 if (currently_expanding_call++ != 0
2809 || !flag_optimize_sibling_calls
4ee9c684 2810 || args_size.var
3072d30e 2811 || dbg_cnt (tail_call) == false)
4ee9c684 2812 try_tail_call = 0;
0e0be288 2813
2814 /* Rest of purposes for tail call optimizations to fail. */
80e11038 2815 if (try_tail_call)
2816 try_tail_call = can_implement_as_sibling_call_p (exp, structure_value_addr, funtype,
2817 reg_parm_stack_space, fndecl,
2818 flags, addr, args_size);
4b066641 2819
4681dd41 2820 /* Check if caller and callee disagree in promotion of function
2821 return value. */
2822 if (try_tail_call)
2823 {
3754d046 2824 machine_mode caller_mode, caller_promoted_mode;
2825 machine_mode callee_mode, callee_promoted_mode;
4681dd41 2826 int caller_unsignedp, callee_unsignedp;
2827 tree caller_res = DECL_RESULT (current_function_decl);
2828
2829 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3b2411a8 2830 caller_mode = DECL_MODE (caller_res);
4681dd41 2831 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3b2411a8 2832 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2833 caller_promoted_mode
2834 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2835 &caller_unsignedp,
2836 TREE_TYPE (current_function_decl), 1);
2837 callee_promoted_mode
c879dbcf 2838 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3b2411a8 2839 &callee_unsignedp,
c879dbcf 2840 funtype, 1);
4681dd41 2841 if (caller_mode != VOIDmode
2842 && (caller_promoted_mode != callee_promoted_mode
2843 || ((caller_mode != caller_promoted_mode
2844 || callee_mode != callee_promoted_mode)
2845 && (caller_unsignedp != callee_unsignedp
2846 || GET_MODE_BITSIZE (caller_mode)
2847 < GET_MODE_BITSIZE (callee_mode)))))
2848 try_tail_call = 0;
2849 }
2850
755ece1f 2851 /* Ensure current function's preferred stack boundary is at least
2852 what we need. Stack alignment may also increase preferred stack
2853 boundary. */
54d759e3 2854 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
edb7afe8 2855 crtl->preferred_stack_boundary = preferred_stack_boundary;
755ece1f 2856 else
2857 preferred_stack_boundary = crtl->preferred_stack_boundary;
d0285dd8 2858
0e0be288 2859 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 2860
60ecc450 2861 /* We want to make two insn chains; one for a sibling call, the other
2862 for a normal call. We will select one of the two chains after
2863 initial RTL generation is complete. */
6e96b626 2864 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 2865 {
2866 int sibcall_failure = 0;
35a3065a 2867 /* We want to emit any pending stack adjustments before the tail
60ecc450 2868 recursion "call". That way we know any adjustment after the tail
0d568ddf 2869 recursion call can be ignored if we indeed use the tail
60ecc450 2870 call expansion. */
b6d206a2 2871 saved_pending_stack_adjust save;
3663becd 2872 rtx_insn *insns, *before_call, *after_args;
2873 rtx next_arg_reg;
1e2b2ab3 2874
60ecc450 2875 if (pass == 0)
2876 {
60ecc450 2877 /* State variables we need to save and restore between
2878 iterations. */
b6d206a2 2879 save_pending_stack_adjust (&save);
60ecc450 2880 }
dfe08167 2881 if (pass)
2882 flags &= ~ECF_SIBCALL;
2883 else
2884 flags |= ECF_SIBCALL;
66d433c7 2885
60ecc450 2886 /* Other state variables that we must reinitialize each time
dfe08167 2887 through the loop (that are not initialized by the loop itself). */
60ecc450 2888 argblock = 0;
2889 call_fusage = 0;
2f921ec9 2890
c87678e4 2891 /* Start a new sequence for the normal call case.
66d433c7 2892
60ecc450 2893 From this point on, if the sibling call fails, we want to set
2894 sibcall_failure instead of continuing the loop. */
2895 start_sequence ();
412321ce 2896
60ecc450 2897 /* Don't let pending stack adjusts add up to too much.
2898 Also, do all pending adjustments now if there is any chance
2899 this might be a call to alloca or if we are expanding a sibling
ff3ae375 2900 call sequence.
82e95be3 2901 Also do the adjustments before a throwing call, otherwise
2902 exception handling can fail; PR 19225. */
60ecc450 2903 if (pending_stack_adjust >= 32
5edaabad 2904 || (pending_stack_adjust > 0
ff3ae375 2905 && (flags & ECF_MAY_BE_ALLOCA))
82e95be3 2906 || (pending_stack_adjust > 0
2907 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 2908 || pass == 0)
2909 do_pending_stack_adjust ();
66d433c7 2910
60ecc450 2911 /* Precompute any arguments as needed. */
02510658 2912 if (pass)
2dd6f9ed 2913 precompute_arguments (num_actuals, args);
66d433c7 2914
60ecc450 2915 /* Now we are about to start emitting insns that can be deleted
2916 if a libcall is deleted. */
2dd6f9ed 2917 if (pass && (flags & ECF_MALLOC))
60ecc450 2918 start_sequence ();
66d433c7 2919
edb7afe8 2920 if (pass == 0 && crtl->stack_protect_guard)
71d89928 2921 stack_protect_epilogue ();
2922
0e0be288 2923 adjusted_args_size = args_size;
481feae3 2924 /* Compute the actual size of the argument block required. The variable
2925 and constant sizes must be combined, the size may have to be rounded,
2926 and there may be a minimum required size. When generating a sibcall
2927 pattern, do not round up, since we'll be re-using whatever space our
2928 caller provided. */
2929 unadjusted_args_size
c87678e4 2930 = compute_argument_block_size (reg_parm_stack_space,
2931 &adjusted_args_size,
fa20f865 2932 fndecl, fntype,
481feae3 2933 (pass == 0 ? 0
2934 : preferred_stack_boundary));
2935
c87678e4 2936 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 2937
02510658 2938 /* The argument block when performing a sibling call is the
a0c938f0 2939 incoming argument block. */
02510658 2940 if (pass == 0)
7ecc63d3 2941 {
27a7a23a 2942 argblock = crtl->args.internal_arg_pointer;
a8b58ffb 2943 if (STACK_GROWS_DOWNWARD)
2944 argblock
2945 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
2946 else
2947 argblock
2948 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
2949
7ecc63d3 2950 stored_args_map = sbitmap_alloc (args_size.constant);
53c5d9d4 2951 bitmap_clear (stored_args_map);
7ecc63d3 2952 }
481feae3 2953
60ecc450 2954 /* If we have no actual push instructions, or shouldn't use them,
2955 make space for all args right now. */
0e0be288 2956 else if (adjusted_args_size.var != 0)
66d433c7 2957 {
60ecc450 2958 if (old_stack_level == 0)
2959 {
e9c97615 2960 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 2961 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 2962 old_pending_adj = pending_stack_adjust;
2963 pending_stack_adjust = 0;
60ecc450 2964 /* stack_arg_under_construction says whether a stack arg is
2965 being constructed at the old stack level. Pushing the stack
2966 gets a clean outgoing argument block. */
2967 old_stack_arg_under_construction = stack_arg_under_construction;
2968 stack_arg_under_construction = 0;
60ecc450 2969 }
0e0be288 2970 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
8c0dd614 2971 if (flag_stack_usage_info)
990495a7 2972 current_function_has_unbounded_dynamic_stack_size = 1;
66d433c7 2973 }
60ecc450 2974 else
2975 {
2976 /* Note that we must go through the motions of allocating an argument
2977 block even if the size is zero because we may be storing args
2978 in the area reserved for register arguments, which may be part of
2979 the stack frame. */
7221f864 2980
0e0be288 2981 int needed = adjusted_args_size.constant;
66d433c7 2982
60ecc450 2983 /* Store the maximum argument space used. It will be pushed by
2984 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2985 checking). */
66d433c7 2986
abe32cce 2987 if (needed > crtl->outgoing_args_size)
2988 crtl->outgoing_args_size = needed;
66d433c7 2989
60ecc450 2990 if (must_preallocate)
2991 {
4448f543 2992 if (ACCUMULATE_OUTGOING_ARGS)
2993 {
02510658 2994 /* Since the stack pointer will never be pushed, it is
2995 possible for the evaluation of a parm to clobber
2996 something we have already written to the stack.
2997 Since most function calls on RISC machines do not use
2998 the stack, this is uncommon, but must work correctly.
7221f864 2999
4448f543 3000 Therefore, we save any area of the stack that was already
02510658 3001 written and that we are using. Here we set up to do this
3002 by making a new stack usage map from the old one. The
c87678e4 3003 actual save will be done by store_one_arg.
7221f864 3004
4448f543 3005 Another approach might be to try to reorder the argument
3006 evaluations to avoid this conflicting stack usage. */
7221f864 3007
02510658 3008 /* Since we will be writing into the entire argument area,
3009 the map must be allocated for its entire size, not just
3010 the part that is the responsibility of the caller. */
fa20f865 3011 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3012 needed += reg_parm_stack_space;
66d433c7 3013
ccccd62c 3014 if (ARGS_GROW_DOWNWARD)
3015 highest_outgoing_arg_in_use
3016 = MAX (initial_highest_arg_in_use, needed + 1);
3017 else
3018 highest_outgoing_arg_in_use
3019 = MAX (initial_highest_arg_in_use, needed);
3020
dd045aee 3021 free (stack_usage_map_buf);
4c36ffe6 3022 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3023 stack_usage_map = stack_usage_map_buf;
66d433c7 3024
4448f543 3025 if (initial_highest_arg_in_use)
8e547276 3026 memcpy (stack_usage_map, initial_stack_usage_map,
3027 initial_highest_arg_in_use);
d1b03b62 3028
4448f543 3029 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3030 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3031 (highest_outgoing_arg_in_use
3032 - initial_highest_arg_in_use));
3033 needed = 0;
d1b03b62 3034
02510658 3035 /* The address of the outgoing argument list must not be
3036 copied to a register here, because argblock would be left
3037 pointing to the wrong place after the call to
c87678e4 3038 allocate_dynamic_stack_space below. */
d1b03b62 3039
4448f543 3040 argblock = virtual_outgoing_args_rtx;
c87678e4 3041 }
4448f543 3042 else
7221f864 3043 {
4448f543 3044 if (inhibit_defer_pop == 0)
60ecc450 3045 {
4448f543 3046 /* Try to reuse some or all of the pending_stack_adjust
481feae3 3047 to get this space. */
3048 needed
c87678e4 3049 = (combine_pending_stack_adjustment_and_call
481feae3 3050 (unadjusted_args_size,
0e0be288 3051 &adjusted_args_size,
481feae3 3052 preferred_unit_stack_boundary));
3053
3054 /* combine_pending_stack_adjustment_and_call computes
3055 an adjustment before the arguments are allocated.
3056 Account for them and see whether or not the stack
3057 needs to go up or down. */
3058 needed = unadjusted_args_size - needed;
3059
3060 if (needed < 0)
4448f543 3061 {
481feae3 3062 /* We're releasing stack space. */
3063 /* ??? We can avoid any adjustment at all if we're
3064 already aligned. FIXME. */
3065 pending_stack_adjust = -needed;
3066 do_pending_stack_adjust ();
4448f543 3067 needed = 0;
3068 }
c87678e4 3069 else
481feae3 3070 /* We need to allocate space. We'll do that in
3071 push_block below. */
3072 pending_stack_adjust = 0;
60ecc450 3073 }
481feae3 3074
3075 /* Special case this because overhead of `push_block' in
3076 this case is non-trivial. */
4448f543 3077 if (needed == 0)
3078 argblock = virtual_outgoing_args_rtx;
60ecc450 3079 else
ad3b56f3 3080 {
3081 argblock = push_block (GEN_INT (needed), 0, 0);
ccccd62c 3082 if (ARGS_GROW_DOWNWARD)
3083 argblock = plus_constant (Pmode, argblock, needed);
ad3b56f3 3084 }
4448f543 3085
02510658 3086 /* We only really need to call `copy_to_reg' in the case
3087 where push insns are going to be used to pass ARGBLOCK
3088 to a function call in ARGS. In that case, the stack
3089 pointer changes value from the allocation point to the
3090 call point, and hence the value of
3091 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3092 as well always do it. */
4448f543 3093 argblock = copy_to_reg (argblock);
9069face 3094 }
3095 }
3096 }
60ecc450 3097
9069face 3098 if (ACCUMULATE_OUTGOING_ARGS)
3099 {
3100 /* The save/restore code in store_one_arg handles all
3101 cases except one: a constructor call (including a C
3102 function returning a BLKmode struct) to initialize
3103 an argument. */
3104 if (stack_arg_under_construction)
3105 {
63c68695 3106 rtx push_size
3107 = GEN_INT (adjusted_args_size.constant
fa20f865 3108 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
22c61100 3109 : TREE_TYPE (fndecl))) ? 0
63c68695 3110 : reg_parm_stack_space));
9069face 3111 if (old_stack_level == 0)
3112 {
e9c97615 3113 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 3114 old_stack_pointer_delta = stack_pointer_delta;
3115 old_pending_adj = pending_stack_adjust;
3116 pending_stack_adjust = 0;
3117 /* stack_arg_under_construction says whether a stack
3118 arg is being constructed at the old stack level.
3119 Pushing the stack gets a clean outgoing argument
3120 block. */
3121 old_stack_arg_under_construction
3122 = stack_arg_under_construction;
3123 stack_arg_under_construction = 0;
3124 /* Make a new map for the new argument list. */
dd045aee 3125 free (stack_usage_map_buf);
43959b95 3126 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3127 stack_usage_map = stack_usage_map_buf;
9069face 3128 highest_outgoing_arg_in_use = 0;
4448f543 3129 }
990495a7 3130 /* We can pass TRUE as the 4th argument because we just
3131 saved the stack pointer and will restore it right after
3132 the call. */
5be42b39 3133 allocate_dynamic_stack_space (push_size, 0,
3134 BIGGEST_ALIGNMENT, true);
60ecc450 3135 }
a3585b90 3136
9069face 3137 /* If argument evaluation might modify the stack pointer,
3138 copy the address of the argument list to a register. */
3139 for (i = 0; i < num_actuals; i++)
3140 if (args[i].pass_on_stack)
3141 {
3142 argblock = copy_addr_to_reg (argblock);
3143 break;
3144 }
3145 }
4c9e08a4 3146
60ecc450 3147 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 3148
2d298c93 3149 /* Stack is properly aligned, pops can't safely be deferred during
3150 the evaluation of the arguments. */
3151 NO_DEFER_POP;
3152
3a12804f 3153 /* Precompute all register parameters. It isn't safe to compute
3154 anything once we have started filling any specific hard regs.
3155 TLS symbols sometimes need a call to resolve. Precompute
3156 register parameters before any stack pointer manipulation
3157 to avoid unaligned stack in the called function. */
3158 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3159
2d298c93 3160 OK_DEFER_POP;
3161
bf29c577 3162 /* Perform stack alignment before the first push (the last arg). */
3163 if (argblock == 0
85c35fbc 3164 && adjusted_args_size.constant > reg_parm_stack_space
0e0be288 3165 && adjusted_args_size.constant != unadjusted_args_size)
ff92623c 3166 {
60ecc450 3167 /* When the stack adjustment is pending, we get better code
3168 by combining the adjustments. */
c87678e4 3169 if (pending_stack_adjust
60ecc450 3170 && ! inhibit_defer_pop)
481feae3 3171 {
3172 pending_stack_adjust
c87678e4 3173 = (combine_pending_stack_adjustment_and_call
481feae3 3174 (unadjusted_args_size,
0e0be288 3175 &adjusted_args_size,
481feae3 3176 preferred_unit_stack_boundary));
3177 do_pending_stack_adjust ();
3178 }
60ecc450 3179 else if (argblock == 0)
0e0be288 3180 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
60ecc450 3181 - unadjusted_args_size));
60ecc450 3182 }
fa4f1f09 3183 /* Now that the stack is properly aligned, pops can't safely
3184 be deferred during the evaluation of the arguments. */
3185 NO_DEFER_POP;
66d433c7 3186
990495a7 3187 /* Record the maximum pushed stack space size. We need to delay
3188 doing it this far to take into account the optimization done
3189 by combine_pending_stack_adjustment_and_call. */
8c0dd614 3190 if (flag_stack_usage_info
990495a7 3191 && !ACCUMULATE_OUTGOING_ARGS
3192 && pass
3193 && adjusted_args_size.var == 0)
3194 {
3195 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3196 if (pushed > current_function_pushed_stack_size)
3197 current_function_pushed_stack_size = pushed;
3198 }
3199
95672afe 3200 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 3201
c2f47e15 3202 if (CALL_EXPR_STATIC_CHAIN (exp))
3203 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 3204 else
3205 static_chain_value = 0;
3206
4448f543 3207#ifdef REG_PARM_STACK_SPACE
60ecc450 3208 /* Save the fixed argument area if it's part of the caller's frame and
3209 is clobbered by argument setup for this call. */
02510658 3210 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 3211 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3212 &low_to_save, &high_to_save);
41332f48 3213#endif
66d433c7 3214
60ecc450 3215 /* Now store (and compute if necessary) all non-register parms.
3216 These come before register parms, since they can require block-moves,
3217 which could clobber the registers used for register parms.
3218 Parms which have partial registers are not stored here,
3219 but we do preallocate space here if they want that. */
66d433c7 3220
60ecc450 3221 for (i = 0; i < num_actuals; i++)
eb940a48 3222 {
058a1b7a 3223 /* Delay bounds until all other args are stored. */
3224 if (POINTER_BOUNDS_P (args[i].tree_value))
3225 continue;
3226 else if (args[i].reg == 0 || args[i].pass_on_stack)
eb940a48 3227 {
3663becd 3228 rtx_insn *before_arg = get_last_insn ();
eb940a48 3229
ba83222c 3230 /* We don't allow passing huge (> 2^30 B) arguments
3231 by value. It would cause an overflow later on. */
3232 if (adjusted_args_size.constant
3233 >= (1 << (HOST_BITS_PER_INT - 2)))
3234 {
3235 sorry ("passing too large argument on stack");
3236 continue;
3237 }
3238
eb940a48 3239 if (store_one_arg (&args[i], argblock, flags,
3240 adjusted_args_size.var != 0,
3241 reg_parm_stack_space)
3242 || (pass == 0
3243 && check_sibcall_argument_overlap (before_arg,
3244 &args[i], 1)))
3245 sibcall_failure = 1;
3246 }
3247
4143d08b 3248 if (args[i].stack)
b4eeceb9 3249 call_fusage
3250 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3251 gen_rtx_USE (VOIDmode, args[i].stack),
3252 call_fusage);
eb940a48 3253 }
60ecc450 3254
3255 /* If we have a parm that is passed in registers but not in memory
3256 and whose alignment does not permit a direct copy into registers,
3257 make a group of pseudos that correspond to each register that we
3258 will later fill. */
3259 if (STRICT_ALIGNMENT)
3260 store_unaligned_arguments_into_pseudos (args, num_actuals);
3261
3262 /* Now store any partially-in-registers parm.
3263 This is the last place a block-move can happen. */
3264 if (reg_parm_seen)
3265 for (i = 0; i < num_actuals; i++)
3266 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 3267 {
3663becd 3268 rtx_insn *before_arg = get_last_insn ();
7ecc63d3 3269
a95e5776 3270 /* On targets with weird calling conventions (e.g. PA) it's
3271 hard to ensure that all cases of argument overlap between
3272 stack and registers work. Play it safe and bail out. */
3273 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3274 {
3275 sibcall_failure = 1;
3276 break;
3277 }
3278
57679d39 3279 if (store_one_arg (&args[i], argblock, flags,
3280 adjusted_args_size.var != 0,
3281 reg_parm_stack_space)
3282 || (pass == 0
3283 && check_sibcall_argument_overlap (before_arg,
42b11544 3284 &args[i], 1)))
7ecc63d3 3285 sibcall_failure = 1;
3286 }
66d433c7 3287
53597a55 3288 bool any_regs = false;
3289 for (i = 0; i < num_actuals; i++)
3290 if (args[i].reg != NULL_RTX)
3291 {
3292 any_regs = true;
3293 targetm.calls.call_args (args[i].reg, funtype);
3294 }
3295 if (!any_regs)
3296 targetm.calls.call_args (pc_rtx, funtype);
3297
3298 /* Figure out the register where the value, if any, will come back. */
3299 valreg = 0;
3300 valbnd = 0;
3301 if (TYPE_MODE (rettype) != VOIDmode
3302 && ! structure_value_addr)
3303 {
3304 if (pcc_struct_value)
3305 {
3306 valreg = hard_function_value (build_pointer_type (rettype),
3307 fndecl, NULL, (pass == 0));
3308 if (CALL_WITH_BOUNDS_P (exp))
3309 valbnd = targetm.calls.
3310 chkp_function_value_bounds (build_pointer_type (rettype),
3311 fndecl, (pass == 0));
3312 }
3313 else
3314 {
3315 valreg = hard_function_value (rettype, fndecl, fntype,
3316 (pass == 0));
3317 if (CALL_WITH_BOUNDS_P (exp))
3318 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3319 fndecl,
3320 (pass == 0));
3321 }
3322
3323 /* If VALREG is a PARALLEL whose first member has a zero
3324 offset, use that. This is for targets such as m68k that
3325 return the same value in multiple places. */
3326 if (GET_CODE (valreg) == PARALLEL)
3327 {
3328 rtx elem = XVECEXP (valreg, 0, 0);
3329 rtx where = XEXP (elem, 0);
3330 rtx offset = XEXP (elem, 1);
3331 if (offset == const0_rtx
3332 && GET_MODE (where) == GET_MODE (valreg))
3333 valreg = where;
3334 }
3335 }
3336
058a1b7a 3337 /* Store all bounds not passed in registers. */
3338 for (i = 0; i < num_actuals; i++)
3339 {
3340 if (POINTER_BOUNDS_P (args[i].tree_value)
3341 && !args[i].reg)
3342 store_bounds (&args[i],
3343 args[i].pointer_arg == -1
3344 ? NULL
3345 : &args[args[i].pointer_arg]);
3346 }
3347
60ecc450 3348 /* If register arguments require space on the stack and stack space
3349 was not preallocated, allocate stack space here for arguments
3350 passed in registers. */
fa20f865 3351 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 3352 && !ACCUMULATE_OUTGOING_ARGS
c87678e4 3353 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 3354 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 3355
60ecc450 3356 /* Pass the function the address in which to return a
3357 structure value. */
3358 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3359 {
0d568ddf 3360 structure_value_addr
85d654dd 3361 = convert_memory_address (Pmode, structure_value_addr);
45550790 3362 emit_move_insn (struct_value,
60ecc450 3363 force_reg (Pmode,
3364 force_operand (structure_value_addr,
3365 NULL_RTX)));
3366
8ad4c111 3367 if (REG_P (struct_value))
45550790 3368 use_reg (&call_fusage, struct_value);
60ecc450 3369 }
02c736f4 3370
c0e7e9f7 3371 after_args = get_last_insn ();
156cc902 3372 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3373 static_chain_value, &call_fusage,
3374 reg_parm_seen, pass == 0);
66d433c7 3375
42b11544 3376 load_register_parameters (args, num_actuals, &call_fusage, flags,
3377 pass == 0, &sibcall_failure);
c87678e4 3378
60ecc450 3379 /* Save a pointer to the last insn before the call, so that we can
3380 later safely search backwards to find the CALL_INSN. */
3381 before_call = get_last_insn ();
66d433c7 3382
7a8d641b 3383 /* Set up next argument register. For sibling calls on machines
3384 with register windows this should be the incoming register. */
7a8d641b 3385 if (pass == 0)
39cba157 3386 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
f387af4f 3387 VOIDmode,
3388 void_type_node,
3389 true);
7a8d641b 3390 else
39cba157 3391 next_arg_reg = targetm.calls.function_arg (args_so_far,
f387af4f 3392 VOIDmode, void_type_node,
3393 true);
7a8d641b 3394
c8010b80 3395 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3396 {
3397 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
bf29c577 3398 arg_nr = num_actuals - arg_nr - 1;
3d38d682 3399 if (arg_nr >= 0
3400 && arg_nr < num_actuals
3401 && args[arg_nr].reg
c8010b80 3402 && valreg
3403 && REG_P (valreg)
3404 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3405 call_fusage
3406 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
d1f9b275 3407 gen_rtx_SET (valreg, args[arg_nr].reg),
c8010b80 3408 call_fusage);
3409 }
60ecc450 3410 /* All arguments and registers used for the call must be set up by
3411 now! */
3412
481feae3 3413 /* Stack must be properly aligned now. */
231bd014 3414 gcc_assert (!pass
3415 || !(stack_pointer_delta % preferred_unit_stack_boundary));
fa4f1f09 3416
60ecc450 3417 /* Generate the actual call instruction. */
4ee9c684 3418 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 3419 adjusted_args_size.constant, struct_value_size,
7a8d641b 3420 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
39cba157 3421 flags, args_so_far);
60ecc450 3422
fcf56aaf 3423 if (flag_ipa_ra)
2e3b0d0f 3424 {
3663becd 3425 rtx_call_insn *last;
3426 rtx datum = NULL_RTX;
2e3b0d0f 3427 if (fndecl != NULL_TREE)
3428 {
3429 datum = XEXP (DECL_RTL (fndecl), 0);
3430 gcc_assert (datum != NULL_RTX
3431 && GET_CODE (datum) == SYMBOL_REF);
3432 }
3433 last = last_call_insn ();
3434 add_reg_note (last, REG_CALL_DECL, datum);
3435 }
3436
c0e7e9f7 3437 /* If the call setup or the call itself overlaps with anything
3438 of the argument setup we probably clobbered our call address.
3439 In that case we can't do sibcalls. */
3440 if (pass == 0
3441 && check_sibcall_argument_overlap (after_args, 0, 0))
3442 sibcall_failure = 1;
3443
05d18e8b 3444 /* If a non-BLKmode value is returned at the most significant end
3445 of a register, shift the register right by the appropriate amount
3446 and update VALREG accordingly. BLKmode values are handled by the
3447 group load/store machinery below. */
3448 if (!structure_value_addr
3449 && !pcc_struct_value
d8ef55fc 3450 && TYPE_MODE (rettype) != VOIDmode
16c9337c 3451 && TYPE_MODE (rettype) != BLKmode
d8ef55fc 3452 && REG_P (valreg)
16c9337c 3453 && targetm.calls.return_in_msb (rettype))
05d18e8b 3454 {
16c9337c 3455 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
05d18e8b 3456 sibcall_failure = 1;
16c9337c 3457 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
05d18e8b 3458 }
3459
2dd6f9ed 3460 if (pass && (flags & ECF_MALLOC))
60ecc450 3461 {
3462 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3663becd 3463 rtx_insn *last, *insns;
60ecc450 3464
c87678e4 3465 /* The return value from a malloc-like function is a pointer. */
16c9337c 3466 if (TREE_CODE (rettype) == POINTER_TYPE)
10836fcc 3467 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
60ecc450 3468
3469 emit_move_insn (temp, valreg);
3470
3471 /* The return value from a malloc-like function can not alias
3472 anything else. */
3473 last = get_last_insn ();
a1ddb869 3474 add_reg_note (last, REG_NOALIAS, temp);
60ecc450 3475
3476 /* Write out the sequence. */
3477 insns = get_insns ();
3478 end_sequence ();
31d3e01c 3479 emit_insn (insns);
60ecc450 3480 valreg = temp;
3481 }
66d433c7 3482
3072d30e 3483 /* For calls to `setjmp', etc., inform
3484 function.c:setjmp_warnings that it should complain if
3485 nonvolatile values are live. For functions that cannot
3486 return, inform flow that control does not fall through. */
66d433c7 3487
4fec1d6c 3488 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 3489 {
9239aee6 3490 /* The barrier must be emitted
60ecc450 3491 immediately after the CALL_INSN. Some ports emit more
3492 than just a CALL_INSN above, so we must search for it here. */
66d433c7 3493
3663becd 3494 rtx_insn *last = get_last_insn ();
6d7dc5b9 3495 while (!CALL_P (last))
60ecc450 3496 {
3497 last = PREV_INSN (last);
3498 /* There was no CALL_INSN? */
231bd014 3499 gcc_assert (last != before_call);
60ecc450 3500 }
66d433c7 3501
9239aee6 3502 emit_barrier_after (last);
20f5f6d0 3503
b494d193 3504 /* Stack adjustments after a noreturn call are dead code.
3505 However when NO_DEFER_POP is in effect, we must preserve
3506 stack_pointer_delta. */
3507 if (inhibit_defer_pop == 0)
3508 {
3509 stack_pointer_delta = old_stack_allocated;
3510 pending_stack_adjust = 0;
3511 }
60ecc450 3512 }
66d433c7 3513
60ecc450 3514 /* If value type not void, return an rtx for the value. */
66d433c7 3515
16c9337c 3516 if (TYPE_MODE (rettype) == VOIDmode
60ecc450 3517 || ignore)
5edaabad 3518 target = const0_rtx;
60ecc450 3519 else if (structure_value_addr)
3520 {
e16ceb8e 3521 if (target == 0 || !MEM_P (target))
60ecc450 3522 {
f7c44134 3523 target
16c9337c 3524 = gen_rtx_MEM (TYPE_MODE (rettype),
3525 memory_address (TYPE_MODE (rettype),
f7c44134 3526 structure_value_addr));
16c9337c 3527 set_mem_attributes (target, rettype, 1);
60ecc450 3528 }
3529 }
3530 else if (pcc_struct_value)
566d850a 3531 {
60ecc450 3532 /* This is the special C++ case where we need to
3533 know what the true target was. We take care to
3534 never use this value more than once in one expression. */
16c9337c 3535 target = gen_rtx_MEM (TYPE_MODE (rettype),
60ecc450 3536 copy_to_reg (valreg));
16c9337c 3537 set_mem_attributes (target, rettype, 1);
566d850a 3538 }
60ecc450 3539 /* Handle calls that return values in multiple non-contiguous locations.
3540 The Irix 6 ABI has examples of this. */
3541 else if (GET_CODE (valreg) == PARALLEL)
3542 {
4ee9c684 3543 if (target == 0)
2d0fd66d 3544 target = emit_group_move_into_temps (valreg);
5bd5c1c2 3545 else if (rtx_equal_p (target, valreg))
3546 ;
3547 else if (GET_CODE (target) == PARALLEL)
3548 /* Handle the result of a emit_group_move_into_temps
3549 call in the previous pass. */
3550 emit_group_move (target, valreg);
3551 else
16c9337c 3552 emit_group_store (target, valreg, rettype,
3553 int_size_in_bytes (rettype));
60ecc450 3554 }
3555 else if (target
16c9337c 3556 && GET_MODE (target) == TYPE_MODE (rettype)
60ecc450 3557 && GET_MODE (target) == GET_MODE (valreg))
3558 {
aadbaa40 3559 bool may_overlap = false;
3560
360738f1 3561 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3562 reg to a plain register. */
90af1361 3563 if (!REG_P (target) || HARD_REGISTER_P (target))
3564 valreg = avoid_likely_spilled_reg (valreg);
360738f1 3565
aadbaa40 3566 /* If TARGET is a MEM in the argument area, and we have
3567 saved part of the argument area, then we can't store
3568 directly into TARGET as it may get overwritten when we
3569 restore the argument save area below. Don't work too
3570 hard though and simply force TARGET to a register if it
3571 is a MEM; the optimizer is quite likely to sort it out. */
3572 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3573 for (i = 0; i < num_actuals; i++)
3574 if (args[i].save_area)
3575 {
3576 may_overlap = true;
3577 break;
3578 }
dbe1f550 3579
aadbaa40 3580 if (may_overlap)
3581 target = copy_to_reg (valreg);
3582 else
3583 {
3584 /* TARGET and VALREG cannot be equal at this point
3585 because the latter would not have
3586 REG_FUNCTION_VALUE_P true, while the former would if
3587 it were referring to the same register.
3588
3589 If they refer to the same register, this move will be
3590 a no-op, except when function inlining is being
3591 done. */
3592 emit_move_insn (target, valreg);
3593
3594 /* If we are setting a MEM, this code must be executed.
3595 Since it is emitted after the call insn, sibcall
3596 optimization cannot be performed in that case. */
3597 if (MEM_P (target))
3598 sibcall_failure = 1;
3599 }
60ecc450 3600 }
60ecc450 3601 else
90af1361 3602 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 3603
3b2411a8 3604 /* If we promoted this return value, make the proper SUBREG.
3605 TARGET might be const0_rtx here, so be careful. */
3606 if (REG_P (target)
16c9337c 3607 && TYPE_MODE (rettype) != BLKmode
3608 && GET_MODE (target) != TYPE_MODE (rettype))
45550790 3609 {
16c9337c 3610 tree type = rettype;
3b2411a8 3611 int unsignedp = TYPE_UNSIGNED (type);
3612 int offset = 0;
3754d046 3613 machine_mode pmode;
3b2411a8 3614
3615 /* Ensure we promote as expected, and get the new unsignedness. */
3616 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3617 funtype, 1);
3618 gcc_assert (GET_MODE (target) == pmode);
3619
3620 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3621 && (GET_MODE_SIZE (GET_MODE (target))
3622 > GET_MODE_SIZE (TYPE_MODE (type))))
231bd014 3623 {
3b2411a8 3624 offset = GET_MODE_SIZE (GET_MODE (target))
3625 - GET_MODE_SIZE (TYPE_MODE (type));
3626 if (! BYTES_BIG_ENDIAN)
3627 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3628 else if (! WORDS_BIG_ENDIAN)
3629 offset %= UNITS_PER_WORD;
231bd014 3630 }
3b2411a8 3631
3632 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3633 SUBREG_PROMOTED_VAR_P (target) = 1;
e8629f9e 3634 SUBREG_PROMOTED_SET (target, unsignedp);
45550790 3635 }
23eb5fa6 3636
60ecc450 3637 /* If size of args is variable or this was a constructor call for a stack
3638 argument, restore saved stack-pointer value. */
66d433c7 3639
ff3ae375 3640 if (old_stack_level)
60ecc450 3641 {
3663becd 3642 rtx_insn *prev = get_last_insn ();
dfe00a8f 3643
e9c97615 3644 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9069face 3645 stack_pointer_delta = old_stack_pointer_delta;
dfe00a8f 3646
897445c7 3647 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
dfe00a8f 3648
60ecc450 3649 pending_stack_adjust = old_pending_adj;
80f06481 3650 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 3651 stack_arg_under_construction = old_stack_arg_under_construction;
3652 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3653 stack_usage_map = initial_stack_usage_map;
60ecc450 3654 sibcall_failure = 1;
3655 }
02510658 3656 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 3657 {
66d433c7 3658#ifdef REG_PARM_STACK_SPACE
60ecc450 3659 if (save_area)
6e96b626 3660 restore_fixed_argument_area (save_area, argblock,
3661 high_to_save, low_to_save);
41332f48 3662#endif
66d433c7 3663
60ecc450 3664 /* If we saved any argument areas, restore them. */
3665 for (i = 0; i < num_actuals; i++)
3666 if (args[i].save_area)
3667 {
3754d046 3668 machine_mode save_mode = GET_MODE (args[i].save_area);
60ecc450 3669 rtx stack_area
3670 = gen_rtx_MEM (save_mode,
3671 memory_address (save_mode,
3672 XEXP (args[i].stack_slot, 0)));
3673
3674 if (save_mode != BLKmode)
3675 emit_move_insn (stack_area, args[i].save_area);
3676 else
0378dbdc 3677 emit_block_move (stack_area, args[i].save_area,
241399f6 3678 GEN_INT (args[i].locate.size.constant),
0378dbdc 3679 BLOCK_OP_CALL_PARM);
60ecc450 3680 }
66d433c7 3681
60ecc450 3682 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3683 stack_usage_map = initial_stack_usage_map;
3684 }
66d433c7 3685
97354ae4 3686 /* If this was alloca, record the new stack level. */
3687 if (flags & ECF_MAY_BE_ALLOCA)
3688 record_new_stack_level ();
66d433c7 3689
60ecc450 3690 /* Free up storage we no longer need. */
3691 for (i = 0; i < num_actuals; ++i)
dd045aee 3692 free (args[i].aligned_regs);
60ecc450 3693
53597a55 3694 targetm.calls.end_call_args ();
3695
60ecc450 3696 insns = get_insns ();
3697 end_sequence ();
3698
3699 if (pass == 0)
3700 {
3701 tail_call_insns = insns;
3702
60ecc450 3703 /* Restore the pending stack adjustment now that we have
3704 finished generating the sibling call sequence. */
91b70175 3705
b6d206a2 3706 restore_pending_stack_adjust (&save);
0e0be288 3707
3708 /* Prepare arg structure for next iteration. */
c87678e4 3709 for (i = 0; i < num_actuals; i++)
0e0be288 3710 {
3711 args[i].value = 0;
3712 args[i].aligned_regs = 0;
3713 args[i].stack = 0;
3714 }
7ecc63d3 3715
3716 sbitmap_free (stored_args_map);
3663becd 3717 internal_arg_pointer_exp_state.scan_start = NULL;
f1f41a6c 3718 internal_arg_pointer_exp_state.cache.release ();
60ecc450 3719 }
3720 else
9069face 3721 {
3722 normal_call_insns = insns;
3723
3724 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 3725 gcc_assert ((flags & ECF_NORETURN)
231bd014 3726 || (old_stack_allocated
3727 == stack_pointer_delta - pending_stack_adjust));
9069face 3728 }
ae8d6151 3729
3730 /* If something prevents making this a sibling call,
3731 zero out the sequence. */
3732 if (sibcall_failure)
3663becd 3733 tail_call_insns = NULL;
4ee9c684 3734 else
3735 break;
60ecc450 3736 }
3737
365db11e 3738 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 3739 arguments too, as argument area is now clobbered by the call. */
3740 if (tail_call_insns)
60ecc450 3741 {
4ee9c684 3742 emit_insn (tail_call_insns);
18d50ae6 3743 crtl->tail_call_emit = true;
60ecc450 3744 }
3745 else
31d3e01c 3746 emit_insn (normal_call_insns);
66d433c7 3747
60ecc450 3748 currently_expanding_call--;
6d801f27 3749
dd045aee 3750 free (stack_usage_map_buf);
a331ea1b 3751
058a1b7a 3752 /* Join result with returned bounds so caller may use them if needed. */
3753 target = chkp_join_splitted_slot (target, valbnd);
3754
66d433c7 3755 return target;
3756}
915e81b8 3757
4ee9c684 3758/* A sibling call sequence invalidates any REG_EQUIV notes made for
3759 this function's incoming arguments.
3760
3761 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 3762 in the rtl chain are those for incoming arguments, so we can look
3763 for REG_EQUIV notes between the start of the function and the
3764 NOTE_INSN_FUNCTION_BEG.
4ee9c684 3765
3766 This is (slight) overkill. We could keep track of the highest
3767 argument we clobber and be more selective in removing notes, but it
3768 does not seem to be worth the effort. */
0a227ed5 3769
4ee9c684 3770void
3771fixup_tail_calls (void)
3772{
3663becd 3773 rtx_insn *insn;
0a227ed5 3774
3775 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3776 {
750a330e 3777 rtx note;
3778
0a227ed5 3779 /* There are never REG_EQUIV notes for the incoming arguments
3780 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3781 if (NOTE_P (insn)
ad4583d9 3782 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 3783 break;
3784
750a330e 3785 note = find_reg_note (insn, REG_EQUIV, 0);
3786 if (note)
3787 remove_note (insn, note);
3788 note = find_reg_note (insn, REG_EQUIV, 0);
3789 gcc_assert (!note);
0a227ed5 3790 }
4ee9c684 3791}
3792
915e81b8 3793/* Traverse a list of TYPES and expand all complex types into their
3794 components. */
5ab29745 3795static tree
915e81b8 3796split_complex_types (tree types)
3797{
3798 tree p;
3799
92d40bc4 3800 /* Before allocating memory, check for the common case of no complex. */
3801 for (p = types; p; p = TREE_CHAIN (p))
3802 {
3803 tree type = TREE_VALUE (p);
3804 if (TREE_CODE (type) == COMPLEX_TYPE
3805 && targetm.calls.split_complex_arg (type))
a0c938f0 3806 goto found;
92d40bc4 3807 }
3808 return types;
3809
3810 found:
915e81b8 3811 types = copy_list (types);
3812
3813 for (p = types; p; p = TREE_CHAIN (p))
3814 {
3815 tree complex_type = TREE_VALUE (p);
3816
92d40bc4 3817 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3818 && targetm.calls.split_complex_arg (complex_type))
915e81b8 3819 {
3820 tree next, imag;
3821
3822 /* Rewrite complex type with component type. */
3823 TREE_VALUE (p) = TREE_TYPE (complex_type);
3824 next = TREE_CHAIN (p);
3825
3826 /* Add another component type for the imaginary part. */
3827 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3828 TREE_CHAIN (p) = imag;
3829 TREE_CHAIN (imag) = next;
3830
3831 /* Skip the newly created node. */
3832 p = TREE_CHAIN (p);
3833 }
3834 }
3835
3836 return types;
3837}
66d433c7 3838\f
20f7032f 3839/* Output a library call to function FUN (a SYMBOL_REF rtx).
c87678e4 3840 The RETVAL parameter specifies whether return value needs to be saved, other
ebf77775 3841 parameters are documented in the emit_library_call function below. */
2a631e19 3842
20f7032f 3843static rtx
4c9e08a4 3844emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3845 enum libcall_type fn_type,
3754d046 3846 machine_mode outmode, int nargs, va_list p)
b39693dd 3847{
9bdaf1ba 3848 /* Total size in bytes of all the stack-parms scanned so far. */
3849 struct args_size args_size;
3850 /* Size of arguments before any adjustments (such as rounding). */
3851 struct args_size original_args_size;
19cb6b50 3852 int argnum;
9bdaf1ba 3853 rtx fun;
22c61100 3854 /* Todo, choose the correct decl type of orgfun. Sadly this information
3855 isn't present here, so we default to native calling abi here. */
60e2260d 3856 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
fa20f865 3857 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
9bdaf1ba 3858 int count;
9bdaf1ba 3859 rtx argblock = 0;
39cba157 3860 CUMULATIVE_ARGS args_so_far_v;
3861 cumulative_args_t args_so_far;
c87678e4 3862 struct arg
3863 {
3864 rtx value;
3754d046 3865 machine_mode mode;
c87678e4 3866 rtx reg;
3867 int partial;
241399f6 3868 struct locate_and_pad_arg_data locate;
c87678e4 3869 rtx save_area;
3870 };
9bdaf1ba 3871 struct arg *argvec;
3872 int old_inhibit_defer_pop = inhibit_defer_pop;
3873 rtx call_fusage = 0;
3874 rtx mem_value = 0;
16204096 3875 rtx valreg;
9bdaf1ba 3876 int pcc_struct_value = 0;
3877 int struct_value_size = 0;
df4b504c 3878 int flags;
9bdaf1ba 3879 int reg_parm_stack_space = 0;
9bdaf1ba 3880 int needed;
3663becd 3881 rtx_insn *before_call;
8700bf9e 3882 bool have_push_fusage;
771d21fa 3883 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 3884
4448f543 3885#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3886 /* Define the boundary of the register parm stack space that needs to be
3887 save, if any. */
75a70cf9 3888 int low_to_save = 0, high_to_save = 0;
c87678e4 3889 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 3890#endif
3891
9bdaf1ba 3892 /* Size of the stack reserved for parameter registers. */
3893 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3894 char *initial_stack_usage_map = stack_usage_map;
a331ea1b 3895 char *stack_usage_map_buf = NULL;
9bdaf1ba 3896
45550790 3897 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3898
9bdaf1ba 3899#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 3900 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 3901#endif
3902
1c1a1b9a 3903 /* By default, library functions cannot throw. */
df4b504c 3904 flags = ECF_NOTHROW;
3905
ab7ccfa2 3906 switch (fn_type)
3907 {
3908 case LCT_NORMAL:
2a0c81bf 3909 break;
ab7ccfa2 3910 case LCT_CONST:
2a0c81bf 3911 flags |= ECF_CONST;
3912 break;
ab7ccfa2 3913 case LCT_PURE:
2a0c81bf 3914 flags |= ECF_PURE;
ab7ccfa2 3915 break;
ab7ccfa2 3916 case LCT_NORETURN:
3917 flags |= ECF_NORETURN;
3918 break;
3919 case LCT_THROW:
1c1a1b9a 3920 flags &= ~ECF_NOTHROW;
ab7ccfa2 3921 break;
0ff18307 3922 case LCT_RETURNS_TWICE:
3923 flags = ECF_RETURNS_TWICE;
3924 break;
ab7ccfa2 3925 }
9bdaf1ba 3926 fun = orgfun;
3927
9bdaf1ba 3928 /* Ensure current function's preferred stack boundary is at least
3929 what we need. */
edb7afe8 3930 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3931 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 3932
3933 /* If this kind of value comes back in memory,
3934 decide where in memory it should come back. */
771d21fa 3935 if (outmode != VOIDmode)
9bdaf1ba 3936 {
dc24ddbd 3937 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 3938 if (aggregate_value_p (tfom, 0))
771d21fa 3939 {
9bdaf1ba 3940#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 3941 rtx pointer_reg
46b3ff29 3942 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 3943 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3944 pcc_struct_value = 1;
3945 if (value == 0)
3946 value = gen_reg_rtx (outmode);
9bdaf1ba 3947#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 3948 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 3949 if (value != 0 && MEM_P (value))
771d21fa 3950 mem_value = value;
3951 else
0ab48139 3952 mem_value = assign_temp (tfom, 1, 1);
9bdaf1ba 3953#endif
771d21fa 3954 /* This call returns a big structure. */
2dd6f9ed 3955 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
771d21fa 3956 }
9bdaf1ba 3957 }
771d21fa 3958 else
3959 tfom = void_type_node;
9bdaf1ba 3960
3961 /* ??? Unfinished: must pass the memory address as an argument. */
3962
3963 /* Copy all the libcall-arguments out of the varargs data
3964 and into a vector ARGVEC.
3965
3966 Compute how to pass each argument. We only support a very small subset
3967 of the full argument passing conventions to limit complexity here since
3968 library functions shouldn't have many args. */
3969
364c0c59 3970 argvec = XALLOCAVEC (struct arg, nargs + 1);
f0af5a88 3971 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 3972
e1efd914 3973#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
39cba157 3974 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
e1efd914 3975#else
39cba157 3976 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
e1efd914 3977#endif
39cba157 3978 args_so_far = pack_cumulative_args (&args_so_far_v);
9bdaf1ba 3979
3980 args_size.constant = 0;
3981 args_size.var = 0;
3982
3983 count = 0;
3984
3985 push_temp_slots ();
3986
3987 /* If there's a structure value address to be passed,
3988 either pass it in the special place, or pass it as an extra argument. */
45550790 3989 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 3990 {
3991 rtx addr = XEXP (mem_value, 0);
a0c938f0 3992
9bdaf1ba 3993 nargs++;
3994
a56c46d2 3995 /* Make sure it is a reasonable operand for a move or push insn. */
3996 if (!REG_P (addr) && !MEM_P (addr)
ca316360 3997 && !(CONSTANT_P (addr)
3998 && targetm.legitimate_constant_p (Pmode, addr)))
a56c46d2 3999 addr = force_operand (addr, NULL_RTX);
4000
9bdaf1ba 4001 argvec[count].value = addr;
4002 argvec[count].mode = Pmode;
4003 argvec[count].partial = 0;
4004
39cba157 4005 argvec[count].reg = targetm.calls.function_arg (args_so_far,
f387af4f 4006 Pmode, NULL_TREE, true);
39cba157 4007 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
f054eb3c 4008 NULL_TREE, 1) == 0);
9bdaf1ba 4009
4010 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 4011#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 4012 1,
2e735c0d 4013#else
4014 argvec[count].reg != 0,
4015#endif
2e090bf6 4016 reg_parm_stack_space, 0,
4017 NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 4018
9bdaf1ba 4019 if (argvec[count].reg == 0 || argvec[count].partial != 0
4020 || reg_parm_stack_space > 0)
241399f6 4021 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 4022
39cba157 4023 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
9bdaf1ba 4024
4025 count++;
4026 }
4027
4028 for (; count < nargs; count++)
4029 {
4030 rtx val = va_arg (p, rtx);
3754d046 4031 machine_mode mode = (machine_mode) va_arg (p, int);
adaf4ef0 4032 int unsigned_p = 0;
9bdaf1ba 4033
4034 /* We cannot convert the arg value to the mode the library wants here;
4035 must do it earlier where we know the signedness of the arg. */
231bd014 4036 gcc_assert (mode != BLKmode
4037 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 4038
a56c46d2 4039 /* Make sure it is a reasonable operand for a move or push insn. */
4040 if (!REG_P (val) && !MEM_P (val)
ca316360 4041 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
a56c46d2 4042 val = force_operand (val, NULL_RTX);
4043
39cba157 4044 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
9bdaf1ba 4045 {
ddaf7ad3 4046 rtx slot;
13f08ee7 4047 int must_copy
39cba157 4048 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
ddaf7ad3 4049
9c2a0c05 4050 /* If this was a CONST function, it is now PURE since it now
4051 reads memory. */
5096b8b0 4052 if (flags & ECF_CONST)
4053 {
4054 flags &= ~ECF_CONST;
4055 flags |= ECF_PURE;
4056 }
4057
590c3166 4058 if (MEM_P (val) && !must_copy)
006e2d5a 4059 {
4060 tree val_expr = MEM_EXPR (val);
4061 if (val_expr)
4062 mark_addressable (val_expr);
4063 slot = val;
4064 }
41dc12b4 4065 else
ddaf7ad3 4066 {
dc24ddbd 4067 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
0ab48139 4068 1, 1);
ddaf7ad3 4069 emit_move_insn (slot, val);
4070 }
387bc205 4071
a683e787 4072 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4073 gen_rtx_USE (VOIDmode, slot),
4074 call_fusage);
ddaf7ad3 4075 if (must_copy)
4076 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4077 gen_rtx_CLOBBER (VOIDmode,
4078 slot),
4079 call_fusage);
4080
9bdaf1ba 4081 mode = Pmode;
ddaf7ad3 4082 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 4083 }
9bdaf1ba 4084
adaf4ef0 4085 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
9bdaf1ba 4086 argvec[count].mode = mode;
adaf4ef0 4087 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
39cba157 4088 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
f387af4f 4089 NULL_TREE, true);
9bdaf1ba 4090
9bdaf1ba 4091 argvec[count].partial
39cba157 4092 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 4093
11fb947f 4094 if (argvec[count].reg == 0
4095 || argvec[count].partial != 0
4096 || reg_parm_stack_space > 0)
4097 {
4098 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 4099#ifdef STACK_PARMS_IN_REG_PARM_AREA
11fb947f 4100 1,
2e735c0d 4101#else
11fb947f 4102 argvec[count].reg != 0,
4103#endif
2e090bf6 4104 reg_parm_stack_space, argvec[count].partial,
11fb947f 4105 NULL_TREE, &args_size, &argvec[count].locate);
4106 args_size.constant += argvec[count].locate.size.constant;
4107 gcc_assert (!argvec[count].locate.size.var);
4108 }
4109#ifdef BLOCK_REG_PADDING
4110 else
4111 /* The argument is passed entirely in registers. See at which
4112 end it should be padded. */
4113 argvec[count].locate.where_pad =
4114 BLOCK_REG_PADDING (mode, NULL_TREE,
4115 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
2e735c0d 4116#endif
9bdaf1ba 4117
39cba157 4118 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
9bdaf1ba 4119 }
9bdaf1ba 4120
9bdaf1ba 4121 /* If this machine requires an external definition for library
4122 functions, write one out. */
4123 assemble_external_libcall (fun);
4124
4125 original_args_size = args_size;
91b70175 4126 args_size.constant = (((args_size.constant
4127 + stack_pointer_delta
4128 + STACK_BYTES - 1)
4129 / STACK_BYTES
4130 * STACK_BYTES)
4131 - stack_pointer_delta);
9bdaf1ba 4132
4133 args_size.constant = MAX (args_size.constant,
4134 reg_parm_stack_space);
4135
fa20f865 4136 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 4137 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 4138
abe32cce 4139 if (args_size.constant > crtl->outgoing_args_size)
4140 crtl->outgoing_args_size = args_size.constant;
9bdaf1ba 4141
8c0dd614 4142 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
990495a7 4143 {
4144 int pushed = args_size.constant + pending_stack_adjust;
4145 if (pushed > current_function_pushed_stack_size)
4146 current_function_pushed_stack_size = pushed;
4147 }
4148
4448f543 4149 if (ACCUMULATE_OUTGOING_ARGS)
4150 {
4151 /* Since the stack pointer will never be pushed, it is possible for
4152 the evaluation of a parm to clobber something we have already
4153 written to the stack. Since most function calls on RISC machines
4154 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 4155
4448f543 4156 Therefore, we save any area of the stack that was already written
4157 and that we are using. Here we set up to do this by making a new
4158 stack usage map from the old one.
9bdaf1ba 4159
4448f543 4160 Another approach might be to try to reorder the argument
4161 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 4162
4448f543 4163 needed = args_size.constant;
9bdaf1ba 4164
4448f543 4165 /* Since we will be writing into the entire argument area, the
4166 map must be allocated for its entire size, not just the part that
4167 is the responsibility of the caller. */
fa20f865 4168 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 4169 needed += reg_parm_stack_space;
9bdaf1ba 4170
ccccd62c 4171 if (ARGS_GROW_DOWNWARD)
4172 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4173 needed + 1);
4174 else
4175 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4176
4c36ffe6 4177 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 4178 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 4179
4448f543 4180 if (initial_highest_arg_in_use)
8e547276 4181 memcpy (stack_usage_map, initial_stack_usage_map,
4182 initial_highest_arg_in_use);
9bdaf1ba 4183
4448f543 4184 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 4185 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 4186 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4187 needed = 0;
9bdaf1ba 4188
9c0a756f 4189 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 4190 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 4191 new libcalls after we've instantiated the virtual regs, and if we
4192 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 4193
9c0a756f 4194 if (virtuals_instantiated)
29c05e22 4195 argblock = plus_constant (Pmode, stack_pointer_rtx,
4196 STACK_POINTER_OFFSET);
9c0a756f 4197 else
4198 argblock = virtual_outgoing_args_rtx;
4448f543 4199 }
4200 else
4201 {
4202 if (!PUSH_ARGS)
4203 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4204 }
9bdaf1ba 4205
bf29c577 4206 /* We push args individually in reverse order, perform stack alignment
9bdaf1ba 4207 before the first push (the last arg). */
bf29c577 4208 if (argblock == 0)
9bdaf1ba 4209 anti_adjust_stack (GEN_INT (args_size.constant
4210 - original_args_size.constant));
9bdaf1ba 4211
bf29c577 4212 argnum = nargs - 1;
9bdaf1ba 4213
4448f543 4214#ifdef REG_PARM_STACK_SPACE
4215 if (ACCUMULATE_OUTGOING_ARGS)
4216 {
4217 /* The argument list is the property of the called routine and it
4218 may clobber it. If the fixed area has been used for previous
6e96b626 4219 parameters, we must save and restore it. */
4220 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4221 &low_to_save, &high_to_save);
9bdaf1ba 4222 }
4223#endif
c87678e4 4224
53597a55 4225 /* When expanding a normal call, args are stored in push order,
4226 which is the reverse of what we have here. */
4227 bool any_regs = false;
4228 for (int i = nargs; i-- > 0; )
4229 if (argvec[i].reg != NULL_RTX)
4230 {
4231 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4232 any_regs = true;
4233 }
4234 if (!any_regs)
4235 targetm.calls.call_args (pc_rtx, NULL_TREE);
4236
9bdaf1ba 4237 /* Push the args that need to be pushed. */
4238
8700bf9e 4239 have_push_fusage = false;
4240
9bdaf1ba 4241 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4242 are to be pushed. */
bf29c577 4243 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 4244 {
3754d046 4245 machine_mode mode = argvec[argnum].mode;
19cb6b50 4246 rtx val = argvec[argnum].value;
9bdaf1ba 4247 rtx reg = argvec[argnum].reg;
4248 int partial = argvec[argnum].partial;
c2fd5e89 4249 unsigned int parm_align = argvec[argnum].locate.boundary;
4448f543 4250 int lower_bound = 0, upper_bound = 0, i;
9bdaf1ba 4251
4252 if (! (reg != 0 && partial == 0))
4253 {
4143d08b 4254 rtx use;
4255
4448f543 4256 if (ACCUMULATE_OUTGOING_ARGS)
4257 {
02510658 4258 /* If this is being stored into a pre-allocated, fixed-size,
4259 stack area, save any previous data at that location. */
9bdaf1ba 4260
ccccd62c 4261 if (ARGS_GROW_DOWNWARD)
4262 {
4263 /* stack_slot is negative, but we want to index stack_usage_map
4264 with positive values. */
4265 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4266 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4267 }
4268 else
4269 {
4270 lower_bound = argvec[argnum].locate.slot_offset.constant;
4271 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4272 }
9bdaf1ba 4273
fd2c0c1d 4274 i = lower_bound;
4275 /* Don't worry about things in the fixed argument area;
4276 it has already been saved. */
4277 if (i < reg_parm_stack_space)
4278 i = reg_parm_stack_space;
4279 while (i < upper_bound && stack_usage_map[i] == 0)
4280 i++;
9bdaf1ba 4281
fd2c0c1d 4282 if (i < upper_bound)
4448f543 4283 {
241399f6 4284 /* We need to make a save area. */
4285 unsigned int size
4286 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3754d046 4287 machine_mode save_mode
241399f6 4288 = mode_for_size (size, MODE_INT, 1);
4289 rtx adr
29c05e22 4290 = plus_constant (Pmode, argblock,
241399f6 4291 argvec[argnum].locate.offset.constant);
4448f543 4292 rtx stack_area
241399f6 4293 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 4294
f9c6a9c3 4295 if (save_mode == BLKmode)
4296 {
4297 argvec[argnum].save_area
4298 = assign_stack_temp (BLKmode,
0ab48139 4299 argvec[argnum].locate.size.constant
4300 );
f9c6a9c3 4301
d2b9158b 4302 emit_block_move (validize_mem
4303 (copy_rtx (argvec[argnum].save_area)),
a0c938f0 4304 stack_area,
f9c6a9c3 4305 GEN_INT (argvec[argnum].locate.size.constant),
4306 BLOCK_OP_CALL_PARM);
4307 }
4308 else
4309 {
4310 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4311
4312 emit_move_insn (argvec[argnum].save_area, stack_area);
4313 }
4448f543 4314 }
9bdaf1ba 4315 }
325d1c45 4316
c2fd5e89 4317 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
0378dbdc 4318 partial, reg, 0, argblock,
241399f6 4319 GEN_INT (argvec[argnum].locate.offset.constant),
4320 reg_parm_stack_space,
a95e5776 4321 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
9bdaf1ba 4322
9bdaf1ba 4323 /* Now mark the segment we just used. */
4448f543 4324 if (ACCUMULATE_OUTGOING_ARGS)
4325 for (i = lower_bound; i < upper_bound; i++)
4326 stack_usage_map[i] = 1;
9bdaf1ba 4327
4328 NO_DEFER_POP;
2eb9302a 4329
4143d08b 4330 /* Indicate argument access so that alias.c knows that these
4331 values are live. */
4332 if (argblock)
29c05e22 4333 use = plus_constant (Pmode, argblock,
4143d08b 4334 argvec[argnum].locate.offset.constant);
8700bf9e 4335 else if (have_push_fusage)
4336 continue;
4143d08b 4337 else
8700bf9e 4338 {
4339 /* When arguments are pushed, trying to tell alias.c where
4340 exactly this argument is won't work, because the
4341 auto-increment causes confusion. So we merely indicate
4342 that we access something with a known mode somewhere on
4343 the stack. */
4344 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4345 gen_rtx_SCRATCH (Pmode));
4346 have_push_fusage = true;
4347 }
4143d08b 4348 use = gen_rtx_MEM (argvec[argnum].mode, use);
4349 use = gen_rtx_USE (VOIDmode, use);
4350 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
9bdaf1ba 4351 }
4352 }
4353
bf29c577 4354 argnum = nargs - 1;
9bdaf1ba 4355
82c7907c 4356 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 4357
4358 /* Now load any reg parms into their regs. */
4359
4360 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4361 are to be pushed. */
bf29c577 4362 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 4363 {
3754d046 4364 machine_mode mode = argvec[argnum].mode;
19cb6b50 4365 rtx val = argvec[argnum].value;
9bdaf1ba 4366 rtx reg = argvec[argnum].reg;
4367 int partial = argvec[argnum].partial;
ab6e3ce0 4368#ifdef BLOCK_REG_PADDING
37cd19a4 4369 int size = 0;
ab6e3ce0 4370#endif
37cd19a4 4371
9bdaf1ba 4372 /* Handle calls that pass values in multiple non-contiguous
4373 locations. The PA64 has examples of this for library calls. */
4374 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 4375 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 4376 else if (reg != 0 && partial == 0)
37cd19a4 4377 {
4378 emit_move_insn (reg, val);
4379#ifdef BLOCK_REG_PADDING
4380 size = GET_MODE_SIZE (argvec[argnum].mode);
4381
4382 /* Copied from load_register_parameters. */
4383
4384 /* Handle case where we have a value that needs shifting
4385 up to the msb. eg. a QImode value and we're padding
4386 upward on a BYTES_BIG_ENDIAN machine. */
4387 if (size < UNITS_PER_WORD
4388 && (argvec[argnum].locate.where_pad
4389 == (BYTES_BIG_ENDIAN ? upward : downward)))
4390 {
4391 rtx x;
4392 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4393
4394 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4395 report the whole reg as used. Strictly speaking, the
4396 call only uses SIZE bytes at the msb end, but it doesn't
4397 seem worth generating rtl to say that. */
4398 reg = gen_rtx_REG (word_mode, REGNO (reg));
4399 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4400 if (x != reg)
4401 emit_move_insn (reg, x);
4402 }
4403#endif
4404 }
9bdaf1ba 4405
4406 NO_DEFER_POP;
4407 }
4408
9bdaf1ba 4409 /* Any regs containing parms remain in use through the call. */
4410 for (count = 0; count < nargs; count++)
4411 {
4412 rtx reg = argvec[count].reg;
4413 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4414 use_group_regs (&call_fusage, reg);
4415 else if (reg != 0)
6c6f16e5 4416 {
4417 int partial = argvec[count].partial;
4418 if (partial)
4419 {
4420 int nregs;
4421 gcc_assert (partial % UNITS_PER_WORD == 0);
4422 nregs = partial / UNITS_PER_WORD;
4423 use_regs (&call_fusage, REGNO (reg), nregs);
4424 }
4425 else
4426 use_reg (&call_fusage, reg);
4427 }
9bdaf1ba 4428 }
4429
4430 /* Pass the function the address in which to return a structure value. */
45550790 4431 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 4432 {
45550790 4433 emit_move_insn (struct_value,
9bdaf1ba 4434 force_reg (Pmode,
4435 force_operand (XEXP (mem_value, 0),
4436 NULL_RTX)));
8ad4c111 4437 if (REG_P (struct_value))
45550790 4438 use_reg (&call_fusage, struct_value);
9bdaf1ba 4439 }
4440
4441 /* Don't allow popping to be deferred, since then
4442 cse'ing of library calls could delete a call and leave the pop. */
4443 NO_DEFER_POP;
16204096 4444 valreg = (mem_value == 0 && outmode != VOIDmode
578d1295 4445 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
9bdaf1ba 4446
481feae3 4447 /* Stack must be properly aligned now. */
231bd014 4448 gcc_assert (!(stack_pointer_delta
4449 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
fa4f1f09 4450
644c283b 4451 before_call = get_last_insn ();
4452
9bdaf1ba 4453 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4454 will set inhibit_defer_pop to that value. */
20f7032f 4455 /* The return type is needed to decide how many bytes the function pops.
4456 Signedness plays no role in that, so for simplicity, we pretend it's
4457 always signed. We also assume that the list of arguments passed has
4458 no impact, so we pretend it is unknown. */
9bdaf1ba 4459
4ee9c684 4460 emit_call_1 (fun, NULL,
c87678e4 4461 get_identifier (XSTR (orgfun, 0)),
771d21fa 4462 build_function_type (tfom, NULL_TREE),
c87678e4 4463 original_args_size.constant, args_size.constant,
9bdaf1ba 4464 struct_value_size,
39cba157 4465 targetm.calls.function_arg (args_so_far,
f387af4f 4466 VOIDmode, void_type_node, true),
16204096 4467 valreg,
39cba157 4468 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
9bdaf1ba 4469
fcf56aaf 4470 if (flag_ipa_ra)
2e3b0d0f 4471 {
9ed997be 4472 rtx datum = orgfun;
2e3b0d0f 4473 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
9ed997be 4474 rtx_call_insn *last = last_call_insn ();
2e3b0d0f 4475 add_reg_note (last, REG_CALL_DECL, datum);
4476 }
4477
37cd19a4 4478 /* Right-shift returned value if necessary. */
4479 if (!pcc_struct_value
4480 && TYPE_MODE (tfom) != BLKmode
4481 && targetm.calls.return_in_msb (tfom))
4482 {
4483 shift_return_value (TYPE_MODE (tfom), false, valreg);
4484 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4485 }
4486
53597a55 4487 targetm.calls.end_call_args ();
4488
3072d30e 4489 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4490 that it should complain if nonvolatile values are live. For
4491 functions that cannot return, inform flow that control does not
4492 fall through. */
4fec1d6c 4493 if (flags & ECF_NORETURN)
644c283b 4494 {
9239aee6 4495 /* The barrier note must be emitted
644c283b 4496 immediately after the CALL_INSN. Some ports emit more than
4497 just a CALL_INSN above, so we must search for it here. */
3663becd 4498 rtx_insn *last = get_last_insn ();
6d7dc5b9 4499 while (!CALL_P (last))
644c283b 4500 {
4501 last = PREV_INSN (last);
4502 /* There was no CALL_INSN? */
231bd014 4503 gcc_assert (last != before_call);
644c283b 4504 }
4505
9239aee6 4506 emit_barrier_after (last);
644c283b 4507 }
4508
43926c6a 4509 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4510 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4511 if (flags & ECF_NOTHROW)
4512 {
3663becd 4513 rtx_insn *last = get_last_insn ();
43926c6a 4514 while (!CALL_P (last))
4515 {
4516 last = PREV_INSN (last);
4517 /* There was no CALL_INSN? */
4518 gcc_assert (last != before_call);
4519 }
4520
4521 make_reg_eh_region_note_nothrow_nononlocal (last);
4522 }
4523
9bdaf1ba 4524 /* Now restore inhibit_defer_pop to its actual original value. */
4525 OK_DEFER_POP;
4526
4527 pop_temp_slots ();
4528
4529 /* Copy the value to the right place. */
20f7032f 4530 if (outmode != VOIDmode && retval)
9bdaf1ba 4531 {
4532 if (mem_value)
4533 {
4534 if (value == 0)
4535 value = mem_value;
4536 if (value != mem_value)
4537 emit_move_insn (value, mem_value);
4538 }
40651bac 4539 else if (GET_CODE (valreg) == PARALLEL)
4540 {
4541 if (value == 0)
4542 value = gen_reg_rtx (outmode);
4c3a0ea5 4543 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 4544 }
9bdaf1ba 4545 else
4e1a3169 4546 {
3b2411a8 4547 /* Convert to the proper mode if a promotion has been active. */
4e1a3169 4548 if (GET_MODE (valreg) != outmode)
4549 {
4550 int unsignedp = TYPE_UNSIGNED (tfom);
4551
3b2411a8 4552 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4553 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4e1a3169 4554 == GET_MODE (valreg));
4e1a3169 4555 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4556 }
4557
4558 if (value != 0)
4559 emit_move_insn (value, valreg);
4560 else
4561 value = valreg;
4562 }
9bdaf1ba 4563 }
4564
4448f543 4565 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 4566 {
4448f543 4567#ifdef REG_PARM_STACK_SPACE
4568 if (save_area)
6e96b626 4569 restore_fixed_argument_area (save_area, argblock,
4570 high_to_save, low_to_save);
9bdaf1ba 4571#endif
c87678e4 4572
4448f543 4573 /* If we saved any argument areas, restore them. */
4574 for (count = 0; count < nargs; count++)
4575 if (argvec[count].save_area)
4576 {
3754d046 4577 machine_mode save_mode = GET_MODE (argvec[count].save_area);
29c05e22 4578 rtx adr = plus_constant (Pmode, argblock,
241399f6 4579 argvec[count].locate.offset.constant);
4580 rtx stack_area = gen_rtx_MEM (save_mode,
4581 memory_address (save_mode, adr));
4448f543 4582
f9c6a9c3 4583 if (save_mode == BLKmode)
4584 emit_block_move (stack_area,
d2b9158b 4585 validize_mem
4586 (copy_rtx (argvec[count].save_area)),
f9c6a9c3 4587 GEN_INT (argvec[count].locate.size.constant),
4588 BLOCK_OP_CALL_PARM);
4589 else
4590 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 4591 }
9bdaf1ba 4592
4448f543 4593 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4594 stack_usage_map = initial_stack_usage_map;
4595 }
b39693dd 4596
dd045aee 4597 free (stack_usage_map_buf);
a331ea1b 4598
20f7032f 4599 return value;
4600
4601}
4602\f
4603/* Output a library call to function FUN (a SYMBOL_REF rtx)
4604 (emitting the queue unless NO_QUEUE is nonzero),
4605 for a value of mode OUTMODE,
4606 with NARGS different arguments, passed as alternating rtx values
4607 and machine_modes to convert them to.
20f7032f 4608
2dd6f9ed 4609 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4610 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4611 other types of library calls. */
20f7032f 4612
4613void
ee582a61 4614emit_library_call (rtx orgfun, enum libcall_type fn_type,
3754d046 4615 machine_mode outmode, int nargs, ...)
20f7032f 4616{
ee582a61 4617 va_list p;
4c9e08a4 4618
ee582a61 4619 va_start (p, nargs);
26dfc457 4620 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
ee582a61 4621 va_end (p);
20f7032f 4622}
4623\f
4624/* Like emit_library_call except that an extra argument, VALUE,
4625 comes second and says where to store the result.
4626 (If VALUE is zero, this function chooses a convenient way
4627 to return the value.
4628
4629 This function returns an rtx for where the value is to be found.
4630 If VALUE is nonzero, VALUE is returned. */
4631
4632rtx
ee582a61 4633emit_library_call_value (rtx orgfun, rtx value,
4634 enum libcall_type fn_type,
3754d046 4635 machine_mode outmode, int nargs, ...)
20f7032f 4636{
7ad77798 4637 rtx result;
ee582a61 4638 va_list p;
4c9e08a4 4639
ee582a61 4640 va_start (p, nargs);
7ad77798 4641 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4642 nargs, p);
ee582a61 4643 va_end (p);
20f7032f 4644
7ad77798 4645 return result;
8ddf1c7e 4646}
4647\f
058a1b7a 4648
4649/* Store pointer bounds argument ARG into Bounds Table entry
4650 associated with PARM. */
4651static void
4652store_bounds (struct arg_data *arg, struct arg_data *parm)
4653{
4654 rtx slot = NULL, ptr = NULL, addr = NULL;
4655
4656 /* We may pass bounds not associated with any pointer. */
4657 if (!parm)
4658 {
4659 gcc_assert (arg->special_slot);
4660 slot = arg->special_slot;
4661 ptr = const0_rtx;
4662 }
4663 /* Find pointer associated with bounds and where it is
4664 passed. */
4665 else
4666 {
4667 if (!parm->reg)
4668 {
4669 gcc_assert (!arg->special_slot);
4670
4671 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
4672 }
4673 else if (REG_P (parm->reg))
4674 {
4675 gcc_assert (arg->special_slot);
4676 slot = arg->special_slot;
4677
4678 if (MEM_P (parm->value))
4679 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
4680 else if (REG_P (parm->value))
4681 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
4682 else
4683 {
4684 gcc_assert (!arg->pointer_offset);
4685 ptr = parm->value;
4686 }
4687 }
4688 else
4689 {
4690 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
4691
4692 gcc_assert (arg->special_slot);
4693 slot = arg->special_slot;
4694
4695 if (parm->parallel_value)
4696 ptr = chkp_get_value_with_offs (parm->parallel_value,
4697 GEN_INT (arg->pointer_offset));
4698 else
4699 gcc_unreachable ();
4700 }
4701 }
4702
4703 /* Expand bounds. */
4704 if (!arg->value)
4705 arg->value = expand_normal (arg->tree_value);
4706
4707 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
4708}
4709
66d433c7 4710/* Store a single argument for a function call
4711 into the register or memory area where it must be passed.
4712 *ARG describes the argument value and where to pass it.
4713
4714 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 4715 or 0 on a machine where arguments are pushed individually.
66d433c7 4716
4717 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 4718 so must be careful about how the stack is used.
66d433c7 4719
4720 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4721 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4722 that we need not worry about saving and restoring the stack.
4723
57679d39 4724 FNDECL is the declaration of the function we are calling.
c87678e4 4725
d10cfa8d 4726 Return nonzero if this arg should cause sibcall failure,
57679d39 4727 zero otherwise. */
66d433c7 4728
57679d39 4729static int
4c9e08a4 4730store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4731 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 4732{
19cb6b50 4733 tree pval = arg->tree_value;
66d433c7 4734 rtx reg = 0;
4735 int partial = 0;
4736 int used = 0;
df9f2bb6 4737 int i, lower_bound = 0, upper_bound = 0;
57679d39 4738 int sibcall_failure = 0;
66d433c7 4739
4740 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 4741 return 1;
66d433c7 4742
1b117c60 4743 /* Push a new temporary level for any temporaries we make for
4744 this argument. */
4745 push_temp_slots ();
4746
02510658 4747 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 4748 {
4448f543 4749 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4750 save any previous data at that location. */
4751 if (argblock && ! variable_size && arg->stack)
4752 {
ccccd62c 4753 if (ARGS_GROW_DOWNWARD)
4754 {
4755 /* stack_slot is negative, but we want to index stack_usage_map
4756 with positive values. */
4757 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4758 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4759 else
4760 upper_bound = 0;
66d433c7 4761
ccccd62c 4762 lower_bound = upper_bound - arg->locate.size.constant;
4763 }
4448f543 4764 else
ccccd62c 4765 {
4766 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4767 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4768 else
4769 lower_bound = 0;
66d433c7 4770
ccccd62c 4771 upper_bound = lower_bound + arg->locate.size.constant;
4772 }
66d433c7 4773
fd2c0c1d 4774 i = lower_bound;
4775 /* Don't worry about things in the fixed argument area;
4776 it has already been saved. */
4777 if (i < reg_parm_stack_space)
4778 i = reg_parm_stack_space;
4779 while (i < upper_bound && stack_usage_map[i] == 0)
4780 i++;
66d433c7 4781
fd2c0c1d 4782 if (i < upper_bound)
66d433c7 4783 {
241399f6 4784 /* We need to make a save area. */
4785 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
3754d046 4786 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
241399f6 4787 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4788 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 4789
4790 if (save_mode == BLKmode)
4791 {
9f495e8d 4792 arg->save_area
4793 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4448f543 4794 preserve_temp_slots (arg->save_area);
d2b9158b 4795 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4796 stack_area,
c2ca1bab 4797 GEN_INT (arg->locate.size.constant),
0378dbdc 4798 BLOCK_OP_CALL_PARM);
4448f543 4799 }
4800 else
4801 {
4802 arg->save_area = gen_reg_rtx (save_mode);
4803 emit_move_insn (arg->save_area, stack_area);
4804 }
66d433c7 4805 }
4806 }
4807 }
b3caaea3 4808
66d433c7 4809 /* If this isn't going to be placed on both the stack and in registers,
4810 set up the register and number of words. */
4811 if (! arg->pass_on_stack)
04d6fcf8 4812 {
4813 if (flags & ECF_SIBCALL)
4814 reg = arg->tail_call_reg;
4815 else
4816 reg = arg->reg;
4817 partial = arg->partial;
4818 }
66d433c7 4819
231bd014 4820 /* Being passed entirely in a register. We shouldn't be called in
4821 this case. */
4822 gcc_assert (reg == 0 || partial != 0);
a0c938f0 4823
f28c7a75 4824 /* If this arg needs special alignment, don't load the registers
4825 here. */
4826 if (arg->n_aligned_regs != 0)
4827 reg = 0;
c87678e4 4828
f28c7a75 4829 /* If this is being passed partially in a register, we can't evaluate
66d433c7 4830 it directly into its stack slot. Otherwise, we can. */
4831 if (arg->value == 0)
f848041f 4832 {
f848041f 4833 /* stack_arg_under_construction is nonzero if a function argument is
4834 being evaluated directly into the outgoing argument list and
4835 expand_call must take special action to preserve the argument list
4836 if it is called recursively.
4837
4838 For scalar function arguments stack_usage_map is sufficient to
4839 determine which stack slots must be saved and restored. Scalar
4840 arguments in general have pass_on_stack == 0.
4841
4842 If this argument is initialized by a function which takes the
4843 address of the argument (a C++ constructor or a C function
4844 returning a BLKmode structure), then stack_usage_map is
4845 insufficient and expand_call must push the stack around the
4846 function call. Such arguments have pass_on_stack == 1.
4847
4848 Note that it is always safe to set stack_arg_under_construction,
4849 but this generates suboptimal code if set when not needed. */
4850
4851 if (arg->pass_on_stack)
4852 stack_arg_under_construction++;
4448f543 4853
7dbf1af4 4854 arg->value = expand_expr (pval,
4855 (partial
4856 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4857 ? NULL_RTX : arg->stack,
a35a63ff 4858 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 4859
4860 /* If we are promoting object (or for any other reason) the mode
4861 doesn't agree, convert the mode. */
4862
1560ef8f 4863 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4864 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4865 arg->value, arg->unsignedp);
1c0c37a5 4866
f848041f 4867 if (arg->pass_on_stack)
4868 stack_arg_under_construction--;
f848041f 4869 }
66d433c7 4870
63864e1c 4871 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 4872 if ((flags & ECF_SIBCALL)
4873 && MEM_P (arg->value)
4874 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4875 arg->locate.size.constant))
4876 sibcall_failure = 1;
63864e1c 4877
66d433c7 4878 /* Don't allow anything left on stack from computation
4879 of argument to alloca. */
02510658 4880 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 4881 do_pending_stack_adjust ();
4882
4883 if (arg->value == arg->stack)
8a06f2d4 4884 /* If the value is already in the stack slot, we are done. */
4885 ;
1c0c37a5 4886 else if (arg->mode != BLKmode)
66d433c7 4887 {
19cb6b50 4888 int size;
851fc2b3 4889 unsigned int parm_align;
66d433c7 4890
4891 /* Argument is a scalar, not entirely passed in registers.
4892 (If part is passed in registers, arg->partial says how much
4893 and emit_push_insn will take care of putting it there.)
c87678e4 4894
66d433c7 4895 Push it, and if its size is less than the
4896 amount of space allocated to it,
4897 also bump stack pointer by the additional space.
4898 Note that in C the default argument promotions
4899 will prevent such mismatches. */
4900
1c0c37a5 4901 size = GET_MODE_SIZE (arg->mode);
66d433c7 4902 /* Compute how much space the push instruction will push.
4903 On many machines, pushing a byte will advance the stack
4904 pointer by a halfword. */
4905#ifdef PUSH_ROUNDING
4906 size = PUSH_ROUNDING (size);
4907#endif
4908 used = size;
4909
4910 /* Compute how much space the argument should get:
4911 round up to a multiple of the alignment for arguments. */
1c0c37a5 4912 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
66d433c7 4913 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4914 / (PARM_BOUNDARY / BITS_PER_UNIT))
4915 * (PARM_BOUNDARY / BITS_PER_UNIT));
4916
851fc2b3 4917 /* Compute the alignment of the pushed argument. */
4918 parm_align = arg->locate.boundary;
4919 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4920 {
4921 int pad = used - size;
4922 if (pad)
4923 {
4924 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4925 parm_align = MIN (parm_align, pad_align);
4926 }
4927 }
4928
66d433c7 4929 /* This isn't already where we want it on the stack, so put it there.
4930 This can either be done with push or copy insns. */
a95e5776 4931 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
851fc2b3 4932 parm_align, partial, reg, used - size, argblock,
241399f6 4933 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
a95e5776 4934 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
4935 sibcall_failure = 1;
d5c9a99f 4936
4937 /* Unless this is a partially-in-register argument, the argument is now
4938 in the stack. */
4939 if (partial == 0)
4940 arg->value = arg->stack;
66d433c7 4941 }
4942 else
4943 {
4944 /* BLKmode, at least partly to be pushed. */
4945
cf78c9ff 4946 unsigned int parm_align;
19cb6b50 4947 int excess;
66d433c7 4948 rtx size_rtx;
4949
4950 /* Pushing a nonscalar.
4951 If part is passed in registers, PARTIAL says how much
4952 and emit_push_insn will take care of putting it there. */
4953
4954 /* Round its size up to a multiple
4955 of the allocation unit for arguments. */
4956
241399f6 4957 if (arg->locate.size.var != 0)
66d433c7 4958 {
4959 excess = 0;
241399f6 4960 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 4961 }
4962 else
4963 {
f054eb3c 4964 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4965 for BLKmode is careful to avoid it. */
4966 excess = (arg->locate.size.constant
4967 - int_size_in_bytes (TREE_TYPE (pval))
4968 + partial);
623282b0 4969 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
b9c74b4d 4970 NULL_RTX, TYPE_MODE (sizetype),
4971 EXPAND_NORMAL);
66d433c7 4972 }
4973
c5dc0c32 4974 parm_align = arg->locate.boundary;
cf78c9ff 4975
4976 /* When an argument is padded down, the block is aligned to
4977 PARM_BOUNDARY, but the actual argument isn't. */
4978 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4979 {
241399f6 4980 if (arg->locate.size.var)
cf78c9ff 4981 parm_align = BITS_PER_UNIT;
4982 else if (excess)
4983 {
28397255 4984 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
cf78c9ff 4985 parm_align = MIN (parm_align, excess_align);
4986 }
4987 }
4988
e16ceb8e 4989 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 4990 {
4991 /* emit_push_insn might not work properly if arg->value and
241399f6 4992 argblock + arg->locate.offset areas overlap. */
57679d39 4993 rtx x = arg->value;
4994 int i = 0;
4995
abe32cce 4996 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
57679d39 4997 || (GET_CODE (XEXP (x, 0)) == PLUS
4998 && XEXP (XEXP (x, 0), 0) ==
abe32cce 4999 crtl->args.internal_arg_pointer
971ba038 5000 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
57679d39 5001 {
abe32cce 5002 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
57679d39 5003 i = INTVAL (XEXP (XEXP (x, 0), 1));
5004
c62f411b 5005 /* arg.locate doesn't contain the pretend_args_size offset,
5006 it's part of argblock. Ensure we don't count it in I. */
5007 if (STACK_GROWS_DOWNWARD)
5008 i -= crtl->args.pretend_args_size;
5009 else
5010 i += crtl->args.pretend_args_size;
5011
21dda4ee 5012 /* expand_call should ensure this. */
231bd014 5013 gcc_assert (!arg->locate.offset.var
2ad152f7 5014 && arg->locate.size.var == 0
971ba038 5015 && CONST_INT_P (size_rtx));
57679d39 5016
241399f6 5017 if (arg->locate.offset.constant > i)
57679d39 5018 {
241399f6 5019 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
57679d39 5020 sibcall_failure = 1;
5021 }
241399f6 5022 else if (arg->locate.offset.constant < i)
57679d39 5023 {
2ad152f7 5024 /* Use arg->locate.size.constant instead of size_rtx
5025 because we only care about the part of the argument
5026 on the stack. */
5027 if (i < (arg->locate.offset.constant
5028 + arg->locate.size.constant))
5029 sibcall_failure = 1;
5030 }
5031 else
5032 {
5033 /* Even though they appear to be at the same location,
5034 if part of the outgoing argument is in registers,
5035 they aren't really at the same location. Check for
5036 this by making sure that the incoming size is the
5037 same as the outgoing size. */
5038 if (arg->locate.size.constant != INTVAL (size_rtx))
57679d39 5039 sibcall_failure = 1;
5040 }
5041 }
5042 }
5043
1c0c37a5 5044 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
cf78c9ff 5045 parm_align, partial, reg, excess, argblock,
241399f6 5046 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
a95e5776 5047 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
66d433c7 5048
d5c9a99f 5049 /* Unless this is a partially-in-register argument, the argument is now
5050 in the stack.
66d433c7 5051
d5c9a99f 5052 ??? Unlike the case above, in which we want the actual
5053 address of the data, so that we can load it directly into a
5054 register, here we want the address of the stack slot, so that
5055 it's properly aligned for word-by-word copying or something
5056 like that. It's not clear that this is always correct. */
5057 if (partial == 0)
5058 arg->value = arg->stack_slot;
5059 }
b600a907 5060
5061 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5062 {
5063 tree type = TREE_TYPE (arg->tree_value);
5064 arg->parallel_value
5065 = emit_group_load_into_temps (arg->reg, arg->value, type,
5066 int_size_in_bytes (type));
5067 }
66d433c7 5068
a35a63ff 5069 /* Mark all slots this store used. */
5070 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5071 && argblock && ! variable_size && arg->stack)
5072 for (i = lower_bound; i < upper_bound; i++)
5073 stack_usage_map[i] = 1;
5074
66d433c7 5075 /* Once we have pushed something, pops can't safely
5076 be deferred during the rest of the arguments. */
5077 NO_DEFER_POP;
5078
0ab48139 5079 /* Free any temporary slots made in processing this argument. */
1b117c60 5080 pop_temp_slots ();
57679d39 5081
5082 return sibcall_failure;
66d433c7 5083}
890f0c17 5084
0336f0f0 5085/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 5086
0336f0f0 5087bool
3754d046 5088must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 5089 const_tree type)
0336f0f0 5090{
5091 if (!type)
5092 return false;
5093
5094 /* If the type has variable size... */
5095 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5096 return true;
890f0c17 5097
0336f0f0 5098 /* If the type is marked as addressable (it is required
5099 to be constructed into the stack)... */
5100 if (TREE_ADDRESSABLE (type))
5101 return true;
5102
5103 return false;
5104}
890f0c17 5105
0d568ddf 5106/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 5107 takes trailing padding of a structure into account. */
5108/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 5109
5110bool
3754d046 5111must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
890f0c17 5112{
5113 if (!type)
dceaa0b1 5114 return false;
890f0c17 5115
5116 /* If the type has variable size... */
5117 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5118 return true;
5119
5120 /* If the type is marked as addressable (it is required
5121 to be constructed into the stack)... */
5122 if (TREE_ADDRESSABLE (type))
5123 return true;
5124
5125 /* If the padding and mode of the type is such that a copy into
5126 a register would put it into the wrong part of the register. */
5127 if (mode == BLKmode
5128 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5129 && (FUNCTION_ARG_PADDING (mode, type)
5130 == (BYTES_BIG_ENDIAN ? upward : downward)))
5131 return true;
5132
5133 return false;
5134}