]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
Use function_arg_info for TARGET_CALLEE_COPIES
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-vrp.h"
51 #include "tree-ssanames.h"
52 #include "tree-ssa-strlen.h"
53 #include "intl.h"
54 #include "stringpool.h"
55 #include "attribs.h"
56 #include "builtins.h"
57 #include "gimple-fold.h"
58
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
61
62 /* Data structure and subroutines used within expand_call. */
63
64 struct arg_data
65 {
66 /* Tree node for this argument. */
67 tree tree_value;
68 /* Mode for value; TYPE_MODE unless promoted. */
69 machine_mode mode;
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
75 PARALLEL if the arg is to be copied into multiple non-contiguous
76 registers. */
77 rtx reg;
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
85 /* If REG was promoted from the actual mode of the argument expression,
86 indicates whether the promotion is sign- or zero-extended. */
87 int unsignedp;
88 /* Number of bytes to put in registers. 0 means put the whole arg
89 in registers. Also 0 if not passed in registers. */
90 int partial;
91 /* Nonzero if argument must be passed on stack.
92 Note that some arguments may be passed on the stack
93 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
94 pass_on_stack identifies arguments that *cannot* go in registers. */
95 int pass_on_stack;
96 /* Some fields packaged up for locate_and_pad_parm. */
97 struct locate_and_pad_arg_data locate;
98 /* Location on the stack at which parameter should be stored. The store
99 has already been done if STACK == VALUE. */
100 rtx stack;
101 /* Location on the stack of the start of this argument slot. This can
102 differ from STACK if this arg pads downward. This location is known
103 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
104 rtx stack_slot;
105 /* Place that this stack area has been saved, if needed. */
106 rtx save_area;
107 /* If an argument's alignment does not permit direct copying into registers,
108 copy in smaller-sized pieces into pseudos. These are stored in a
109 block pointed to by this field. The next field says how many
110 word-sized pseudos we made. */
111 rtx *aligned_regs;
112 int n_aligned_regs;
113 };
114
115 /* A vector of one char per byte of stack space. A byte if nonzero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map;
120
121 /* Size of STACK_USAGE_MAP. */
122 static unsigned int highest_outgoing_arg_in_use;
123
124 /* Assume that any stack location at this byte index is used,
125 without checking the contents of stack_usage_map. */
126 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
127
128 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
129 stack location's tail call argument has been already stored into the stack.
130 This bitmap is used to prevent sibling call optimization if function tries
131 to use parent's incoming argument slots when they have been already
132 overwritten with tail call arguments. */
133 static sbitmap stored_args_map;
134
135 /* Assume that any virtual-incoming location at this byte index has been
136 stored, without checking the contents of stored_args_map. */
137 static unsigned HOST_WIDE_INT stored_args_watermark;
138
139 /* stack_arg_under_construction is nonzero when an argument may be
140 initialized with a constructor call (including a C function that
141 returns a BLKmode struct) and expand_call must take special action
142 to make sure the object being constructed does not overlap the
143 argument list for the constructor call. */
144 static int stack_arg_under_construction;
145
146 static void precompute_register_parameters (int, struct arg_data *, int *);
147 static int store_one_arg (struct arg_data *, rtx, int, int, int);
148 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
149 static int finalize_must_preallocate (int, int, struct arg_data *,
150 struct args_size *);
151 static void precompute_arguments (int, struct arg_data *);
152 static void compute_argument_addresses (struct arg_data *, rtx, int);
153 static rtx rtx_for_function_call (tree, tree);
154 static void load_register_parameters (struct arg_data *, int, rtx *, int,
155 int, int *);
156 static int special_function_p (const_tree, int);
157 static int check_sibcall_argument_overlap_1 (rtx);
158 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
159
160 static tree split_complex_types (tree);
161
162 #ifdef REG_PARM_STACK_SPACE
163 static rtx save_fixed_argument_area (int, rtx, int *, int *);
164 static void restore_fixed_argument_area (rtx, rtx, int, int);
165 #endif
166 \f
167 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
168 stack region might already be in use. */
169
170 static bool
171 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
172 unsigned int reg_parm_stack_space)
173 {
174 unsigned HOST_WIDE_INT const_lower, const_upper;
175 const_lower = constant_lower_bound (lower_bound);
176 if (!upper_bound.is_constant (&const_upper))
177 const_upper = HOST_WIDE_INT_M1U;
178
179 if (const_upper > stack_usage_watermark)
180 return true;
181
182 /* Don't worry about things in the fixed argument area;
183 it has already been saved. */
184 const_lower = MAX (const_lower, reg_parm_stack_space);
185 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
186 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
187 if (stack_usage_map[i])
188 return true;
189 return false;
190 }
191
192 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
193 stack region are now in use. */
194
195 static void
196 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
197 {
198 unsigned HOST_WIDE_INT const_lower, const_upper;
199 const_lower = constant_lower_bound (lower_bound);
200 if (upper_bound.is_constant (&const_upper))
201 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
202 stack_usage_map[i] = 1;
203 else
204 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
205 }
206
207 /* Force FUNEXP into a form suitable for the address of a CALL,
208 and return that as an rtx. Also load the static chain register
209 if FNDECL is a nested function.
210
211 CALL_FUSAGE points to a variable holding the prospective
212 CALL_INSN_FUNCTION_USAGE information. */
213
214 rtx
215 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
216 rtx *call_fusage, int reg_parm_seen, int flags)
217 {
218 /* Make a valid memory address and copy constants through pseudo-regs,
219 but not for a constant address if -fno-function-cse. */
220 if (GET_CODE (funexp) != SYMBOL_REF)
221 {
222 /* If it's an indirect call by descriptor, generate code to perform
223 runtime identification of the pointer and load the descriptor. */
224 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
225 {
226 const int bit_val = targetm.calls.custom_function_descriptors;
227 rtx call_lab = gen_label_rtx ();
228
229 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
230 fndecl_or_type
231 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
232 fndecl_or_type);
233 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
234 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
235
236 if (GET_MODE (funexp) != Pmode)
237 funexp = convert_memory_address (Pmode, funexp);
238
239 /* Avoid long live ranges around function calls. */
240 funexp = copy_to_mode_reg (Pmode, funexp);
241
242 if (REG_P (chain))
243 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
244
245 /* Emit the runtime identification pattern. */
246 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
247 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
248 call_lab);
249
250 /* Statically predict the branch to very likely taken. */
251 rtx_insn *insn = get_last_insn ();
252 if (JUMP_P (insn))
253 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
254
255 /* Load the descriptor. */
256 rtx mem = gen_rtx_MEM (ptr_mode,
257 plus_constant (Pmode, funexp, - bit_val));
258 MEM_NOTRAP_P (mem) = 1;
259 mem = convert_memory_address (Pmode, mem);
260 emit_move_insn (chain, mem);
261
262 mem = gen_rtx_MEM (ptr_mode,
263 plus_constant (Pmode, funexp,
264 POINTER_SIZE / BITS_PER_UNIT
265 - bit_val));
266 MEM_NOTRAP_P (mem) = 1;
267 mem = convert_memory_address (Pmode, mem);
268 emit_move_insn (funexp, mem);
269
270 emit_label (call_lab);
271
272 if (REG_P (chain))
273 {
274 use_reg (call_fusage, chain);
275 STATIC_CHAIN_REG_P (chain) = 1;
276 }
277
278 /* Make sure we're not going to be overwritten below. */
279 gcc_assert (!static_chain_value);
280 }
281
282 /* If we are using registers for parameters, force the
283 function address into a register now. */
284 funexp = ((reg_parm_seen
285 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
286 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
287 : memory_address (FUNCTION_MODE, funexp));
288 }
289 else
290 {
291 /* funexp could be a SYMBOL_REF represents a function pointer which is
292 of ptr_mode. In this case, it should be converted into address mode
293 to be a valid address for memory rtx pattern. See PR 64971. */
294 if (GET_MODE (funexp) != Pmode)
295 funexp = convert_memory_address (Pmode, funexp);
296
297 if (!(flags & ECF_SIBCALL))
298 {
299 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
300 funexp = force_reg (Pmode, funexp);
301 }
302 }
303
304 if (static_chain_value != 0
305 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
306 || DECL_STATIC_CHAIN (fndecl_or_type)))
307 {
308 rtx chain;
309
310 chain = targetm.calls.static_chain (fndecl_or_type, false);
311 static_chain_value = convert_memory_address (Pmode, static_chain_value);
312
313 emit_move_insn (chain, static_chain_value);
314 if (REG_P (chain))
315 {
316 use_reg (call_fusage, chain);
317 STATIC_CHAIN_REG_P (chain) = 1;
318 }
319 }
320
321 return funexp;
322 }
323
324 /* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
327
328 FNDECL is the declaration node of the function. This is given to the
329 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
330 its own args.
331
332 FUNTYPE is the data type of the function. This is given to the hook
333 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
334 own args. We used to allow an identifier for library functions, but
335 that doesn't work when the return type is an aggregate type and the
336 calling convention says that the pointer to this aggregate is to be
337 popped by the callee.
338
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
343 code if necessary.
344
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
347
348 NEXT_ARG_REG is the rtx that results from executing
349 targetm.calls.function_arg (&args_so_far,
350 function_arg_info::end_marker ());
351 just after all the args have had their registers assigned.
352 This could be whatever you like, but normally it is the first
353 arg-register beyond those used for args in this call,
354 or 0 if all the arg-registers are used in this call.
355 It is passed on to `gen_call' so you can put this info in the call insn.
356
357 VALREG is a hard register in which a value is returned,
358 or 0 if the call does not return a value.
359
360 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
361 the args to this call were processed.
362 We restore `inhibit_defer_pop' to that value.
363
364 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
365 denote registers used by the called function. */
366
367 static void
368 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
369 tree funtype ATTRIBUTE_UNUSED,
370 poly_int64 stack_size ATTRIBUTE_UNUSED,
371 poly_int64 rounded_stack_size,
372 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
373 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
374 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
375 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
376 {
377 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
378 rtx call, funmem, pat;
379 int already_popped = 0;
380 poly_int64 n_popped = 0;
381
382 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
383 patterns exist). Any popping that the callee does on return will
384 be from our caller's frame rather than ours. */
385 if (!(ecf_flags & ECF_SIBCALL))
386 {
387 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
388
389 #ifdef CALL_POPS_ARGS
390 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
391 #endif
392 }
393
394 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
395 and we don't want to load it into a register as an optimization,
396 because prepare_call_address already did it if it should be done. */
397 if (GET_CODE (funexp) != SYMBOL_REF)
398 funexp = memory_address (FUNCTION_MODE, funexp);
399
400 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
401 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
402 {
403 tree t = fndecl;
404
405 /* Although a built-in FUNCTION_DECL and its non-__builtin
406 counterpart compare equal and get a shared mem_attrs, they
407 produce different dump output in compare-debug compilations,
408 if an entry gets garbage collected in one compilation, then
409 adds a different (but equivalent) entry, while the other
410 doesn't run the garbage collector at the same spot and then
411 shares the mem_attr with the equivalent entry. */
412 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
413 {
414 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
415 if (t2)
416 t = t2;
417 }
418
419 set_mem_expr (funmem, t);
420 }
421 else if (fntree)
422 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
423
424 if (ecf_flags & ECF_SIBCALL)
425 {
426 if (valreg)
427 pat = targetm.gen_sibcall_value (valreg, funmem,
428 rounded_stack_size_rtx,
429 next_arg_reg, NULL_RTX);
430 else
431 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
432 next_arg_reg,
433 gen_int_mode (struct_value_size, Pmode));
434 }
435 /* If the target has "call" or "call_value" insns, then prefer them
436 if no arguments are actually popped. If the target does not have
437 "call" or "call_value" insns, then we must use the popping versions
438 even if the call has no arguments to pop. */
439 else if (maybe_ne (n_popped, 0)
440 || !(valreg
441 ? targetm.have_call_value ()
442 : targetm.have_call ()))
443 {
444 rtx n_pop = gen_int_mode (n_popped, Pmode);
445
446 /* If this subroutine pops its own args, record that in the call insn
447 if possible, for the sake of frame pointer elimination. */
448
449 if (valreg)
450 pat = targetm.gen_call_value_pop (valreg, funmem,
451 rounded_stack_size_rtx,
452 next_arg_reg, n_pop);
453 else
454 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
455 next_arg_reg, n_pop);
456
457 already_popped = 1;
458 }
459 else
460 {
461 if (valreg)
462 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
463 next_arg_reg, NULL_RTX);
464 else
465 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
466 gen_int_mode (struct_value_size, Pmode));
467 }
468 emit_insn (pat);
469
470 /* Find the call we just emitted. */
471 rtx_call_insn *call_insn = last_call_insn ();
472
473 /* Some target create a fresh MEM instead of reusing the one provided
474 above. Set its MEM_EXPR. */
475 call = get_call_rtx_from (call_insn);
476 if (call
477 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
478 && MEM_EXPR (funmem) != NULL_TREE)
479 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
480
481 /* Put the register usage information there. */
482 add_function_usage_to (call_insn, call_fusage);
483
484 /* If this is a const call, then set the insn's unchanging bit. */
485 if (ecf_flags & ECF_CONST)
486 RTL_CONST_CALL_P (call_insn) = 1;
487
488 /* If this is a pure call, then set the insn's unchanging bit. */
489 if (ecf_flags & ECF_PURE)
490 RTL_PURE_CALL_P (call_insn) = 1;
491
492 /* If this is a const call, then set the insn's unchanging bit. */
493 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
494 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
495
496 /* Create a nothrow REG_EH_REGION note, if needed. */
497 make_reg_eh_region_note (call_insn, ecf_flags, 0);
498
499 if (ecf_flags & ECF_NORETURN)
500 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
501
502 if (ecf_flags & ECF_RETURNS_TWICE)
503 {
504 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
505 cfun->calls_setjmp = 1;
506 }
507
508 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
509
510 /* Restore this now, so that we do defer pops for this call's args
511 if the context of the call as a whole permits. */
512 inhibit_defer_pop = old_inhibit_defer_pop;
513
514 if (maybe_ne (n_popped, 0))
515 {
516 if (!already_popped)
517 CALL_INSN_FUNCTION_USAGE (call_insn)
518 = gen_rtx_EXPR_LIST (VOIDmode,
519 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
520 CALL_INSN_FUNCTION_USAGE (call_insn));
521 rounded_stack_size -= n_popped;
522 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
523 stack_pointer_delta -= n_popped;
524
525 add_args_size_note (call_insn, stack_pointer_delta);
526
527 /* If popup is needed, stack realign must use DRAP */
528 if (SUPPORTS_STACK_ALIGNMENT)
529 crtl->need_drap = true;
530 }
531 /* For noreturn calls when not accumulating outgoing args force
532 REG_ARGS_SIZE note to prevent crossjumping of calls with different
533 args sizes. */
534 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
535 add_args_size_note (call_insn, stack_pointer_delta);
536
537 if (!ACCUMULATE_OUTGOING_ARGS)
538 {
539 /* If returning from the subroutine does not automatically pop the args,
540 we need an instruction to pop them sooner or later.
541 Perhaps do it now; perhaps just record how much space to pop later.
542
543 If returning from the subroutine does pop the args, indicate that the
544 stack pointer will be changed. */
545
546 if (maybe_ne (rounded_stack_size, 0))
547 {
548 if (ecf_flags & ECF_NORETURN)
549 /* Just pretend we did the pop. */
550 stack_pointer_delta -= rounded_stack_size;
551 else if (flag_defer_pop && inhibit_defer_pop == 0
552 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
553 pending_stack_adjust += rounded_stack_size;
554 else
555 adjust_stack (rounded_stack_size_rtx);
556 }
557 }
558 /* When we accumulate outgoing args, we must avoid any stack manipulations.
559 Restore the stack pointer to its original value now. Usually
560 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
561 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
562 popping variants of functions exist as well.
563
564 ??? We may optimize similar to defer_pop above, but it is
565 probably not worthwhile.
566
567 ??? It will be worthwhile to enable combine_stack_adjustments even for
568 such machines. */
569 else if (maybe_ne (n_popped, 0))
570 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
571 }
572
573 /* Determine if the function identified by FNDECL is one with
574 special properties we wish to know about. Modify FLAGS accordingly.
575
576 For example, if the function might return more than one time (setjmp), then
577 set ECF_RETURNS_TWICE.
578
579 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
580 space from the stack such as alloca. */
581
582 static int
583 special_function_p (const_tree fndecl, int flags)
584 {
585 tree name_decl = DECL_NAME (fndecl);
586
587 if (fndecl && name_decl
588 && IDENTIFIER_LENGTH (name_decl) <= 11
589 /* Exclude functions not at the file scope, or not `extern',
590 since they are not the magic functions we would otherwise
591 think they are.
592 FIXME: this should be handled with attributes, not with this
593 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
594 because you can declare fork() inside a function if you
595 wish. */
596 && (DECL_CONTEXT (fndecl) == NULL_TREE
597 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
598 && TREE_PUBLIC (fndecl))
599 {
600 const char *name = IDENTIFIER_POINTER (name_decl);
601 const char *tname = name;
602
603 /* We assume that alloca will always be called by name. It
604 makes no sense to pass it as a pointer-to-function to
605 anything that does not understand its behavior. */
606 if (IDENTIFIER_LENGTH (name_decl) == 6
607 && name[0] == 'a'
608 && ! strcmp (name, "alloca"))
609 flags |= ECF_MAY_BE_ALLOCA;
610
611 /* Disregard prefix _ or __. */
612 if (name[0] == '_')
613 {
614 if (name[1] == '_')
615 tname += 2;
616 else
617 tname += 1;
618 }
619
620 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
621 if (! strcmp (tname, "setjmp")
622 || ! strcmp (tname, "sigsetjmp")
623 || ! strcmp (name, "savectx")
624 || ! strcmp (name, "vfork")
625 || ! strcmp (name, "getcontext"))
626 flags |= ECF_RETURNS_TWICE;
627 }
628
629 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
630 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
631 flags |= ECF_MAY_BE_ALLOCA;
632
633 return flags;
634 }
635
636 /* Similar to special_function_p; return a set of ERF_ flags for the
637 function FNDECL. */
638 static int
639 decl_return_flags (tree fndecl)
640 {
641 tree attr;
642 tree type = TREE_TYPE (fndecl);
643 if (!type)
644 return 0;
645
646 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
647 if (!attr)
648 return 0;
649
650 attr = TREE_VALUE (TREE_VALUE (attr));
651 if (!attr || TREE_STRING_LENGTH (attr) < 1)
652 return 0;
653
654 switch (TREE_STRING_POINTER (attr)[0])
655 {
656 case '1':
657 case '2':
658 case '3':
659 case '4':
660 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
661
662 case 'm':
663 return ERF_NOALIAS;
664
665 case '.':
666 default:
667 return 0;
668 }
669 }
670
671 /* Return nonzero when FNDECL represents a call to setjmp. */
672
673 int
674 setjmp_call_p (const_tree fndecl)
675 {
676 if (DECL_IS_RETURNS_TWICE (fndecl))
677 return ECF_RETURNS_TWICE;
678 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
679 }
680
681
682 /* Return true if STMT may be an alloca call. */
683
684 bool
685 gimple_maybe_alloca_call_p (const gimple *stmt)
686 {
687 tree fndecl;
688
689 if (!is_gimple_call (stmt))
690 return false;
691
692 fndecl = gimple_call_fndecl (stmt);
693 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
694 return true;
695
696 return false;
697 }
698
699 /* Return true if STMT is a builtin alloca call. */
700
701 bool
702 gimple_alloca_call_p (const gimple *stmt)
703 {
704 tree fndecl;
705
706 if (!is_gimple_call (stmt))
707 return false;
708
709 fndecl = gimple_call_fndecl (stmt);
710 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
711 switch (DECL_FUNCTION_CODE (fndecl))
712 {
713 CASE_BUILT_IN_ALLOCA:
714 return gimple_call_num_args (stmt) > 0;
715 default:
716 break;
717 }
718
719 return false;
720 }
721
722 /* Return true when exp contains a builtin alloca call. */
723
724 bool
725 alloca_call_p (const_tree exp)
726 {
727 tree fndecl;
728 if (TREE_CODE (exp) == CALL_EXPR
729 && (fndecl = get_callee_fndecl (exp))
730 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
731 switch (DECL_FUNCTION_CODE (fndecl))
732 {
733 CASE_BUILT_IN_ALLOCA:
734 return true;
735 default:
736 break;
737 }
738
739 return false;
740 }
741
742 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
743 function. Return FALSE otherwise. */
744
745 static bool
746 is_tm_builtin (const_tree fndecl)
747 {
748 if (fndecl == NULL)
749 return false;
750
751 if (decl_is_tm_clone (fndecl))
752 return true;
753
754 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
755 {
756 switch (DECL_FUNCTION_CODE (fndecl))
757 {
758 case BUILT_IN_TM_COMMIT:
759 case BUILT_IN_TM_COMMIT_EH:
760 case BUILT_IN_TM_ABORT:
761 case BUILT_IN_TM_IRREVOCABLE:
762 case BUILT_IN_TM_GETTMCLONE_IRR:
763 case BUILT_IN_TM_MEMCPY:
764 case BUILT_IN_TM_MEMMOVE:
765 case BUILT_IN_TM_MEMSET:
766 CASE_BUILT_IN_TM_STORE (1):
767 CASE_BUILT_IN_TM_STORE (2):
768 CASE_BUILT_IN_TM_STORE (4):
769 CASE_BUILT_IN_TM_STORE (8):
770 CASE_BUILT_IN_TM_STORE (FLOAT):
771 CASE_BUILT_IN_TM_STORE (DOUBLE):
772 CASE_BUILT_IN_TM_STORE (LDOUBLE):
773 CASE_BUILT_IN_TM_STORE (M64):
774 CASE_BUILT_IN_TM_STORE (M128):
775 CASE_BUILT_IN_TM_STORE (M256):
776 CASE_BUILT_IN_TM_LOAD (1):
777 CASE_BUILT_IN_TM_LOAD (2):
778 CASE_BUILT_IN_TM_LOAD (4):
779 CASE_BUILT_IN_TM_LOAD (8):
780 CASE_BUILT_IN_TM_LOAD (FLOAT):
781 CASE_BUILT_IN_TM_LOAD (DOUBLE):
782 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
783 CASE_BUILT_IN_TM_LOAD (M64):
784 CASE_BUILT_IN_TM_LOAD (M128):
785 CASE_BUILT_IN_TM_LOAD (M256):
786 case BUILT_IN_TM_LOG:
787 case BUILT_IN_TM_LOG_1:
788 case BUILT_IN_TM_LOG_2:
789 case BUILT_IN_TM_LOG_4:
790 case BUILT_IN_TM_LOG_8:
791 case BUILT_IN_TM_LOG_FLOAT:
792 case BUILT_IN_TM_LOG_DOUBLE:
793 case BUILT_IN_TM_LOG_LDOUBLE:
794 case BUILT_IN_TM_LOG_M64:
795 case BUILT_IN_TM_LOG_M128:
796 case BUILT_IN_TM_LOG_M256:
797 return true;
798 default:
799 break;
800 }
801 }
802 return false;
803 }
804
805 /* Detect flags (function attributes) from the function decl or type node. */
806
807 int
808 flags_from_decl_or_type (const_tree exp)
809 {
810 int flags = 0;
811
812 if (DECL_P (exp))
813 {
814 /* The function exp may have the `malloc' attribute. */
815 if (DECL_IS_MALLOC (exp))
816 flags |= ECF_MALLOC;
817
818 /* The function exp may have the `returns_twice' attribute. */
819 if (DECL_IS_RETURNS_TWICE (exp))
820 flags |= ECF_RETURNS_TWICE;
821
822 /* Process the pure and const attributes. */
823 if (TREE_READONLY (exp))
824 flags |= ECF_CONST;
825 if (DECL_PURE_P (exp))
826 flags |= ECF_PURE;
827 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
828 flags |= ECF_LOOPING_CONST_OR_PURE;
829
830 if (DECL_IS_NOVOPS (exp))
831 flags |= ECF_NOVOPS;
832 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_LEAF;
834 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
835 flags |= ECF_COLD;
836
837 if (TREE_NOTHROW (exp))
838 flags |= ECF_NOTHROW;
839
840 if (flag_tm)
841 {
842 if (is_tm_builtin (exp))
843 flags |= ECF_TM_BUILTIN;
844 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
845 || lookup_attribute ("transaction_pure",
846 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
847 flags |= ECF_TM_PURE;
848 }
849
850 flags = special_function_p (exp, flags);
851 }
852 else if (TYPE_P (exp))
853 {
854 if (TYPE_READONLY (exp))
855 flags |= ECF_CONST;
856
857 if (flag_tm
858 && ((flags & ECF_CONST) != 0
859 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
860 flags |= ECF_TM_PURE;
861 }
862 else
863 gcc_unreachable ();
864
865 if (TREE_THIS_VOLATILE (exp))
866 {
867 flags |= ECF_NORETURN;
868 if (flags & (ECF_CONST|ECF_PURE))
869 flags |= ECF_LOOPING_CONST_OR_PURE;
870 }
871
872 return flags;
873 }
874
875 /* Detect flags from a CALL_EXPR. */
876
877 int
878 call_expr_flags (const_tree t)
879 {
880 int flags;
881 tree decl = get_callee_fndecl (t);
882
883 if (decl)
884 flags = flags_from_decl_or_type (decl);
885 else if (CALL_EXPR_FN (t) == NULL_TREE)
886 flags = internal_fn_flags (CALL_EXPR_IFN (t));
887 else
888 {
889 tree type = TREE_TYPE (CALL_EXPR_FN (t));
890 if (type && TREE_CODE (type) == POINTER_TYPE)
891 flags = flags_from_decl_or_type (TREE_TYPE (type));
892 else
893 flags = 0;
894 if (CALL_EXPR_BY_DESCRIPTOR (t))
895 flags |= ECF_BY_DESCRIPTOR;
896 }
897
898 return flags;
899 }
900
901 /* Return true if ARG should be passed by invisible reference. */
902
903 bool
904 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
905 {
906 if (tree type = arg.type)
907 {
908 /* If this type contains non-trivial constructors, then it is
909 forbidden for the middle-end to create any new copies. */
910 if (TREE_ADDRESSABLE (type))
911 return true;
912
913 /* GCC post 3.4 passes *all* variable sized types by reference. */
914 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
915 return true;
916
917 /* If a record type should be passed the same as its first (and only)
918 member, use the type and mode of that member. */
919 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
920 {
921 arg.type = TREE_TYPE (first_field (type));
922 arg.mode = TYPE_MODE (arg.type);
923 }
924 }
925
926 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
927 }
928
929 /* Return true if TYPE should be passed by reference when passed to
930 the "..." arguments of a function. */
931
932 bool
933 pass_va_arg_by_reference (tree type)
934 {
935 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
936 }
937
938 /* Return true if ARG, which is passed by reference, should be callee
939 copied instead of caller copied. */
940
941 bool
942 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
943 {
944 if (arg.type && TREE_ADDRESSABLE (arg.type))
945 return false;
946 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
947 }
948
949
950 /* Precompute all register parameters as described by ARGS, storing values
951 into fields within the ARGS array.
952
953 NUM_ACTUALS indicates the total number elements in the ARGS array.
954
955 Set REG_PARM_SEEN if we encounter a register parameter. */
956
957 static void
958 precompute_register_parameters (int num_actuals, struct arg_data *args,
959 int *reg_parm_seen)
960 {
961 int i;
962
963 *reg_parm_seen = 0;
964
965 for (i = 0; i < num_actuals; i++)
966 if (args[i].reg != 0 && ! args[i].pass_on_stack)
967 {
968 *reg_parm_seen = 1;
969
970 if (args[i].value == 0)
971 {
972 push_temp_slots ();
973 args[i].value = expand_normal (args[i].tree_value);
974 preserve_temp_slots (args[i].value);
975 pop_temp_slots ();
976 }
977
978 /* If we are to promote the function arg to a wider mode,
979 do it now. */
980
981 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
982 args[i].value
983 = convert_modes (args[i].mode,
984 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
985 args[i].value, args[i].unsignedp);
986
987 /* If the value is a non-legitimate constant, force it into a
988 pseudo now. TLS symbols sometimes need a call to resolve. */
989 if (CONSTANT_P (args[i].value)
990 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
991 args[i].value = force_reg (args[i].mode, args[i].value);
992
993 /* If we're going to have to load the value by parts, pull the
994 parts into pseudos. The part extraction process can involve
995 non-trivial computation. */
996 if (GET_CODE (args[i].reg) == PARALLEL)
997 {
998 tree type = TREE_TYPE (args[i].tree_value);
999 args[i].parallel_value
1000 = emit_group_load_into_temps (args[i].reg, args[i].value,
1001 type, int_size_in_bytes (type));
1002 }
1003
1004 /* If the value is expensive, and we are inside an appropriately
1005 short loop, put the value into a pseudo and then put the pseudo
1006 into the hard reg.
1007
1008 For small register classes, also do this if this call uses
1009 register parameters. This is to avoid reload conflicts while
1010 loading the parameters registers. */
1011
1012 else if ((! (REG_P (args[i].value)
1013 || (GET_CODE (args[i].value) == SUBREG
1014 && REG_P (SUBREG_REG (args[i].value)))))
1015 && args[i].mode != BLKmode
1016 && (set_src_cost (args[i].value, args[i].mode,
1017 optimize_insn_for_speed_p ())
1018 > COSTS_N_INSNS (1))
1019 && ((*reg_parm_seen
1020 && targetm.small_register_classes_for_mode_p (args[i].mode))
1021 || optimize))
1022 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1023 }
1024 }
1025
1026 #ifdef REG_PARM_STACK_SPACE
1027
1028 /* The argument list is the property of the called routine and it
1029 may clobber it. If the fixed area has been used for previous
1030 parameters, we must save and restore it. */
1031
1032 static rtx
1033 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1034 {
1035 unsigned int low;
1036 unsigned int high;
1037
1038 /* Compute the boundary of the area that needs to be saved, if any. */
1039 high = reg_parm_stack_space;
1040 if (ARGS_GROW_DOWNWARD)
1041 high += 1;
1042
1043 if (high > highest_outgoing_arg_in_use)
1044 high = highest_outgoing_arg_in_use;
1045
1046 for (low = 0; low < high; low++)
1047 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1048 {
1049 int num_to_save;
1050 machine_mode save_mode;
1051 int delta;
1052 rtx addr;
1053 rtx stack_area;
1054 rtx save_area;
1055
1056 while (stack_usage_map[--high] == 0)
1057 ;
1058
1059 *low_to_save = low;
1060 *high_to_save = high;
1061
1062 num_to_save = high - low + 1;
1063
1064 /* If we don't have the required alignment, must do this
1065 in BLKmode. */
1066 scalar_int_mode imode;
1067 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1068 && (low & (MIN (GET_MODE_SIZE (imode),
1069 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1070 save_mode = imode;
1071 else
1072 save_mode = BLKmode;
1073
1074 if (ARGS_GROW_DOWNWARD)
1075 delta = -high;
1076 else
1077 delta = low;
1078
1079 addr = plus_constant (Pmode, argblock, delta);
1080 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1081
1082 set_mem_align (stack_area, PARM_BOUNDARY);
1083 if (save_mode == BLKmode)
1084 {
1085 save_area = assign_stack_temp (BLKmode, num_to_save);
1086 emit_block_move (validize_mem (save_area), stack_area,
1087 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1088 }
1089 else
1090 {
1091 save_area = gen_reg_rtx (save_mode);
1092 emit_move_insn (save_area, stack_area);
1093 }
1094
1095 return save_area;
1096 }
1097
1098 return NULL_RTX;
1099 }
1100
1101 static void
1102 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1103 {
1104 machine_mode save_mode = GET_MODE (save_area);
1105 int delta;
1106 rtx addr, stack_area;
1107
1108 if (ARGS_GROW_DOWNWARD)
1109 delta = -high_to_save;
1110 else
1111 delta = low_to_save;
1112
1113 addr = plus_constant (Pmode, argblock, delta);
1114 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1115 set_mem_align (stack_area, PARM_BOUNDARY);
1116
1117 if (save_mode != BLKmode)
1118 emit_move_insn (stack_area, save_area);
1119 else
1120 emit_block_move (stack_area, validize_mem (save_area),
1121 GEN_INT (high_to_save - low_to_save + 1),
1122 BLOCK_OP_CALL_PARM);
1123 }
1124 #endif /* REG_PARM_STACK_SPACE */
1125
1126 /* If any elements in ARGS refer to parameters that are to be passed in
1127 registers, but not in memory, and whose alignment does not permit a
1128 direct copy into registers. Copy the values into a group of pseudos
1129 which we will later copy into the appropriate hard registers.
1130
1131 Pseudos for each unaligned argument will be stored into the array
1132 args[argnum].aligned_regs. The caller is responsible for deallocating
1133 the aligned_regs array if it is nonzero. */
1134
1135 static void
1136 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1137 {
1138 int i, j;
1139
1140 for (i = 0; i < num_actuals; i++)
1141 if (args[i].reg != 0 && ! args[i].pass_on_stack
1142 && GET_CODE (args[i].reg) != PARALLEL
1143 && args[i].mode == BLKmode
1144 && MEM_P (args[i].value)
1145 && (MEM_ALIGN (args[i].value)
1146 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1147 {
1148 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1149 int endian_correction = 0;
1150
1151 if (args[i].partial)
1152 {
1153 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1154 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1155 }
1156 else
1157 {
1158 args[i].n_aligned_regs
1159 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1160 }
1161
1162 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1163
1164 /* Structures smaller than a word are normally aligned to the
1165 least significant byte. On a BYTES_BIG_ENDIAN machine,
1166 this means we must skip the empty high order bytes when
1167 calculating the bit offset. */
1168 if (bytes < UNITS_PER_WORD
1169 #ifdef BLOCK_REG_PADDING
1170 && (BLOCK_REG_PADDING (args[i].mode,
1171 TREE_TYPE (args[i].tree_value), 1)
1172 == PAD_DOWNWARD)
1173 #else
1174 && BYTES_BIG_ENDIAN
1175 #endif
1176 )
1177 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1178
1179 for (j = 0; j < args[i].n_aligned_regs; j++)
1180 {
1181 rtx reg = gen_reg_rtx (word_mode);
1182 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1183 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1184
1185 args[i].aligned_regs[j] = reg;
1186 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1187 word_mode, word_mode, false, NULL);
1188
1189 /* There is no need to restrict this code to loading items
1190 in TYPE_ALIGN sized hunks. The bitfield instructions can
1191 load up entire word sized registers efficiently.
1192
1193 ??? This may not be needed anymore.
1194 We use to emit a clobber here but that doesn't let later
1195 passes optimize the instructions we emit. By storing 0 into
1196 the register later passes know the first AND to zero out the
1197 bitfield being set in the register is unnecessary. The store
1198 of 0 will be deleted as will at least the first AND. */
1199
1200 emit_move_insn (reg, const0_rtx);
1201
1202 bytes -= bitsize / BITS_PER_UNIT;
1203 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1204 word_mode, word, false);
1205 }
1206 }
1207 }
1208
1209 /* The limit set by -Walloc-larger-than=. */
1210 static GTY(()) tree alloc_object_size_limit;
1211
1212 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1213 setting if the option is specified, or to the maximum object size if it
1214 is not. Return the initialized value. */
1215
1216 static tree
1217 alloc_max_size (void)
1218 {
1219 if (alloc_object_size_limit)
1220 return alloc_object_size_limit;
1221
1222 HOST_WIDE_INT limit = warn_alloc_size_limit;
1223 if (limit == HOST_WIDE_INT_MAX)
1224 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1225
1226 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1227
1228 return alloc_object_size_limit;
1229 }
1230
1231 /* Return true when EXP's range can be determined and set RANGE[] to it
1232 after adjusting it if necessary to make EXP a represents a valid size
1233 of object, or a valid size argument to an allocation function declared
1234 with attribute alloc_size (whose argument may be signed), or to a string
1235 manipulation function like memset. When ALLOW_ZERO is true, allow
1236 returning a range of [0, 0] for a size in an anti-range [1, N] where
1237 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1238 allocation functions like malloc but it is a valid argument to
1239 functions like memset. */
1240
1241 bool
1242 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1243 {
1244 if (tree_fits_uhwi_p (exp))
1245 {
1246 /* EXP is a constant. */
1247 range[0] = range[1] = exp;
1248 return true;
1249 }
1250
1251 tree exptype = TREE_TYPE (exp);
1252 bool integral = INTEGRAL_TYPE_P (exptype);
1253
1254 wide_int min, max;
1255 enum value_range_kind range_type;
1256
1257 if (integral)
1258 range_type = determine_value_range (exp, &min, &max);
1259 else
1260 range_type = VR_VARYING;
1261
1262 if (range_type == VR_VARYING)
1263 {
1264 if (integral)
1265 {
1266 /* Use the full range of the type of the expression when
1267 no value range information is available. */
1268 range[0] = TYPE_MIN_VALUE (exptype);
1269 range[1] = TYPE_MAX_VALUE (exptype);
1270 return true;
1271 }
1272
1273 range[0] = NULL_TREE;
1274 range[1] = NULL_TREE;
1275 return false;
1276 }
1277
1278 unsigned expprec = TYPE_PRECISION (exptype);
1279
1280 bool signed_p = !TYPE_UNSIGNED (exptype);
1281
1282 if (range_type == VR_ANTI_RANGE)
1283 {
1284 if (signed_p)
1285 {
1286 if (wi::les_p (max, 0))
1287 {
1288 /* EXP is not in a strictly negative range. That means
1289 it must be in some (not necessarily strictly) positive
1290 range which includes zero. Since in signed to unsigned
1291 conversions negative values end up converted to large
1292 positive values, and otherwise they are not valid sizes,
1293 the resulting range is in both cases [0, TYPE_MAX]. */
1294 min = wi::zero (expprec);
1295 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1296 }
1297 else if (wi::les_p (min - 1, 0))
1298 {
1299 /* EXP is not in a negative-positive range. That means EXP
1300 is either negative, or greater than max. Since negative
1301 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1302 min = max + 1;
1303 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1304 }
1305 else
1306 {
1307 max = min - 1;
1308 min = wi::zero (expprec);
1309 }
1310 }
1311 else if (wi::eq_p (0, min - 1))
1312 {
1313 /* EXP is unsigned and not in the range [1, MAX]. That means
1314 it's either zero or greater than MAX. Even though 0 would
1315 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1316 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1317 is greater than the limit the whole range is diagnosed. */
1318 if (allow_zero)
1319 min = max = wi::zero (expprec);
1320 else
1321 {
1322 min = max + 1;
1323 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1324 }
1325 }
1326 else
1327 {
1328 max = min - 1;
1329 min = wi::zero (expprec);
1330 }
1331 }
1332
1333 range[0] = wide_int_to_tree (exptype, min);
1334 range[1] = wide_int_to_tree (exptype, max);
1335
1336 return true;
1337 }
1338
1339 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1340 whose argument numbers given by IDX with values given by ARGS exceed
1341 the maximum object size or cause an unsigned oveflow (wrapping) when
1342 multiplied. FN is null when EXP is a call via a function pointer.
1343 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1344 for functions like malloc, and non-null for those like calloc that
1345 are decorated with a two-argument attribute alloc_size. */
1346
1347 void
1348 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1349 {
1350 /* The range each of the (up to) two arguments is known to be in. */
1351 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1352
1353 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1354 tree maxobjsize = alloc_max_size ();
1355
1356 location_t loc = EXPR_LOCATION (exp);
1357
1358 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1359 bool warned = false;
1360
1361 /* Validate each argument individually. */
1362 for (unsigned i = 0; i != 2 && args[i]; ++i)
1363 {
1364 if (TREE_CODE (args[i]) == INTEGER_CST)
1365 {
1366 argrange[i][0] = args[i];
1367 argrange[i][1] = args[i];
1368
1369 if (tree_int_cst_lt (args[i], integer_zero_node))
1370 {
1371 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1372 "%Kargument %i value %qE is negative",
1373 exp, idx[i] + 1, args[i]);
1374 }
1375 else if (integer_zerop (args[i]))
1376 {
1377 /* Avoid issuing -Walloc-zero for allocation functions other
1378 than __builtin_alloca that are declared with attribute
1379 returns_nonnull because there's no portability risk. This
1380 avoids warning for such calls to libiberty's xmalloc and
1381 friends.
1382 Also avoid issuing the warning for calls to function named
1383 "alloca". */
1384 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1385 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1386 : !lookup_attribute ("returns_nonnull",
1387 TYPE_ATTRIBUTES (fntype)))
1388 warned = warning_at (loc, OPT_Walloc_zero,
1389 "%Kargument %i value is zero",
1390 exp, idx[i] + 1);
1391 }
1392 else if (tree_int_cst_lt (maxobjsize, args[i]))
1393 {
1394 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1395 mode and with -fno-exceptions as a way to indicate array
1396 size overflow. There's no good way to detect C++98 here
1397 so avoid diagnosing these calls for all C++ modes. */
1398 if (i == 0
1399 && fn
1400 && !args[1]
1401 && lang_GNU_CXX ()
1402 && DECL_IS_OPERATOR_NEW_P (fn)
1403 && integer_all_onesp (args[i]))
1404 continue;
1405
1406 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1407 "%Kargument %i value %qE exceeds "
1408 "maximum object size %E",
1409 exp, idx[i] + 1, args[i], maxobjsize);
1410 }
1411 }
1412 else if (TREE_CODE (args[i]) == SSA_NAME
1413 && get_size_range (args[i], argrange[i]))
1414 {
1415 /* Verify that the argument's range is not negative (including
1416 upper bound of zero). */
1417 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1418 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1419 {
1420 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1421 "%Kargument %i range [%E, %E] is negative",
1422 exp, idx[i] + 1,
1423 argrange[i][0], argrange[i][1]);
1424 }
1425 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1426 {
1427 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1428 "%Kargument %i range [%E, %E] exceeds "
1429 "maximum object size %E",
1430 exp, idx[i] + 1,
1431 argrange[i][0], argrange[i][1],
1432 maxobjsize);
1433 }
1434 }
1435 }
1436
1437 if (!argrange[0])
1438 return;
1439
1440 /* For a two-argument alloc_size, validate the product of the two
1441 arguments if both of their values or ranges are known. */
1442 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1443 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1444 && !integer_onep (argrange[0][0])
1445 && !integer_onep (argrange[1][0]))
1446 {
1447 /* Check for overflow in the product of a function decorated with
1448 attribute alloc_size (X, Y). */
1449 unsigned szprec = TYPE_PRECISION (size_type_node);
1450 wide_int x = wi::to_wide (argrange[0][0], szprec);
1451 wide_int y = wi::to_wide (argrange[1][0], szprec);
1452
1453 wi::overflow_type vflow;
1454 wide_int prod = wi::umul (x, y, &vflow);
1455
1456 if (vflow)
1457 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1458 "%Kproduct %<%E * %E%> of arguments %i and %i "
1459 "exceeds %<SIZE_MAX%>",
1460 exp, argrange[0][0], argrange[1][0],
1461 idx[0] + 1, idx[1] + 1);
1462 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1463 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1464 "%Kproduct %<%E * %E%> of arguments %i and %i "
1465 "exceeds maximum object size %E",
1466 exp, argrange[0][0], argrange[1][0],
1467 idx[0] + 1, idx[1] + 1,
1468 maxobjsize);
1469
1470 if (warned)
1471 {
1472 /* Print the full range of each of the two arguments to make
1473 it clear when it is, in fact, in a range and not constant. */
1474 if (argrange[0][0] != argrange [0][1])
1475 inform (loc, "argument %i in the range [%E, %E]",
1476 idx[0] + 1, argrange[0][0], argrange[0][1]);
1477 if (argrange[1][0] != argrange [1][1])
1478 inform (loc, "argument %i in the range [%E, %E]",
1479 idx[1] + 1, argrange[1][0], argrange[1][1]);
1480 }
1481 }
1482
1483 if (warned && fn)
1484 {
1485 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1486
1487 if (DECL_IS_BUILTIN (fn))
1488 inform (loc,
1489 "in a call to built-in allocation function %qD", fn);
1490 else
1491 inform (fnloc,
1492 "in a call to allocation function %qD declared here", fn);
1493 }
1494 }
1495
1496 /* If EXPR refers to a character array or pointer declared attribute
1497 nonstring return a decl for that array or pointer and set *REF to
1498 the referenced enclosing object or pointer. Otherwise returns
1499 null. */
1500
1501 tree
1502 get_attr_nonstring_decl (tree expr, tree *ref)
1503 {
1504 tree decl = expr;
1505 tree var = NULL_TREE;
1506 if (TREE_CODE (decl) == SSA_NAME)
1507 {
1508 gimple *def = SSA_NAME_DEF_STMT (decl);
1509
1510 if (is_gimple_assign (def))
1511 {
1512 tree_code code = gimple_assign_rhs_code (def);
1513 if (code == ADDR_EXPR
1514 || code == COMPONENT_REF
1515 || code == VAR_DECL)
1516 decl = gimple_assign_rhs1 (def);
1517 }
1518 else
1519 var = SSA_NAME_VAR (decl);
1520 }
1521
1522 if (TREE_CODE (decl) == ADDR_EXPR)
1523 decl = TREE_OPERAND (decl, 0);
1524
1525 /* To simplify calling code, store the referenced DECL regardless of
1526 the attribute determined below, but avoid storing the SSA_NAME_VAR
1527 obtained above (it's not useful for dataflow purposes). */
1528 if (ref)
1529 *ref = decl;
1530
1531 /* Use the SSA_NAME_VAR that was determined above to see if it's
1532 declared nonstring. Otherwise drill down into the referenced
1533 DECL. */
1534 if (var)
1535 decl = var;
1536 else if (TREE_CODE (decl) == ARRAY_REF)
1537 decl = TREE_OPERAND (decl, 0);
1538 else if (TREE_CODE (decl) == COMPONENT_REF)
1539 decl = TREE_OPERAND (decl, 1);
1540 else if (TREE_CODE (decl) == MEM_REF)
1541 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1542
1543 if (DECL_P (decl)
1544 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1545 return decl;
1546
1547 return NULL_TREE;
1548 }
1549
1550 /* Warn about passing a non-string array/pointer to a function that
1551 expects a nul-terminated string argument. */
1552
1553 void
1554 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1555 {
1556 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1557 return;
1558
1559 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
1560 return;
1561
1562 /* Avoid clearly invalid calls (more checking done below). */
1563 unsigned nargs = call_expr_nargs (exp);
1564 if (!nargs)
1565 return;
1566
1567 /* The bound argument to a bounded string function like strncpy. */
1568 tree bound = NULL_TREE;
1569
1570 /* The longest known or possible string argument to one of the comparison
1571 functions. If the length is less than the bound it is used instead.
1572 Since the length is only used for warning and not for code generation
1573 disable strict mode in the calls to get_range_strlen below. */
1574 tree maxlen = NULL_TREE;
1575
1576 /* It's safe to call "bounded" string functions with a non-string
1577 argument since the functions provide an explicit bound for this
1578 purpose. The exception is strncat where the bound may refer to
1579 either the destination or the source. */
1580 int fncode = DECL_FUNCTION_CODE (fndecl);
1581 switch (fncode)
1582 {
1583 case BUILT_IN_STRCMP:
1584 case BUILT_IN_STRNCMP:
1585 case BUILT_IN_STRNCASECMP:
1586 {
1587 /* For these, if one argument refers to one or more of a set
1588 of string constants or arrays of known size, determine
1589 the range of their known or possible lengths and use it
1590 conservatively as the bound for the unbounded function,
1591 and to adjust the range of the bound of the bounded ones. */
1592 for (unsigned argno = 0;
1593 argno < MIN (nargs, 2)
1594 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1595 {
1596 tree arg = CALL_EXPR_ARG (exp, argno);
1597 if (!get_attr_nonstring_decl (arg))
1598 {
1599 c_strlen_data lendata = { };
1600 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1601 maxlen = lendata.maxbound;
1602 }
1603 }
1604 }
1605 /* Fall through. */
1606
1607 case BUILT_IN_STRNCAT:
1608 case BUILT_IN_STPNCPY:
1609 case BUILT_IN_STRNCPY:
1610 if (nargs > 2)
1611 bound = CALL_EXPR_ARG (exp, 2);
1612 break;
1613
1614 case BUILT_IN_STRNDUP:
1615 if (nargs > 1)
1616 bound = CALL_EXPR_ARG (exp, 1);
1617 break;
1618
1619 case BUILT_IN_STRNLEN:
1620 {
1621 tree arg = CALL_EXPR_ARG (exp, 0);
1622 if (!get_attr_nonstring_decl (arg))
1623 {
1624 c_strlen_data lendata = { };
1625 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1626 maxlen = lendata.maxbound;
1627 }
1628 if (nargs > 1)
1629 bound = CALL_EXPR_ARG (exp, 1);
1630 break;
1631 }
1632
1633 default:
1634 break;
1635 }
1636
1637 /* Determine the range of the bound argument (if specified). */
1638 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1639 if (bound)
1640 {
1641 STRIP_NOPS (bound);
1642 get_size_range (bound, bndrng);
1643 }
1644
1645 location_t loc = EXPR_LOCATION (exp);
1646
1647 if (bndrng[0])
1648 {
1649 /* Diagnose excessive bound prior the adjustment below and
1650 regardless of attribute nonstring. */
1651 tree maxobjsize = max_object_size ();
1652 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1653 {
1654 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1655 warning_at (loc, OPT_Wstringop_overflow_,
1656 "%K%qD specified bound %E "
1657 "exceeds maximum object size %E",
1658 exp, fndecl, bndrng[0], maxobjsize);
1659 else
1660 warning_at (loc, OPT_Wstringop_overflow_,
1661 "%K%qD specified bound [%E, %E] "
1662 "exceeds maximum object size %E",
1663 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1664 return;
1665 }
1666 }
1667
1668 if (maxlen && !integer_all_onesp (maxlen))
1669 {
1670 /* Add one for the nul. */
1671 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1672 size_one_node);
1673
1674 if (!bndrng[0])
1675 {
1676 /* Conservatively use the upper bound of the lengths for
1677 both the lower and the upper bound of the operation. */
1678 bndrng[0] = maxlen;
1679 bndrng[1] = maxlen;
1680 bound = void_type_node;
1681 }
1682 else if (maxlen)
1683 {
1684 /* Replace the bound on the operation with the upper bound
1685 of the length of the string if the latter is smaller. */
1686 if (tree_int_cst_lt (maxlen, bndrng[0]))
1687 bndrng[0] = maxlen;
1688 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1689 bndrng[1] = maxlen;
1690 }
1691 }
1692
1693 /* Iterate over the built-in function's formal arguments and check
1694 each const char* against the actual argument. If the actual
1695 argument is declared attribute non-string issue a warning unless
1696 the argument's maximum length is bounded. */
1697 function_args_iterator it;
1698 function_args_iter_init (&it, TREE_TYPE (fndecl));
1699
1700 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1701 {
1702 /* Avoid iterating past the declared argument in a call
1703 to function declared without a prototype. */
1704 if (argno >= nargs)
1705 break;
1706
1707 tree argtype = function_args_iter_cond (&it);
1708 if (!argtype)
1709 break;
1710
1711 if (TREE_CODE (argtype) != POINTER_TYPE)
1712 continue;
1713
1714 argtype = TREE_TYPE (argtype);
1715
1716 if (TREE_CODE (argtype) != INTEGER_TYPE
1717 || !TYPE_READONLY (argtype))
1718 continue;
1719
1720 argtype = TYPE_MAIN_VARIANT (argtype);
1721 if (argtype != char_type_node)
1722 continue;
1723
1724 tree callarg = CALL_EXPR_ARG (exp, argno);
1725 if (TREE_CODE (callarg) == ADDR_EXPR)
1726 callarg = TREE_OPERAND (callarg, 0);
1727
1728 /* See if the destination is declared with attribute "nonstring". */
1729 tree decl = get_attr_nonstring_decl (callarg);
1730 if (!decl)
1731 continue;
1732
1733 /* The maximum number of array elements accessed. */
1734 offset_int wibnd = 0;
1735
1736 if (argno && fncode == BUILT_IN_STRNCAT)
1737 {
1738 /* See if the bound in strncat is derived from the length
1739 of the strlen of the destination (as it's expected to be).
1740 If so, reset BOUND and FNCODE to trigger a warning. */
1741 tree dstarg = CALL_EXPR_ARG (exp, 0);
1742 if (is_strlen_related_p (dstarg, bound))
1743 {
1744 /* The bound applies to the destination, not to the source,
1745 so reset these to trigger a warning without mentioning
1746 the bound. */
1747 bound = NULL;
1748 fncode = 0;
1749 }
1750 else if (bndrng[1])
1751 /* Use the upper bound of the range for strncat. */
1752 wibnd = wi::to_offset (bndrng[1]);
1753 }
1754 else if (bndrng[0])
1755 /* Use the lower bound of the range for functions other than
1756 strncat. */
1757 wibnd = wi::to_offset (bndrng[0]);
1758
1759 /* Determine the size of the argument array if it is one. */
1760 offset_int asize = wibnd;
1761 bool known_size = false;
1762 tree type = TREE_TYPE (decl);
1763
1764 /* Determine the array size. For arrays of unknown bound and
1765 pointers reset BOUND to trigger the appropriate warning. */
1766 if (TREE_CODE (type) == ARRAY_TYPE)
1767 {
1768 if (tree arrbnd = TYPE_DOMAIN (type))
1769 {
1770 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1771 {
1772 asize = wi::to_offset (arrbnd) + 1;
1773 known_size = true;
1774 }
1775 }
1776 else if (bound == void_type_node)
1777 bound = NULL_TREE;
1778 }
1779 else if (bound == void_type_node)
1780 bound = NULL_TREE;
1781
1782 /* In a call to strncat with a bound in a range whose lower but
1783 not upper bound is less than the array size, reset ASIZE to
1784 be the same as the bound and the other variable to trigger
1785 the apprpriate warning below. */
1786 if (fncode == BUILT_IN_STRNCAT
1787 && bndrng[0] != bndrng[1]
1788 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1789 && (!known_size
1790 || wi::ltu_p (asize, wibnd)))
1791 {
1792 asize = wibnd;
1793 bound = NULL_TREE;
1794 fncode = 0;
1795 }
1796
1797 bool warned = false;
1798
1799 auto_diagnostic_group d;
1800 if (wi::ltu_p (asize, wibnd))
1801 {
1802 if (bndrng[0] == bndrng[1])
1803 warned = warning_at (loc, OPT_Wstringop_overflow_,
1804 "%qD argument %i declared attribute "
1805 "%<nonstring%> is smaller than the specified "
1806 "bound %wu",
1807 fndecl, argno + 1, wibnd.to_uhwi ());
1808 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1809 warned = warning_at (loc, OPT_Wstringop_overflow_,
1810 "%qD argument %i declared attribute "
1811 "%<nonstring%> is smaller than "
1812 "the specified bound [%E, %E]",
1813 fndecl, argno + 1, bndrng[0], bndrng[1]);
1814 else
1815 warned = warning_at (loc, OPT_Wstringop_overflow_,
1816 "%qD argument %i declared attribute "
1817 "%<nonstring%> may be smaller than "
1818 "the specified bound [%E, %E]",
1819 fndecl, argno + 1, bndrng[0], bndrng[1]);
1820 }
1821 else if (fncode == BUILT_IN_STRNCAT)
1822 ; /* Avoid warning for calls to strncat() when the bound
1823 is equal to the size of the non-string argument. */
1824 else if (!bound)
1825 warned = warning_at (loc, OPT_Wstringop_overflow_,
1826 "%qD argument %i declared attribute %<nonstring%>",
1827 fndecl, argno + 1);
1828
1829 if (warned)
1830 inform (DECL_SOURCE_LOCATION (decl),
1831 "argument %qD declared here", decl);
1832 }
1833 }
1834
1835 /* Issue an error if CALL_EXPR was flagged as requiring
1836 tall-call optimization. */
1837
1838 static void
1839 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1840 {
1841 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1842 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1843 return;
1844
1845 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1846 }
1847
1848 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1849 CALL_EXPR EXP.
1850
1851 NUM_ACTUALS is the total number of parameters.
1852
1853 N_NAMED_ARGS is the total number of named arguments.
1854
1855 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1856 value, or null.
1857
1858 FNDECL is the tree code for the target of this call (if known)
1859
1860 ARGS_SO_FAR holds state needed by the target to know where to place
1861 the next argument.
1862
1863 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1864 for arguments which are passed in registers.
1865
1866 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1867 and may be modified by this routine.
1868
1869 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1870 flags which may be modified by this routine.
1871
1872 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1873 that requires allocation of stack space.
1874
1875 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1876 the thunked-to function. */
1877
1878 static void
1879 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1880 struct arg_data *args,
1881 struct args_size *args_size,
1882 int n_named_args ATTRIBUTE_UNUSED,
1883 tree exp, tree struct_value_addr_value,
1884 tree fndecl, tree fntype,
1885 cumulative_args_t args_so_far,
1886 int reg_parm_stack_space,
1887 rtx *old_stack_level,
1888 poly_int64_pod *old_pending_adj,
1889 int *must_preallocate, int *ecf_flags,
1890 bool *may_tailcall, bool call_from_thunk_p)
1891 {
1892 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1893 location_t loc = EXPR_LOCATION (exp);
1894
1895 /* Count arg position in order args appear. */
1896 int argpos;
1897
1898 int i;
1899
1900 args_size->constant = 0;
1901 args_size->var = 0;
1902
1903 bitmap_obstack_initialize (NULL);
1904
1905 /* In this loop, we consider args in the order they are written.
1906 We fill up ARGS from the back. */
1907
1908 i = num_actuals - 1;
1909 {
1910 int j = i;
1911 call_expr_arg_iterator iter;
1912 tree arg;
1913 bitmap slots = NULL;
1914
1915 if (struct_value_addr_value)
1916 {
1917 args[j].tree_value = struct_value_addr_value;
1918 j--;
1919 }
1920 argpos = 0;
1921 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1922 {
1923 tree argtype = TREE_TYPE (arg);
1924
1925 if (targetm.calls.split_complex_arg
1926 && argtype
1927 && TREE_CODE (argtype) == COMPLEX_TYPE
1928 && targetm.calls.split_complex_arg (argtype))
1929 {
1930 tree subtype = TREE_TYPE (argtype);
1931 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1932 j--;
1933 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1934 }
1935 else
1936 args[j].tree_value = arg;
1937 j--;
1938 argpos++;
1939 }
1940
1941 if (slots)
1942 BITMAP_FREE (slots);
1943 }
1944
1945 bitmap_obstack_release (NULL);
1946
1947 /* Extract attribute alloc_size from the type of the called expression
1948 (which could be a function or a function pointer) and if set, store
1949 the indices of the corresponding arguments in ALLOC_IDX, and then
1950 the actual argument(s) at those indices in ALLOC_ARGS. */
1951 int alloc_idx[2] = { -1, -1 };
1952 if (tree alloc_size = lookup_attribute ("alloc_size",
1953 TYPE_ATTRIBUTES (fntype)))
1954 {
1955 tree args = TREE_VALUE (alloc_size);
1956 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1957 if (TREE_CHAIN (args))
1958 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1959 }
1960
1961 /* Array for up to the two attribute alloc_size arguments. */
1962 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1963
1964 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1965 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1966 {
1967 tree type = TREE_TYPE (args[i].tree_value);
1968 int unsignedp;
1969 machine_mode mode;
1970
1971 /* Replace erroneous argument with constant zero. */
1972 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1973 args[i].tree_value = integer_zero_node, type = integer_type_node;
1974
1975 /* If TYPE is a transparent union or record, pass things the way
1976 we would pass the first field of the union or record. We have
1977 already verified that the modes are the same. */
1978 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1979 && TYPE_TRANSPARENT_AGGR (type))
1980 type = TREE_TYPE (first_field (type));
1981
1982 /* Decide where to pass this arg.
1983
1984 args[i].reg is nonzero if all or part is passed in registers.
1985
1986 args[i].partial is nonzero if part but not all is passed in registers,
1987 and the exact value says how many bytes are passed in registers.
1988
1989 args[i].pass_on_stack is nonzero if the argument must at least be
1990 computed on the stack. It may then be loaded back into registers
1991 if args[i].reg is nonzero.
1992
1993 These decisions are driven by the FUNCTION_... macros and must agree
1994 with those made by function.c. */
1995
1996 /* See if this argument should be passed by invisible reference. */
1997 function_arg_info orig_arg (type, argpos < n_named_args);
1998 if (pass_by_reference (args_so_far_pnt, orig_arg))
1999 {
2000 bool callee_copies;
2001 tree base = NULL_TREE;
2002
2003 callee_copies = reference_callee_copied (args_so_far_pnt, orig_arg);
2004
2005 /* If we're compiling a thunk, pass through invisible references
2006 instead of making a copy. */
2007 if (call_from_thunk_p
2008 || (callee_copies
2009 && !TREE_ADDRESSABLE (type)
2010 && (base = get_base_address (args[i].tree_value))
2011 && TREE_CODE (base) != SSA_NAME
2012 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2013 {
2014 /* We may have turned the parameter value into an SSA name.
2015 Go back to the original parameter so we can take the
2016 address. */
2017 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2018 {
2019 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2020 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2021 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2022 }
2023 /* Argument setup code may have copied the value to register. We
2024 revert that optimization now because the tail call code must
2025 use the original location. */
2026 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2027 && !MEM_P (DECL_RTL (args[i].tree_value))
2028 && DECL_INCOMING_RTL (args[i].tree_value)
2029 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2030 set_decl_rtl (args[i].tree_value,
2031 DECL_INCOMING_RTL (args[i].tree_value));
2032
2033 mark_addressable (args[i].tree_value);
2034
2035 /* We can't use sibcalls if a callee-copied argument is
2036 stored in the current function's frame. */
2037 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2038 {
2039 *may_tailcall = false;
2040 maybe_complain_about_tail_call (exp,
2041 "a callee-copied argument is"
2042 " stored in the current"
2043 " function's frame");
2044 }
2045
2046 args[i].tree_value = build_fold_addr_expr_loc (loc,
2047 args[i].tree_value);
2048 type = TREE_TYPE (args[i].tree_value);
2049
2050 if (*ecf_flags & ECF_CONST)
2051 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2052 }
2053 else
2054 {
2055 /* We make a copy of the object and pass the address to the
2056 function being called. */
2057 rtx copy;
2058
2059 if (!COMPLETE_TYPE_P (type)
2060 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2061 || (flag_stack_check == GENERIC_STACK_CHECK
2062 && compare_tree_int (TYPE_SIZE_UNIT (type),
2063 STACK_CHECK_MAX_VAR_SIZE) > 0))
2064 {
2065 /* This is a variable-sized object. Make space on the stack
2066 for it. */
2067 rtx size_rtx = expr_size (args[i].tree_value);
2068
2069 if (*old_stack_level == 0)
2070 {
2071 emit_stack_save (SAVE_BLOCK, old_stack_level);
2072 *old_pending_adj = pending_stack_adjust;
2073 pending_stack_adjust = 0;
2074 }
2075
2076 /* We can pass TRUE as the 4th argument because we just
2077 saved the stack pointer and will restore it right after
2078 the call. */
2079 copy = allocate_dynamic_stack_space (size_rtx,
2080 TYPE_ALIGN (type),
2081 TYPE_ALIGN (type),
2082 max_int_size_in_bytes
2083 (type),
2084 true);
2085 copy = gen_rtx_MEM (BLKmode, copy);
2086 set_mem_attributes (copy, type, 1);
2087 }
2088 else
2089 copy = assign_temp (type, 1, 0);
2090
2091 store_expr (args[i].tree_value, copy, 0, false, false);
2092
2093 /* Just change the const function to pure and then let
2094 the next test clear the pure based on
2095 callee_copies. */
2096 if (*ecf_flags & ECF_CONST)
2097 {
2098 *ecf_flags &= ~ECF_CONST;
2099 *ecf_flags |= ECF_PURE;
2100 }
2101
2102 if (!callee_copies && *ecf_flags & ECF_PURE)
2103 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2104
2105 args[i].tree_value
2106 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2107 type = TREE_TYPE (args[i].tree_value);
2108 *may_tailcall = false;
2109 maybe_complain_about_tail_call (exp,
2110 "argument must be passed"
2111 " by copying");
2112 }
2113 }
2114
2115 unsignedp = TYPE_UNSIGNED (type);
2116 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2117 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2118
2119 args[i].unsignedp = unsignedp;
2120 args[i].mode = mode;
2121
2122 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2123
2124 function_arg_info arg (type, mode, argpos < n_named_args);
2125 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2126
2127 if (args[i].reg && CONST_INT_P (args[i].reg))
2128 args[i].reg = NULL;
2129
2130 /* If this is a sibling call and the machine has register windows, the
2131 register window has to be unwinded before calling the routine, so
2132 arguments have to go into the incoming registers. */
2133 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2134 args[i].tail_call_reg
2135 = targetm.calls.function_incoming_arg (args_so_far, arg);
2136 else
2137 args[i].tail_call_reg = args[i].reg;
2138
2139 if (args[i].reg)
2140 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2141
2142 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
2143
2144 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2145 it means that we are to pass this arg in the register(s) designated
2146 by the PARALLEL, but also to pass it in the stack. */
2147 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2148 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2149 args[i].pass_on_stack = 1;
2150
2151 /* If this is an addressable type, we must preallocate the stack
2152 since we must evaluate the object into its final location.
2153
2154 If this is to be passed in both registers and the stack, it is simpler
2155 to preallocate. */
2156 if (TREE_ADDRESSABLE (type)
2157 || (args[i].pass_on_stack && args[i].reg != 0))
2158 *must_preallocate = 1;
2159
2160 /* Compute the stack-size of this argument. */
2161 if (args[i].reg == 0 || args[i].partial != 0
2162 || reg_parm_stack_space > 0
2163 || args[i].pass_on_stack)
2164 locate_and_pad_parm (mode, type,
2165 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2166 1,
2167 #else
2168 args[i].reg != 0,
2169 #endif
2170 reg_parm_stack_space,
2171 args[i].pass_on_stack ? 0 : args[i].partial,
2172 fndecl, args_size, &args[i].locate);
2173 #ifdef BLOCK_REG_PADDING
2174 else
2175 /* The argument is passed entirely in registers. See at which
2176 end it should be padded. */
2177 args[i].locate.where_pad =
2178 BLOCK_REG_PADDING (mode, type,
2179 int_size_in_bytes (type) <= UNITS_PER_WORD);
2180 #endif
2181
2182 /* Update ARGS_SIZE, the total stack space for args so far. */
2183
2184 args_size->constant += args[i].locate.size.constant;
2185 if (args[i].locate.size.var)
2186 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2187
2188 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2189 have been used, etc. */
2190
2191 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2192 promoted_mode used for function_arg above. However, the
2193 corresponding handling of incoming arguments in function.c
2194 does pass the promoted mode. */
2195 function_arg_info arg_to_skip (type, TYPE_MODE (type),
2196 argpos < n_named_args);
2197 targetm.calls.function_arg_advance (args_so_far, arg_to_skip);
2198
2199 /* Store argument values for functions decorated with attribute
2200 alloc_size. */
2201 if (argpos == alloc_idx[0])
2202 alloc_args[0] = args[i].tree_value;
2203 else if (argpos == alloc_idx[1])
2204 alloc_args[1] = args[i].tree_value;
2205 }
2206
2207 if (alloc_args[0])
2208 {
2209 /* Check the arguments of functions decorated with attribute
2210 alloc_size. */
2211 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2212 }
2213
2214 /* Detect passing non-string arguments to functions expecting
2215 nul-terminated strings. */
2216 maybe_warn_nonstring_arg (fndecl, exp);
2217 }
2218
2219 /* Update ARGS_SIZE to contain the total size for the argument block.
2220 Return the original constant component of the argument block's size.
2221
2222 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2223 for arguments passed in registers. */
2224
2225 static poly_int64
2226 compute_argument_block_size (int reg_parm_stack_space,
2227 struct args_size *args_size,
2228 tree fndecl ATTRIBUTE_UNUSED,
2229 tree fntype ATTRIBUTE_UNUSED,
2230 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2231 {
2232 poly_int64 unadjusted_args_size = args_size->constant;
2233
2234 /* For accumulate outgoing args mode we don't need to align, since the frame
2235 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2236 backends from generating misaligned frame sizes. */
2237 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2238 preferred_stack_boundary = STACK_BOUNDARY;
2239
2240 /* Compute the actual size of the argument block required. The variable
2241 and constant sizes must be combined, the size may have to be rounded,
2242 and there may be a minimum required size. */
2243
2244 if (args_size->var)
2245 {
2246 args_size->var = ARGS_SIZE_TREE (*args_size);
2247 args_size->constant = 0;
2248
2249 preferred_stack_boundary /= BITS_PER_UNIT;
2250 if (preferred_stack_boundary > 1)
2251 {
2252 /* We don't handle this case yet. To handle it correctly we have
2253 to add the delta, round and subtract the delta.
2254 Currently no machine description requires this support. */
2255 gcc_assert (multiple_p (stack_pointer_delta,
2256 preferred_stack_boundary));
2257 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2258 }
2259
2260 if (reg_parm_stack_space > 0)
2261 {
2262 args_size->var
2263 = size_binop (MAX_EXPR, args_size->var,
2264 ssize_int (reg_parm_stack_space));
2265
2266 /* The area corresponding to register parameters is not to count in
2267 the size of the block we need. So make the adjustment. */
2268 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2269 args_size->var
2270 = size_binop (MINUS_EXPR, args_size->var,
2271 ssize_int (reg_parm_stack_space));
2272 }
2273 }
2274 else
2275 {
2276 preferred_stack_boundary /= BITS_PER_UNIT;
2277 if (preferred_stack_boundary < 1)
2278 preferred_stack_boundary = 1;
2279 args_size->constant = (aligned_upper_bound (args_size->constant
2280 + stack_pointer_delta,
2281 preferred_stack_boundary)
2282 - stack_pointer_delta);
2283
2284 args_size->constant = upper_bound (args_size->constant,
2285 reg_parm_stack_space);
2286
2287 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2288 args_size->constant -= reg_parm_stack_space;
2289 }
2290 return unadjusted_args_size;
2291 }
2292
2293 /* Precompute parameters as needed for a function call.
2294
2295 FLAGS is mask of ECF_* constants.
2296
2297 NUM_ACTUALS is the number of arguments.
2298
2299 ARGS is an array containing information for each argument; this
2300 routine fills in the INITIAL_VALUE and VALUE fields for each
2301 precomputed argument. */
2302
2303 static void
2304 precompute_arguments (int num_actuals, struct arg_data *args)
2305 {
2306 int i;
2307
2308 /* If this is a libcall, then precompute all arguments so that we do not
2309 get extraneous instructions emitted as part of the libcall sequence. */
2310
2311 /* If we preallocated the stack space, and some arguments must be passed
2312 on the stack, then we must precompute any parameter which contains a
2313 function call which will store arguments on the stack.
2314 Otherwise, evaluating the parameter may clobber previous parameters
2315 which have already been stored into the stack. (we have code to avoid
2316 such case by saving the outgoing stack arguments, but it results in
2317 worse code) */
2318 if (!ACCUMULATE_OUTGOING_ARGS)
2319 return;
2320
2321 for (i = 0; i < num_actuals; i++)
2322 {
2323 tree type;
2324 machine_mode mode;
2325
2326 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2327 continue;
2328
2329 /* If this is an addressable type, we cannot pre-evaluate it. */
2330 type = TREE_TYPE (args[i].tree_value);
2331 gcc_assert (!TREE_ADDRESSABLE (type));
2332
2333 args[i].initial_value = args[i].value
2334 = expand_normal (args[i].tree_value);
2335
2336 mode = TYPE_MODE (type);
2337 if (mode != args[i].mode)
2338 {
2339 int unsignedp = args[i].unsignedp;
2340 args[i].value
2341 = convert_modes (args[i].mode, mode,
2342 args[i].value, args[i].unsignedp);
2343
2344 /* CSE will replace this only if it contains args[i].value
2345 pseudo, so convert it down to the declared mode using
2346 a SUBREG. */
2347 if (REG_P (args[i].value)
2348 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2349 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2350 {
2351 args[i].initial_value
2352 = gen_lowpart_SUBREG (mode, args[i].value);
2353 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2354 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2355 }
2356 }
2357 }
2358 }
2359
2360 /* Given the current state of MUST_PREALLOCATE and information about
2361 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2362 compute and return the final value for MUST_PREALLOCATE. */
2363
2364 static int
2365 finalize_must_preallocate (int must_preallocate, int num_actuals,
2366 struct arg_data *args, struct args_size *args_size)
2367 {
2368 /* See if we have or want to preallocate stack space.
2369
2370 If we would have to push a partially-in-regs parm
2371 before other stack parms, preallocate stack space instead.
2372
2373 If the size of some parm is not a multiple of the required stack
2374 alignment, we must preallocate.
2375
2376 If the total size of arguments that would otherwise create a copy in
2377 a temporary (such as a CALL) is more than half the total argument list
2378 size, preallocation is faster.
2379
2380 Another reason to preallocate is if we have a machine (like the m88k)
2381 where stack alignment is required to be maintained between every
2382 pair of insns, not just when the call is made. However, we assume here
2383 that such machines either do not have push insns (and hence preallocation
2384 would occur anyway) or the problem is taken care of with
2385 PUSH_ROUNDING. */
2386
2387 if (! must_preallocate)
2388 {
2389 int partial_seen = 0;
2390 poly_int64 copy_to_evaluate_size = 0;
2391 int i;
2392
2393 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2394 {
2395 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2396 partial_seen = 1;
2397 else if (partial_seen && args[i].reg == 0)
2398 must_preallocate = 1;
2399
2400 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2401 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2402 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2403 || TREE_CODE (args[i].tree_value) == COND_EXPR
2404 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2405 copy_to_evaluate_size
2406 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2407 }
2408
2409 if (maybe_ne (args_size->constant, 0)
2410 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2411 must_preallocate = 1;
2412 }
2413 return must_preallocate;
2414 }
2415
2416 /* If we preallocated stack space, compute the address of each argument
2417 and store it into the ARGS array.
2418
2419 We need not ensure it is a valid memory address here; it will be
2420 validized when it is used.
2421
2422 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2423
2424 static void
2425 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2426 {
2427 if (argblock)
2428 {
2429 rtx arg_reg = argblock;
2430 int i;
2431 poly_int64 arg_offset = 0;
2432
2433 if (GET_CODE (argblock) == PLUS)
2434 {
2435 arg_reg = XEXP (argblock, 0);
2436 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2437 }
2438
2439 for (i = 0; i < num_actuals; i++)
2440 {
2441 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2442 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2443 rtx addr;
2444 unsigned int align, boundary;
2445 poly_uint64 units_on_stack = 0;
2446 machine_mode partial_mode = VOIDmode;
2447
2448 /* Skip this parm if it will not be passed on the stack. */
2449 if (! args[i].pass_on_stack
2450 && args[i].reg != 0
2451 && args[i].partial == 0)
2452 continue;
2453
2454 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2455 continue;
2456
2457 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2458 addr = plus_constant (Pmode, addr, arg_offset);
2459
2460 if (args[i].partial != 0)
2461 {
2462 /* Only part of the parameter is being passed on the stack.
2463 Generate a simple memory reference of the correct size. */
2464 units_on_stack = args[i].locate.size.constant;
2465 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2466 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2467 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2468 set_mem_size (args[i].stack, units_on_stack);
2469 }
2470 else
2471 {
2472 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2473 set_mem_attributes (args[i].stack,
2474 TREE_TYPE (args[i].tree_value), 1);
2475 }
2476 align = BITS_PER_UNIT;
2477 boundary = args[i].locate.boundary;
2478 poly_int64 offset_val;
2479 if (args[i].locate.where_pad != PAD_DOWNWARD)
2480 align = boundary;
2481 else if (poly_int_rtx_p (offset, &offset_val))
2482 {
2483 align = least_bit_hwi (boundary);
2484 unsigned int offset_align
2485 = known_alignment (offset_val) * BITS_PER_UNIT;
2486 if (offset_align != 0)
2487 align = MIN (align, offset_align);
2488 }
2489 set_mem_align (args[i].stack, align);
2490
2491 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2492 addr = plus_constant (Pmode, addr, arg_offset);
2493
2494 if (args[i].partial != 0)
2495 {
2496 /* Only part of the parameter is being passed on the stack.
2497 Generate a simple memory reference of the correct size.
2498 */
2499 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2500 set_mem_size (args[i].stack_slot, units_on_stack);
2501 }
2502 else
2503 {
2504 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2505 set_mem_attributes (args[i].stack_slot,
2506 TREE_TYPE (args[i].tree_value), 1);
2507 }
2508 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2509
2510 /* Function incoming arguments may overlap with sibling call
2511 outgoing arguments and we cannot allow reordering of reads
2512 from function arguments with stores to outgoing arguments
2513 of sibling calls. */
2514 set_mem_alias_set (args[i].stack, 0);
2515 set_mem_alias_set (args[i].stack_slot, 0);
2516 }
2517 }
2518 }
2519
2520 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2521 in a call instruction.
2522
2523 FNDECL is the tree node for the target function. For an indirect call
2524 FNDECL will be NULL_TREE.
2525
2526 ADDR is the operand 0 of CALL_EXPR for this call. */
2527
2528 static rtx
2529 rtx_for_function_call (tree fndecl, tree addr)
2530 {
2531 rtx funexp;
2532
2533 /* Get the function to call, in the form of RTL. */
2534 if (fndecl)
2535 {
2536 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2537 TREE_USED (fndecl) = 1;
2538
2539 /* Get a SYMBOL_REF rtx for the function address. */
2540 funexp = XEXP (DECL_RTL (fndecl), 0);
2541 }
2542 else
2543 /* Generate an rtx (probably a pseudo-register) for the address. */
2544 {
2545 push_temp_slots ();
2546 funexp = expand_normal (addr);
2547 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2548 }
2549 return funexp;
2550 }
2551
2552 /* Return the static chain for this function, if any. */
2553
2554 rtx
2555 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2556 {
2557 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2558 return NULL;
2559
2560 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2561 }
2562
2563 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2564 static struct
2565 {
2566 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2567 or NULL_RTX if none has been scanned yet. */
2568 rtx_insn *scan_start;
2569 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2570 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2571 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2572 with fixed offset, or PC if this is with variable or unknown offset. */
2573 vec<rtx> cache;
2574 } internal_arg_pointer_exp_state;
2575
2576 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2577
2578 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2579 the tail call sequence, starting with first insn that hasn't been
2580 scanned yet, and note for each pseudo on the LHS whether it is based
2581 on crtl->args.internal_arg_pointer or not, and what offset from that
2582 that pointer it has. */
2583
2584 static void
2585 internal_arg_pointer_based_exp_scan (void)
2586 {
2587 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2588
2589 if (scan_start == NULL_RTX)
2590 insn = get_insns ();
2591 else
2592 insn = NEXT_INSN (scan_start);
2593
2594 while (insn)
2595 {
2596 rtx set = single_set (insn);
2597 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2598 {
2599 rtx val = NULL_RTX;
2600 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2601 /* Punt on pseudos set multiple times. */
2602 if (idx < internal_arg_pointer_exp_state.cache.length ()
2603 && (internal_arg_pointer_exp_state.cache[idx]
2604 != NULL_RTX))
2605 val = pc_rtx;
2606 else
2607 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2608 if (val != NULL_RTX)
2609 {
2610 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2611 internal_arg_pointer_exp_state.cache
2612 .safe_grow_cleared (idx + 1);
2613 internal_arg_pointer_exp_state.cache[idx] = val;
2614 }
2615 }
2616 if (NEXT_INSN (insn) == NULL_RTX)
2617 scan_start = insn;
2618 insn = NEXT_INSN (insn);
2619 }
2620
2621 internal_arg_pointer_exp_state.scan_start = scan_start;
2622 }
2623
2624 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2625 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2626 it with fixed offset, or PC if this is with variable or unknown offset.
2627 TOPLEVEL is true if the function is invoked at the topmost level. */
2628
2629 static rtx
2630 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2631 {
2632 if (CONSTANT_P (rtl))
2633 return NULL_RTX;
2634
2635 if (rtl == crtl->args.internal_arg_pointer)
2636 return const0_rtx;
2637
2638 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2639 return NULL_RTX;
2640
2641 poly_int64 offset;
2642 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2643 {
2644 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2645 if (val == NULL_RTX || val == pc_rtx)
2646 return val;
2647 return plus_constant (Pmode, val, offset);
2648 }
2649
2650 /* When called at the topmost level, scan pseudo assignments in between the
2651 last scanned instruction in the tail call sequence and the latest insn
2652 in that sequence. */
2653 if (toplevel)
2654 internal_arg_pointer_based_exp_scan ();
2655
2656 if (REG_P (rtl))
2657 {
2658 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2659 if (idx < internal_arg_pointer_exp_state.cache.length ())
2660 return internal_arg_pointer_exp_state.cache[idx];
2661
2662 return NULL_RTX;
2663 }
2664
2665 subrtx_iterator::array_type array;
2666 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2667 {
2668 const_rtx x = *iter;
2669 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2670 return pc_rtx;
2671 if (MEM_P (x))
2672 iter.skip_subrtxes ();
2673 }
2674
2675 return NULL_RTX;
2676 }
2677
2678 /* Return true if SIZE bytes starting from address ADDR might overlap an
2679 already-clobbered argument area. This function is used to determine
2680 if we should give up a sibcall. */
2681
2682 static bool
2683 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2684 {
2685 poly_int64 i;
2686 unsigned HOST_WIDE_INT start, end;
2687 rtx val;
2688
2689 if (bitmap_empty_p (stored_args_map)
2690 && stored_args_watermark == HOST_WIDE_INT_M1U)
2691 return false;
2692 val = internal_arg_pointer_based_exp (addr, true);
2693 if (val == NULL_RTX)
2694 return false;
2695 else if (!poly_int_rtx_p (val, &i))
2696 return true;
2697
2698 if (known_eq (size, 0U))
2699 return false;
2700
2701 if (STACK_GROWS_DOWNWARD)
2702 i -= crtl->args.pretend_args_size;
2703 else
2704 i += crtl->args.pretend_args_size;
2705
2706 if (ARGS_GROW_DOWNWARD)
2707 i = -i - size;
2708
2709 /* We can ignore any references to the function's pretend args,
2710 which at this point would manifest as negative values of I. */
2711 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2712 return false;
2713
2714 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2715 if (!(i + size).is_constant (&end))
2716 end = HOST_WIDE_INT_M1U;
2717
2718 if (end > stored_args_watermark)
2719 return true;
2720
2721 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2722 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2723 if (bitmap_bit_p (stored_args_map, k))
2724 return true;
2725
2726 return false;
2727 }
2728
2729 /* Do the register loads required for any wholly-register parms or any
2730 parms which are passed both on the stack and in a register. Their
2731 expressions were already evaluated.
2732
2733 Mark all register-parms as living through the call, putting these USE
2734 insns in the CALL_INSN_FUNCTION_USAGE field.
2735
2736 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2737 checking, setting *SIBCALL_FAILURE if appropriate. */
2738
2739 static void
2740 load_register_parameters (struct arg_data *args, int num_actuals,
2741 rtx *call_fusage, int flags, int is_sibcall,
2742 int *sibcall_failure)
2743 {
2744 int i, j;
2745
2746 for (i = 0; i < num_actuals; i++)
2747 {
2748 rtx reg = ((flags & ECF_SIBCALL)
2749 ? args[i].tail_call_reg : args[i].reg);
2750 if (reg)
2751 {
2752 int partial = args[i].partial;
2753 int nregs;
2754 poly_int64 size = 0;
2755 HOST_WIDE_INT const_size = 0;
2756 rtx_insn *before_arg = get_last_insn ();
2757 /* Set non-negative if we must move a word at a time, even if
2758 just one word (e.g, partial == 4 && mode == DFmode). Set
2759 to -1 if we just use a normal move insn. This value can be
2760 zero if the argument is a zero size structure. */
2761 nregs = -1;
2762 if (GET_CODE (reg) == PARALLEL)
2763 ;
2764 else if (partial)
2765 {
2766 gcc_assert (partial % UNITS_PER_WORD == 0);
2767 nregs = partial / UNITS_PER_WORD;
2768 }
2769 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2770 {
2771 /* Variable-sized parameters should be described by a
2772 PARALLEL instead. */
2773 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2774 gcc_assert (const_size >= 0);
2775 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2776 size = const_size;
2777 }
2778 else
2779 size = GET_MODE_SIZE (args[i].mode);
2780
2781 /* Handle calls that pass values in multiple non-contiguous
2782 locations. The Irix 6 ABI has examples of this. */
2783
2784 if (GET_CODE (reg) == PARALLEL)
2785 emit_group_move (reg, args[i].parallel_value);
2786
2787 /* If simple case, just do move. If normal partial, store_one_arg
2788 has already loaded the register for us. In all other cases,
2789 load the register(s) from memory. */
2790
2791 else if (nregs == -1)
2792 {
2793 emit_move_insn (reg, args[i].value);
2794 #ifdef BLOCK_REG_PADDING
2795 /* Handle case where we have a value that needs shifting
2796 up to the msb. eg. a QImode value and we're padding
2797 upward on a BYTES_BIG_ENDIAN machine. */
2798 if (args[i].locate.where_pad
2799 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2800 {
2801 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2802 if (maybe_lt (size, UNITS_PER_WORD))
2803 {
2804 rtx x;
2805 poly_int64 shift
2806 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2807
2808 /* Assigning REG here rather than a temp makes
2809 CALL_FUSAGE report the whole reg as used.
2810 Strictly speaking, the call only uses SIZE
2811 bytes at the msb end, but it doesn't seem worth
2812 generating rtl to say that. */
2813 reg = gen_rtx_REG (word_mode, REGNO (reg));
2814 x = expand_shift (LSHIFT_EXPR, word_mode,
2815 reg, shift, reg, 1);
2816 if (x != reg)
2817 emit_move_insn (reg, x);
2818 }
2819 }
2820 #endif
2821 }
2822
2823 /* If we have pre-computed the values to put in the registers in
2824 the case of non-aligned structures, copy them in now. */
2825
2826 else if (args[i].n_aligned_regs != 0)
2827 for (j = 0; j < args[i].n_aligned_regs; j++)
2828 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2829 args[i].aligned_regs[j]);
2830
2831 else if (partial == 0 || args[i].pass_on_stack)
2832 {
2833 /* SIZE and CONST_SIZE are 0 for partial arguments and
2834 the size of a BLKmode type otherwise. */
2835 gcc_checking_assert (known_eq (size, const_size));
2836 rtx mem = validize_mem (copy_rtx (args[i].value));
2837
2838 /* Check for overlap with already clobbered argument area,
2839 providing that this has non-zero size. */
2840 if (is_sibcall
2841 && const_size != 0
2842 && (mem_might_overlap_already_clobbered_arg_p
2843 (XEXP (args[i].value, 0), const_size)))
2844 *sibcall_failure = 1;
2845
2846 if (const_size % UNITS_PER_WORD == 0
2847 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2848 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2849 else
2850 {
2851 if (nregs > 1)
2852 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2853 args[i].mode);
2854 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2855 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2856 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2857 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2858 word_mode, word_mode, false,
2859 NULL);
2860 if (BYTES_BIG_ENDIAN)
2861 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2862 BITS_PER_WORD - bitsize, dest, 1);
2863 if (x != dest)
2864 emit_move_insn (dest, x);
2865 }
2866
2867 /* Handle a BLKmode that needs shifting. */
2868 if (nregs == 1 && const_size < UNITS_PER_WORD
2869 #ifdef BLOCK_REG_PADDING
2870 && args[i].locate.where_pad == PAD_DOWNWARD
2871 #else
2872 && BYTES_BIG_ENDIAN
2873 #endif
2874 )
2875 {
2876 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2877 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2878 enum tree_code dir = (BYTES_BIG_ENDIAN
2879 ? RSHIFT_EXPR : LSHIFT_EXPR);
2880 rtx x;
2881
2882 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2883 if (x != dest)
2884 emit_move_insn (dest, x);
2885 }
2886 }
2887
2888 /* When a parameter is a block, and perhaps in other cases, it is
2889 possible that it did a load from an argument slot that was
2890 already clobbered. */
2891 if (is_sibcall
2892 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2893 *sibcall_failure = 1;
2894
2895 /* Handle calls that pass values in multiple non-contiguous
2896 locations. The Irix 6 ABI has examples of this. */
2897 if (GET_CODE (reg) == PARALLEL)
2898 use_group_regs (call_fusage, reg);
2899 else if (nregs == -1)
2900 use_reg_mode (call_fusage, reg,
2901 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2902 else if (nregs > 0)
2903 use_regs (call_fusage, REGNO (reg), nregs);
2904 }
2905 }
2906 }
2907
2908 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2909 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2910 bytes, then we would need to push some additional bytes to pad the
2911 arguments. So, we try to compute an adjust to the stack pointer for an
2912 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2913 bytes. Then, when the arguments are pushed the stack will be perfectly
2914 aligned.
2915
2916 Return true if this optimization is possible, storing the adjustment
2917 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2918 bytes that should be popped after the call. */
2919
2920 static bool
2921 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2922 poly_int64 unadjusted_args_size,
2923 struct args_size *args_size,
2924 unsigned int preferred_unit_stack_boundary)
2925 {
2926 /* The number of bytes to pop so that the stack will be
2927 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2928 poly_int64 adjustment;
2929 /* The alignment of the stack after the arguments are pushed, if we
2930 just pushed the arguments without adjust the stack here. */
2931 unsigned HOST_WIDE_INT unadjusted_alignment;
2932
2933 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2934 preferred_unit_stack_boundary,
2935 &unadjusted_alignment))
2936 return false;
2937
2938 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2939 as possible -- leaving just enough left to cancel out the
2940 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2941 PENDING_STACK_ADJUST is non-negative, and congruent to
2942 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2943
2944 /* Begin by trying to pop all the bytes. */
2945 unsigned HOST_WIDE_INT tmp_misalignment;
2946 if (!known_misalignment (pending_stack_adjust,
2947 preferred_unit_stack_boundary,
2948 &tmp_misalignment))
2949 return false;
2950 unadjusted_alignment -= tmp_misalignment;
2951 adjustment = pending_stack_adjust;
2952 /* Push enough additional bytes that the stack will be aligned
2953 after the arguments are pushed. */
2954 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2955 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2956
2957 /* We need to know whether the adjusted argument size
2958 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2959 or a deallocation. */
2960 if (!ordered_p (adjustment, unadjusted_args_size))
2961 return false;
2962
2963 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2964 bytes after the call. The right number is the entire
2965 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2966 by the arguments in the first place. */
2967 args_size->constant
2968 = pending_stack_adjust - adjustment + unadjusted_args_size;
2969
2970 *adjustment_out = adjustment;
2971 return true;
2972 }
2973
2974 /* Scan X expression if it does not dereference any argument slots
2975 we already clobbered by tail call arguments (as noted in stored_args_map
2976 bitmap).
2977 Return nonzero if X expression dereferences such argument slots,
2978 zero otherwise. */
2979
2980 static int
2981 check_sibcall_argument_overlap_1 (rtx x)
2982 {
2983 RTX_CODE code;
2984 int i, j;
2985 const char *fmt;
2986
2987 if (x == NULL_RTX)
2988 return 0;
2989
2990 code = GET_CODE (x);
2991
2992 /* We need not check the operands of the CALL expression itself. */
2993 if (code == CALL)
2994 return 0;
2995
2996 if (code == MEM)
2997 return (mem_might_overlap_already_clobbered_arg_p
2998 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
2999
3000 /* Scan all subexpressions. */
3001 fmt = GET_RTX_FORMAT (code);
3002 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3003 {
3004 if (*fmt == 'e')
3005 {
3006 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3007 return 1;
3008 }
3009 else if (*fmt == 'E')
3010 {
3011 for (j = 0; j < XVECLEN (x, i); j++)
3012 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3013 return 1;
3014 }
3015 }
3016 return 0;
3017 }
3018
3019 /* Scan sequence after INSN if it does not dereference any argument slots
3020 we already clobbered by tail call arguments (as noted in stored_args_map
3021 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3022 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3023 should be 0). Return nonzero if sequence after INSN dereferences such argument
3024 slots, zero otherwise. */
3025
3026 static int
3027 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3028 int mark_stored_args_map)
3029 {
3030 poly_uint64 low, high;
3031 unsigned HOST_WIDE_INT const_low, const_high;
3032
3033 if (insn == NULL_RTX)
3034 insn = get_insns ();
3035 else
3036 insn = NEXT_INSN (insn);
3037
3038 for (; insn; insn = NEXT_INSN (insn))
3039 if (INSN_P (insn)
3040 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3041 break;
3042
3043 if (mark_stored_args_map)
3044 {
3045 if (ARGS_GROW_DOWNWARD)
3046 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3047 else
3048 low = arg->locate.slot_offset.constant;
3049 high = low + arg->locate.size.constant;
3050
3051 const_low = constant_lower_bound (low);
3052 if (high.is_constant (&const_high))
3053 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3054 bitmap_set_bit (stored_args_map, i);
3055 else
3056 stored_args_watermark = MIN (stored_args_watermark, const_low);
3057 }
3058 return insn != NULL_RTX;
3059 }
3060
3061 /* Given that a function returns a value of mode MODE at the most
3062 significant end of hard register VALUE, shift VALUE left or right
3063 as specified by LEFT_P. Return true if some action was needed. */
3064
3065 bool
3066 shift_return_value (machine_mode mode, bool left_p, rtx value)
3067 {
3068 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3069 machine_mode value_mode = GET_MODE (value);
3070 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3071
3072 if (known_eq (shift, 0))
3073 return false;
3074
3075 /* Use ashr rather than lshr for right shifts. This is for the benefit
3076 of the MIPS port, which requires SImode values to be sign-extended
3077 when stored in 64-bit registers. */
3078 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3079 value, gen_int_shift_amount (value_mode, shift),
3080 value, 1, OPTAB_WIDEN))
3081 gcc_unreachable ();
3082 return true;
3083 }
3084
3085 /* If X is a likely-spilled register value, copy it to a pseudo
3086 register and return that register. Return X otherwise. */
3087
3088 static rtx
3089 avoid_likely_spilled_reg (rtx x)
3090 {
3091 rtx new_rtx;
3092
3093 if (REG_P (x)
3094 && HARD_REGISTER_P (x)
3095 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3096 {
3097 /* Make sure that we generate a REG rather than a CONCAT.
3098 Moves into CONCATs can need nontrivial instructions,
3099 and the whole point of this function is to avoid
3100 using the hard register directly in such a situation. */
3101 generating_concat_p = 0;
3102 new_rtx = gen_reg_rtx (GET_MODE (x));
3103 generating_concat_p = 1;
3104 emit_move_insn (new_rtx, x);
3105 return new_rtx;
3106 }
3107 return x;
3108 }
3109
3110 /* Helper function for expand_call.
3111 Return false is EXP is not implementable as a sibling call. */
3112
3113 static bool
3114 can_implement_as_sibling_call_p (tree exp,
3115 rtx structure_value_addr,
3116 tree funtype,
3117 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3118 tree fndecl,
3119 int flags,
3120 tree addr,
3121 const args_size &args_size)
3122 {
3123 if (!targetm.have_sibcall_epilogue ())
3124 {
3125 maybe_complain_about_tail_call
3126 (exp,
3127 "machine description does not have"
3128 " a sibcall_epilogue instruction pattern");
3129 return false;
3130 }
3131
3132 /* Doing sibling call optimization needs some work, since
3133 structure_value_addr can be allocated on the stack.
3134 It does not seem worth the effort since few optimizable
3135 sibling calls will return a structure. */
3136 if (structure_value_addr != NULL_RTX)
3137 {
3138 maybe_complain_about_tail_call (exp, "callee returns a structure");
3139 return false;
3140 }
3141
3142 #ifdef REG_PARM_STACK_SPACE
3143 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3144 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3145 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3146 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3147 {
3148 maybe_complain_about_tail_call (exp,
3149 "inconsistent size of stack space"
3150 " allocated for arguments which are"
3151 " passed in registers");
3152 return false;
3153 }
3154 #endif
3155
3156 /* Check whether the target is able to optimize the call
3157 into a sibcall. */
3158 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3159 {
3160 maybe_complain_about_tail_call (exp,
3161 "target is not able to optimize the"
3162 " call into a sibling call");
3163 return false;
3164 }
3165
3166 /* Functions that do not return exactly once may not be sibcall
3167 optimized. */
3168 if (flags & ECF_RETURNS_TWICE)
3169 {
3170 maybe_complain_about_tail_call (exp, "callee returns twice");
3171 return false;
3172 }
3173 if (flags & ECF_NORETURN)
3174 {
3175 maybe_complain_about_tail_call (exp, "callee does not return");
3176 return false;
3177 }
3178
3179 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3180 {
3181 maybe_complain_about_tail_call (exp, "volatile function type");
3182 return false;
3183 }
3184
3185 /* If the called function is nested in the current one, it might access
3186 some of the caller's arguments, but could clobber them beforehand if
3187 the argument areas are shared. */
3188 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3189 {
3190 maybe_complain_about_tail_call (exp, "nested function");
3191 return false;
3192 }
3193
3194 /* If this function requires more stack slots than the current
3195 function, we cannot change it into a sibling call.
3196 crtl->args.pretend_args_size is not part of the
3197 stack allocated by our caller. */
3198 if (maybe_gt (args_size.constant,
3199 crtl->args.size - crtl->args.pretend_args_size))
3200 {
3201 maybe_complain_about_tail_call (exp,
3202 "callee required more stack slots"
3203 " than the caller");
3204 return false;
3205 }
3206
3207 /* If the callee pops its own arguments, then it must pop exactly
3208 the same number of arguments as the current function. */
3209 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3210 args_size.constant),
3211 targetm.calls.return_pops_args (current_function_decl,
3212 TREE_TYPE
3213 (current_function_decl),
3214 crtl->args.size)))
3215 {
3216 maybe_complain_about_tail_call (exp,
3217 "inconsistent number of"
3218 " popped arguments");
3219 return false;
3220 }
3221
3222 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3223 {
3224 maybe_complain_about_tail_call (exp, "frontend does not support"
3225 " sibling call");
3226 return false;
3227 }
3228
3229 /* All checks passed. */
3230 return true;
3231 }
3232
3233 /* Update stack alignment when the parameter is passed in the stack
3234 since the outgoing parameter requires extra alignment on the calling
3235 function side. */
3236
3237 static void
3238 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3239 {
3240 if (crtl->stack_alignment_needed < locate->boundary)
3241 crtl->stack_alignment_needed = locate->boundary;
3242 if (crtl->preferred_stack_boundary < locate->boundary)
3243 crtl->preferred_stack_boundary = locate->boundary;
3244 }
3245
3246 /* Generate all the code for a CALL_EXPR exp
3247 and return an rtx for its value.
3248 Store the value in TARGET (specified as an rtx) if convenient.
3249 If the value is stored in TARGET then TARGET is returned.
3250 If IGNORE is nonzero, then we ignore the value of the function call. */
3251
3252 rtx
3253 expand_call (tree exp, rtx target, int ignore)
3254 {
3255 /* Nonzero if we are currently expanding a call. */
3256 static int currently_expanding_call = 0;
3257
3258 /* RTX for the function to be called. */
3259 rtx funexp;
3260 /* Sequence of insns to perform a normal "call". */
3261 rtx_insn *normal_call_insns = NULL;
3262 /* Sequence of insns to perform a tail "call". */
3263 rtx_insn *tail_call_insns = NULL;
3264 /* Data type of the function. */
3265 tree funtype;
3266 tree type_arg_types;
3267 tree rettype;
3268 /* Declaration of the function being called,
3269 or 0 if the function is computed (not known by name). */
3270 tree fndecl = 0;
3271 /* The type of the function being called. */
3272 tree fntype;
3273 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3274 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3275 int pass;
3276
3277 /* Register in which non-BLKmode value will be returned,
3278 or 0 if no value or if value is BLKmode. */
3279 rtx valreg;
3280 /* Address where we should return a BLKmode value;
3281 0 if value not BLKmode. */
3282 rtx structure_value_addr = 0;
3283 /* Nonzero if that address is being passed by treating it as
3284 an extra, implicit first parameter. Otherwise,
3285 it is passed by being copied directly into struct_value_rtx. */
3286 int structure_value_addr_parm = 0;
3287 /* Holds the value of implicit argument for the struct value. */
3288 tree structure_value_addr_value = NULL_TREE;
3289 /* Size of aggregate value wanted, or zero if none wanted
3290 or if we are using the non-reentrant PCC calling convention
3291 or expecting the value in registers. */
3292 poly_int64 struct_value_size = 0;
3293 /* Nonzero if called function returns an aggregate in memory PCC style,
3294 by returning the address of where to find it. */
3295 int pcc_struct_value = 0;
3296 rtx struct_value = 0;
3297
3298 /* Number of actual parameters in this call, including struct value addr. */
3299 int num_actuals;
3300 /* Number of named args. Args after this are anonymous ones
3301 and they must all go on the stack. */
3302 int n_named_args;
3303 /* Number of complex actual arguments that need to be split. */
3304 int num_complex_actuals = 0;
3305
3306 /* Vector of information about each argument.
3307 Arguments are numbered in the order they will be pushed,
3308 not the order they are written. */
3309 struct arg_data *args;
3310
3311 /* Total size in bytes of all the stack-parms scanned so far. */
3312 struct args_size args_size;
3313 struct args_size adjusted_args_size;
3314 /* Size of arguments before any adjustments (such as rounding). */
3315 poly_int64 unadjusted_args_size;
3316 /* Data on reg parms scanned so far. */
3317 CUMULATIVE_ARGS args_so_far_v;
3318 cumulative_args_t args_so_far;
3319 /* Nonzero if a reg parm has been scanned. */
3320 int reg_parm_seen;
3321 /* Nonzero if this is an indirect function call. */
3322
3323 /* Nonzero if we must avoid push-insns in the args for this call.
3324 If stack space is allocated for register parameters, but not by the
3325 caller, then it is preallocated in the fixed part of the stack frame.
3326 So the entire argument block must then be preallocated (i.e., we
3327 ignore PUSH_ROUNDING in that case). */
3328
3329 int must_preallocate = !PUSH_ARGS;
3330
3331 /* Size of the stack reserved for parameter registers. */
3332 int reg_parm_stack_space = 0;
3333
3334 /* Address of space preallocated for stack parms
3335 (on machines that lack push insns), or 0 if space not preallocated. */
3336 rtx argblock = 0;
3337
3338 /* Mask of ECF_ and ERF_ flags. */
3339 int flags = 0;
3340 int return_flags = 0;
3341 #ifdef REG_PARM_STACK_SPACE
3342 /* Define the boundary of the register parm stack space that needs to be
3343 saved, if any. */
3344 int low_to_save, high_to_save;
3345 rtx save_area = 0; /* Place that it is saved */
3346 #endif
3347
3348 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3349 char *initial_stack_usage_map = stack_usage_map;
3350 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3351 char *stack_usage_map_buf = NULL;
3352
3353 poly_int64 old_stack_allocated;
3354
3355 /* State variables to track stack modifications. */
3356 rtx old_stack_level = 0;
3357 int old_stack_arg_under_construction = 0;
3358 poly_int64 old_pending_adj = 0;
3359 int old_inhibit_defer_pop = inhibit_defer_pop;
3360
3361 /* Some stack pointer alterations we make are performed via
3362 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3363 which we then also need to save/restore along the way. */
3364 poly_int64 old_stack_pointer_delta = 0;
3365
3366 rtx call_fusage;
3367 tree addr = CALL_EXPR_FN (exp);
3368 int i;
3369 /* The alignment of the stack, in bits. */
3370 unsigned HOST_WIDE_INT preferred_stack_boundary;
3371 /* The alignment of the stack, in bytes. */
3372 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3373 /* The static chain value to use for this call. */
3374 rtx static_chain_value;
3375 /* See if this is "nothrow" function call. */
3376 if (TREE_NOTHROW (exp))
3377 flags |= ECF_NOTHROW;
3378
3379 /* See if we can find a DECL-node for the actual function, and get the
3380 function attributes (flags) from the function decl or type node. */
3381 fndecl = get_callee_fndecl (exp);
3382 if (fndecl)
3383 {
3384 fntype = TREE_TYPE (fndecl);
3385 flags |= flags_from_decl_or_type (fndecl);
3386 return_flags |= decl_return_flags (fndecl);
3387 }
3388 else
3389 {
3390 fntype = TREE_TYPE (TREE_TYPE (addr));
3391 flags |= flags_from_decl_or_type (fntype);
3392 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3393 flags |= ECF_BY_DESCRIPTOR;
3394 }
3395 rettype = TREE_TYPE (exp);
3396
3397 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3398
3399 /* Warn if this value is an aggregate type,
3400 regardless of which calling convention we are using for it. */
3401 if (AGGREGATE_TYPE_P (rettype))
3402 warning (OPT_Waggregate_return, "function call has aggregate value");
3403
3404 /* If the result of a non looping pure or const function call is
3405 ignored (or void), and none of its arguments are volatile, we can
3406 avoid expanding the call and just evaluate the arguments for
3407 side-effects. */
3408 if ((flags & (ECF_CONST | ECF_PURE))
3409 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3410 && (ignore || target == const0_rtx
3411 || TYPE_MODE (rettype) == VOIDmode))
3412 {
3413 bool volatilep = false;
3414 tree arg;
3415 call_expr_arg_iterator iter;
3416
3417 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3418 if (TREE_THIS_VOLATILE (arg))
3419 {
3420 volatilep = true;
3421 break;
3422 }
3423
3424 if (! volatilep)
3425 {
3426 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3427 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3428 return const0_rtx;
3429 }
3430 }
3431
3432 #ifdef REG_PARM_STACK_SPACE
3433 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3434 #endif
3435
3436 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3437 && reg_parm_stack_space > 0 && PUSH_ARGS)
3438 must_preallocate = 1;
3439
3440 /* Set up a place to return a structure. */
3441
3442 /* Cater to broken compilers. */
3443 if (aggregate_value_p (exp, fntype))
3444 {
3445 /* This call returns a big structure. */
3446 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3447
3448 #ifdef PCC_STATIC_STRUCT_RETURN
3449 {
3450 pcc_struct_value = 1;
3451 }
3452 #else /* not PCC_STATIC_STRUCT_RETURN */
3453 {
3454 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3455 struct_value_size = -1;
3456
3457 /* Even if it is semantically safe to use the target as the return
3458 slot, it may be not sufficiently aligned for the return type. */
3459 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3460 && target
3461 && MEM_P (target)
3462 /* If rettype is addressable, we may not create a temporary.
3463 If target is properly aligned at runtime and the compiler
3464 just doesn't know about it, it will work fine, otherwise it
3465 will be UB. */
3466 && (TREE_ADDRESSABLE (rettype)
3467 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3468 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3469 MEM_ALIGN (target)))))
3470 structure_value_addr = XEXP (target, 0);
3471 else
3472 {
3473 /* For variable-sized objects, we must be called with a target
3474 specified. If we were to allocate space on the stack here,
3475 we would have no way of knowing when to free it. */
3476 rtx d = assign_temp (rettype, 1, 1);
3477 structure_value_addr = XEXP (d, 0);
3478 target = 0;
3479 }
3480 }
3481 #endif /* not PCC_STATIC_STRUCT_RETURN */
3482 }
3483
3484 /* Figure out the amount to which the stack should be aligned. */
3485 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3486 if (fndecl)
3487 {
3488 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3489 /* Without automatic stack alignment, we can't increase preferred
3490 stack boundary. With automatic stack alignment, it is
3491 unnecessary since unless we can guarantee that all callers will
3492 align the outgoing stack properly, callee has to align its
3493 stack anyway. */
3494 if (i
3495 && i->preferred_incoming_stack_boundary
3496 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3497 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3498 }
3499
3500 /* Operand 0 is a pointer-to-function; get the type of the function. */
3501 funtype = TREE_TYPE (addr);
3502 gcc_assert (POINTER_TYPE_P (funtype));
3503 funtype = TREE_TYPE (funtype);
3504
3505 /* Count whether there are actual complex arguments that need to be split
3506 into their real and imaginary parts. Munge the type_arg_types
3507 appropriately here as well. */
3508 if (targetm.calls.split_complex_arg)
3509 {
3510 call_expr_arg_iterator iter;
3511 tree arg;
3512 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3513 {
3514 tree type = TREE_TYPE (arg);
3515 if (type && TREE_CODE (type) == COMPLEX_TYPE
3516 && targetm.calls.split_complex_arg (type))
3517 num_complex_actuals++;
3518 }
3519 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3520 }
3521 else
3522 type_arg_types = TYPE_ARG_TYPES (funtype);
3523
3524 if (flags & ECF_MAY_BE_ALLOCA)
3525 cfun->calls_alloca = 1;
3526
3527 /* If struct_value_rtx is 0, it means pass the address
3528 as if it were an extra parameter. Put the argument expression
3529 in structure_value_addr_value. */
3530 if (structure_value_addr && struct_value == 0)
3531 {
3532 /* If structure_value_addr is a REG other than
3533 virtual_outgoing_args_rtx, we can use always use it. If it
3534 is not a REG, we must always copy it into a register.
3535 If it is virtual_outgoing_args_rtx, we must copy it to another
3536 register in some cases. */
3537 rtx temp = (!REG_P (structure_value_addr)
3538 || (ACCUMULATE_OUTGOING_ARGS
3539 && stack_arg_under_construction
3540 && structure_value_addr == virtual_outgoing_args_rtx)
3541 ? copy_addr_to_reg (convert_memory_address
3542 (Pmode, structure_value_addr))
3543 : structure_value_addr);
3544
3545 structure_value_addr_value =
3546 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3547 structure_value_addr_parm = 1;
3548 }
3549
3550 /* Count the arguments and set NUM_ACTUALS. */
3551 num_actuals =
3552 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3553
3554 /* Compute number of named args.
3555 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3556
3557 if (type_arg_types != 0)
3558 n_named_args
3559 = (list_length (type_arg_types)
3560 /* Count the struct value address, if it is passed as a parm. */
3561 + structure_value_addr_parm);
3562 else
3563 /* If we know nothing, treat all args as named. */
3564 n_named_args = num_actuals;
3565
3566 /* Start updating where the next arg would go.
3567
3568 On some machines (such as the PA) indirect calls have a different
3569 calling convention than normal calls. The fourth argument in
3570 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3571 or not. */
3572 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3573 args_so_far = pack_cumulative_args (&args_so_far_v);
3574
3575 /* Now possibly adjust the number of named args.
3576 Normally, don't include the last named arg if anonymous args follow.
3577 We do include the last named arg if
3578 targetm.calls.strict_argument_naming() returns nonzero.
3579 (If no anonymous args follow, the result of list_length is actually
3580 one too large. This is harmless.)
3581
3582 If targetm.calls.pretend_outgoing_varargs_named() returns
3583 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3584 this machine will be able to place unnamed args that were passed
3585 in registers into the stack. So treat all args as named. This
3586 allows the insns emitting for a specific argument list to be
3587 independent of the function declaration.
3588
3589 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3590 we do not have any reliable way to pass unnamed args in
3591 registers, so we must force them into memory. */
3592
3593 if (type_arg_types != 0
3594 && targetm.calls.strict_argument_naming (args_so_far))
3595 ;
3596 else if (type_arg_types != 0
3597 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3598 /* Don't include the last named arg. */
3599 --n_named_args;
3600 else
3601 /* Treat all args as named. */
3602 n_named_args = num_actuals;
3603
3604 /* Make a vector to hold all the information about each arg. */
3605 args = XCNEWVEC (struct arg_data, num_actuals);
3606
3607 /* Build up entries in the ARGS array, compute the size of the
3608 arguments into ARGS_SIZE, etc. */
3609 initialize_argument_information (num_actuals, args, &args_size,
3610 n_named_args, exp,
3611 structure_value_addr_value, fndecl, fntype,
3612 args_so_far, reg_parm_stack_space,
3613 &old_stack_level, &old_pending_adj,
3614 &must_preallocate, &flags,
3615 &try_tail_call, CALL_FROM_THUNK_P (exp));
3616
3617 if (args_size.var)
3618 must_preallocate = 1;
3619
3620 /* Now make final decision about preallocating stack space. */
3621 must_preallocate = finalize_must_preallocate (must_preallocate,
3622 num_actuals, args,
3623 &args_size);
3624
3625 /* If the structure value address will reference the stack pointer, we
3626 must stabilize it. We don't need to do this if we know that we are
3627 not going to adjust the stack pointer in processing this call. */
3628
3629 if (structure_value_addr
3630 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3631 || reg_mentioned_p (virtual_outgoing_args_rtx,
3632 structure_value_addr))
3633 && (args_size.var
3634 || (!ACCUMULATE_OUTGOING_ARGS
3635 && maybe_ne (args_size.constant, 0))))
3636 structure_value_addr = copy_to_reg (structure_value_addr);
3637
3638 /* Tail calls can make things harder to debug, and we've traditionally
3639 pushed these optimizations into -O2. Don't try if we're already
3640 expanding a call, as that means we're an argument. Don't try if
3641 there's cleanups, as we know there's code to follow the call. */
3642 if (currently_expanding_call++ != 0
3643 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3644 || args_size.var
3645 || dbg_cnt (tail_call) == false)
3646 try_tail_call = 0;
3647
3648 /* Workaround buggy C/C++ wrappers around Fortran routines with
3649 character(len=constant) arguments if the hidden string length arguments
3650 are passed on the stack; if the callers forget to pass those arguments,
3651 attempting to tail call in such routines leads to stack corruption.
3652 Avoid tail calls in functions where at least one such hidden string
3653 length argument is passed (partially or fully) on the stack in the
3654 caller and the callee needs to pass any arguments on the stack.
3655 See PR90329. */
3656 if (try_tail_call && maybe_ne (args_size.constant, 0))
3657 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3658 arg; arg = DECL_CHAIN (arg))
3659 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3660 {
3661 subrtx_iterator::array_type array;
3662 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3663 if (MEM_P (*iter))
3664 {
3665 try_tail_call = 0;
3666 break;
3667 }
3668 }
3669
3670 /* If the user has marked the function as requiring tail-call
3671 optimization, attempt it. */
3672 if (must_tail_call)
3673 try_tail_call = 1;
3674
3675 /* Rest of purposes for tail call optimizations to fail. */
3676 if (try_tail_call)
3677 try_tail_call = can_implement_as_sibling_call_p (exp,
3678 structure_value_addr,
3679 funtype,
3680 reg_parm_stack_space,
3681 fndecl,
3682 flags, addr, args_size);
3683
3684 /* Check if caller and callee disagree in promotion of function
3685 return value. */
3686 if (try_tail_call)
3687 {
3688 machine_mode caller_mode, caller_promoted_mode;
3689 machine_mode callee_mode, callee_promoted_mode;
3690 int caller_unsignedp, callee_unsignedp;
3691 tree caller_res = DECL_RESULT (current_function_decl);
3692
3693 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3694 caller_mode = DECL_MODE (caller_res);
3695 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3696 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3697 caller_promoted_mode
3698 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3699 &caller_unsignedp,
3700 TREE_TYPE (current_function_decl), 1);
3701 callee_promoted_mode
3702 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3703 &callee_unsignedp,
3704 funtype, 1);
3705 if (caller_mode != VOIDmode
3706 && (caller_promoted_mode != callee_promoted_mode
3707 || ((caller_mode != caller_promoted_mode
3708 || callee_mode != callee_promoted_mode)
3709 && (caller_unsignedp != callee_unsignedp
3710 || partial_subreg_p (caller_mode, callee_mode)))))
3711 {
3712 try_tail_call = 0;
3713 maybe_complain_about_tail_call (exp,
3714 "caller and callee disagree in"
3715 " promotion of function"
3716 " return value");
3717 }
3718 }
3719
3720 /* Ensure current function's preferred stack boundary is at least
3721 what we need. Stack alignment may also increase preferred stack
3722 boundary. */
3723 for (i = 0; i < num_actuals; i++)
3724 if (reg_parm_stack_space > 0
3725 || args[i].reg == 0
3726 || args[i].partial != 0
3727 || args[i].pass_on_stack)
3728 update_stack_alignment_for_call (&args[i].locate);
3729 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3730 crtl->preferred_stack_boundary = preferred_stack_boundary;
3731 else
3732 preferred_stack_boundary = crtl->preferred_stack_boundary;
3733
3734 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3735
3736 /* We want to make two insn chains; one for a sibling call, the other
3737 for a normal call. We will select one of the two chains after
3738 initial RTL generation is complete. */
3739 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3740 {
3741 int sibcall_failure = 0;
3742 /* We want to emit any pending stack adjustments before the tail
3743 recursion "call". That way we know any adjustment after the tail
3744 recursion call can be ignored if we indeed use the tail
3745 call expansion. */
3746 saved_pending_stack_adjust save;
3747 rtx_insn *insns, *before_call, *after_args;
3748 rtx next_arg_reg;
3749
3750 if (pass == 0)
3751 {
3752 /* State variables we need to save and restore between
3753 iterations. */
3754 save_pending_stack_adjust (&save);
3755 }
3756 if (pass)
3757 flags &= ~ECF_SIBCALL;
3758 else
3759 flags |= ECF_SIBCALL;
3760
3761 /* Other state variables that we must reinitialize each time
3762 through the loop (that are not initialized by the loop itself). */
3763 argblock = 0;
3764 call_fusage = 0;
3765
3766 /* Start a new sequence for the normal call case.
3767
3768 From this point on, if the sibling call fails, we want to set
3769 sibcall_failure instead of continuing the loop. */
3770 start_sequence ();
3771
3772 /* Don't let pending stack adjusts add up to too much.
3773 Also, do all pending adjustments now if there is any chance
3774 this might be a call to alloca or if we are expanding a sibling
3775 call sequence.
3776 Also do the adjustments before a throwing call, otherwise
3777 exception handling can fail; PR 19225. */
3778 if (maybe_ge (pending_stack_adjust, 32)
3779 || (maybe_ne (pending_stack_adjust, 0)
3780 && (flags & ECF_MAY_BE_ALLOCA))
3781 || (maybe_ne (pending_stack_adjust, 0)
3782 && flag_exceptions && !(flags & ECF_NOTHROW))
3783 || pass == 0)
3784 do_pending_stack_adjust ();
3785
3786 /* Precompute any arguments as needed. */
3787 if (pass)
3788 precompute_arguments (num_actuals, args);
3789
3790 /* Now we are about to start emitting insns that can be deleted
3791 if a libcall is deleted. */
3792 if (pass && (flags & ECF_MALLOC))
3793 start_sequence ();
3794
3795 if (pass == 0
3796 && crtl->stack_protect_guard
3797 && targetm.stack_protect_runtime_enabled_p ())
3798 stack_protect_epilogue ();
3799
3800 adjusted_args_size = args_size;
3801 /* Compute the actual size of the argument block required. The variable
3802 and constant sizes must be combined, the size may have to be rounded,
3803 and there may be a minimum required size. When generating a sibcall
3804 pattern, do not round up, since we'll be re-using whatever space our
3805 caller provided. */
3806 unadjusted_args_size
3807 = compute_argument_block_size (reg_parm_stack_space,
3808 &adjusted_args_size,
3809 fndecl, fntype,
3810 (pass == 0 ? 0
3811 : preferred_stack_boundary));
3812
3813 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3814
3815 /* The argument block when performing a sibling call is the
3816 incoming argument block. */
3817 if (pass == 0)
3818 {
3819 argblock = crtl->args.internal_arg_pointer;
3820 if (STACK_GROWS_DOWNWARD)
3821 argblock
3822 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3823 else
3824 argblock
3825 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3826
3827 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3828 stored_args_map = sbitmap_alloc (map_size);
3829 bitmap_clear (stored_args_map);
3830 stored_args_watermark = HOST_WIDE_INT_M1U;
3831 }
3832
3833 /* If we have no actual push instructions, or shouldn't use them,
3834 make space for all args right now. */
3835 else if (adjusted_args_size.var != 0)
3836 {
3837 if (old_stack_level == 0)
3838 {
3839 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3840 old_stack_pointer_delta = stack_pointer_delta;
3841 old_pending_adj = pending_stack_adjust;
3842 pending_stack_adjust = 0;
3843 /* stack_arg_under_construction says whether a stack arg is
3844 being constructed at the old stack level. Pushing the stack
3845 gets a clean outgoing argument block. */
3846 old_stack_arg_under_construction = stack_arg_under_construction;
3847 stack_arg_under_construction = 0;
3848 }
3849 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3850 if (flag_stack_usage_info)
3851 current_function_has_unbounded_dynamic_stack_size = 1;
3852 }
3853 else
3854 {
3855 /* Note that we must go through the motions of allocating an argument
3856 block even if the size is zero because we may be storing args
3857 in the area reserved for register arguments, which may be part of
3858 the stack frame. */
3859
3860 poly_int64 needed = adjusted_args_size.constant;
3861
3862 /* Store the maximum argument space used. It will be pushed by
3863 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3864 checking). */
3865
3866 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3867 needed);
3868
3869 if (must_preallocate)
3870 {
3871 if (ACCUMULATE_OUTGOING_ARGS)
3872 {
3873 /* Since the stack pointer will never be pushed, it is
3874 possible for the evaluation of a parm to clobber
3875 something we have already written to the stack.
3876 Since most function calls on RISC machines do not use
3877 the stack, this is uncommon, but must work correctly.
3878
3879 Therefore, we save any area of the stack that was already
3880 written and that we are using. Here we set up to do this
3881 by making a new stack usage map from the old one. The
3882 actual save will be done by store_one_arg.
3883
3884 Another approach might be to try to reorder the argument
3885 evaluations to avoid this conflicting stack usage. */
3886
3887 /* Since we will be writing into the entire argument area,
3888 the map must be allocated for its entire size, not just
3889 the part that is the responsibility of the caller. */
3890 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3891 needed += reg_parm_stack_space;
3892
3893 poly_int64 limit = needed;
3894 if (ARGS_GROW_DOWNWARD)
3895 limit += 1;
3896
3897 /* For polynomial sizes, this is the maximum possible
3898 size needed for arguments with a constant size
3899 and offset. */
3900 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3901 highest_outgoing_arg_in_use
3902 = MAX (initial_highest_arg_in_use, const_limit);
3903
3904 free (stack_usage_map_buf);
3905 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3906 stack_usage_map = stack_usage_map_buf;
3907
3908 if (initial_highest_arg_in_use)
3909 memcpy (stack_usage_map, initial_stack_usage_map,
3910 initial_highest_arg_in_use);
3911
3912 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3913 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3914 (highest_outgoing_arg_in_use
3915 - initial_highest_arg_in_use));
3916 needed = 0;
3917
3918 /* The address of the outgoing argument list must not be
3919 copied to a register here, because argblock would be left
3920 pointing to the wrong place after the call to
3921 allocate_dynamic_stack_space below. */
3922
3923 argblock = virtual_outgoing_args_rtx;
3924 }
3925 else
3926 {
3927 /* Try to reuse some or all of the pending_stack_adjust
3928 to get this space. */
3929 if (inhibit_defer_pop == 0
3930 && (combine_pending_stack_adjustment_and_call
3931 (&needed,
3932 unadjusted_args_size,
3933 &adjusted_args_size,
3934 preferred_unit_stack_boundary)))
3935 {
3936 /* combine_pending_stack_adjustment_and_call computes
3937 an adjustment before the arguments are allocated.
3938 Account for them and see whether or not the stack
3939 needs to go up or down. */
3940 needed = unadjusted_args_size - needed;
3941
3942 /* Checked by
3943 combine_pending_stack_adjustment_and_call. */
3944 gcc_checking_assert (ordered_p (needed, 0));
3945 if (maybe_lt (needed, 0))
3946 {
3947 /* We're releasing stack space. */
3948 /* ??? We can avoid any adjustment at all if we're
3949 already aligned. FIXME. */
3950 pending_stack_adjust = -needed;
3951 do_pending_stack_adjust ();
3952 needed = 0;
3953 }
3954 else
3955 /* We need to allocate space. We'll do that in
3956 push_block below. */
3957 pending_stack_adjust = 0;
3958 }
3959
3960 /* Special case this because overhead of `push_block' in
3961 this case is non-trivial. */
3962 if (known_eq (needed, 0))
3963 argblock = virtual_outgoing_args_rtx;
3964 else
3965 {
3966 rtx needed_rtx = gen_int_mode (needed, Pmode);
3967 argblock = push_block (needed_rtx, 0, 0);
3968 if (ARGS_GROW_DOWNWARD)
3969 argblock = plus_constant (Pmode, argblock, needed);
3970 }
3971
3972 /* We only really need to call `copy_to_reg' in the case
3973 where push insns are going to be used to pass ARGBLOCK
3974 to a function call in ARGS. In that case, the stack
3975 pointer changes value from the allocation point to the
3976 call point, and hence the value of
3977 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3978 as well always do it. */
3979 argblock = copy_to_reg (argblock);
3980 }
3981 }
3982 }
3983
3984 if (ACCUMULATE_OUTGOING_ARGS)
3985 {
3986 /* The save/restore code in store_one_arg handles all
3987 cases except one: a constructor call (including a C
3988 function returning a BLKmode struct) to initialize
3989 an argument. */
3990 if (stack_arg_under_construction)
3991 {
3992 rtx push_size
3993 = (gen_int_mode
3994 (adjusted_args_size.constant
3995 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3996 : TREE_TYPE (fndecl))
3997 ? 0 : reg_parm_stack_space), Pmode));
3998 if (old_stack_level == 0)
3999 {
4000 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4001 old_stack_pointer_delta = stack_pointer_delta;
4002 old_pending_adj = pending_stack_adjust;
4003 pending_stack_adjust = 0;
4004 /* stack_arg_under_construction says whether a stack
4005 arg is being constructed at the old stack level.
4006 Pushing the stack gets a clean outgoing argument
4007 block. */
4008 old_stack_arg_under_construction
4009 = stack_arg_under_construction;
4010 stack_arg_under_construction = 0;
4011 /* Make a new map for the new argument list. */
4012 free (stack_usage_map_buf);
4013 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4014 stack_usage_map = stack_usage_map_buf;
4015 highest_outgoing_arg_in_use = 0;
4016 stack_usage_watermark = HOST_WIDE_INT_M1U;
4017 }
4018 /* We can pass TRUE as the 4th argument because we just
4019 saved the stack pointer and will restore it right after
4020 the call. */
4021 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4022 -1, true);
4023 }
4024
4025 /* If argument evaluation might modify the stack pointer,
4026 copy the address of the argument list to a register. */
4027 for (i = 0; i < num_actuals; i++)
4028 if (args[i].pass_on_stack)
4029 {
4030 argblock = copy_addr_to_reg (argblock);
4031 break;
4032 }
4033 }
4034
4035 compute_argument_addresses (args, argblock, num_actuals);
4036
4037 /* Stack is properly aligned, pops can't safely be deferred during
4038 the evaluation of the arguments. */
4039 NO_DEFER_POP;
4040
4041 /* Precompute all register parameters. It isn't safe to compute
4042 anything once we have started filling any specific hard regs.
4043 TLS symbols sometimes need a call to resolve. Precompute
4044 register parameters before any stack pointer manipulation
4045 to avoid unaligned stack in the called function. */
4046 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4047
4048 OK_DEFER_POP;
4049
4050 /* Perform stack alignment before the first push (the last arg). */
4051 if (argblock == 0
4052 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4053 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4054 {
4055 /* When the stack adjustment is pending, we get better code
4056 by combining the adjustments. */
4057 if (maybe_ne (pending_stack_adjust, 0)
4058 && ! inhibit_defer_pop
4059 && (combine_pending_stack_adjustment_and_call
4060 (&pending_stack_adjust,
4061 unadjusted_args_size,
4062 &adjusted_args_size,
4063 preferred_unit_stack_boundary)))
4064 do_pending_stack_adjust ();
4065 else if (argblock == 0)
4066 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4067 - unadjusted_args_size,
4068 Pmode));
4069 }
4070 /* Now that the stack is properly aligned, pops can't safely
4071 be deferred during the evaluation of the arguments. */
4072 NO_DEFER_POP;
4073
4074 /* Record the maximum pushed stack space size. We need to delay
4075 doing it this far to take into account the optimization done
4076 by combine_pending_stack_adjustment_and_call. */
4077 if (flag_stack_usage_info
4078 && !ACCUMULATE_OUTGOING_ARGS
4079 && pass
4080 && adjusted_args_size.var == 0)
4081 {
4082 poly_int64 pushed = (adjusted_args_size.constant
4083 + pending_stack_adjust);
4084 current_function_pushed_stack_size
4085 = upper_bound (current_function_pushed_stack_size, pushed);
4086 }
4087
4088 funexp = rtx_for_function_call (fndecl, addr);
4089
4090 if (CALL_EXPR_STATIC_CHAIN (exp))
4091 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4092 else
4093 static_chain_value = 0;
4094
4095 #ifdef REG_PARM_STACK_SPACE
4096 /* Save the fixed argument area if it's part of the caller's frame and
4097 is clobbered by argument setup for this call. */
4098 if (ACCUMULATE_OUTGOING_ARGS && pass)
4099 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4100 &low_to_save, &high_to_save);
4101 #endif
4102
4103 /* Now store (and compute if necessary) all non-register parms.
4104 These come before register parms, since they can require block-moves,
4105 which could clobber the registers used for register parms.
4106 Parms which have partial registers are not stored here,
4107 but we do preallocate space here if they want that. */
4108
4109 for (i = 0; i < num_actuals; i++)
4110 {
4111 if (args[i].reg == 0 || args[i].pass_on_stack)
4112 {
4113 rtx_insn *before_arg = get_last_insn ();
4114
4115 /* We don't allow passing huge (> 2^30 B) arguments
4116 by value. It would cause an overflow later on. */
4117 if (constant_lower_bound (adjusted_args_size.constant)
4118 >= (1 << (HOST_BITS_PER_INT - 2)))
4119 {
4120 sorry ("passing too large argument on stack");
4121 continue;
4122 }
4123
4124 if (store_one_arg (&args[i], argblock, flags,
4125 adjusted_args_size.var != 0,
4126 reg_parm_stack_space)
4127 || (pass == 0
4128 && check_sibcall_argument_overlap (before_arg,
4129 &args[i], 1)))
4130 sibcall_failure = 1;
4131 }
4132
4133 if (args[i].stack)
4134 call_fusage
4135 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4136 gen_rtx_USE (VOIDmode, args[i].stack),
4137 call_fusage);
4138 }
4139
4140 /* If we have a parm that is passed in registers but not in memory
4141 and whose alignment does not permit a direct copy into registers,
4142 make a group of pseudos that correspond to each register that we
4143 will later fill. */
4144 if (STRICT_ALIGNMENT)
4145 store_unaligned_arguments_into_pseudos (args, num_actuals);
4146
4147 /* Now store any partially-in-registers parm.
4148 This is the last place a block-move can happen. */
4149 if (reg_parm_seen)
4150 for (i = 0; i < num_actuals; i++)
4151 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4152 {
4153 rtx_insn *before_arg = get_last_insn ();
4154
4155 /* On targets with weird calling conventions (e.g. PA) it's
4156 hard to ensure that all cases of argument overlap between
4157 stack and registers work. Play it safe and bail out. */
4158 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4159 {
4160 sibcall_failure = 1;
4161 break;
4162 }
4163
4164 if (store_one_arg (&args[i], argblock, flags,
4165 adjusted_args_size.var != 0,
4166 reg_parm_stack_space)
4167 || (pass == 0
4168 && check_sibcall_argument_overlap (before_arg,
4169 &args[i], 1)))
4170 sibcall_failure = 1;
4171 }
4172
4173 bool any_regs = false;
4174 for (i = 0; i < num_actuals; i++)
4175 if (args[i].reg != NULL_RTX)
4176 {
4177 any_regs = true;
4178 targetm.calls.call_args (args[i].reg, funtype);
4179 }
4180 if (!any_regs)
4181 targetm.calls.call_args (pc_rtx, funtype);
4182
4183 /* Figure out the register where the value, if any, will come back. */
4184 valreg = 0;
4185 if (TYPE_MODE (rettype) != VOIDmode
4186 && ! structure_value_addr)
4187 {
4188 if (pcc_struct_value)
4189 valreg = hard_function_value (build_pointer_type (rettype),
4190 fndecl, NULL, (pass == 0));
4191 else
4192 valreg = hard_function_value (rettype, fndecl, fntype,
4193 (pass == 0));
4194
4195 /* If VALREG is a PARALLEL whose first member has a zero
4196 offset, use that. This is for targets such as m68k that
4197 return the same value in multiple places. */
4198 if (GET_CODE (valreg) == PARALLEL)
4199 {
4200 rtx elem = XVECEXP (valreg, 0, 0);
4201 rtx where = XEXP (elem, 0);
4202 rtx offset = XEXP (elem, 1);
4203 if (offset == const0_rtx
4204 && GET_MODE (where) == GET_MODE (valreg))
4205 valreg = where;
4206 }
4207 }
4208
4209 /* If register arguments require space on the stack and stack space
4210 was not preallocated, allocate stack space here for arguments
4211 passed in registers. */
4212 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4213 && !ACCUMULATE_OUTGOING_ARGS
4214 && must_preallocate == 0 && reg_parm_stack_space > 0)
4215 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4216
4217 /* Pass the function the address in which to return a
4218 structure value. */
4219 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4220 {
4221 structure_value_addr
4222 = convert_memory_address (Pmode, structure_value_addr);
4223 emit_move_insn (struct_value,
4224 force_reg (Pmode,
4225 force_operand (structure_value_addr,
4226 NULL_RTX)));
4227
4228 if (REG_P (struct_value))
4229 use_reg (&call_fusage, struct_value);
4230 }
4231
4232 after_args = get_last_insn ();
4233 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4234 static_chain_value, &call_fusage,
4235 reg_parm_seen, flags);
4236
4237 load_register_parameters (args, num_actuals, &call_fusage, flags,
4238 pass == 0, &sibcall_failure);
4239
4240 /* Save a pointer to the last insn before the call, so that we can
4241 later safely search backwards to find the CALL_INSN. */
4242 before_call = get_last_insn ();
4243
4244 /* Set up next argument register. For sibling calls on machines
4245 with register windows this should be the incoming register. */
4246 if (pass == 0)
4247 next_arg_reg = targetm.calls.function_incoming_arg
4248 (args_so_far, function_arg_info::end_marker ());
4249 else
4250 next_arg_reg = targetm.calls.function_arg
4251 (args_so_far, function_arg_info::end_marker ());
4252
4253 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4254 {
4255 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4256 arg_nr = num_actuals - arg_nr - 1;
4257 if (arg_nr >= 0
4258 && arg_nr < num_actuals
4259 && args[arg_nr].reg
4260 && valreg
4261 && REG_P (valreg)
4262 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4263 call_fusage
4264 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4265 gen_rtx_SET (valreg, args[arg_nr].reg),
4266 call_fusage);
4267 }
4268 /* All arguments and registers used for the call must be set up by
4269 now! */
4270
4271 /* Stack must be properly aligned now. */
4272 gcc_assert (!pass
4273 || multiple_p (stack_pointer_delta,
4274 preferred_unit_stack_boundary));
4275
4276 /* Generate the actual call instruction. */
4277 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4278 adjusted_args_size.constant, struct_value_size,
4279 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4280 flags, args_so_far);
4281
4282 if (flag_ipa_ra)
4283 {
4284 rtx_call_insn *last;
4285 rtx datum = NULL_RTX;
4286 if (fndecl != NULL_TREE)
4287 {
4288 datum = XEXP (DECL_RTL (fndecl), 0);
4289 gcc_assert (datum != NULL_RTX
4290 && GET_CODE (datum) == SYMBOL_REF);
4291 }
4292 last = last_call_insn ();
4293 add_reg_note (last, REG_CALL_DECL, datum);
4294 }
4295
4296 /* If the call setup or the call itself overlaps with anything
4297 of the argument setup we probably clobbered our call address.
4298 In that case we can't do sibcalls. */
4299 if (pass == 0
4300 && check_sibcall_argument_overlap (after_args, 0, 0))
4301 sibcall_failure = 1;
4302
4303 /* If a non-BLKmode value is returned at the most significant end
4304 of a register, shift the register right by the appropriate amount
4305 and update VALREG accordingly. BLKmode values are handled by the
4306 group load/store machinery below. */
4307 if (!structure_value_addr
4308 && !pcc_struct_value
4309 && TYPE_MODE (rettype) != VOIDmode
4310 && TYPE_MODE (rettype) != BLKmode
4311 && REG_P (valreg)
4312 && targetm.calls.return_in_msb (rettype))
4313 {
4314 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4315 sibcall_failure = 1;
4316 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4317 }
4318
4319 if (pass && (flags & ECF_MALLOC))
4320 {
4321 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4322 rtx_insn *last, *insns;
4323
4324 /* The return value from a malloc-like function is a pointer. */
4325 if (TREE_CODE (rettype) == POINTER_TYPE)
4326 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4327
4328 emit_move_insn (temp, valreg);
4329
4330 /* The return value from a malloc-like function cannot alias
4331 anything else. */
4332 last = get_last_insn ();
4333 add_reg_note (last, REG_NOALIAS, temp);
4334
4335 /* Write out the sequence. */
4336 insns = get_insns ();
4337 end_sequence ();
4338 emit_insn (insns);
4339 valreg = temp;
4340 }
4341
4342 /* For calls to `setjmp', etc., inform
4343 function.c:setjmp_warnings that it should complain if
4344 nonvolatile values are live. For functions that cannot
4345 return, inform flow that control does not fall through. */
4346
4347 if ((flags & ECF_NORETURN) || pass == 0)
4348 {
4349 /* The barrier must be emitted
4350 immediately after the CALL_INSN. Some ports emit more
4351 than just a CALL_INSN above, so we must search for it here. */
4352
4353 rtx_insn *last = get_last_insn ();
4354 while (!CALL_P (last))
4355 {
4356 last = PREV_INSN (last);
4357 /* There was no CALL_INSN? */
4358 gcc_assert (last != before_call);
4359 }
4360
4361 emit_barrier_after (last);
4362
4363 /* Stack adjustments after a noreturn call are dead code.
4364 However when NO_DEFER_POP is in effect, we must preserve
4365 stack_pointer_delta. */
4366 if (inhibit_defer_pop == 0)
4367 {
4368 stack_pointer_delta = old_stack_allocated;
4369 pending_stack_adjust = 0;
4370 }
4371 }
4372
4373 /* If value type not void, return an rtx for the value. */
4374
4375 if (TYPE_MODE (rettype) == VOIDmode
4376 || ignore)
4377 target = const0_rtx;
4378 else if (structure_value_addr)
4379 {
4380 if (target == 0 || !MEM_P (target))
4381 {
4382 target
4383 = gen_rtx_MEM (TYPE_MODE (rettype),
4384 memory_address (TYPE_MODE (rettype),
4385 structure_value_addr));
4386 set_mem_attributes (target, rettype, 1);
4387 }
4388 }
4389 else if (pcc_struct_value)
4390 {
4391 /* This is the special C++ case where we need to
4392 know what the true target was. We take care to
4393 never use this value more than once in one expression. */
4394 target = gen_rtx_MEM (TYPE_MODE (rettype),
4395 copy_to_reg (valreg));
4396 set_mem_attributes (target, rettype, 1);
4397 }
4398 /* Handle calls that return values in multiple non-contiguous locations.
4399 The Irix 6 ABI has examples of this. */
4400 else if (GET_CODE (valreg) == PARALLEL)
4401 {
4402 if (target == 0)
4403 target = emit_group_move_into_temps (valreg);
4404 else if (rtx_equal_p (target, valreg))
4405 ;
4406 else if (GET_CODE (target) == PARALLEL)
4407 /* Handle the result of a emit_group_move_into_temps
4408 call in the previous pass. */
4409 emit_group_move (target, valreg);
4410 else
4411 emit_group_store (target, valreg, rettype,
4412 int_size_in_bytes (rettype));
4413 }
4414 else if (target
4415 && GET_MODE (target) == TYPE_MODE (rettype)
4416 && GET_MODE (target) == GET_MODE (valreg))
4417 {
4418 bool may_overlap = false;
4419
4420 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4421 reg to a plain register. */
4422 if (!REG_P (target) || HARD_REGISTER_P (target))
4423 valreg = avoid_likely_spilled_reg (valreg);
4424
4425 /* If TARGET is a MEM in the argument area, and we have
4426 saved part of the argument area, then we can't store
4427 directly into TARGET as it may get overwritten when we
4428 restore the argument save area below. Don't work too
4429 hard though and simply force TARGET to a register if it
4430 is a MEM; the optimizer is quite likely to sort it out. */
4431 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4432 for (i = 0; i < num_actuals; i++)
4433 if (args[i].save_area)
4434 {
4435 may_overlap = true;
4436 break;
4437 }
4438
4439 if (may_overlap)
4440 target = copy_to_reg (valreg);
4441 else
4442 {
4443 /* TARGET and VALREG cannot be equal at this point
4444 because the latter would not have
4445 REG_FUNCTION_VALUE_P true, while the former would if
4446 it were referring to the same register.
4447
4448 If they refer to the same register, this move will be
4449 a no-op, except when function inlining is being
4450 done. */
4451 emit_move_insn (target, valreg);
4452
4453 /* If we are setting a MEM, this code must be executed.
4454 Since it is emitted after the call insn, sibcall
4455 optimization cannot be performed in that case. */
4456 if (MEM_P (target))
4457 sibcall_failure = 1;
4458 }
4459 }
4460 else
4461 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4462
4463 /* If we promoted this return value, make the proper SUBREG.
4464 TARGET might be const0_rtx here, so be careful. */
4465 if (REG_P (target)
4466 && TYPE_MODE (rettype) != BLKmode
4467 && GET_MODE (target) != TYPE_MODE (rettype))
4468 {
4469 tree type = rettype;
4470 int unsignedp = TYPE_UNSIGNED (type);
4471 machine_mode pmode;
4472
4473 /* Ensure we promote as expected, and get the new unsignedness. */
4474 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4475 funtype, 1);
4476 gcc_assert (GET_MODE (target) == pmode);
4477
4478 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4479 GET_MODE (target));
4480 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4481 SUBREG_PROMOTED_VAR_P (target) = 1;
4482 SUBREG_PROMOTED_SET (target, unsignedp);
4483 }
4484
4485 /* If size of args is variable or this was a constructor call for a stack
4486 argument, restore saved stack-pointer value. */
4487
4488 if (old_stack_level)
4489 {
4490 rtx_insn *prev = get_last_insn ();
4491
4492 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4493 stack_pointer_delta = old_stack_pointer_delta;
4494
4495 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4496
4497 pending_stack_adjust = old_pending_adj;
4498 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4499 stack_arg_under_construction = old_stack_arg_under_construction;
4500 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4501 stack_usage_map = initial_stack_usage_map;
4502 stack_usage_watermark = initial_stack_usage_watermark;
4503 sibcall_failure = 1;
4504 }
4505 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4506 {
4507 #ifdef REG_PARM_STACK_SPACE
4508 if (save_area)
4509 restore_fixed_argument_area (save_area, argblock,
4510 high_to_save, low_to_save);
4511 #endif
4512
4513 /* If we saved any argument areas, restore them. */
4514 for (i = 0; i < num_actuals; i++)
4515 if (args[i].save_area)
4516 {
4517 machine_mode save_mode = GET_MODE (args[i].save_area);
4518 rtx stack_area
4519 = gen_rtx_MEM (save_mode,
4520 memory_address (save_mode,
4521 XEXP (args[i].stack_slot, 0)));
4522
4523 if (save_mode != BLKmode)
4524 emit_move_insn (stack_area, args[i].save_area);
4525 else
4526 emit_block_move (stack_area, args[i].save_area,
4527 (gen_int_mode
4528 (args[i].locate.size.constant, Pmode)),
4529 BLOCK_OP_CALL_PARM);
4530 }
4531
4532 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4533 stack_usage_map = initial_stack_usage_map;
4534 stack_usage_watermark = initial_stack_usage_watermark;
4535 }
4536
4537 /* If this was alloca, record the new stack level. */
4538 if (flags & ECF_MAY_BE_ALLOCA)
4539 record_new_stack_level ();
4540
4541 /* Free up storage we no longer need. */
4542 for (i = 0; i < num_actuals; ++i)
4543 free (args[i].aligned_regs);
4544
4545 targetm.calls.end_call_args ();
4546
4547 insns = get_insns ();
4548 end_sequence ();
4549
4550 if (pass == 0)
4551 {
4552 tail_call_insns = insns;
4553
4554 /* Restore the pending stack adjustment now that we have
4555 finished generating the sibling call sequence. */
4556
4557 restore_pending_stack_adjust (&save);
4558
4559 /* Prepare arg structure for next iteration. */
4560 for (i = 0; i < num_actuals; i++)
4561 {
4562 args[i].value = 0;
4563 args[i].aligned_regs = 0;
4564 args[i].stack = 0;
4565 }
4566
4567 sbitmap_free (stored_args_map);
4568 internal_arg_pointer_exp_state.scan_start = NULL;
4569 internal_arg_pointer_exp_state.cache.release ();
4570 }
4571 else
4572 {
4573 normal_call_insns = insns;
4574
4575 /* Verify that we've deallocated all the stack we used. */
4576 gcc_assert ((flags & ECF_NORETURN)
4577 || known_eq (old_stack_allocated,
4578 stack_pointer_delta
4579 - pending_stack_adjust));
4580 }
4581
4582 /* If something prevents making this a sibling call,
4583 zero out the sequence. */
4584 if (sibcall_failure)
4585 tail_call_insns = NULL;
4586 else
4587 break;
4588 }
4589
4590 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4591 arguments too, as argument area is now clobbered by the call. */
4592 if (tail_call_insns)
4593 {
4594 emit_insn (tail_call_insns);
4595 crtl->tail_call_emit = true;
4596 }
4597 else
4598 {
4599 emit_insn (normal_call_insns);
4600 if (try_tail_call)
4601 /* Ideally we'd emit a message for all of the ways that it could
4602 have failed. */
4603 maybe_complain_about_tail_call (exp, "tail call production failed");
4604 }
4605
4606 currently_expanding_call--;
4607
4608 free (stack_usage_map_buf);
4609 free (args);
4610 return target;
4611 }
4612
4613 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4614 this function's incoming arguments.
4615
4616 At the start of RTL generation we know the only REG_EQUIV notes
4617 in the rtl chain are those for incoming arguments, so we can look
4618 for REG_EQUIV notes between the start of the function and the
4619 NOTE_INSN_FUNCTION_BEG.
4620
4621 This is (slight) overkill. We could keep track of the highest
4622 argument we clobber and be more selective in removing notes, but it
4623 does not seem to be worth the effort. */
4624
4625 void
4626 fixup_tail_calls (void)
4627 {
4628 rtx_insn *insn;
4629
4630 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4631 {
4632 rtx note;
4633
4634 /* There are never REG_EQUIV notes for the incoming arguments
4635 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4636 if (NOTE_P (insn)
4637 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4638 break;
4639
4640 note = find_reg_note (insn, REG_EQUIV, 0);
4641 if (note)
4642 remove_note (insn, note);
4643 note = find_reg_note (insn, REG_EQUIV, 0);
4644 gcc_assert (!note);
4645 }
4646 }
4647
4648 /* Traverse a list of TYPES and expand all complex types into their
4649 components. */
4650 static tree
4651 split_complex_types (tree types)
4652 {
4653 tree p;
4654
4655 /* Before allocating memory, check for the common case of no complex. */
4656 for (p = types; p; p = TREE_CHAIN (p))
4657 {
4658 tree type = TREE_VALUE (p);
4659 if (TREE_CODE (type) == COMPLEX_TYPE
4660 && targetm.calls.split_complex_arg (type))
4661 goto found;
4662 }
4663 return types;
4664
4665 found:
4666 types = copy_list (types);
4667
4668 for (p = types; p; p = TREE_CHAIN (p))
4669 {
4670 tree complex_type = TREE_VALUE (p);
4671
4672 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4673 && targetm.calls.split_complex_arg (complex_type))
4674 {
4675 tree next, imag;
4676
4677 /* Rewrite complex type with component type. */
4678 TREE_VALUE (p) = TREE_TYPE (complex_type);
4679 next = TREE_CHAIN (p);
4680
4681 /* Add another component type for the imaginary part. */
4682 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4683 TREE_CHAIN (p) = imag;
4684 TREE_CHAIN (imag) = next;
4685
4686 /* Skip the newly created node. */
4687 p = TREE_CHAIN (p);
4688 }
4689 }
4690
4691 return types;
4692 }
4693 \f
4694 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4695 for a value of mode OUTMODE,
4696 with NARGS different arguments, passed as ARGS.
4697 Store the return value if RETVAL is nonzero: store it in VALUE if
4698 VALUE is nonnull, otherwise pick a convenient location. In either
4699 case return the location of the stored value.
4700
4701 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4702 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4703 other types of library calls. */
4704
4705 rtx
4706 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4707 enum libcall_type fn_type,
4708 machine_mode outmode, int nargs, rtx_mode_t *args)
4709 {
4710 /* Total size in bytes of all the stack-parms scanned so far. */
4711 struct args_size args_size;
4712 /* Size of arguments before any adjustments (such as rounding). */
4713 struct args_size original_args_size;
4714 int argnum;
4715 rtx fun;
4716 /* Todo, choose the correct decl type of orgfun. Sadly this information
4717 isn't present here, so we default to native calling abi here. */
4718 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4719 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4720 int count;
4721 rtx argblock = 0;
4722 CUMULATIVE_ARGS args_so_far_v;
4723 cumulative_args_t args_so_far;
4724 struct arg
4725 {
4726 rtx value;
4727 machine_mode mode;
4728 rtx reg;
4729 int partial;
4730 struct locate_and_pad_arg_data locate;
4731 rtx save_area;
4732 };
4733 struct arg *argvec;
4734 int old_inhibit_defer_pop = inhibit_defer_pop;
4735 rtx call_fusage = 0;
4736 rtx mem_value = 0;
4737 rtx valreg;
4738 int pcc_struct_value = 0;
4739 poly_int64 struct_value_size = 0;
4740 int flags;
4741 int reg_parm_stack_space = 0;
4742 poly_int64 needed;
4743 rtx_insn *before_call;
4744 bool have_push_fusage;
4745 tree tfom; /* type_for_mode (outmode, 0) */
4746
4747 #ifdef REG_PARM_STACK_SPACE
4748 /* Define the boundary of the register parm stack space that needs to be
4749 save, if any. */
4750 int low_to_save = 0, high_to_save = 0;
4751 rtx save_area = 0; /* Place that it is saved. */
4752 #endif
4753
4754 /* Size of the stack reserved for parameter registers. */
4755 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4756 char *initial_stack_usage_map = stack_usage_map;
4757 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4758 char *stack_usage_map_buf = NULL;
4759
4760 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4761
4762 #ifdef REG_PARM_STACK_SPACE
4763 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4764 #endif
4765
4766 /* By default, library functions cannot throw. */
4767 flags = ECF_NOTHROW;
4768
4769 switch (fn_type)
4770 {
4771 case LCT_NORMAL:
4772 break;
4773 case LCT_CONST:
4774 flags |= ECF_CONST;
4775 break;
4776 case LCT_PURE:
4777 flags |= ECF_PURE;
4778 break;
4779 case LCT_NORETURN:
4780 flags |= ECF_NORETURN;
4781 break;
4782 case LCT_THROW:
4783 flags &= ~ECF_NOTHROW;
4784 break;
4785 case LCT_RETURNS_TWICE:
4786 flags = ECF_RETURNS_TWICE;
4787 break;
4788 }
4789 fun = orgfun;
4790
4791 /* Ensure current function's preferred stack boundary is at least
4792 what we need. */
4793 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4794 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4795
4796 /* If this kind of value comes back in memory,
4797 decide where in memory it should come back. */
4798 if (outmode != VOIDmode)
4799 {
4800 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4801 if (aggregate_value_p (tfom, 0))
4802 {
4803 #ifdef PCC_STATIC_STRUCT_RETURN
4804 rtx pointer_reg
4805 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4806 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4807 pcc_struct_value = 1;
4808 if (value == 0)
4809 value = gen_reg_rtx (outmode);
4810 #else /* not PCC_STATIC_STRUCT_RETURN */
4811 struct_value_size = GET_MODE_SIZE (outmode);
4812 if (value != 0 && MEM_P (value))
4813 mem_value = value;
4814 else
4815 mem_value = assign_temp (tfom, 1, 1);
4816 #endif
4817 /* This call returns a big structure. */
4818 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4819 }
4820 }
4821 else
4822 tfom = void_type_node;
4823
4824 /* ??? Unfinished: must pass the memory address as an argument. */
4825
4826 /* Copy all the libcall-arguments out of the varargs data
4827 and into a vector ARGVEC.
4828
4829 Compute how to pass each argument. We only support a very small subset
4830 of the full argument passing conventions to limit complexity here since
4831 library functions shouldn't have many args. */
4832
4833 argvec = XALLOCAVEC (struct arg, nargs + 1);
4834 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4835
4836 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4837 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4838 #else
4839 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4840 #endif
4841 args_so_far = pack_cumulative_args (&args_so_far_v);
4842
4843 args_size.constant = 0;
4844 args_size.var = 0;
4845
4846 count = 0;
4847
4848 push_temp_slots ();
4849
4850 /* If there's a structure value address to be passed,
4851 either pass it in the special place, or pass it as an extra argument. */
4852 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4853 {
4854 rtx addr = XEXP (mem_value, 0);
4855
4856 nargs++;
4857
4858 /* Make sure it is a reasonable operand for a move or push insn. */
4859 if (!REG_P (addr) && !MEM_P (addr)
4860 && !(CONSTANT_P (addr)
4861 && targetm.legitimate_constant_p (Pmode, addr)))
4862 addr = force_operand (addr, NULL_RTX);
4863
4864 argvec[count].value = addr;
4865 argvec[count].mode = Pmode;
4866 argvec[count].partial = 0;
4867
4868 function_arg_info ptr_arg (Pmode, /*named=*/true);
4869 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
4870 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
4871
4872 locate_and_pad_parm (Pmode, NULL_TREE,
4873 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4874 1,
4875 #else
4876 argvec[count].reg != 0,
4877 #endif
4878 reg_parm_stack_space, 0,
4879 NULL_TREE, &args_size, &argvec[count].locate);
4880
4881 if (argvec[count].reg == 0 || argvec[count].partial != 0
4882 || reg_parm_stack_space > 0)
4883 args_size.constant += argvec[count].locate.size.constant;
4884
4885 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
4886
4887 count++;
4888 }
4889
4890 for (unsigned int i = 0; count < nargs; i++, count++)
4891 {
4892 rtx val = args[i].first;
4893 machine_mode mode = args[i].second;
4894 int unsigned_p = 0;
4895
4896 /* We cannot convert the arg value to the mode the library wants here;
4897 must do it earlier where we know the signedness of the arg. */
4898 gcc_assert (mode != BLKmode
4899 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4900
4901 /* Make sure it is a reasonable operand for a move or push insn. */
4902 if (!REG_P (val) && !MEM_P (val)
4903 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4904 val = force_operand (val, NULL_RTX);
4905
4906 function_arg_info orig_arg (mode, /*named=*/true);
4907 if (pass_by_reference (&args_so_far_v, orig_arg))
4908 {
4909 rtx slot;
4910 int must_copy = !reference_callee_copied (&args_so_far_v, orig_arg);
4911
4912 /* If this was a CONST function, it is now PURE since it now
4913 reads memory. */
4914 if (flags & ECF_CONST)
4915 {
4916 flags &= ~ECF_CONST;
4917 flags |= ECF_PURE;
4918 }
4919
4920 if (MEM_P (val) && !must_copy)
4921 {
4922 tree val_expr = MEM_EXPR (val);
4923 if (val_expr)
4924 mark_addressable (val_expr);
4925 slot = val;
4926 }
4927 else
4928 {
4929 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4930 1, 1);
4931 emit_move_insn (slot, val);
4932 }
4933
4934 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4935 gen_rtx_USE (VOIDmode, slot),
4936 call_fusage);
4937 if (must_copy)
4938 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4939 gen_rtx_CLOBBER (VOIDmode,
4940 slot),
4941 call_fusage);
4942
4943 mode = Pmode;
4944 val = force_operand (XEXP (slot, 0), NULL_RTX);
4945 }
4946
4947 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4948 function_arg_info arg (mode, /*named=*/true);
4949 argvec[count].mode = mode;
4950 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4951 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
4952
4953 argvec[count].partial
4954 = targetm.calls.arg_partial_bytes (args_so_far, arg);
4955
4956 if (argvec[count].reg == 0
4957 || argvec[count].partial != 0
4958 || reg_parm_stack_space > 0)
4959 {
4960 locate_and_pad_parm (mode, NULL_TREE,
4961 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4962 1,
4963 #else
4964 argvec[count].reg != 0,
4965 #endif
4966 reg_parm_stack_space, argvec[count].partial,
4967 NULL_TREE, &args_size, &argvec[count].locate);
4968 args_size.constant += argvec[count].locate.size.constant;
4969 gcc_assert (!argvec[count].locate.size.var);
4970 }
4971 #ifdef BLOCK_REG_PADDING
4972 else
4973 /* The argument is passed entirely in registers. See at which
4974 end it should be padded. */
4975 argvec[count].locate.where_pad =
4976 BLOCK_REG_PADDING (mode, NULL_TREE,
4977 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4978 #endif
4979
4980 targetm.calls.function_arg_advance (args_so_far, arg);
4981 }
4982
4983 for (int i = 0; i < nargs; i++)
4984 if (reg_parm_stack_space > 0
4985 || argvec[i].reg == 0
4986 || argvec[i].partial != 0)
4987 update_stack_alignment_for_call (&argvec[i].locate);
4988
4989 /* If this machine requires an external definition for library
4990 functions, write one out. */
4991 assemble_external_libcall (fun);
4992
4993 original_args_size = args_size;
4994 args_size.constant = (aligned_upper_bound (args_size.constant
4995 + stack_pointer_delta,
4996 STACK_BYTES)
4997 - stack_pointer_delta);
4998
4999 args_size.constant = upper_bound (args_size.constant,
5000 reg_parm_stack_space);
5001
5002 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5003 args_size.constant -= reg_parm_stack_space;
5004
5005 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5006 args_size.constant);
5007
5008 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5009 {
5010 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5011 current_function_pushed_stack_size
5012 = upper_bound (current_function_pushed_stack_size, pushed);
5013 }
5014
5015 if (ACCUMULATE_OUTGOING_ARGS)
5016 {
5017 /* Since the stack pointer will never be pushed, it is possible for
5018 the evaluation of a parm to clobber something we have already
5019 written to the stack. Since most function calls on RISC machines
5020 do not use the stack, this is uncommon, but must work correctly.
5021
5022 Therefore, we save any area of the stack that was already written
5023 and that we are using. Here we set up to do this by making a new
5024 stack usage map from the old one.
5025
5026 Another approach might be to try to reorder the argument
5027 evaluations to avoid this conflicting stack usage. */
5028
5029 needed = args_size.constant;
5030
5031 /* Since we will be writing into the entire argument area, the
5032 map must be allocated for its entire size, not just the part that
5033 is the responsibility of the caller. */
5034 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5035 needed += reg_parm_stack_space;
5036
5037 poly_int64 limit = needed;
5038 if (ARGS_GROW_DOWNWARD)
5039 limit += 1;
5040
5041 /* For polynomial sizes, this is the maximum possible size needed
5042 for arguments with a constant size and offset. */
5043 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5044 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5045 const_limit);
5046
5047 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5048 stack_usage_map = stack_usage_map_buf;
5049
5050 if (initial_highest_arg_in_use)
5051 memcpy (stack_usage_map, initial_stack_usage_map,
5052 initial_highest_arg_in_use);
5053
5054 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5055 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5056 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5057 needed = 0;
5058
5059 /* We must be careful to use virtual regs before they're instantiated,
5060 and real regs afterwards. Loop optimization, for example, can create
5061 new libcalls after we've instantiated the virtual regs, and if we
5062 use virtuals anyway, they won't match the rtl patterns. */
5063
5064 if (virtuals_instantiated)
5065 argblock = plus_constant (Pmode, stack_pointer_rtx,
5066 STACK_POINTER_OFFSET);
5067 else
5068 argblock = virtual_outgoing_args_rtx;
5069 }
5070 else
5071 {
5072 if (!PUSH_ARGS)
5073 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5074 }
5075
5076 /* We push args individually in reverse order, perform stack alignment
5077 before the first push (the last arg). */
5078 if (argblock == 0)
5079 anti_adjust_stack (gen_int_mode (args_size.constant
5080 - original_args_size.constant,
5081 Pmode));
5082
5083 argnum = nargs - 1;
5084
5085 #ifdef REG_PARM_STACK_SPACE
5086 if (ACCUMULATE_OUTGOING_ARGS)
5087 {
5088 /* The argument list is the property of the called routine and it
5089 may clobber it. If the fixed area has been used for previous
5090 parameters, we must save and restore it. */
5091 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5092 &low_to_save, &high_to_save);
5093 }
5094 #endif
5095
5096 /* When expanding a normal call, args are stored in push order,
5097 which is the reverse of what we have here. */
5098 bool any_regs = false;
5099 for (int i = nargs; i-- > 0; )
5100 if (argvec[i].reg != NULL_RTX)
5101 {
5102 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5103 any_regs = true;
5104 }
5105 if (!any_regs)
5106 targetm.calls.call_args (pc_rtx, NULL_TREE);
5107
5108 /* Push the args that need to be pushed. */
5109
5110 have_push_fusage = false;
5111
5112 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5113 are to be pushed. */
5114 for (count = 0; count < nargs; count++, argnum--)
5115 {
5116 machine_mode mode = argvec[argnum].mode;
5117 rtx val = argvec[argnum].value;
5118 rtx reg = argvec[argnum].reg;
5119 int partial = argvec[argnum].partial;
5120 unsigned int parm_align = argvec[argnum].locate.boundary;
5121 poly_int64 lower_bound = 0, upper_bound = 0;
5122
5123 if (! (reg != 0 && partial == 0))
5124 {
5125 rtx use;
5126
5127 if (ACCUMULATE_OUTGOING_ARGS)
5128 {
5129 /* If this is being stored into a pre-allocated, fixed-size,
5130 stack area, save any previous data at that location. */
5131
5132 if (ARGS_GROW_DOWNWARD)
5133 {
5134 /* stack_slot is negative, but we want to index stack_usage_map
5135 with positive values. */
5136 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5137 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5138 }
5139 else
5140 {
5141 lower_bound = argvec[argnum].locate.slot_offset.constant;
5142 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5143 }
5144
5145 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5146 reg_parm_stack_space))
5147 {
5148 /* We need to make a save area. */
5149 poly_uint64 size
5150 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5151 machine_mode save_mode
5152 = int_mode_for_size (size, 1).else_blk ();
5153 rtx adr
5154 = plus_constant (Pmode, argblock,
5155 argvec[argnum].locate.offset.constant);
5156 rtx stack_area
5157 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5158
5159 if (save_mode == BLKmode)
5160 {
5161 argvec[argnum].save_area
5162 = assign_stack_temp (BLKmode,
5163 argvec[argnum].locate.size.constant
5164 );
5165
5166 emit_block_move (validize_mem
5167 (copy_rtx (argvec[argnum].save_area)),
5168 stack_area,
5169 (gen_int_mode
5170 (argvec[argnum].locate.size.constant,
5171 Pmode)),
5172 BLOCK_OP_CALL_PARM);
5173 }
5174 else
5175 {
5176 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5177
5178 emit_move_insn (argvec[argnum].save_area, stack_area);
5179 }
5180 }
5181 }
5182
5183 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5184 partial, reg, 0, argblock,
5185 (gen_int_mode
5186 (argvec[argnum].locate.offset.constant, Pmode)),
5187 reg_parm_stack_space,
5188 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5189
5190 /* Now mark the segment we just used. */
5191 if (ACCUMULATE_OUTGOING_ARGS)
5192 mark_stack_region_used (lower_bound, upper_bound);
5193
5194 NO_DEFER_POP;
5195
5196 /* Indicate argument access so that alias.c knows that these
5197 values are live. */
5198 if (argblock)
5199 use = plus_constant (Pmode, argblock,
5200 argvec[argnum].locate.offset.constant);
5201 else if (have_push_fusage)
5202 continue;
5203 else
5204 {
5205 /* When arguments are pushed, trying to tell alias.c where
5206 exactly this argument is won't work, because the
5207 auto-increment causes confusion. So we merely indicate
5208 that we access something with a known mode somewhere on
5209 the stack. */
5210 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5211 gen_rtx_SCRATCH (Pmode));
5212 have_push_fusage = true;
5213 }
5214 use = gen_rtx_MEM (argvec[argnum].mode, use);
5215 use = gen_rtx_USE (VOIDmode, use);
5216 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5217 }
5218 }
5219
5220 argnum = nargs - 1;
5221
5222 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5223
5224 /* Now load any reg parms into their regs. */
5225
5226 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5227 are to be pushed. */
5228 for (count = 0; count < nargs; count++, argnum--)
5229 {
5230 machine_mode mode = argvec[argnum].mode;
5231 rtx val = argvec[argnum].value;
5232 rtx reg = argvec[argnum].reg;
5233 int partial = argvec[argnum].partial;
5234
5235 /* Handle calls that pass values in multiple non-contiguous
5236 locations. The PA64 has examples of this for library calls. */
5237 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5238 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5239 else if (reg != 0 && partial == 0)
5240 {
5241 emit_move_insn (reg, val);
5242 #ifdef BLOCK_REG_PADDING
5243 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5244
5245 /* Copied from load_register_parameters. */
5246
5247 /* Handle case where we have a value that needs shifting
5248 up to the msb. eg. a QImode value and we're padding
5249 upward on a BYTES_BIG_ENDIAN machine. */
5250 if (known_lt (size, UNITS_PER_WORD)
5251 && (argvec[argnum].locate.where_pad
5252 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5253 {
5254 rtx x;
5255 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5256
5257 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5258 report the whole reg as used. Strictly speaking, the
5259 call only uses SIZE bytes at the msb end, but it doesn't
5260 seem worth generating rtl to say that. */
5261 reg = gen_rtx_REG (word_mode, REGNO (reg));
5262 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5263 if (x != reg)
5264 emit_move_insn (reg, x);
5265 }
5266 #endif
5267 }
5268
5269 NO_DEFER_POP;
5270 }
5271
5272 /* Any regs containing parms remain in use through the call. */
5273 for (count = 0; count < nargs; count++)
5274 {
5275 rtx reg = argvec[count].reg;
5276 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5277 use_group_regs (&call_fusage, reg);
5278 else if (reg != 0)
5279 {
5280 int partial = argvec[count].partial;
5281 if (partial)
5282 {
5283 int nregs;
5284 gcc_assert (partial % UNITS_PER_WORD == 0);
5285 nregs = partial / UNITS_PER_WORD;
5286 use_regs (&call_fusage, REGNO (reg), nregs);
5287 }
5288 else
5289 use_reg (&call_fusage, reg);
5290 }
5291 }
5292
5293 /* Pass the function the address in which to return a structure value. */
5294 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5295 {
5296 emit_move_insn (struct_value,
5297 force_reg (Pmode,
5298 force_operand (XEXP (mem_value, 0),
5299 NULL_RTX)));
5300 if (REG_P (struct_value))
5301 use_reg (&call_fusage, struct_value);
5302 }
5303
5304 /* Don't allow popping to be deferred, since then
5305 cse'ing of library calls could delete a call and leave the pop. */
5306 NO_DEFER_POP;
5307 valreg = (mem_value == 0 && outmode != VOIDmode
5308 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5309
5310 /* Stack must be properly aligned now. */
5311 gcc_assert (multiple_p (stack_pointer_delta,
5312 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5313
5314 before_call = get_last_insn ();
5315
5316 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5317 will set inhibit_defer_pop to that value. */
5318 /* The return type is needed to decide how many bytes the function pops.
5319 Signedness plays no role in that, so for simplicity, we pretend it's
5320 always signed. We also assume that the list of arguments passed has
5321 no impact, so we pretend it is unknown. */
5322
5323 emit_call_1 (fun, NULL,
5324 get_identifier (XSTR (orgfun, 0)),
5325 build_function_type (tfom, NULL_TREE),
5326 original_args_size.constant, args_size.constant,
5327 struct_value_size,
5328 targetm.calls.function_arg (args_so_far,
5329 function_arg_info::end_marker ()),
5330 valreg,
5331 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5332
5333 if (flag_ipa_ra)
5334 {
5335 rtx datum = orgfun;
5336 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5337 rtx_call_insn *last = last_call_insn ();
5338 add_reg_note (last, REG_CALL_DECL, datum);
5339 }
5340
5341 /* Right-shift returned value if necessary. */
5342 if (!pcc_struct_value
5343 && TYPE_MODE (tfom) != BLKmode
5344 && targetm.calls.return_in_msb (tfom))
5345 {
5346 shift_return_value (TYPE_MODE (tfom), false, valreg);
5347 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5348 }
5349
5350 targetm.calls.end_call_args ();
5351
5352 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5353 that it should complain if nonvolatile values are live. For
5354 functions that cannot return, inform flow that control does not
5355 fall through. */
5356 if (flags & ECF_NORETURN)
5357 {
5358 /* The barrier note must be emitted
5359 immediately after the CALL_INSN. Some ports emit more than
5360 just a CALL_INSN above, so we must search for it here. */
5361 rtx_insn *last = get_last_insn ();
5362 while (!CALL_P (last))
5363 {
5364 last = PREV_INSN (last);
5365 /* There was no CALL_INSN? */
5366 gcc_assert (last != before_call);
5367 }
5368
5369 emit_barrier_after (last);
5370 }
5371
5372 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5373 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5374 if (flags & ECF_NOTHROW)
5375 {
5376 rtx_insn *last = get_last_insn ();
5377 while (!CALL_P (last))
5378 {
5379 last = PREV_INSN (last);
5380 /* There was no CALL_INSN? */
5381 gcc_assert (last != before_call);
5382 }
5383
5384 make_reg_eh_region_note_nothrow_nononlocal (last);
5385 }
5386
5387 /* Now restore inhibit_defer_pop to its actual original value. */
5388 OK_DEFER_POP;
5389
5390 pop_temp_slots ();
5391
5392 /* Copy the value to the right place. */
5393 if (outmode != VOIDmode && retval)
5394 {
5395 if (mem_value)
5396 {
5397 if (value == 0)
5398 value = mem_value;
5399 if (value != mem_value)
5400 emit_move_insn (value, mem_value);
5401 }
5402 else if (GET_CODE (valreg) == PARALLEL)
5403 {
5404 if (value == 0)
5405 value = gen_reg_rtx (outmode);
5406 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5407 }
5408 else
5409 {
5410 /* Convert to the proper mode if a promotion has been active. */
5411 if (GET_MODE (valreg) != outmode)
5412 {
5413 int unsignedp = TYPE_UNSIGNED (tfom);
5414
5415 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5416 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5417 == GET_MODE (valreg));
5418 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5419 }
5420
5421 if (value != 0)
5422 emit_move_insn (value, valreg);
5423 else
5424 value = valreg;
5425 }
5426 }
5427
5428 if (ACCUMULATE_OUTGOING_ARGS)
5429 {
5430 #ifdef REG_PARM_STACK_SPACE
5431 if (save_area)
5432 restore_fixed_argument_area (save_area, argblock,
5433 high_to_save, low_to_save);
5434 #endif
5435
5436 /* If we saved any argument areas, restore them. */
5437 for (count = 0; count < nargs; count++)
5438 if (argvec[count].save_area)
5439 {
5440 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5441 rtx adr = plus_constant (Pmode, argblock,
5442 argvec[count].locate.offset.constant);
5443 rtx stack_area = gen_rtx_MEM (save_mode,
5444 memory_address (save_mode, adr));
5445
5446 if (save_mode == BLKmode)
5447 emit_block_move (stack_area,
5448 validize_mem
5449 (copy_rtx (argvec[count].save_area)),
5450 (gen_int_mode
5451 (argvec[count].locate.size.constant, Pmode)),
5452 BLOCK_OP_CALL_PARM);
5453 else
5454 emit_move_insn (stack_area, argvec[count].save_area);
5455 }
5456
5457 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5458 stack_usage_map = initial_stack_usage_map;
5459 stack_usage_watermark = initial_stack_usage_watermark;
5460 }
5461
5462 free (stack_usage_map_buf);
5463
5464 return value;
5465
5466 }
5467 \f
5468
5469 /* Store a single argument for a function call
5470 into the register or memory area where it must be passed.
5471 *ARG describes the argument value and where to pass it.
5472
5473 ARGBLOCK is the address of the stack-block for all the arguments,
5474 or 0 on a machine where arguments are pushed individually.
5475
5476 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5477 so must be careful about how the stack is used.
5478
5479 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5480 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5481 that we need not worry about saving and restoring the stack.
5482
5483 FNDECL is the declaration of the function we are calling.
5484
5485 Return nonzero if this arg should cause sibcall failure,
5486 zero otherwise. */
5487
5488 static int
5489 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5490 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5491 {
5492 tree pval = arg->tree_value;
5493 rtx reg = 0;
5494 int partial = 0;
5495 poly_int64 used = 0;
5496 poly_int64 lower_bound = 0, upper_bound = 0;
5497 int sibcall_failure = 0;
5498
5499 if (TREE_CODE (pval) == ERROR_MARK)
5500 return 1;
5501
5502 /* Push a new temporary level for any temporaries we make for
5503 this argument. */
5504 push_temp_slots ();
5505
5506 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5507 {
5508 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5509 save any previous data at that location. */
5510 if (argblock && ! variable_size && arg->stack)
5511 {
5512 if (ARGS_GROW_DOWNWARD)
5513 {
5514 /* stack_slot is negative, but we want to index stack_usage_map
5515 with positive values. */
5516 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5517 {
5518 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5519 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5520 }
5521 else
5522 upper_bound = 0;
5523
5524 lower_bound = upper_bound - arg->locate.size.constant;
5525 }
5526 else
5527 {
5528 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5529 {
5530 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5531 lower_bound = rtx_to_poly_int64 (offset);
5532 }
5533 else
5534 lower_bound = 0;
5535
5536 upper_bound = lower_bound + arg->locate.size.constant;
5537 }
5538
5539 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5540 reg_parm_stack_space))
5541 {
5542 /* We need to make a save area. */
5543 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5544 machine_mode save_mode
5545 = int_mode_for_size (size, 1).else_blk ();
5546 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5547 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5548
5549 if (save_mode == BLKmode)
5550 {
5551 arg->save_area
5552 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5553 preserve_temp_slots (arg->save_area);
5554 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5555 stack_area,
5556 (gen_int_mode
5557 (arg->locate.size.constant, Pmode)),
5558 BLOCK_OP_CALL_PARM);
5559 }
5560 else
5561 {
5562 arg->save_area = gen_reg_rtx (save_mode);
5563 emit_move_insn (arg->save_area, stack_area);
5564 }
5565 }
5566 }
5567 }
5568
5569 /* If this isn't going to be placed on both the stack and in registers,
5570 set up the register and number of words. */
5571 if (! arg->pass_on_stack)
5572 {
5573 if (flags & ECF_SIBCALL)
5574 reg = arg->tail_call_reg;
5575 else
5576 reg = arg->reg;
5577 partial = arg->partial;
5578 }
5579
5580 /* Being passed entirely in a register. We shouldn't be called in
5581 this case. */
5582 gcc_assert (reg == 0 || partial != 0);
5583
5584 /* If this arg needs special alignment, don't load the registers
5585 here. */
5586 if (arg->n_aligned_regs != 0)
5587 reg = 0;
5588
5589 /* If this is being passed partially in a register, we can't evaluate
5590 it directly into its stack slot. Otherwise, we can. */
5591 if (arg->value == 0)
5592 {
5593 /* stack_arg_under_construction is nonzero if a function argument is
5594 being evaluated directly into the outgoing argument list and
5595 expand_call must take special action to preserve the argument list
5596 if it is called recursively.
5597
5598 For scalar function arguments stack_usage_map is sufficient to
5599 determine which stack slots must be saved and restored. Scalar
5600 arguments in general have pass_on_stack == 0.
5601
5602 If this argument is initialized by a function which takes the
5603 address of the argument (a C++ constructor or a C function
5604 returning a BLKmode structure), then stack_usage_map is
5605 insufficient and expand_call must push the stack around the
5606 function call. Such arguments have pass_on_stack == 1.
5607
5608 Note that it is always safe to set stack_arg_under_construction,
5609 but this generates suboptimal code if set when not needed. */
5610
5611 if (arg->pass_on_stack)
5612 stack_arg_under_construction++;
5613
5614 arg->value = expand_expr (pval,
5615 (partial
5616 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5617 ? NULL_RTX : arg->stack,
5618 VOIDmode, EXPAND_STACK_PARM);
5619
5620 /* If we are promoting object (or for any other reason) the mode
5621 doesn't agree, convert the mode. */
5622
5623 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5624 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5625 arg->value, arg->unsignedp);
5626
5627 if (arg->pass_on_stack)
5628 stack_arg_under_construction--;
5629 }
5630
5631 /* Check for overlap with already clobbered argument area. */
5632 if ((flags & ECF_SIBCALL)
5633 && MEM_P (arg->value)
5634 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5635 arg->locate.size.constant))
5636 sibcall_failure = 1;
5637
5638 /* Don't allow anything left on stack from computation
5639 of argument to alloca. */
5640 if (flags & ECF_MAY_BE_ALLOCA)
5641 do_pending_stack_adjust ();
5642
5643 if (arg->value == arg->stack)
5644 /* If the value is already in the stack slot, we are done. */
5645 ;
5646 else if (arg->mode != BLKmode)
5647 {
5648 unsigned int parm_align;
5649
5650 /* Argument is a scalar, not entirely passed in registers.
5651 (If part is passed in registers, arg->partial says how much
5652 and emit_push_insn will take care of putting it there.)
5653
5654 Push it, and if its size is less than the
5655 amount of space allocated to it,
5656 also bump stack pointer by the additional space.
5657 Note that in C the default argument promotions
5658 will prevent such mismatches. */
5659
5660 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5661 ? 0 : GET_MODE_SIZE (arg->mode));
5662
5663 /* Compute how much space the push instruction will push.
5664 On many machines, pushing a byte will advance the stack
5665 pointer by a halfword. */
5666 #ifdef PUSH_ROUNDING
5667 size = PUSH_ROUNDING (size);
5668 #endif
5669 used = size;
5670
5671 /* Compute how much space the argument should get:
5672 round up to a multiple of the alignment for arguments. */
5673 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5674 != PAD_NONE)
5675 /* At the moment we don't (need to) support ABIs for which the
5676 padding isn't known at compile time. In principle it should
5677 be easy to add though. */
5678 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5679
5680 /* Compute the alignment of the pushed argument. */
5681 parm_align = arg->locate.boundary;
5682 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5683 == PAD_DOWNWARD)
5684 {
5685 poly_int64 pad = used - size;
5686 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5687 if (pad_align != 0)
5688 parm_align = MIN (parm_align, pad_align);
5689 }
5690
5691 /* This isn't already where we want it on the stack, so put it there.
5692 This can either be done with push or copy insns. */
5693 if (maybe_ne (used, 0)
5694 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5695 NULL_RTX, parm_align, partial, reg, used - size,
5696 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5697 reg_parm_stack_space,
5698 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5699 sibcall_failure = 1;
5700
5701 /* Unless this is a partially-in-register argument, the argument is now
5702 in the stack. */
5703 if (partial == 0)
5704 arg->value = arg->stack;
5705 }
5706 else
5707 {
5708 /* BLKmode, at least partly to be pushed. */
5709
5710 unsigned int parm_align;
5711 poly_int64 excess;
5712 rtx size_rtx;
5713
5714 /* Pushing a nonscalar.
5715 If part is passed in registers, PARTIAL says how much
5716 and emit_push_insn will take care of putting it there. */
5717
5718 /* Round its size up to a multiple
5719 of the allocation unit for arguments. */
5720
5721 if (arg->locate.size.var != 0)
5722 {
5723 excess = 0;
5724 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5725 }
5726 else
5727 {
5728 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5729 for BLKmode is careful to avoid it. */
5730 excess = (arg->locate.size.constant
5731 - arg_int_size_in_bytes (TREE_TYPE (pval))
5732 + partial);
5733 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5734 NULL_RTX, TYPE_MODE (sizetype),
5735 EXPAND_NORMAL);
5736 }
5737
5738 parm_align = arg->locate.boundary;
5739
5740 /* When an argument is padded down, the block is aligned to
5741 PARM_BOUNDARY, but the actual argument isn't. */
5742 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5743 == PAD_DOWNWARD)
5744 {
5745 if (arg->locate.size.var)
5746 parm_align = BITS_PER_UNIT;
5747 else
5748 {
5749 unsigned int excess_align
5750 = known_alignment (excess) * BITS_PER_UNIT;
5751 if (excess_align != 0)
5752 parm_align = MIN (parm_align, excess_align);
5753 }
5754 }
5755
5756 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5757 {
5758 /* emit_push_insn might not work properly if arg->value and
5759 argblock + arg->locate.offset areas overlap. */
5760 rtx x = arg->value;
5761 poly_int64 i = 0;
5762
5763 if (strip_offset (XEXP (x, 0), &i)
5764 == crtl->args.internal_arg_pointer)
5765 {
5766 /* arg.locate doesn't contain the pretend_args_size offset,
5767 it's part of argblock. Ensure we don't count it in I. */
5768 if (STACK_GROWS_DOWNWARD)
5769 i -= crtl->args.pretend_args_size;
5770 else
5771 i += crtl->args.pretend_args_size;
5772
5773 /* expand_call should ensure this. */
5774 gcc_assert (!arg->locate.offset.var
5775 && arg->locate.size.var == 0);
5776 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
5777
5778 if (known_eq (arg->locate.offset.constant, i))
5779 {
5780 /* Even though they appear to be at the same location,
5781 if part of the outgoing argument is in registers,
5782 they aren't really at the same location. Check for
5783 this by making sure that the incoming size is the
5784 same as the outgoing size. */
5785 if (maybe_ne (arg->locate.size.constant, size_val))
5786 sibcall_failure = 1;
5787 }
5788 else if (maybe_in_range_p (arg->locate.offset.constant,
5789 i, size_val))
5790 sibcall_failure = 1;
5791 /* Use arg->locate.size.constant instead of size_rtx
5792 because we only care about the part of the argument
5793 on the stack. */
5794 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5795 arg->locate.size.constant))
5796 sibcall_failure = 1;
5797 }
5798 }
5799
5800 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5801 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5802 parm_align, partial, reg, excess, argblock,
5803 ARGS_SIZE_RTX (arg->locate.offset),
5804 reg_parm_stack_space,
5805 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5806
5807 /* Unless this is a partially-in-register argument, the argument is now
5808 in the stack.
5809
5810 ??? Unlike the case above, in which we want the actual
5811 address of the data, so that we can load it directly into a
5812 register, here we want the address of the stack slot, so that
5813 it's properly aligned for word-by-word copying or something
5814 like that. It's not clear that this is always correct. */
5815 if (partial == 0)
5816 arg->value = arg->stack_slot;
5817 }
5818
5819 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5820 {
5821 tree type = TREE_TYPE (arg->tree_value);
5822 arg->parallel_value
5823 = emit_group_load_into_temps (arg->reg, arg->value, type,
5824 int_size_in_bytes (type));
5825 }
5826
5827 /* Mark all slots this store used. */
5828 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5829 && argblock && ! variable_size && arg->stack)
5830 mark_stack_region_used (lower_bound, upper_bound);
5831
5832 /* Once we have pushed something, pops can't safely
5833 be deferred during the rest of the arguments. */
5834 NO_DEFER_POP;
5835
5836 /* Free any temporary slots made in processing this argument. */
5837 pop_temp_slots ();
5838
5839 return sibcall_failure;
5840 }
5841
5842 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5843
5844 bool
5845 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5846 const_tree type)
5847 {
5848 if (!type)
5849 return false;
5850
5851 /* If the type has variable size... */
5852 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5853 return true;
5854
5855 /* If the type is marked as addressable (it is required
5856 to be constructed into the stack)... */
5857 if (TREE_ADDRESSABLE (type))
5858 return true;
5859
5860 return false;
5861 }
5862
5863 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5864 takes trailing padding of a structure into account. */
5865 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5866
5867 bool
5868 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5869 {
5870 if (!type)
5871 return false;
5872
5873 /* If the type has variable size... */
5874 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5875 return true;
5876
5877 /* If the type is marked as addressable (it is required
5878 to be constructed into the stack)... */
5879 if (TREE_ADDRESSABLE (type))
5880 return true;
5881
5882 if (TYPE_EMPTY_P (type))
5883 return false;
5884
5885 /* If the padding and mode of the type is such that a copy into
5886 a register would put it into the wrong part of the register. */
5887 if (mode == BLKmode
5888 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5889 && (targetm.calls.function_arg_padding (mode, type)
5890 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5891 return true;
5892
5893 return false;
5894 }
5895
5896 /* Return true if TYPE must be passed on the stack when passed to
5897 the "..." arguments of a function. */
5898
5899 bool
5900 must_pass_va_arg_in_stack (tree type)
5901 {
5902 return targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
5903 }
5904
5905 /* Tell the garbage collector about GTY markers in this source file. */
5906 #include "gt-calls.h"