]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
poly_int: GET_MODE_SIZE
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "rtl-chkp.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 #include "builtins.h"
58
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
61
62 /* Data structure and subroutines used within expand_call. */
63
64 struct arg_data
65 {
66 /* Tree node for this argument. */
67 tree tree_value;
68 /* Mode for value; TYPE_MODE unless promoted. */
69 machine_mode mode;
70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
75 PARALLEL if the arg is to be copied into multiple non-contiguous
76 registers. */
77 rtx reg;
78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
85 /* If value is passed in neither reg nor stack, this field holds a number
86 of a special slot to be used. */
87 rtx special_slot;
88 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
89 there is no such pointer. */
90 int pointer_arg;
91 /* If pointer_arg refers a structure, then pointer_offset holds an offset
92 of a pointer in this structure. */
93 int pointer_offset;
94 /* If REG was promoted from the actual mode of the argument expression,
95 indicates whether the promotion is sign- or zero-extended. */
96 int unsignedp;
97 /* Number of bytes to put in registers. 0 means put the whole arg
98 in registers. Also 0 if not passed in registers. */
99 int partial;
100 /* Nonzero if argument must be passed on stack.
101 Note that some arguments may be passed on the stack
102 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
103 pass_on_stack identifies arguments that *cannot* go in registers. */
104 int pass_on_stack;
105 /* Some fields packaged up for locate_and_pad_parm. */
106 struct locate_and_pad_arg_data locate;
107 /* Location on the stack at which parameter should be stored. The store
108 has already been done if STACK == VALUE. */
109 rtx stack;
110 /* Location on the stack of the start of this argument slot. This can
111 differ from STACK if this arg pads downward. This location is known
112 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
113 rtx stack_slot;
114 /* Place that this stack area has been saved, if needed. */
115 rtx save_area;
116 /* If an argument's alignment does not permit direct copying into registers,
117 copy in smaller-sized pieces into pseudos. These are stored in a
118 block pointed to by this field. The next field says how many
119 word-sized pseudos we made. */
120 rtx *aligned_regs;
121 int n_aligned_regs;
122 };
123
124 /* A vector of one char per byte of stack space. A byte if nonzero if
125 the corresponding stack location has been used.
126 This vector is used to prevent a function call within an argument from
127 clobbering any stack already set up. */
128 static char *stack_usage_map;
129
130 /* Size of STACK_USAGE_MAP. */
131 static unsigned int highest_outgoing_arg_in_use;
132
133 /* Assume that any stack location at this byte index is used,
134 without checking the contents of stack_usage_map. */
135 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
136
137 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
138 stack location's tail call argument has been already stored into the stack.
139 This bitmap is used to prevent sibling call optimization if function tries
140 to use parent's incoming argument slots when they have been already
141 overwritten with tail call arguments. */
142 static sbitmap stored_args_map;
143
144 /* Assume that any virtual-incoming location at this byte index has been
145 stored, without checking the contents of stored_args_map. */
146 static unsigned HOST_WIDE_INT stored_args_watermark;
147
148 /* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
153 static int stack_arg_under_construction;
154
155 static void precompute_register_parameters (int, struct arg_data *, int *);
156 static void store_bounds (struct arg_data *, struct arg_data *);
157 static int store_one_arg (struct arg_data *, rtx, int, int, int);
158 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
159 static int finalize_must_preallocate (int, int, struct arg_data *,
160 struct args_size *);
161 static void precompute_arguments (int, struct arg_data *);
162 static void compute_argument_addresses (struct arg_data *, rtx, int);
163 static rtx rtx_for_function_call (tree, tree);
164 static void load_register_parameters (struct arg_data *, int, rtx *, int,
165 int, int *);
166 static int special_function_p (const_tree, int);
167 static int check_sibcall_argument_overlap_1 (rtx);
168 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
169
170 static tree split_complex_types (tree);
171
172 #ifdef REG_PARM_STACK_SPACE
173 static rtx save_fixed_argument_area (int, rtx, int *, int *);
174 static void restore_fixed_argument_area (rtx, rtx, int, int);
175 #endif
176 \f
177 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
178 stack region might already be in use. */
179
180 static bool
181 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
182 unsigned int reg_parm_stack_space)
183 {
184 unsigned HOST_WIDE_INT const_lower, const_upper;
185 const_lower = constant_lower_bound (lower_bound);
186 if (!upper_bound.is_constant (&const_upper))
187 const_upper = HOST_WIDE_INT_M1U;
188
189 if (const_upper > stack_usage_watermark)
190 return true;
191
192 /* Don't worry about things in the fixed argument area;
193 it has already been saved. */
194 const_lower = MAX (const_lower, reg_parm_stack_space);
195 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
196 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
197 if (stack_usage_map[i])
198 return true;
199 return false;
200 }
201
202 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
203 stack region are now in use. */
204
205 static void
206 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
207 {
208 unsigned HOST_WIDE_INT const_lower, const_upper;
209 const_lower = constant_lower_bound (lower_bound);
210 if (upper_bound.is_constant (&const_upper))
211 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
212 stack_usage_map[i] = 1;
213 else
214 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
215 }
216
217 /* Force FUNEXP into a form suitable for the address of a CALL,
218 and return that as an rtx. Also load the static chain register
219 if FNDECL is a nested function.
220
221 CALL_FUSAGE points to a variable holding the prospective
222 CALL_INSN_FUNCTION_USAGE information. */
223
224 rtx
225 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
226 rtx *call_fusage, int reg_parm_seen, int flags)
227 {
228 /* Make a valid memory address and copy constants through pseudo-regs,
229 but not for a constant address if -fno-function-cse. */
230 if (GET_CODE (funexp) != SYMBOL_REF)
231 {
232 /* If it's an indirect call by descriptor, generate code to perform
233 runtime identification of the pointer and load the descriptor. */
234 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
235 {
236 const int bit_val = targetm.calls.custom_function_descriptors;
237 rtx call_lab = gen_label_rtx ();
238
239 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
240 fndecl_or_type
241 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
242 fndecl_or_type);
243 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
244 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
245
246 if (GET_MODE (funexp) != Pmode)
247 funexp = convert_memory_address (Pmode, funexp);
248
249 /* Avoid long live ranges around function calls. */
250 funexp = copy_to_mode_reg (Pmode, funexp);
251
252 if (REG_P (chain))
253 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
254
255 /* Emit the runtime identification pattern. */
256 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
257 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
258 call_lab);
259
260 /* Statically predict the branch to very likely taken. */
261 rtx_insn *insn = get_last_insn ();
262 if (JUMP_P (insn))
263 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
264
265 /* Load the descriptor. */
266 rtx mem = gen_rtx_MEM (ptr_mode,
267 plus_constant (Pmode, funexp, - bit_val));
268 MEM_NOTRAP_P (mem) = 1;
269 mem = convert_memory_address (Pmode, mem);
270 emit_move_insn (chain, mem);
271
272 mem = gen_rtx_MEM (ptr_mode,
273 plus_constant (Pmode, funexp,
274 POINTER_SIZE / BITS_PER_UNIT
275 - bit_val));
276 MEM_NOTRAP_P (mem) = 1;
277 mem = convert_memory_address (Pmode, mem);
278 emit_move_insn (funexp, mem);
279
280 emit_label (call_lab);
281
282 if (REG_P (chain))
283 {
284 use_reg (call_fusage, chain);
285 STATIC_CHAIN_REG_P (chain) = 1;
286 }
287
288 /* Make sure we're not going to be overwritten below. */
289 gcc_assert (!static_chain_value);
290 }
291
292 /* If we are using registers for parameters, force the
293 function address into a register now. */
294 funexp = ((reg_parm_seen
295 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
296 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
297 : memory_address (FUNCTION_MODE, funexp));
298 }
299 else
300 {
301 /* funexp could be a SYMBOL_REF represents a function pointer which is
302 of ptr_mode. In this case, it should be converted into address mode
303 to be a valid address for memory rtx pattern. See PR 64971. */
304 if (GET_MODE (funexp) != Pmode)
305 funexp = convert_memory_address (Pmode, funexp);
306
307 if (!(flags & ECF_SIBCALL))
308 {
309 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
310 funexp = force_reg (Pmode, funexp);
311 }
312 }
313
314 if (static_chain_value != 0
315 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
316 || DECL_STATIC_CHAIN (fndecl_or_type)))
317 {
318 rtx chain;
319
320 chain = targetm.calls.static_chain (fndecl_or_type, false);
321 static_chain_value = convert_memory_address (Pmode, static_chain_value);
322
323 emit_move_insn (chain, static_chain_value);
324 if (REG_P (chain))
325 {
326 use_reg (call_fusage, chain);
327 STATIC_CHAIN_REG_P (chain) = 1;
328 }
329 }
330
331 return funexp;
332 }
333
334 /* Generate instructions to call function FUNEXP,
335 and optionally pop the results.
336 The CALL_INSN is the first insn generated.
337
338 FNDECL is the declaration node of the function. This is given to the
339 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
340 its own args.
341
342 FUNTYPE is the data type of the function. This is given to the hook
343 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
344 own args. We used to allow an identifier for library functions, but
345 that doesn't work when the return type is an aggregate type and the
346 calling convention says that the pointer to this aggregate is to be
347 popped by the callee.
348
349 STACK_SIZE is the number of bytes of arguments on the stack,
350 ROUNDED_STACK_SIZE is that number rounded up to
351 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
352 both to put into the call insn and to generate explicit popping
353 code if necessary.
354
355 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
356 It is zero if this call doesn't want a structure value.
357
358 NEXT_ARG_REG is the rtx that results from executing
359 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
360 just after all the args have had their registers assigned.
361 This could be whatever you like, but normally it is the first
362 arg-register beyond those used for args in this call,
363 or 0 if all the arg-registers are used in this call.
364 It is passed on to `gen_call' so you can put this info in the call insn.
365
366 VALREG is a hard register in which a value is returned,
367 or 0 if the call does not return a value.
368
369 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
370 the args to this call were processed.
371 We restore `inhibit_defer_pop' to that value.
372
373 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
374 denote registers used by the called function. */
375
376 static void
377 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
378 tree funtype ATTRIBUTE_UNUSED,
379 poly_int64 stack_size ATTRIBUTE_UNUSED,
380 poly_int64 rounded_stack_size,
381 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
382 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
383 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
384 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
385 {
386 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
387 rtx call, funmem, pat;
388 int already_popped = 0;
389 poly_int64 n_popped = 0;
390
391 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
392 patterns exist). Any popping that the callee does on return will
393 be from our caller's frame rather than ours. */
394 if (!(ecf_flags & ECF_SIBCALL))
395 {
396 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
397
398 #ifdef CALL_POPS_ARGS
399 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
400 #endif
401 }
402
403 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
404 and we don't want to load it into a register as an optimization,
405 because prepare_call_address already did it if it should be done. */
406 if (GET_CODE (funexp) != SYMBOL_REF)
407 funexp = memory_address (FUNCTION_MODE, funexp);
408
409 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
410 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
411 {
412 tree t = fndecl;
413
414 /* Although a built-in FUNCTION_DECL and its non-__builtin
415 counterpart compare equal and get a shared mem_attrs, they
416 produce different dump output in compare-debug compilations,
417 if an entry gets garbage collected in one compilation, then
418 adds a different (but equivalent) entry, while the other
419 doesn't run the garbage collector at the same spot and then
420 shares the mem_attr with the equivalent entry. */
421 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
422 {
423 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
424 if (t2)
425 t = t2;
426 }
427
428 set_mem_expr (funmem, t);
429 }
430 else if (fntree)
431 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
432
433 if (ecf_flags & ECF_SIBCALL)
434 {
435 if (valreg)
436 pat = targetm.gen_sibcall_value (valreg, funmem,
437 rounded_stack_size_rtx,
438 next_arg_reg, NULL_RTX);
439 else
440 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
441 next_arg_reg,
442 gen_int_mode (struct_value_size, Pmode));
443 }
444 /* If the target has "call" or "call_value" insns, then prefer them
445 if no arguments are actually popped. If the target does not have
446 "call" or "call_value" insns, then we must use the popping versions
447 even if the call has no arguments to pop. */
448 else if (maybe_ne (n_popped, 0)
449 || !(valreg
450 ? targetm.have_call_value ()
451 : targetm.have_call ()))
452 {
453 rtx n_pop = gen_int_mode (n_popped, Pmode);
454
455 /* If this subroutine pops its own args, record that in the call insn
456 if possible, for the sake of frame pointer elimination. */
457
458 if (valreg)
459 pat = targetm.gen_call_value_pop (valreg, funmem,
460 rounded_stack_size_rtx,
461 next_arg_reg, n_pop);
462 else
463 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
464 next_arg_reg, n_pop);
465
466 already_popped = 1;
467 }
468 else
469 {
470 if (valreg)
471 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
472 next_arg_reg, NULL_RTX);
473 else
474 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
475 gen_int_mode (struct_value_size, Pmode));
476 }
477 emit_insn (pat);
478
479 /* Find the call we just emitted. */
480 rtx_call_insn *call_insn = last_call_insn ();
481
482 /* Some target create a fresh MEM instead of reusing the one provided
483 above. Set its MEM_EXPR. */
484 call = get_call_rtx_from (call_insn);
485 if (call
486 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
487 && MEM_EXPR (funmem) != NULL_TREE)
488 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
489
490 /* Mark instrumented calls. */
491 if (call && fntree)
492 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
493
494 /* Put the register usage information there. */
495 add_function_usage_to (call_insn, call_fusage);
496
497 /* If this is a const call, then set the insn's unchanging bit. */
498 if (ecf_flags & ECF_CONST)
499 RTL_CONST_CALL_P (call_insn) = 1;
500
501 /* If this is a pure call, then set the insn's unchanging bit. */
502 if (ecf_flags & ECF_PURE)
503 RTL_PURE_CALL_P (call_insn) = 1;
504
505 /* If this is a const call, then set the insn's unchanging bit. */
506 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
507 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
508
509 /* Create a nothrow REG_EH_REGION note, if needed. */
510 make_reg_eh_region_note (call_insn, ecf_flags, 0);
511
512 if (ecf_flags & ECF_NORETURN)
513 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
514
515 if (ecf_flags & ECF_RETURNS_TWICE)
516 {
517 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
518 cfun->calls_setjmp = 1;
519 }
520
521 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
522
523 /* Restore this now, so that we do defer pops for this call's args
524 if the context of the call as a whole permits. */
525 inhibit_defer_pop = old_inhibit_defer_pop;
526
527 if (maybe_ne (n_popped, 0))
528 {
529 if (!already_popped)
530 CALL_INSN_FUNCTION_USAGE (call_insn)
531 = gen_rtx_EXPR_LIST (VOIDmode,
532 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
533 CALL_INSN_FUNCTION_USAGE (call_insn));
534 rounded_stack_size -= n_popped;
535 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
536 stack_pointer_delta -= n_popped;
537
538 add_args_size_note (call_insn, stack_pointer_delta);
539
540 /* If popup is needed, stack realign must use DRAP */
541 if (SUPPORTS_STACK_ALIGNMENT)
542 crtl->need_drap = true;
543 }
544 /* For noreturn calls when not accumulating outgoing args force
545 REG_ARGS_SIZE note to prevent crossjumping of calls with different
546 args sizes. */
547 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
548 add_args_size_note (call_insn, stack_pointer_delta);
549
550 if (!ACCUMULATE_OUTGOING_ARGS)
551 {
552 /* If returning from the subroutine does not automatically pop the args,
553 we need an instruction to pop them sooner or later.
554 Perhaps do it now; perhaps just record how much space to pop later.
555
556 If returning from the subroutine does pop the args, indicate that the
557 stack pointer will be changed. */
558
559 if (maybe_ne (rounded_stack_size, 0))
560 {
561 if (ecf_flags & ECF_NORETURN)
562 /* Just pretend we did the pop. */
563 stack_pointer_delta -= rounded_stack_size;
564 else if (flag_defer_pop && inhibit_defer_pop == 0
565 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
566 pending_stack_adjust += rounded_stack_size;
567 else
568 adjust_stack (rounded_stack_size_rtx);
569 }
570 }
571 /* When we accumulate outgoing args, we must avoid any stack manipulations.
572 Restore the stack pointer to its original value now. Usually
573 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
574 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
575 popping variants of functions exist as well.
576
577 ??? We may optimize similar to defer_pop above, but it is
578 probably not worthwhile.
579
580 ??? It will be worthwhile to enable combine_stack_adjustments even for
581 such machines. */
582 else if (maybe_ne (n_popped, 0))
583 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
584 }
585
586 /* Determine if the function identified by FNDECL is one with
587 special properties we wish to know about. Modify FLAGS accordingly.
588
589 For example, if the function might return more than one time (setjmp), then
590 set ECF_RETURNS_TWICE.
591
592 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
593 space from the stack such as alloca. */
594
595 static int
596 special_function_p (const_tree fndecl, int flags)
597 {
598 tree name_decl = DECL_NAME (fndecl);
599
600 /* For instrumentation clones we want to derive flags
601 from the original name. */
602 if (cgraph_node::get (fndecl)
603 && cgraph_node::get (fndecl)->instrumentation_clone)
604 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
605
606 if (fndecl && name_decl
607 && IDENTIFIER_LENGTH (name_decl) <= 11
608 /* Exclude functions not at the file scope, or not `extern',
609 since they are not the magic functions we would otherwise
610 think they are.
611 FIXME: this should be handled with attributes, not with this
612 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
613 because you can declare fork() inside a function if you
614 wish. */
615 && (DECL_CONTEXT (fndecl) == NULL_TREE
616 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
617 && TREE_PUBLIC (fndecl))
618 {
619 const char *name = IDENTIFIER_POINTER (name_decl);
620 const char *tname = name;
621
622 /* We assume that alloca will always be called by name. It
623 makes no sense to pass it as a pointer-to-function to
624 anything that does not understand its behavior. */
625 if (IDENTIFIER_LENGTH (name_decl) == 6
626 && name[0] == 'a'
627 && ! strcmp (name, "alloca"))
628 flags |= ECF_MAY_BE_ALLOCA;
629
630 /* Disregard prefix _ or __. */
631 if (name[0] == '_')
632 {
633 if (name[1] == '_')
634 tname += 2;
635 else
636 tname += 1;
637 }
638
639 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
640 if (! strcmp (tname, "setjmp")
641 || ! strcmp (tname, "sigsetjmp")
642 || ! strcmp (name, "savectx")
643 || ! strcmp (name, "vfork")
644 || ! strcmp (name, "getcontext"))
645 flags |= ECF_RETURNS_TWICE;
646 }
647
648 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
649 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
650 flags |= ECF_MAY_BE_ALLOCA;
651
652 return flags;
653 }
654
655 /* Similar to special_function_p; return a set of ERF_ flags for the
656 function FNDECL. */
657 static int
658 decl_return_flags (tree fndecl)
659 {
660 tree attr;
661 tree type = TREE_TYPE (fndecl);
662 if (!type)
663 return 0;
664
665 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
666 if (!attr)
667 return 0;
668
669 attr = TREE_VALUE (TREE_VALUE (attr));
670 if (!attr || TREE_STRING_LENGTH (attr) < 1)
671 return 0;
672
673 switch (TREE_STRING_POINTER (attr)[0])
674 {
675 case '1':
676 case '2':
677 case '3':
678 case '4':
679 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
680
681 case 'm':
682 return ERF_NOALIAS;
683
684 case '.':
685 default:
686 return 0;
687 }
688 }
689
690 /* Return nonzero when FNDECL represents a call to setjmp. */
691
692 int
693 setjmp_call_p (const_tree fndecl)
694 {
695 if (DECL_IS_RETURNS_TWICE (fndecl))
696 return ECF_RETURNS_TWICE;
697 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
698 }
699
700
701 /* Return true if STMT may be an alloca call. */
702
703 bool
704 gimple_maybe_alloca_call_p (const gimple *stmt)
705 {
706 tree fndecl;
707
708 if (!is_gimple_call (stmt))
709 return false;
710
711 fndecl = gimple_call_fndecl (stmt);
712 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
713 return true;
714
715 return false;
716 }
717
718 /* Return true if STMT is a builtin alloca call. */
719
720 bool
721 gimple_alloca_call_p (const gimple *stmt)
722 {
723 tree fndecl;
724
725 if (!is_gimple_call (stmt))
726 return false;
727
728 fndecl = gimple_call_fndecl (stmt);
729 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
730 switch (DECL_FUNCTION_CODE (fndecl))
731 {
732 CASE_BUILT_IN_ALLOCA:
733 return true;
734 default:
735 break;
736 }
737
738 return false;
739 }
740
741 /* Return true when exp contains a builtin alloca call. */
742
743 bool
744 alloca_call_p (const_tree exp)
745 {
746 tree fndecl;
747 if (TREE_CODE (exp) == CALL_EXPR
748 && (fndecl = get_callee_fndecl (exp))
749 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
750 switch (DECL_FUNCTION_CODE (fndecl))
751 {
752 CASE_BUILT_IN_ALLOCA:
753 return true;
754 default:
755 break;
756 }
757
758 return false;
759 }
760
761 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
762 function. Return FALSE otherwise. */
763
764 static bool
765 is_tm_builtin (const_tree fndecl)
766 {
767 if (fndecl == NULL)
768 return false;
769
770 if (decl_is_tm_clone (fndecl))
771 return true;
772
773 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
774 {
775 switch (DECL_FUNCTION_CODE (fndecl))
776 {
777 case BUILT_IN_TM_COMMIT:
778 case BUILT_IN_TM_COMMIT_EH:
779 case BUILT_IN_TM_ABORT:
780 case BUILT_IN_TM_IRREVOCABLE:
781 case BUILT_IN_TM_GETTMCLONE_IRR:
782 case BUILT_IN_TM_MEMCPY:
783 case BUILT_IN_TM_MEMMOVE:
784 case BUILT_IN_TM_MEMSET:
785 CASE_BUILT_IN_TM_STORE (1):
786 CASE_BUILT_IN_TM_STORE (2):
787 CASE_BUILT_IN_TM_STORE (4):
788 CASE_BUILT_IN_TM_STORE (8):
789 CASE_BUILT_IN_TM_STORE (FLOAT):
790 CASE_BUILT_IN_TM_STORE (DOUBLE):
791 CASE_BUILT_IN_TM_STORE (LDOUBLE):
792 CASE_BUILT_IN_TM_STORE (M64):
793 CASE_BUILT_IN_TM_STORE (M128):
794 CASE_BUILT_IN_TM_STORE (M256):
795 CASE_BUILT_IN_TM_LOAD (1):
796 CASE_BUILT_IN_TM_LOAD (2):
797 CASE_BUILT_IN_TM_LOAD (4):
798 CASE_BUILT_IN_TM_LOAD (8):
799 CASE_BUILT_IN_TM_LOAD (FLOAT):
800 CASE_BUILT_IN_TM_LOAD (DOUBLE):
801 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
802 CASE_BUILT_IN_TM_LOAD (M64):
803 CASE_BUILT_IN_TM_LOAD (M128):
804 CASE_BUILT_IN_TM_LOAD (M256):
805 case BUILT_IN_TM_LOG:
806 case BUILT_IN_TM_LOG_1:
807 case BUILT_IN_TM_LOG_2:
808 case BUILT_IN_TM_LOG_4:
809 case BUILT_IN_TM_LOG_8:
810 case BUILT_IN_TM_LOG_FLOAT:
811 case BUILT_IN_TM_LOG_DOUBLE:
812 case BUILT_IN_TM_LOG_LDOUBLE:
813 case BUILT_IN_TM_LOG_M64:
814 case BUILT_IN_TM_LOG_M128:
815 case BUILT_IN_TM_LOG_M256:
816 return true;
817 default:
818 break;
819 }
820 }
821 return false;
822 }
823
824 /* Detect flags (function attributes) from the function decl or type node. */
825
826 int
827 flags_from_decl_or_type (const_tree exp)
828 {
829 int flags = 0;
830
831 if (DECL_P (exp))
832 {
833 /* The function exp may have the `malloc' attribute. */
834 if (DECL_IS_MALLOC (exp))
835 flags |= ECF_MALLOC;
836
837 /* The function exp may have the `returns_twice' attribute. */
838 if (DECL_IS_RETURNS_TWICE (exp))
839 flags |= ECF_RETURNS_TWICE;
840
841 /* Process the pure and const attributes. */
842 if (TREE_READONLY (exp))
843 flags |= ECF_CONST;
844 if (DECL_PURE_P (exp))
845 flags |= ECF_PURE;
846 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
847 flags |= ECF_LOOPING_CONST_OR_PURE;
848
849 if (DECL_IS_NOVOPS (exp))
850 flags |= ECF_NOVOPS;
851 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
852 flags |= ECF_LEAF;
853 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
854 flags |= ECF_COLD;
855
856 if (TREE_NOTHROW (exp))
857 flags |= ECF_NOTHROW;
858
859 if (flag_tm)
860 {
861 if (is_tm_builtin (exp))
862 flags |= ECF_TM_BUILTIN;
863 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
864 || lookup_attribute ("transaction_pure",
865 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
866 flags |= ECF_TM_PURE;
867 }
868
869 flags = special_function_p (exp, flags);
870 }
871 else if (TYPE_P (exp))
872 {
873 if (TYPE_READONLY (exp))
874 flags |= ECF_CONST;
875
876 if (flag_tm
877 && ((flags & ECF_CONST) != 0
878 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
879 flags |= ECF_TM_PURE;
880 }
881 else
882 gcc_unreachable ();
883
884 if (TREE_THIS_VOLATILE (exp))
885 {
886 flags |= ECF_NORETURN;
887 if (flags & (ECF_CONST|ECF_PURE))
888 flags |= ECF_LOOPING_CONST_OR_PURE;
889 }
890
891 return flags;
892 }
893
894 /* Detect flags from a CALL_EXPR. */
895
896 int
897 call_expr_flags (const_tree t)
898 {
899 int flags;
900 tree decl = get_callee_fndecl (t);
901
902 if (decl)
903 flags = flags_from_decl_or_type (decl);
904 else if (CALL_EXPR_FN (t) == NULL_TREE)
905 flags = internal_fn_flags (CALL_EXPR_IFN (t));
906 else
907 {
908 tree type = TREE_TYPE (CALL_EXPR_FN (t));
909 if (type && TREE_CODE (type) == POINTER_TYPE)
910 flags = flags_from_decl_or_type (TREE_TYPE (type));
911 else
912 flags = 0;
913 if (CALL_EXPR_BY_DESCRIPTOR (t))
914 flags |= ECF_BY_DESCRIPTOR;
915 }
916
917 return flags;
918 }
919
920 /* Return true if TYPE should be passed by invisible reference. */
921
922 bool
923 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
924 tree type, bool named_arg)
925 {
926 if (type)
927 {
928 /* If this type contains non-trivial constructors, then it is
929 forbidden for the middle-end to create any new copies. */
930 if (TREE_ADDRESSABLE (type))
931 return true;
932
933 /* GCC post 3.4 passes *all* variable sized types by reference. */
934 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
935 return true;
936
937 /* If a record type should be passed the same as its first (and only)
938 member, use the type and mode of that member. */
939 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
940 {
941 type = TREE_TYPE (first_field (type));
942 mode = TYPE_MODE (type);
943 }
944 }
945
946 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
947 type, named_arg);
948 }
949
950 /* Return true if TYPE, which is passed by reference, should be callee
951 copied instead of caller copied. */
952
953 bool
954 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
955 tree type, bool named_arg)
956 {
957 if (type && TREE_ADDRESSABLE (type))
958 return false;
959 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
960 named_arg);
961 }
962
963
964 /* Precompute all register parameters as described by ARGS, storing values
965 into fields within the ARGS array.
966
967 NUM_ACTUALS indicates the total number elements in the ARGS array.
968
969 Set REG_PARM_SEEN if we encounter a register parameter. */
970
971 static void
972 precompute_register_parameters (int num_actuals, struct arg_data *args,
973 int *reg_parm_seen)
974 {
975 int i;
976
977 *reg_parm_seen = 0;
978
979 for (i = 0; i < num_actuals; i++)
980 if (args[i].reg != 0 && ! args[i].pass_on_stack)
981 {
982 *reg_parm_seen = 1;
983
984 if (args[i].value == 0)
985 {
986 push_temp_slots ();
987 args[i].value = expand_normal (args[i].tree_value);
988 preserve_temp_slots (args[i].value);
989 pop_temp_slots ();
990 }
991
992 /* If we are to promote the function arg to a wider mode,
993 do it now. */
994
995 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
996 args[i].value
997 = convert_modes (args[i].mode,
998 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
999 args[i].value, args[i].unsignedp);
1000
1001 /* If the value is a non-legitimate constant, force it into a
1002 pseudo now. TLS symbols sometimes need a call to resolve. */
1003 if (CONSTANT_P (args[i].value)
1004 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1005 args[i].value = force_reg (args[i].mode, args[i].value);
1006
1007 /* If we're going to have to load the value by parts, pull the
1008 parts into pseudos. The part extraction process can involve
1009 non-trivial computation. */
1010 if (GET_CODE (args[i].reg) == PARALLEL)
1011 {
1012 tree type = TREE_TYPE (args[i].tree_value);
1013 args[i].parallel_value
1014 = emit_group_load_into_temps (args[i].reg, args[i].value,
1015 type, int_size_in_bytes (type));
1016 }
1017
1018 /* If the value is expensive, and we are inside an appropriately
1019 short loop, put the value into a pseudo and then put the pseudo
1020 into the hard reg.
1021
1022 For small register classes, also do this if this call uses
1023 register parameters. This is to avoid reload conflicts while
1024 loading the parameters registers. */
1025
1026 else if ((! (REG_P (args[i].value)
1027 || (GET_CODE (args[i].value) == SUBREG
1028 && REG_P (SUBREG_REG (args[i].value)))))
1029 && args[i].mode != BLKmode
1030 && (set_src_cost (args[i].value, args[i].mode,
1031 optimize_insn_for_speed_p ())
1032 > COSTS_N_INSNS (1))
1033 && ((*reg_parm_seen
1034 && targetm.small_register_classes_for_mode_p (args[i].mode))
1035 || optimize))
1036 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1037 }
1038 }
1039
1040 #ifdef REG_PARM_STACK_SPACE
1041
1042 /* The argument list is the property of the called routine and it
1043 may clobber it. If the fixed area has been used for previous
1044 parameters, we must save and restore it. */
1045
1046 static rtx
1047 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1048 {
1049 unsigned int low;
1050 unsigned int high;
1051
1052 /* Compute the boundary of the area that needs to be saved, if any. */
1053 high = reg_parm_stack_space;
1054 if (ARGS_GROW_DOWNWARD)
1055 high += 1;
1056
1057 if (high > highest_outgoing_arg_in_use)
1058 high = highest_outgoing_arg_in_use;
1059
1060 for (low = 0; low < high; low++)
1061 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1062 {
1063 int num_to_save;
1064 machine_mode save_mode;
1065 int delta;
1066 rtx addr;
1067 rtx stack_area;
1068 rtx save_area;
1069
1070 while (stack_usage_map[--high] == 0)
1071 ;
1072
1073 *low_to_save = low;
1074 *high_to_save = high;
1075
1076 num_to_save = high - low + 1;
1077
1078 /* If we don't have the required alignment, must do this
1079 in BLKmode. */
1080 scalar_int_mode imode;
1081 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1082 && (low & (MIN (GET_MODE_SIZE (imode),
1083 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1084 save_mode = imode;
1085 else
1086 save_mode = BLKmode;
1087
1088 if (ARGS_GROW_DOWNWARD)
1089 delta = -high;
1090 else
1091 delta = low;
1092
1093 addr = plus_constant (Pmode, argblock, delta);
1094 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1095
1096 set_mem_align (stack_area, PARM_BOUNDARY);
1097 if (save_mode == BLKmode)
1098 {
1099 save_area = assign_stack_temp (BLKmode, num_to_save);
1100 emit_block_move (validize_mem (save_area), stack_area,
1101 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1102 }
1103 else
1104 {
1105 save_area = gen_reg_rtx (save_mode);
1106 emit_move_insn (save_area, stack_area);
1107 }
1108
1109 return save_area;
1110 }
1111
1112 return NULL_RTX;
1113 }
1114
1115 static void
1116 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1117 {
1118 machine_mode save_mode = GET_MODE (save_area);
1119 int delta;
1120 rtx addr, stack_area;
1121
1122 if (ARGS_GROW_DOWNWARD)
1123 delta = -high_to_save;
1124 else
1125 delta = low_to_save;
1126
1127 addr = plus_constant (Pmode, argblock, delta);
1128 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1129 set_mem_align (stack_area, PARM_BOUNDARY);
1130
1131 if (save_mode != BLKmode)
1132 emit_move_insn (stack_area, save_area);
1133 else
1134 emit_block_move (stack_area, validize_mem (save_area),
1135 GEN_INT (high_to_save - low_to_save + 1),
1136 BLOCK_OP_CALL_PARM);
1137 }
1138 #endif /* REG_PARM_STACK_SPACE */
1139
1140 /* If any elements in ARGS refer to parameters that are to be passed in
1141 registers, but not in memory, and whose alignment does not permit a
1142 direct copy into registers. Copy the values into a group of pseudos
1143 which we will later copy into the appropriate hard registers.
1144
1145 Pseudos for each unaligned argument will be stored into the array
1146 args[argnum].aligned_regs. The caller is responsible for deallocating
1147 the aligned_regs array if it is nonzero. */
1148
1149 static void
1150 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1151 {
1152 int i, j;
1153
1154 for (i = 0; i < num_actuals; i++)
1155 if (args[i].reg != 0 && ! args[i].pass_on_stack
1156 && GET_CODE (args[i].reg) != PARALLEL
1157 && args[i].mode == BLKmode
1158 && MEM_P (args[i].value)
1159 && (MEM_ALIGN (args[i].value)
1160 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1161 {
1162 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1163 int endian_correction = 0;
1164
1165 if (args[i].partial)
1166 {
1167 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1168 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1169 }
1170 else
1171 {
1172 args[i].n_aligned_regs
1173 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1174 }
1175
1176 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1177
1178 /* Structures smaller than a word are normally aligned to the
1179 least significant byte. On a BYTES_BIG_ENDIAN machine,
1180 this means we must skip the empty high order bytes when
1181 calculating the bit offset. */
1182 if (bytes < UNITS_PER_WORD
1183 #ifdef BLOCK_REG_PADDING
1184 && (BLOCK_REG_PADDING (args[i].mode,
1185 TREE_TYPE (args[i].tree_value), 1)
1186 == PAD_DOWNWARD)
1187 #else
1188 && BYTES_BIG_ENDIAN
1189 #endif
1190 )
1191 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1192
1193 for (j = 0; j < args[i].n_aligned_regs; j++)
1194 {
1195 rtx reg = gen_reg_rtx (word_mode);
1196 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1197 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1198
1199 args[i].aligned_regs[j] = reg;
1200 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1201 word_mode, word_mode, false, NULL);
1202
1203 /* There is no need to restrict this code to loading items
1204 in TYPE_ALIGN sized hunks. The bitfield instructions can
1205 load up entire word sized registers efficiently.
1206
1207 ??? This may not be needed anymore.
1208 We use to emit a clobber here but that doesn't let later
1209 passes optimize the instructions we emit. By storing 0 into
1210 the register later passes know the first AND to zero out the
1211 bitfield being set in the register is unnecessary. The store
1212 of 0 will be deleted as will at least the first AND. */
1213
1214 emit_move_insn (reg, const0_rtx);
1215
1216 bytes -= bitsize / BITS_PER_UNIT;
1217 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1218 word_mode, word, false);
1219 }
1220 }
1221 }
1222
1223 /* The limit set by -Walloc-larger-than=. */
1224 static GTY(()) tree alloc_object_size_limit;
1225
1226 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1227 setting if the option is specified, or to the maximum object size if it
1228 is not. Return the initialized value. */
1229
1230 static tree
1231 alloc_max_size (void)
1232 {
1233 if (!alloc_object_size_limit)
1234 {
1235 alloc_object_size_limit = max_object_size ();
1236
1237 if (warn_alloc_size_limit)
1238 {
1239 char *end = NULL;
1240 errno = 0;
1241 unsigned HOST_WIDE_INT unit = 1;
1242 unsigned HOST_WIDE_INT limit
1243 = strtoull (warn_alloc_size_limit, &end, 10);
1244
1245 if (!errno)
1246 {
1247 if (end && *end)
1248 {
1249 /* Numeric option arguments are at most INT_MAX. Make it
1250 possible to specify a larger value by accepting common
1251 suffixes. */
1252 if (!strcmp (end, "kB"))
1253 unit = 1000;
1254 else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1255 unit = 1024;
1256 else if (!strcmp (end, "MB"))
1257 unit = HOST_WIDE_INT_UC (1000) * 1000;
1258 else if (!strcasecmp (end, "MiB"))
1259 unit = HOST_WIDE_INT_UC (1024) * 1024;
1260 else if (!strcasecmp (end, "GB"))
1261 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
1262 else if (!strcasecmp (end, "GiB"))
1263 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
1264 else if (!strcasecmp (end, "TB"))
1265 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
1266 else if (!strcasecmp (end, "TiB"))
1267 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
1268 else if (!strcasecmp (end, "PB"))
1269 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
1270 else if (!strcasecmp (end, "PiB"))
1271 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
1272 else if (!strcasecmp (end, "EB"))
1273 unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1274 * 1000;
1275 else if (!strcasecmp (end, "EiB"))
1276 unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1277 * 1024;
1278 else
1279 unit = 0;
1280 }
1281
1282 if (unit)
1283 {
1284 widest_int w = wi::mul (limit, unit);
1285 if (w < wi::to_widest (alloc_object_size_limit))
1286 alloc_object_size_limit
1287 = wide_int_to_tree (ptrdiff_type_node, w);
1288 }
1289 }
1290 }
1291 }
1292 return alloc_object_size_limit;
1293 }
1294
1295 /* Return true when EXP's range can be determined and set RANGE[] to it
1296 after adjusting it if necessary to make EXP a represents a valid size
1297 of object, or a valid size argument to an allocation function declared
1298 with attribute alloc_size (whose argument may be signed), or to a string
1299 manipulation function like memset. When ALLOW_ZERO is true, allow
1300 returning a range of [0, 0] for a size in an anti-range [1, N] where
1301 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1302 allocation functions like malloc but it is a valid argument to
1303 functions like memset. */
1304
1305 bool
1306 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1307 {
1308 if (tree_fits_uhwi_p (exp))
1309 {
1310 /* EXP is a constant. */
1311 range[0] = range[1] = exp;
1312 return true;
1313 }
1314
1315 tree exptype = TREE_TYPE (exp);
1316 bool integral = INTEGRAL_TYPE_P (exptype);
1317
1318 wide_int min, max;
1319 enum value_range_type range_type;
1320
1321 if (TREE_CODE (exp) == SSA_NAME && integral)
1322 range_type = get_range_info (exp, &min, &max);
1323 else
1324 range_type = VR_VARYING;
1325
1326 if (range_type == VR_VARYING)
1327 {
1328 if (integral)
1329 {
1330 /* Use the full range of the type of the expression when
1331 no value range information is available. */
1332 range[0] = TYPE_MIN_VALUE (exptype);
1333 range[1] = TYPE_MAX_VALUE (exptype);
1334 return true;
1335 }
1336
1337 range[0] = NULL_TREE;
1338 range[1] = NULL_TREE;
1339 return false;
1340 }
1341
1342 unsigned expprec = TYPE_PRECISION (exptype);
1343
1344 bool signed_p = !TYPE_UNSIGNED (exptype);
1345
1346 if (range_type == VR_ANTI_RANGE)
1347 {
1348 if (signed_p)
1349 {
1350 if (wi::les_p (max, 0))
1351 {
1352 /* EXP is not in a strictly negative range. That means
1353 it must be in some (not necessarily strictly) positive
1354 range which includes zero. Since in signed to unsigned
1355 conversions negative values end up converted to large
1356 positive values, and otherwise they are not valid sizes,
1357 the resulting range is in both cases [0, TYPE_MAX]. */
1358 min = wi::zero (expprec);
1359 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1360 }
1361 else if (wi::les_p (min - 1, 0))
1362 {
1363 /* EXP is not in a negative-positive range. That means EXP
1364 is either negative, or greater than max. Since negative
1365 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1366 min = max + 1;
1367 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1368 }
1369 else
1370 {
1371 max = min - 1;
1372 min = wi::zero (expprec);
1373 }
1374 }
1375 else if (wi::eq_p (0, min - 1))
1376 {
1377 /* EXP is unsigned and not in the range [1, MAX]. That means
1378 it's either zero or greater than MAX. Even though 0 would
1379 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1380 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1381 is greater than the limit the whole range is diagnosed. */
1382 if (allow_zero)
1383 min = max = wi::zero (expprec);
1384 else
1385 {
1386 min = max + 1;
1387 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1388 }
1389 }
1390 else
1391 {
1392 max = min - 1;
1393 min = wi::zero (expprec);
1394 }
1395 }
1396
1397 range[0] = wide_int_to_tree (exptype, min);
1398 range[1] = wide_int_to_tree (exptype, max);
1399
1400 return true;
1401 }
1402
1403 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404 whose argument numbers given by IDX with values given by ARGS exceed
1405 the maximum object size or cause an unsigned oveflow (wrapping) when
1406 multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
1407 may be null for functions like malloc, and non-null for those like
1408 calloc that are decorated with a two-argument attribute alloc_size. */
1409
1410 void
1411 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1412 {
1413 /* The range each of the (up to) two arguments is known to be in. */
1414 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1415
1416 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1417 tree maxobjsize = alloc_max_size ();
1418
1419 location_t loc = EXPR_LOCATION (exp);
1420
1421 bool warned = false;
1422
1423 /* Validate each argument individually. */
1424 for (unsigned i = 0; i != 2 && args[i]; ++i)
1425 {
1426 if (TREE_CODE (args[i]) == INTEGER_CST)
1427 {
1428 argrange[i][0] = args[i];
1429 argrange[i][1] = args[i];
1430
1431 if (tree_int_cst_lt (args[i], integer_zero_node))
1432 {
1433 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1434 "%Kargument %i value %qE is negative",
1435 exp, idx[i] + 1, args[i]);
1436 }
1437 else if (integer_zerop (args[i]))
1438 {
1439 /* Avoid issuing -Walloc-zero for allocation functions other
1440 than __builtin_alloca that are declared with attribute
1441 returns_nonnull because there's no portability risk. This
1442 avoids warning for such calls to libiberty's xmalloc and
1443 friends.
1444 Also avoid issuing the warning for calls to function named
1445 "alloca". */
1446 if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1447 && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1448 || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1449 && !lookup_attribute ("returns_nonnull",
1450 TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1451 warned = warning_at (loc, OPT_Walloc_zero,
1452 "%Kargument %i value is zero",
1453 exp, idx[i] + 1);
1454 }
1455 else if (tree_int_cst_lt (maxobjsize, args[i]))
1456 {
1457 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1458 mode and with -fno-exceptions as a way to indicate array
1459 size overflow. There's no good way to detect C++98 here
1460 so avoid diagnosing these calls for all C++ modes. */
1461 if (i == 0
1462 && !args[1]
1463 && lang_GNU_CXX ()
1464 && DECL_IS_OPERATOR_NEW (fn)
1465 && integer_all_onesp (args[i]))
1466 continue;
1467
1468 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1469 "%Kargument %i value %qE exceeds "
1470 "maximum object size %E",
1471 exp, idx[i] + 1, args[i], maxobjsize);
1472 }
1473 }
1474 else if (TREE_CODE (args[i]) == SSA_NAME
1475 && get_size_range (args[i], argrange[i]))
1476 {
1477 /* Verify that the argument's range is not negative (including
1478 upper bound of zero). */
1479 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1480 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1481 {
1482 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1483 "%Kargument %i range [%E, %E] is negative",
1484 exp, idx[i] + 1,
1485 argrange[i][0], argrange[i][1]);
1486 }
1487 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1488 {
1489 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1490 "%Kargument %i range [%E, %E] exceeds "
1491 "maximum object size %E",
1492 exp, idx[i] + 1,
1493 argrange[i][0], argrange[i][1],
1494 maxobjsize);
1495 }
1496 }
1497 }
1498
1499 if (!argrange[0])
1500 return;
1501
1502 /* For a two-argument alloc_size, validate the product of the two
1503 arguments if both of their values or ranges are known. */
1504 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1505 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1506 && !integer_onep (argrange[0][0])
1507 && !integer_onep (argrange[1][0]))
1508 {
1509 /* Check for overflow in the product of a function decorated with
1510 attribute alloc_size (X, Y). */
1511 unsigned szprec = TYPE_PRECISION (size_type_node);
1512 wide_int x = wi::to_wide (argrange[0][0], szprec);
1513 wide_int y = wi::to_wide (argrange[1][0], szprec);
1514
1515 bool vflow;
1516 wide_int prod = wi::umul (x, y, &vflow);
1517
1518 if (vflow)
1519 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1520 "%Kproduct %<%E * %E%> of arguments %i and %i "
1521 "exceeds %<SIZE_MAX%>",
1522 exp, argrange[0][0], argrange[1][0],
1523 idx[0] + 1, idx[1] + 1);
1524 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1525 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1526 "%Kproduct %<%E * %E%> of arguments %i and %i "
1527 "exceeds maximum object size %E",
1528 exp, argrange[0][0], argrange[1][0],
1529 idx[0] + 1, idx[1] + 1,
1530 maxobjsize);
1531
1532 if (warned)
1533 {
1534 /* Print the full range of each of the two arguments to make
1535 it clear when it is, in fact, in a range and not constant. */
1536 if (argrange[0][0] != argrange [0][1])
1537 inform (loc, "argument %i in the range [%E, %E]",
1538 idx[0] + 1, argrange[0][0], argrange[0][1]);
1539 if (argrange[1][0] != argrange [1][1])
1540 inform (loc, "argument %i in the range [%E, %E]",
1541 idx[1] + 1, argrange[1][0], argrange[1][1]);
1542 }
1543 }
1544
1545 if (warned)
1546 {
1547 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1548
1549 if (DECL_IS_BUILTIN (fn))
1550 inform (loc,
1551 "in a call to built-in allocation function %qD", fn);
1552 else
1553 inform (fnloc,
1554 "in a call to allocation function %qD declared here", fn);
1555 }
1556 }
1557
1558 /* If EXPR refers to a character array or pointer declared attribute
1559 nonstring return a decl for that array or pointer and set *REF to
1560 the referenced enclosing object or pointer. Otherwise returns
1561 null. */
1562
1563 tree
1564 get_attr_nonstring_decl (tree expr, tree *ref)
1565 {
1566 tree decl = expr;
1567 if (TREE_CODE (decl) == SSA_NAME)
1568 {
1569 gimple *def = SSA_NAME_DEF_STMT (decl);
1570
1571 if (is_gimple_assign (def))
1572 {
1573 tree_code code = gimple_assign_rhs_code (def);
1574 if (code == ADDR_EXPR
1575 || code == COMPONENT_REF
1576 || code == VAR_DECL)
1577 decl = gimple_assign_rhs1 (def);
1578 }
1579 else if (tree var = SSA_NAME_VAR (decl))
1580 decl = var;
1581 }
1582
1583 if (TREE_CODE (decl) == ADDR_EXPR)
1584 decl = TREE_OPERAND (decl, 0);
1585
1586 if (ref)
1587 *ref = decl;
1588
1589 if (TREE_CODE (decl) == COMPONENT_REF)
1590 decl = TREE_OPERAND (decl, 1);
1591
1592 if (DECL_P (decl)
1593 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1594 return decl;
1595
1596 return NULL_TREE;
1597 }
1598
1599 /* Warn about passing a non-string array/pointer to a function that
1600 expects a nul-terminated string argument. */
1601
1602 void
1603 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1604 {
1605 if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
1606 return;
1607
1608 bool with_bounds = CALL_WITH_BOUNDS_P (exp);
1609
1610 /* The bound argument to a bounded string function like strncpy. */
1611 tree bound = NULL_TREE;
1612
1613 /* It's safe to call "bounded" string functions with a non-string
1614 argument since the functions provide an explicit bound for this
1615 purpose. */
1616 switch (DECL_FUNCTION_CODE (fndecl))
1617 {
1618 case BUILT_IN_STPNCPY:
1619 case BUILT_IN_STPNCPY_CHK:
1620 case BUILT_IN_STRNCMP:
1621 case BUILT_IN_STRNCASECMP:
1622 case BUILT_IN_STRNCPY:
1623 case BUILT_IN_STRNCPY_CHK:
1624 bound = CALL_EXPR_ARG (exp, with_bounds ? 4 : 2);
1625 break;
1626
1627 case BUILT_IN_STRNDUP:
1628 bound = CALL_EXPR_ARG (exp, with_bounds ? 2 : 1);
1629 break;
1630
1631 default:
1632 break;
1633 }
1634
1635 /* Determine the range of the bound argument (if specified). */
1636 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1637 if (bound)
1638 get_size_range (bound, bndrng);
1639
1640 /* Iterate over the built-in function's formal arguments and check
1641 each const char* against the actual argument. If the actual
1642 argument is declared attribute non-string issue a warning unless
1643 the argument's maximum length is bounded. */
1644 function_args_iterator it;
1645 function_args_iter_init (&it, TREE_TYPE (fndecl));
1646
1647 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1648 {
1649 tree argtype = function_args_iter_cond (&it);
1650 if (!argtype)
1651 break;
1652
1653 if (TREE_CODE (argtype) != POINTER_TYPE)
1654 continue;
1655
1656 argtype = TREE_TYPE (argtype);
1657
1658 if (TREE_CODE (argtype) != INTEGER_TYPE
1659 || !TYPE_READONLY (argtype))
1660 continue;
1661
1662 argtype = TYPE_MAIN_VARIANT (argtype);
1663 if (argtype != char_type_node)
1664 continue;
1665
1666 tree callarg = CALL_EXPR_ARG (exp, argno);
1667 if (TREE_CODE (callarg) == ADDR_EXPR)
1668 callarg = TREE_OPERAND (callarg, 0);
1669
1670 /* See if the destination is declared with attribute "nonstring". */
1671 tree decl = get_attr_nonstring_decl (callarg);
1672 if (!decl)
1673 continue;
1674
1675 tree type = TREE_TYPE (decl);
1676
1677 offset_int wibnd = 0;
1678 if (bndrng[0])
1679 wibnd = wi::to_offset (bndrng[0]);
1680
1681 offset_int asize = wibnd;
1682
1683 if (TREE_CODE (type) == ARRAY_TYPE)
1684 if (tree arrbnd = TYPE_DOMAIN (type))
1685 {
1686 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1687 asize = wi::to_offset (arrbnd) + 1;
1688 }
1689
1690 location_t loc = EXPR_LOCATION (exp);
1691
1692 bool warned = false;
1693
1694 if (wi::ltu_p (asize, wibnd))
1695 warned = warning_at (loc, OPT_Wstringop_overflow_,
1696 "%qD argument %i declared attribute %<nonstring%> "
1697 "is smaller than the specified bound %E",
1698 fndecl, argno + 1, bndrng[0]);
1699 else if (!bound)
1700 warned = warning_at (loc, OPT_Wstringop_overflow_,
1701 "%qD argument %i declared attribute %<nonstring%>",
1702 fndecl, argno + 1);
1703
1704 if (warned)
1705 inform (DECL_SOURCE_LOCATION (decl),
1706 "argument %qD declared here", decl);
1707 }
1708 }
1709
1710 /* Issue an error if CALL_EXPR was flagged as requiring
1711 tall-call optimization. */
1712
1713 static void
1714 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1715 {
1716 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1717 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1718 return;
1719
1720 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1721 }
1722
1723 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1724 CALL_EXPR EXP.
1725
1726 NUM_ACTUALS is the total number of parameters.
1727
1728 N_NAMED_ARGS is the total number of named arguments.
1729
1730 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1731 value, or null.
1732
1733 FNDECL is the tree code for the target of this call (if known)
1734
1735 ARGS_SO_FAR holds state needed by the target to know where to place
1736 the next argument.
1737
1738 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1739 for arguments which are passed in registers.
1740
1741 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1742 and may be modified by this routine.
1743
1744 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1745 flags which may be modified by this routine.
1746
1747 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1748 that requires allocation of stack space.
1749
1750 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1751 the thunked-to function. */
1752
1753 static void
1754 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1755 struct arg_data *args,
1756 struct args_size *args_size,
1757 int n_named_args ATTRIBUTE_UNUSED,
1758 tree exp, tree struct_value_addr_value,
1759 tree fndecl, tree fntype,
1760 cumulative_args_t args_so_far,
1761 int reg_parm_stack_space,
1762 rtx *old_stack_level,
1763 poly_int64_pod *old_pending_adj,
1764 int *must_preallocate, int *ecf_flags,
1765 bool *may_tailcall, bool call_from_thunk_p)
1766 {
1767 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1768 location_t loc = EXPR_LOCATION (exp);
1769
1770 /* Count arg position in order args appear. */
1771 int argpos;
1772
1773 int i;
1774
1775 args_size->constant = 0;
1776 args_size->var = 0;
1777
1778 bitmap_obstack_initialize (NULL);
1779
1780 /* In this loop, we consider args in the order they are written.
1781 We fill up ARGS from the back. */
1782
1783 i = num_actuals - 1;
1784 {
1785 int j = i, ptr_arg = -1;
1786 call_expr_arg_iterator iter;
1787 tree arg;
1788 bitmap slots = NULL;
1789
1790 if (struct_value_addr_value)
1791 {
1792 args[j].tree_value = struct_value_addr_value;
1793 j--;
1794
1795 /* If we pass structure address then we need to
1796 create bounds for it. Since created bounds is
1797 a call statement, we expand it right here to avoid
1798 fixing all other places where it may be expanded. */
1799 if (CALL_WITH_BOUNDS_P (exp))
1800 {
1801 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1802 args[j].tree_value
1803 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1804 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1805 EXPAND_NORMAL, 0, false);
1806 args[j].pointer_arg = j + 1;
1807 j--;
1808 }
1809 }
1810 argpos = 0;
1811 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1812 {
1813 tree argtype = TREE_TYPE (arg);
1814
1815 /* Remember last param with pointer and associate it
1816 with following pointer bounds. */
1817 if (CALL_WITH_BOUNDS_P (exp)
1818 && chkp_type_has_pointer (argtype))
1819 {
1820 if (slots)
1821 BITMAP_FREE (slots);
1822 ptr_arg = j;
1823 if (!BOUNDED_TYPE_P (argtype))
1824 {
1825 slots = BITMAP_ALLOC (NULL);
1826 chkp_find_bound_slots (argtype, slots);
1827 }
1828 }
1829 else if (CALL_WITH_BOUNDS_P (exp)
1830 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1831 argpos < n_named_args))
1832 {
1833 if (slots)
1834 BITMAP_FREE (slots);
1835 ptr_arg = j;
1836 }
1837 else if (POINTER_BOUNDS_TYPE_P (argtype))
1838 {
1839 /* We expect bounds in instrumented calls only.
1840 Otherwise it is a sign we lost flag due to some optimization
1841 and may emit call args incorrectly. */
1842 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1843
1844 /* For structures look for the next available pointer. */
1845 if (ptr_arg != -1 && slots)
1846 {
1847 unsigned bnd_no = bitmap_first_set_bit (slots);
1848 args[j].pointer_offset =
1849 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1850
1851 bitmap_clear_bit (slots, bnd_no);
1852
1853 /* Check we have no more pointers in the structure. */
1854 if (bitmap_empty_p (slots))
1855 BITMAP_FREE (slots);
1856 }
1857 args[j].pointer_arg = ptr_arg;
1858
1859 /* Check we covered all pointers in the previous
1860 non bounds arg. */
1861 if (!slots)
1862 ptr_arg = -1;
1863 }
1864 else
1865 ptr_arg = -1;
1866
1867 if (targetm.calls.split_complex_arg
1868 && argtype
1869 && TREE_CODE (argtype) == COMPLEX_TYPE
1870 && targetm.calls.split_complex_arg (argtype))
1871 {
1872 tree subtype = TREE_TYPE (argtype);
1873 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1874 j--;
1875 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1876 }
1877 else
1878 args[j].tree_value = arg;
1879 j--;
1880 argpos++;
1881 }
1882
1883 if (slots)
1884 BITMAP_FREE (slots);
1885 }
1886
1887 bitmap_obstack_release (NULL);
1888
1889 /* Extract attribute alloc_size and if set, store the indices of
1890 the corresponding arguments in ALLOC_IDX, and then the actual
1891 argument(s) at those indices in ALLOC_ARGS. */
1892 int alloc_idx[2] = { -1, -1 };
1893 if (tree alloc_size
1894 = (fndecl ? lookup_attribute ("alloc_size",
1895 TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1896 : NULL_TREE))
1897 {
1898 tree args = TREE_VALUE (alloc_size);
1899 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1900 if (TREE_CHAIN (args))
1901 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1902 }
1903
1904 /* Array for up to the two attribute alloc_size arguments. */
1905 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1906
1907 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1908 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1909 {
1910 tree type = TREE_TYPE (args[i].tree_value);
1911 int unsignedp;
1912 machine_mode mode;
1913
1914 /* Replace erroneous argument with constant zero. */
1915 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1916 args[i].tree_value = integer_zero_node, type = integer_type_node;
1917
1918 /* If TYPE is a transparent union or record, pass things the way
1919 we would pass the first field of the union or record. We have
1920 already verified that the modes are the same. */
1921 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1922 && TYPE_TRANSPARENT_AGGR (type))
1923 type = TREE_TYPE (first_field (type));
1924
1925 /* Decide where to pass this arg.
1926
1927 args[i].reg is nonzero if all or part is passed in registers.
1928
1929 args[i].partial is nonzero if part but not all is passed in registers,
1930 and the exact value says how many bytes are passed in registers.
1931
1932 args[i].pass_on_stack is nonzero if the argument must at least be
1933 computed on the stack. It may then be loaded back into registers
1934 if args[i].reg is nonzero.
1935
1936 These decisions are driven by the FUNCTION_... macros and must agree
1937 with those made by function.c. */
1938
1939 /* See if this argument should be passed by invisible reference. */
1940 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1941 type, argpos < n_named_args))
1942 {
1943 bool callee_copies;
1944 tree base = NULL_TREE;
1945
1946 callee_copies
1947 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1948 type, argpos < n_named_args);
1949
1950 /* If we're compiling a thunk, pass through invisible references
1951 instead of making a copy. */
1952 if (call_from_thunk_p
1953 || (callee_copies
1954 && !TREE_ADDRESSABLE (type)
1955 && (base = get_base_address (args[i].tree_value))
1956 && TREE_CODE (base) != SSA_NAME
1957 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1958 {
1959 /* We may have turned the parameter value into an SSA name.
1960 Go back to the original parameter so we can take the
1961 address. */
1962 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1963 {
1964 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1965 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1966 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1967 }
1968 /* Argument setup code may have copied the value to register. We
1969 revert that optimization now because the tail call code must
1970 use the original location. */
1971 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1972 && !MEM_P (DECL_RTL (args[i].tree_value))
1973 && DECL_INCOMING_RTL (args[i].tree_value)
1974 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1975 set_decl_rtl (args[i].tree_value,
1976 DECL_INCOMING_RTL (args[i].tree_value));
1977
1978 mark_addressable (args[i].tree_value);
1979
1980 /* We can't use sibcalls if a callee-copied argument is
1981 stored in the current function's frame. */
1982 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1983 {
1984 *may_tailcall = false;
1985 maybe_complain_about_tail_call (exp,
1986 "a callee-copied argument is"
1987 " stored in the current "
1988 " function's frame");
1989 }
1990
1991 args[i].tree_value = build_fold_addr_expr_loc (loc,
1992 args[i].tree_value);
1993 type = TREE_TYPE (args[i].tree_value);
1994
1995 if (*ecf_flags & ECF_CONST)
1996 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1997 }
1998 else
1999 {
2000 /* We make a copy of the object and pass the address to the
2001 function being called. */
2002 rtx copy;
2003
2004 if (!COMPLETE_TYPE_P (type)
2005 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2006 || (flag_stack_check == GENERIC_STACK_CHECK
2007 && compare_tree_int (TYPE_SIZE_UNIT (type),
2008 STACK_CHECK_MAX_VAR_SIZE) > 0))
2009 {
2010 /* This is a variable-sized object. Make space on the stack
2011 for it. */
2012 rtx size_rtx = expr_size (args[i].tree_value);
2013
2014 if (*old_stack_level == 0)
2015 {
2016 emit_stack_save (SAVE_BLOCK, old_stack_level);
2017 *old_pending_adj = pending_stack_adjust;
2018 pending_stack_adjust = 0;
2019 }
2020
2021 /* We can pass TRUE as the 4th argument because we just
2022 saved the stack pointer and will restore it right after
2023 the call. */
2024 copy = allocate_dynamic_stack_space (size_rtx,
2025 TYPE_ALIGN (type),
2026 TYPE_ALIGN (type),
2027 max_int_size_in_bytes
2028 (type),
2029 true);
2030 copy = gen_rtx_MEM (BLKmode, copy);
2031 set_mem_attributes (copy, type, 1);
2032 }
2033 else
2034 copy = assign_temp (type, 1, 0);
2035
2036 store_expr (args[i].tree_value, copy, 0, false, false);
2037
2038 /* Just change the const function to pure and then let
2039 the next test clear the pure based on
2040 callee_copies. */
2041 if (*ecf_flags & ECF_CONST)
2042 {
2043 *ecf_flags &= ~ECF_CONST;
2044 *ecf_flags |= ECF_PURE;
2045 }
2046
2047 if (!callee_copies && *ecf_flags & ECF_PURE)
2048 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2049
2050 args[i].tree_value
2051 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2052 type = TREE_TYPE (args[i].tree_value);
2053 *may_tailcall = false;
2054 maybe_complain_about_tail_call (exp,
2055 "argument must be passed"
2056 " by copying");
2057 }
2058 }
2059
2060 unsignedp = TYPE_UNSIGNED (type);
2061 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2062 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2063
2064 args[i].unsignedp = unsignedp;
2065 args[i].mode = mode;
2066
2067 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2068
2069 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2070 argpos < n_named_args);
2071
2072 if (args[i].reg && CONST_INT_P (args[i].reg))
2073 {
2074 args[i].special_slot = args[i].reg;
2075 args[i].reg = NULL;
2076 }
2077
2078 /* If this is a sibling call and the machine has register windows, the
2079 register window has to be unwinded before calling the routine, so
2080 arguments have to go into the incoming registers. */
2081 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2082 args[i].tail_call_reg
2083 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2084 argpos < n_named_args);
2085 else
2086 args[i].tail_call_reg = args[i].reg;
2087
2088 if (args[i].reg)
2089 args[i].partial
2090 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2091 argpos < n_named_args);
2092
2093 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
2094
2095 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2096 it means that we are to pass this arg in the register(s) designated
2097 by the PARALLEL, but also to pass it in the stack. */
2098 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2099 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2100 args[i].pass_on_stack = 1;
2101
2102 /* If this is an addressable type, we must preallocate the stack
2103 since we must evaluate the object into its final location.
2104
2105 If this is to be passed in both registers and the stack, it is simpler
2106 to preallocate. */
2107 if (TREE_ADDRESSABLE (type)
2108 || (args[i].pass_on_stack && args[i].reg != 0))
2109 *must_preallocate = 1;
2110
2111 /* No stack allocation and padding for bounds. */
2112 if (POINTER_BOUNDS_P (args[i].tree_value))
2113 ;
2114 /* Compute the stack-size of this argument. */
2115 else if (args[i].reg == 0 || args[i].partial != 0
2116 || reg_parm_stack_space > 0
2117 || args[i].pass_on_stack)
2118 locate_and_pad_parm (mode, type,
2119 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2120 1,
2121 #else
2122 args[i].reg != 0,
2123 #endif
2124 reg_parm_stack_space,
2125 args[i].pass_on_stack ? 0 : args[i].partial,
2126 fndecl, args_size, &args[i].locate);
2127 #ifdef BLOCK_REG_PADDING
2128 else
2129 /* The argument is passed entirely in registers. See at which
2130 end it should be padded. */
2131 args[i].locate.where_pad =
2132 BLOCK_REG_PADDING (mode, type,
2133 int_size_in_bytes (type) <= UNITS_PER_WORD);
2134 #endif
2135
2136 /* Update ARGS_SIZE, the total stack space for args so far. */
2137
2138 args_size->constant += args[i].locate.size.constant;
2139 if (args[i].locate.size.var)
2140 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2141
2142 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2143 have been used, etc. */
2144
2145 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2146 type, argpos < n_named_args);
2147
2148 /* Store argument values for functions decorated with attribute
2149 alloc_size. */
2150 if (argpos == alloc_idx[0])
2151 alloc_args[0] = args[i].tree_value;
2152 else if (argpos == alloc_idx[1])
2153 alloc_args[1] = args[i].tree_value;
2154 }
2155
2156 if (alloc_args[0])
2157 {
2158 /* Check the arguments of functions decorated with attribute
2159 alloc_size. */
2160 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2161 }
2162
2163 /* Detect passing non-string arguments to functions expecting
2164 nul-terminated strings. */
2165 maybe_warn_nonstring_arg (fndecl, exp);
2166 }
2167
2168 /* Update ARGS_SIZE to contain the total size for the argument block.
2169 Return the original constant component of the argument block's size.
2170
2171 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2172 for arguments passed in registers. */
2173
2174 static poly_int64
2175 compute_argument_block_size (int reg_parm_stack_space,
2176 struct args_size *args_size,
2177 tree fndecl ATTRIBUTE_UNUSED,
2178 tree fntype ATTRIBUTE_UNUSED,
2179 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2180 {
2181 poly_int64 unadjusted_args_size = args_size->constant;
2182
2183 /* For accumulate outgoing args mode we don't need to align, since the frame
2184 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2185 backends from generating misaligned frame sizes. */
2186 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2187 preferred_stack_boundary = STACK_BOUNDARY;
2188
2189 /* Compute the actual size of the argument block required. The variable
2190 and constant sizes must be combined, the size may have to be rounded,
2191 and there may be a minimum required size. */
2192
2193 if (args_size->var)
2194 {
2195 args_size->var = ARGS_SIZE_TREE (*args_size);
2196 args_size->constant = 0;
2197
2198 preferred_stack_boundary /= BITS_PER_UNIT;
2199 if (preferred_stack_boundary > 1)
2200 {
2201 /* We don't handle this case yet. To handle it correctly we have
2202 to add the delta, round and subtract the delta.
2203 Currently no machine description requires this support. */
2204 gcc_assert (multiple_p (stack_pointer_delta,
2205 preferred_stack_boundary));
2206 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2207 }
2208
2209 if (reg_parm_stack_space > 0)
2210 {
2211 args_size->var
2212 = size_binop (MAX_EXPR, args_size->var,
2213 ssize_int (reg_parm_stack_space));
2214
2215 /* The area corresponding to register parameters is not to count in
2216 the size of the block we need. So make the adjustment. */
2217 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2218 args_size->var
2219 = size_binop (MINUS_EXPR, args_size->var,
2220 ssize_int (reg_parm_stack_space));
2221 }
2222 }
2223 else
2224 {
2225 preferred_stack_boundary /= BITS_PER_UNIT;
2226 if (preferred_stack_boundary < 1)
2227 preferred_stack_boundary = 1;
2228 args_size->constant = (aligned_upper_bound (args_size->constant
2229 + stack_pointer_delta,
2230 preferred_stack_boundary)
2231 - stack_pointer_delta);
2232
2233 args_size->constant = upper_bound (args_size->constant,
2234 reg_parm_stack_space);
2235
2236 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2237 args_size->constant -= reg_parm_stack_space;
2238 }
2239 return unadjusted_args_size;
2240 }
2241
2242 /* Precompute parameters as needed for a function call.
2243
2244 FLAGS is mask of ECF_* constants.
2245
2246 NUM_ACTUALS is the number of arguments.
2247
2248 ARGS is an array containing information for each argument; this
2249 routine fills in the INITIAL_VALUE and VALUE fields for each
2250 precomputed argument. */
2251
2252 static void
2253 precompute_arguments (int num_actuals, struct arg_data *args)
2254 {
2255 int i;
2256
2257 /* If this is a libcall, then precompute all arguments so that we do not
2258 get extraneous instructions emitted as part of the libcall sequence. */
2259
2260 /* If we preallocated the stack space, and some arguments must be passed
2261 on the stack, then we must precompute any parameter which contains a
2262 function call which will store arguments on the stack.
2263 Otherwise, evaluating the parameter may clobber previous parameters
2264 which have already been stored into the stack. (we have code to avoid
2265 such case by saving the outgoing stack arguments, but it results in
2266 worse code) */
2267 if (!ACCUMULATE_OUTGOING_ARGS)
2268 return;
2269
2270 for (i = 0; i < num_actuals; i++)
2271 {
2272 tree type;
2273 machine_mode mode;
2274
2275 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2276 continue;
2277
2278 /* If this is an addressable type, we cannot pre-evaluate it. */
2279 type = TREE_TYPE (args[i].tree_value);
2280 gcc_assert (!TREE_ADDRESSABLE (type));
2281
2282 args[i].initial_value = args[i].value
2283 = expand_normal (args[i].tree_value);
2284
2285 mode = TYPE_MODE (type);
2286 if (mode != args[i].mode)
2287 {
2288 int unsignedp = args[i].unsignedp;
2289 args[i].value
2290 = convert_modes (args[i].mode, mode,
2291 args[i].value, args[i].unsignedp);
2292
2293 /* CSE will replace this only if it contains args[i].value
2294 pseudo, so convert it down to the declared mode using
2295 a SUBREG. */
2296 if (REG_P (args[i].value)
2297 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2298 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2299 {
2300 args[i].initial_value
2301 = gen_lowpart_SUBREG (mode, args[i].value);
2302 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2303 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2304 }
2305 }
2306 }
2307 }
2308
2309 /* Given the current state of MUST_PREALLOCATE and information about
2310 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2311 compute and return the final value for MUST_PREALLOCATE. */
2312
2313 static int
2314 finalize_must_preallocate (int must_preallocate, int num_actuals,
2315 struct arg_data *args, struct args_size *args_size)
2316 {
2317 /* See if we have or want to preallocate stack space.
2318
2319 If we would have to push a partially-in-regs parm
2320 before other stack parms, preallocate stack space instead.
2321
2322 If the size of some parm is not a multiple of the required stack
2323 alignment, we must preallocate.
2324
2325 If the total size of arguments that would otherwise create a copy in
2326 a temporary (such as a CALL) is more than half the total argument list
2327 size, preallocation is faster.
2328
2329 Another reason to preallocate is if we have a machine (like the m88k)
2330 where stack alignment is required to be maintained between every
2331 pair of insns, not just when the call is made. However, we assume here
2332 that such machines either do not have push insns (and hence preallocation
2333 would occur anyway) or the problem is taken care of with
2334 PUSH_ROUNDING. */
2335
2336 if (! must_preallocate)
2337 {
2338 int partial_seen = 0;
2339 poly_int64 copy_to_evaluate_size = 0;
2340 int i;
2341
2342 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2343 {
2344 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2345 partial_seen = 1;
2346 else if (partial_seen && args[i].reg == 0)
2347 must_preallocate = 1;
2348 /* We preallocate in case there are bounds passed
2349 in the bounds table to have precomputed address
2350 for bounds association. */
2351 else if (POINTER_BOUNDS_P (args[i].tree_value)
2352 && !args[i].reg)
2353 must_preallocate = 1;
2354
2355 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2356 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2357 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2358 || TREE_CODE (args[i].tree_value) == COND_EXPR
2359 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2360 copy_to_evaluate_size
2361 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2362 }
2363
2364 if (maybe_ne (args_size->constant, 0)
2365 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2366 must_preallocate = 1;
2367 }
2368 return must_preallocate;
2369 }
2370
2371 /* If we preallocated stack space, compute the address of each argument
2372 and store it into the ARGS array.
2373
2374 We need not ensure it is a valid memory address here; it will be
2375 validized when it is used.
2376
2377 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2378
2379 static void
2380 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2381 {
2382 if (argblock)
2383 {
2384 rtx arg_reg = argblock;
2385 int i;
2386 poly_int64 arg_offset = 0;
2387
2388 if (GET_CODE (argblock) == PLUS)
2389 {
2390 arg_reg = XEXP (argblock, 0);
2391 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2392 }
2393
2394 for (i = 0; i < num_actuals; i++)
2395 {
2396 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2397 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2398 rtx addr;
2399 unsigned int align, boundary;
2400 poly_uint64 units_on_stack = 0;
2401 machine_mode partial_mode = VOIDmode;
2402
2403 /* Skip this parm if it will not be passed on the stack. */
2404 if (! args[i].pass_on_stack
2405 && args[i].reg != 0
2406 && args[i].partial == 0)
2407 continue;
2408
2409 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2410 continue;
2411
2412 /* Pointer Bounds are never passed on the stack. */
2413 if (POINTER_BOUNDS_P (args[i].tree_value))
2414 continue;
2415
2416 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2417 addr = plus_constant (Pmode, addr, arg_offset);
2418
2419 if (args[i].partial != 0)
2420 {
2421 /* Only part of the parameter is being passed on the stack.
2422 Generate a simple memory reference of the correct size. */
2423 units_on_stack = args[i].locate.size.constant;
2424 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2425 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2426 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2427 set_mem_size (args[i].stack, units_on_stack);
2428 }
2429 else
2430 {
2431 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2432 set_mem_attributes (args[i].stack,
2433 TREE_TYPE (args[i].tree_value), 1);
2434 }
2435 align = BITS_PER_UNIT;
2436 boundary = args[i].locate.boundary;
2437 poly_int64 offset_val;
2438 if (args[i].locate.where_pad != PAD_DOWNWARD)
2439 align = boundary;
2440 else if (poly_int_rtx_p (offset, &offset_val))
2441 {
2442 align = least_bit_hwi (boundary);
2443 unsigned int offset_align
2444 = known_alignment (offset_val) * BITS_PER_UNIT;
2445 if (offset_align != 0)
2446 align = MIN (align, offset_align);
2447 }
2448 set_mem_align (args[i].stack, align);
2449
2450 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2451 addr = plus_constant (Pmode, addr, arg_offset);
2452
2453 if (args[i].partial != 0)
2454 {
2455 /* Only part of the parameter is being passed on the stack.
2456 Generate a simple memory reference of the correct size.
2457 */
2458 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2459 set_mem_size (args[i].stack_slot, units_on_stack);
2460 }
2461 else
2462 {
2463 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2464 set_mem_attributes (args[i].stack_slot,
2465 TREE_TYPE (args[i].tree_value), 1);
2466 }
2467 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2468
2469 /* Function incoming arguments may overlap with sibling call
2470 outgoing arguments and we cannot allow reordering of reads
2471 from function arguments with stores to outgoing arguments
2472 of sibling calls. */
2473 set_mem_alias_set (args[i].stack, 0);
2474 set_mem_alias_set (args[i].stack_slot, 0);
2475 }
2476 }
2477 }
2478
2479 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2480 in a call instruction.
2481
2482 FNDECL is the tree node for the target function. For an indirect call
2483 FNDECL will be NULL_TREE.
2484
2485 ADDR is the operand 0 of CALL_EXPR for this call. */
2486
2487 static rtx
2488 rtx_for_function_call (tree fndecl, tree addr)
2489 {
2490 rtx funexp;
2491
2492 /* Get the function to call, in the form of RTL. */
2493 if (fndecl)
2494 {
2495 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2496 TREE_USED (fndecl) = 1;
2497
2498 /* Get a SYMBOL_REF rtx for the function address. */
2499 funexp = XEXP (DECL_RTL (fndecl), 0);
2500 }
2501 else
2502 /* Generate an rtx (probably a pseudo-register) for the address. */
2503 {
2504 push_temp_slots ();
2505 funexp = expand_normal (addr);
2506 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2507 }
2508 return funexp;
2509 }
2510
2511 /* Return the static chain for this function, if any. */
2512
2513 rtx
2514 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2515 {
2516 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2517 return NULL;
2518
2519 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2520 }
2521
2522 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2523 static struct
2524 {
2525 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2526 or NULL_RTX if none has been scanned yet. */
2527 rtx_insn *scan_start;
2528 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2529 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2530 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2531 with fixed offset, or PC if this is with variable or unknown offset. */
2532 vec<rtx> cache;
2533 } internal_arg_pointer_exp_state;
2534
2535 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2536
2537 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2538 the tail call sequence, starting with first insn that hasn't been
2539 scanned yet, and note for each pseudo on the LHS whether it is based
2540 on crtl->args.internal_arg_pointer or not, and what offset from that
2541 that pointer it has. */
2542
2543 static void
2544 internal_arg_pointer_based_exp_scan (void)
2545 {
2546 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2547
2548 if (scan_start == NULL_RTX)
2549 insn = get_insns ();
2550 else
2551 insn = NEXT_INSN (scan_start);
2552
2553 while (insn)
2554 {
2555 rtx set = single_set (insn);
2556 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2557 {
2558 rtx val = NULL_RTX;
2559 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2560 /* Punt on pseudos set multiple times. */
2561 if (idx < internal_arg_pointer_exp_state.cache.length ()
2562 && (internal_arg_pointer_exp_state.cache[idx]
2563 != NULL_RTX))
2564 val = pc_rtx;
2565 else
2566 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2567 if (val != NULL_RTX)
2568 {
2569 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2570 internal_arg_pointer_exp_state.cache
2571 .safe_grow_cleared (idx + 1);
2572 internal_arg_pointer_exp_state.cache[idx] = val;
2573 }
2574 }
2575 if (NEXT_INSN (insn) == NULL_RTX)
2576 scan_start = insn;
2577 insn = NEXT_INSN (insn);
2578 }
2579
2580 internal_arg_pointer_exp_state.scan_start = scan_start;
2581 }
2582
2583 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2584 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2585 it with fixed offset, or PC if this is with variable or unknown offset.
2586 TOPLEVEL is true if the function is invoked at the topmost level. */
2587
2588 static rtx
2589 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2590 {
2591 if (CONSTANT_P (rtl))
2592 return NULL_RTX;
2593
2594 if (rtl == crtl->args.internal_arg_pointer)
2595 return const0_rtx;
2596
2597 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2598 return NULL_RTX;
2599
2600 poly_int64 offset;
2601 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2602 {
2603 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2604 if (val == NULL_RTX || val == pc_rtx)
2605 return val;
2606 return plus_constant (Pmode, val, offset);
2607 }
2608
2609 /* When called at the topmost level, scan pseudo assignments in between the
2610 last scanned instruction in the tail call sequence and the latest insn
2611 in that sequence. */
2612 if (toplevel)
2613 internal_arg_pointer_based_exp_scan ();
2614
2615 if (REG_P (rtl))
2616 {
2617 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2618 if (idx < internal_arg_pointer_exp_state.cache.length ())
2619 return internal_arg_pointer_exp_state.cache[idx];
2620
2621 return NULL_RTX;
2622 }
2623
2624 subrtx_iterator::array_type array;
2625 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2626 {
2627 const_rtx x = *iter;
2628 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2629 return pc_rtx;
2630 if (MEM_P (x))
2631 iter.skip_subrtxes ();
2632 }
2633
2634 return NULL_RTX;
2635 }
2636
2637 /* Return true if SIZE bytes starting from address ADDR might overlap an
2638 already-clobbered argument area. This function is used to determine
2639 if we should give up a sibcall. */
2640
2641 static bool
2642 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2643 {
2644 poly_int64 i;
2645 unsigned HOST_WIDE_INT start, end;
2646 rtx val;
2647
2648 if (bitmap_empty_p (stored_args_map)
2649 && stored_args_watermark == HOST_WIDE_INT_M1U)
2650 return false;
2651 val = internal_arg_pointer_based_exp (addr, true);
2652 if (val == NULL_RTX)
2653 return false;
2654 else if (!poly_int_rtx_p (val, &i))
2655 return true;
2656
2657 if (known_eq (size, 0U))
2658 return false;
2659
2660 if (STACK_GROWS_DOWNWARD)
2661 i -= crtl->args.pretend_args_size;
2662 else
2663 i += crtl->args.pretend_args_size;
2664
2665 if (ARGS_GROW_DOWNWARD)
2666 i = -i - size;
2667
2668 /* We can ignore any references to the function's pretend args,
2669 which at this point would manifest as negative values of I. */
2670 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2671 return false;
2672
2673 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2674 if (!(i + size).is_constant (&end))
2675 end = HOST_WIDE_INT_M1U;
2676
2677 if (end > stored_args_watermark)
2678 return true;
2679
2680 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2681 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2682 if (bitmap_bit_p (stored_args_map, k))
2683 return true;
2684
2685 return false;
2686 }
2687
2688 /* Do the register loads required for any wholly-register parms or any
2689 parms which are passed both on the stack and in a register. Their
2690 expressions were already evaluated.
2691
2692 Mark all register-parms as living through the call, putting these USE
2693 insns in the CALL_INSN_FUNCTION_USAGE field.
2694
2695 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2696 checking, setting *SIBCALL_FAILURE if appropriate. */
2697
2698 static void
2699 load_register_parameters (struct arg_data *args, int num_actuals,
2700 rtx *call_fusage, int flags, int is_sibcall,
2701 int *sibcall_failure)
2702 {
2703 int i, j;
2704
2705 for (i = 0; i < num_actuals; i++)
2706 {
2707 rtx reg = ((flags & ECF_SIBCALL)
2708 ? args[i].tail_call_reg : args[i].reg);
2709 if (reg)
2710 {
2711 int partial = args[i].partial;
2712 int nregs;
2713 poly_int64 size = 0;
2714 HOST_WIDE_INT const_size = 0;
2715 rtx_insn *before_arg = get_last_insn ();
2716 /* Set non-negative if we must move a word at a time, even if
2717 just one word (e.g, partial == 4 && mode == DFmode). Set
2718 to -1 if we just use a normal move insn. This value can be
2719 zero if the argument is a zero size structure. */
2720 nregs = -1;
2721 if (GET_CODE (reg) == PARALLEL)
2722 ;
2723 else if (partial)
2724 {
2725 gcc_assert (partial % UNITS_PER_WORD == 0);
2726 nregs = partial / UNITS_PER_WORD;
2727 }
2728 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2729 {
2730 /* Variable-sized parameters should be described by a
2731 PARALLEL instead. */
2732 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2733 gcc_assert (const_size >= 0);
2734 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2735 size = const_size;
2736 }
2737 else
2738 size = GET_MODE_SIZE (args[i].mode);
2739
2740 /* Handle calls that pass values in multiple non-contiguous
2741 locations. The Irix 6 ABI has examples of this. */
2742
2743 if (GET_CODE (reg) == PARALLEL)
2744 emit_group_move (reg, args[i].parallel_value);
2745
2746 /* If simple case, just do move. If normal partial, store_one_arg
2747 has already loaded the register for us. In all other cases,
2748 load the register(s) from memory. */
2749
2750 else if (nregs == -1)
2751 {
2752 emit_move_insn (reg, args[i].value);
2753 #ifdef BLOCK_REG_PADDING
2754 /* Handle case where we have a value that needs shifting
2755 up to the msb. eg. a QImode value and we're padding
2756 upward on a BYTES_BIG_ENDIAN machine. */
2757 if (args[i].locate.where_pad
2758 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2759 {
2760 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2761 if (maybe_lt (size, UNITS_PER_WORD))
2762 {
2763 rtx x;
2764 poly_int64 shift
2765 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2766
2767 /* Assigning REG here rather than a temp makes
2768 CALL_FUSAGE report the whole reg as used.
2769 Strictly speaking, the call only uses SIZE
2770 bytes at the msb end, but it doesn't seem worth
2771 generating rtl to say that. */
2772 reg = gen_rtx_REG (word_mode, REGNO (reg));
2773 x = expand_shift (LSHIFT_EXPR, word_mode,
2774 reg, shift, reg, 1);
2775 if (x != reg)
2776 emit_move_insn (reg, x);
2777 }
2778 }
2779 #endif
2780 }
2781
2782 /* If we have pre-computed the values to put in the registers in
2783 the case of non-aligned structures, copy them in now. */
2784
2785 else if (args[i].n_aligned_regs != 0)
2786 for (j = 0; j < args[i].n_aligned_regs; j++)
2787 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2788 args[i].aligned_regs[j]);
2789
2790 else if (partial == 0 || args[i].pass_on_stack)
2791 {
2792 /* SIZE and CONST_SIZE are 0 for partial arguments and
2793 the size of a BLKmode type otherwise. */
2794 gcc_checking_assert (known_eq (size, const_size));
2795 rtx mem = validize_mem (copy_rtx (args[i].value));
2796
2797 /* Check for overlap with already clobbered argument area,
2798 providing that this has non-zero size. */
2799 if (is_sibcall
2800 && const_size != 0
2801 && (mem_might_overlap_already_clobbered_arg_p
2802 (XEXP (args[i].value, 0), const_size)))
2803 *sibcall_failure = 1;
2804
2805 if (const_size % UNITS_PER_WORD == 0
2806 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2807 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2808 else
2809 {
2810 if (nregs > 1)
2811 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2812 args[i].mode);
2813 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2814 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2815 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2816 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2817 word_mode, word_mode, false,
2818 NULL);
2819 if (BYTES_BIG_ENDIAN)
2820 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2821 BITS_PER_WORD - bitsize, dest, 1);
2822 if (x != dest)
2823 emit_move_insn (dest, x);
2824 }
2825
2826 /* Handle a BLKmode that needs shifting. */
2827 if (nregs == 1 && const_size < UNITS_PER_WORD
2828 #ifdef BLOCK_REG_PADDING
2829 && args[i].locate.where_pad == PAD_DOWNWARD
2830 #else
2831 && BYTES_BIG_ENDIAN
2832 #endif
2833 )
2834 {
2835 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2836 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2837 enum tree_code dir = (BYTES_BIG_ENDIAN
2838 ? RSHIFT_EXPR : LSHIFT_EXPR);
2839 rtx x;
2840
2841 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2842 if (x != dest)
2843 emit_move_insn (dest, x);
2844 }
2845 }
2846
2847 /* When a parameter is a block, and perhaps in other cases, it is
2848 possible that it did a load from an argument slot that was
2849 already clobbered. */
2850 if (is_sibcall
2851 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2852 *sibcall_failure = 1;
2853
2854 /* Handle calls that pass values in multiple non-contiguous
2855 locations. The Irix 6 ABI has examples of this. */
2856 if (GET_CODE (reg) == PARALLEL)
2857 use_group_regs (call_fusage, reg);
2858 else if (nregs == -1)
2859 use_reg_mode (call_fusage, reg,
2860 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2861 else if (nregs > 0)
2862 use_regs (call_fusage, REGNO (reg), nregs);
2863 }
2864 }
2865 }
2866
2867 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2868 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2869 bytes, then we would need to push some additional bytes to pad the
2870 arguments. So, we try to compute an adjust to the stack pointer for an
2871 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2872 bytes. Then, when the arguments are pushed the stack will be perfectly
2873 aligned.
2874
2875 Return true if this optimization is possible, storing the adjustment
2876 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2877 bytes that should be popped after the call. */
2878
2879 static bool
2880 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2881 poly_int64 unadjusted_args_size,
2882 struct args_size *args_size,
2883 unsigned int preferred_unit_stack_boundary)
2884 {
2885 /* The number of bytes to pop so that the stack will be
2886 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2887 poly_int64 adjustment;
2888 /* The alignment of the stack after the arguments are pushed, if we
2889 just pushed the arguments without adjust the stack here. */
2890 unsigned HOST_WIDE_INT unadjusted_alignment;
2891
2892 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2893 preferred_unit_stack_boundary,
2894 &unadjusted_alignment))
2895 return false;
2896
2897 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2898 as possible -- leaving just enough left to cancel out the
2899 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2900 PENDING_STACK_ADJUST is non-negative, and congruent to
2901 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2902
2903 /* Begin by trying to pop all the bytes. */
2904 unsigned HOST_WIDE_INT tmp_misalignment;
2905 if (!known_misalignment (pending_stack_adjust,
2906 preferred_unit_stack_boundary,
2907 &tmp_misalignment))
2908 return false;
2909 unadjusted_alignment -= tmp_misalignment;
2910 adjustment = pending_stack_adjust;
2911 /* Push enough additional bytes that the stack will be aligned
2912 after the arguments are pushed. */
2913 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2914 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2915
2916 /* We need to know whether the adjusted argument size
2917 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2918 or a deallocation. */
2919 if (!ordered_p (adjustment, unadjusted_args_size))
2920 return false;
2921
2922 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2923 bytes after the call. The right number is the entire
2924 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2925 by the arguments in the first place. */
2926 args_size->constant
2927 = pending_stack_adjust - adjustment + unadjusted_args_size;
2928
2929 *adjustment_out = adjustment;
2930 return true;
2931 }
2932
2933 /* Scan X expression if it does not dereference any argument slots
2934 we already clobbered by tail call arguments (as noted in stored_args_map
2935 bitmap).
2936 Return nonzero if X expression dereferences such argument slots,
2937 zero otherwise. */
2938
2939 static int
2940 check_sibcall_argument_overlap_1 (rtx x)
2941 {
2942 RTX_CODE code;
2943 int i, j;
2944 const char *fmt;
2945
2946 if (x == NULL_RTX)
2947 return 0;
2948
2949 code = GET_CODE (x);
2950
2951 /* We need not check the operands of the CALL expression itself. */
2952 if (code == CALL)
2953 return 0;
2954
2955 if (code == MEM)
2956 return (mem_might_overlap_already_clobbered_arg_p
2957 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
2958
2959 /* Scan all subexpressions. */
2960 fmt = GET_RTX_FORMAT (code);
2961 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2962 {
2963 if (*fmt == 'e')
2964 {
2965 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2966 return 1;
2967 }
2968 else if (*fmt == 'E')
2969 {
2970 for (j = 0; j < XVECLEN (x, i); j++)
2971 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2972 return 1;
2973 }
2974 }
2975 return 0;
2976 }
2977
2978 /* Scan sequence after INSN if it does not dereference any argument slots
2979 we already clobbered by tail call arguments (as noted in stored_args_map
2980 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2981 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2982 should be 0). Return nonzero if sequence after INSN dereferences such argument
2983 slots, zero otherwise. */
2984
2985 static int
2986 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2987 int mark_stored_args_map)
2988 {
2989 poly_uint64 low, high;
2990 unsigned HOST_WIDE_INT const_low, const_high;
2991
2992 if (insn == NULL_RTX)
2993 insn = get_insns ();
2994 else
2995 insn = NEXT_INSN (insn);
2996
2997 for (; insn; insn = NEXT_INSN (insn))
2998 if (INSN_P (insn)
2999 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3000 break;
3001
3002 if (mark_stored_args_map)
3003 {
3004 if (ARGS_GROW_DOWNWARD)
3005 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3006 else
3007 low = arg->locate.slot_offset.constant;
3008 high = low + arg->locate.size.constant;
3009
3010 const_low = constant_lower_bound (low);
3011 if (high.is_constant (&const_high))
3012 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3013 bitmap_set_bit (stored_args_map, i);
3014 else
3015 stored_args_watermark = MIN (stored_args_watermark, const_low);
3016 }
3017 return insn != NULL_RTX;
3018 }
3019
3020 /* Given that a function returns a value of mode MODE at the most
3021 significant end of hard register VALUE, shift VALUE left or right
3022 as specified by LEFT_P. Return true if some action was needed. */
3023
3024 bool
3025 shift_return_value (machine_mode mode, bool left_p, rtx value)
3026 {
3027 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3028 machine_mode value_mode = GET_MODE (value);
3029 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3030
3031 if (known_eq (shift, 0))
3032 return false;
3033
3034 /* Use ashr rather than lshr for right shifts. This is for the benefit
3035 of the MIPS port, which requires SImode values to be sign-extended
3036 when stored in 64-bit registers. */
3037 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3038 value, gen_int_shift_amount (value_mode, shift),
3039 value, 1, OPTAB_WIDEN))
3040 gcc_unreachable ();
3041 return true;
3042 }
3043
3044 /* If X is a likely-spilled register value, copy it to a pseudo
3045 register and return that register. Return X otherwise. */
3046
3047 static rtx
3048 avoid_likely_spilled_reg (rtx x)
3049 {
3050 rtx new_rtx;
3051
3052 if (REG_P (x)
3053 && HARD_REGISTER_P (x)
3054 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3055 {
3056 /* Make sure that we generate a REG rather than a CONCAT.
3057 Moves into CONCATs can need nontrivial instructions,
3058 and the whole point of this function is to avoid
3059 using the hard register directly in such a situation. */
3060 generating_concat_p = 0;
3061 new_rtx = gen_reg_rtx (GET_MODE (x));
3062 generating_concat_p = 1;
3063 emit_move_insn (new_rtx, x);
3064 return new_rtx;
3065 }
3066 return x;
3067 }
3068
3069 /* Helper function for expand_call.
3070 Return false is EXP is not implementable as a sibling call. */
3071
3072 static bool
3073 can_implement_as_sibling_call_p (tree exp,
3074 rtx structure_value_addr,
3075 tree funtype,
3076 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3077 tree fndecl,
3078 int flags,
3079 tree addr,
3080 const args_size &args_size)
3081 {
3082 if (!targetm.have_sibcall_epilogue ())
3083 {
3084 maybe_complain_about_tail_call
3085 (exp,
3086 "machine description does not have"
3087 " a sibcall_epilogue instruction pattern");
3088 return false;
3089 }
3090
3091 /* Doing sibling call optimization needs some work, since
3092 structure_value_addr can be allocated on the stack.
3093 It does not seem worth the effort since few optimizable
3094 sibling calls will return a structure. */
3095 if (structure_value_addr != NULL_RTX)
3096 {
3097 maybe_complain_about_tail_call (exp, "callee returns a structure");
3098 return false;
3099 }
3100
3101 #ifdef REG_PARM_STACK_SPACE
3102 /* If outgoing reg parm stack space changes, we can not do sibcall. */
3103 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3104 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3105 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3106 {
3107 maybe_complain_about_tail_call (exp,
3108 "inconsistent size of stack space"
3109 " allocated for arguments which are"
3110 " passed in registers");
3111 return false;
3112 }
3113 #endif
3114
3115 /* Check whether the target is able to optimize the call
3116 into a sibcall. */
3117 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3118 {
3119 maybe_complain_about_tail_call (exp,
3120 "target is not able to optimize the"
3121 " call into a sibling call");
3122 return false;
3123 }
3124
3125 /* Functions that do not return exactly once may not be sibcall
3126 optimized. */
3127 if (flags & ECF_RETURNS_TWICE)
3128 {
3129 maybe_complain_about_tail_call (exp, "callee returns twice");
3130 return false;
3131 }
3132 if (flags & ECF_NORETURN)
3133 {
3134 maybe_complain_about_tail_call (exp, "callee does not return");
3135 return false;
3136 }
3137
3138 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3139 {
3140 maybe_complain_about_tail_call (exp, "volatile function type");
3141 return false;
3142 }
3143
3144 /* If the called function is nested in the current one, it might access
3145 some of the caller's arguments, but could clobber them beforehand if
3146 the argument areas are shared. */
3147 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3148 {
3149 maybe_complain_about_tail_call (exp, "nested function");
3150 return false;
3151 }
3152
3153 /* If this function requires more stack slots than the current
3154 function, we cannot change it into a sibling call.
3155 crtl->args.pretend_args_size is not part of the
3156 stack allocated by our caller. */
3157 if (maybe_gt (args_size.constant,
3158 crtl->args.size - crtl->args.pretend_args_size))
3159 {
3160 maybe_complain_about_tail_call (exp,
3161 "callee required more stack slots"
3162 " than the caller");
3163 return false;
3164 }
3165
3166 /* If the callee pops its own arguments, then it must pop exactly
3167 the same number of arguments as the current function. */
3168 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3169 args_size.constant),
3170 targetm.calls.return_pops_args (current_function_decl,
3171 TREE_TYPE
3172 (current_function_decl),
3173 crtl->args.size)))
3174 {
3175 maybe_complain_about_tail_call (exp,
3176 "inconsistent number of"
3177 " popped arguments");
3178 return false;
3179 }
3180
3181 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3182 {
3183 maybe_complain_about_tail_call (exp, "frontend does not support"
3184 " sibling call");
3185 return false;
3186 }
3187
3188 /* All checks passed. */
3189 return true;
3190 }
3191
3192 /* Generate all the code for a CALL_EXPR exp
3193 and return an rtx for its value.
3194 Store the value in TARGET (specified as an rtx) if convenient.
3195 If the value is stored in TARGET then TARGET is returned.
3196 If IGNORE is nonzero, then we ignore the value of the function call. */
3197
3198 rtx
3199 expand_call (tree exp, rtx target, int ignore)
3200 {
3201 /* Nonzero if we are currently expanding a call. */
3202 static int currently_expanding_call = 0;
3203
3204 /* RTX for the function to be called. */
3205 rtx funexp;
3206 /* Sequence of insns to perform a normal "call". */
3207 rtx_insn *normal_call_insns = NULL;
3208 /* Sequence of insns to perform a tail "call". */
3209 rtx_insn *tail_call_insns = NULL;
3210 /* Data type of the function. */
3211 tree funtype;
3212 tree type_arg_types;
3213 tree rettype;
3214 /* Declaration of the function being called,
3215 or 0 if the function is computed (not known by name). */
3216 tree fndecl = 0;
3217 /* The type of the function being called. */
3218 tree fntype;
3219 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3220 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3221 int pass;
3222
3223 /* Register in which non-BLKmode value will be returned,
3224 or 0 if no value or if value is BLKmode. */
3225 rtx valreg;
3226 /* Register(s) in which bounds are returned. */
3227 rtx valbnd = NULL;
3228 /* Address where we should return a BLKmode value;
3229 0 if value not BLKmode. */
3230 rtx structure_value_addr = 0;
3231 /* Nonzero if that address is being passed by treating it as
3232 an extra, implicit first parameter. Otherwise,
3233 it is passed by being copied directly into struct_value_rtx. */
3234 int structure_value_addr_parm = 0;
3235 /* Holds the value of implicit argument for the struct value. */
3236 tree structure_value_addr_value = NULL_TREE;
3237 /* Size of aggregate value wanted, or zero if none wanted
3238 or if we are using the non-reentrant PCC calling convention
3239 or expecting the value in registers. */
3240 poly_int64 struct_value_size = 0;
3241 /* Nonzero if called function returns an aggregate in memory PCC style,
3242 by returning the address of where to find it. */
3243 int pcc_struct_value = 0;
3244 rtx struct_value = 0;
3245
3246 /* Number of actual parameters in this call, including struct value addr. */
3247 int num_actuals;
3248 /* Number of named args. Args after this are anonymous ones
3249 and they must all go on the stack. */
3250 int n_named_args;
3251 /* Number of complex actual arguments that need to be split. */
3252 int num_complex_actuals = 0;
3253
3254 /* Vector of information about each argument.
3255 Arguments are numbered in the order they will be pushed,
3256 not the order they are written. */
3257 struct arg_data *args;
3258
3259 /* Total size in bytes of all the stack-parms scanned so far. */
3260 struct args_size args_size;
3261 struct args_size adjusted_args_size;
3262 /* Size of arguments before any adjustments (such as rounding). */
3263 poly_int64 unadjusted_args_size;
3264 /* Data on reg parms scanned so far. */
3265 CUMULATIVE_ARGS args_so_far_v;
3266 cumulative_args_t args_so_far;
3267 /* Nonzero if a reg parm has been scanned. */
3268 int reg_parm_seen;
3269 /* Nonzero if this is an indirect function call. */
3270
3271 /* Nonzero if we must avoid push-insns in the args for this call.
3272 If stack space is allocated for register parameters, but not by the
3273 caller, then it is preallocated in the fixed part of the stack frame.
3274 So the entire argument block must then be preallocated (i.e., we
3275 ignore PUSH_ROUNDING in that case). */
3276
3277 int must_preallocate = !PUSH_ARGS;
3278
3279 /* Size of the stack reserved for parameter registers. */
3280 int reg_parm_stack_space = 0;
3281
3282 /* Address of space preallocated for stack parms
3283 (on machines that lack push insns), or 0 if space not preallocated. */
3284 rtx argblock = 0;
3285
3286 /* Mask of ECF_ and ERF_ flags. */
3287 int flags = 0;
3288 int return_flags = 0;
3289 #ifdef REG_PARM_STACK_SPACE
3290 /* Define the boundary of the register parm stack space that needs to be
3291 saved, if any. */
3292 int low_to_save, high_to_save;
3293 rtx save_area = 0; /* Place that it is saved */
3294 #endif
3295
3296 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3297 char *initial_stack_usage_map = stack_usage_map;
3298 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3299 char *stack_usage_map_buf = NULL;
3300
3301 poly_int64 old_stack_allocated;
3302
3303 /* State variables to track stack modifications. */
3304 rtx old_stack_level = 0;
3305 int old_stack_arg_under_construction = 0;
3306 poly_int64 old_pending_adj = 0;
3307 int old_inhibit_defer_pop = inhibit_defer_pop;
3308
3309 /* Some stack pointer alterations we make are performed via
3310 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3311 which we then also need to save/restore along the way. */
3312 poly_int64 old_stack_pointer_delta = 0;
3313
3314 rtx call_fusage;
3315 tree addr = CALL_EXPR_FN (exp);
3316 int i;
3317 /* The alignment of the stack, in bits. */
3318 unsigned HOST_WIDE_INT preferred_stack_boundary;
3319 /* The alignment of the stack, in bytes. */
3320 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3321 /* The static chain value to use for this call. */
3322 rtx static_chain_value;
3323 /* See if this is "nothrow" function call. */
3324 if (TREE_NOTHROW (exp))
3325 flags |= ECF_NOTHROW;
3326
3327 /* See if we can find a DECL-node for the actual function, and get the
3328 function attributes (flags) from the function decl or type node. */
3329 fndecl = get_callee_fndecl (exp);
3330 if (fndecl)
3331 {
3332 fntype = TREE_TYPE (fndecl);
3333 flags |= flags_from_decl_or_type (fndecl);
3334 return_flags |= decl_return_flags (fndecl);
3335 }
3336 else
3337 {
3338 fntype = TREE_TYPE (TREE_TYPE (addr));
3339 flags |= flags_from_decl_or_type (fntype);
3340 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3341 flags |= ECF_BY_DESCRIPTOR;
3342 }
3343 rettype = TREE_TYPE (exp);
3344
3345 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3346
3347 /* Warn if this value is an aggregate type,
3348 regardless of which calling convention we are using for it. */
3349 if (AGGREGATE_TYPE_P (rettype))
3350 warning (OPT_Waggregate_return, "function call has aggregate value");
3351
3352 /* If the result of a non looping pure or const function call is
3353 ignored (or void), and none of its arguments are volatile, we can
3354 avoid expanding the call and just evaluate the arguments for
3355 side-effects. */
3356 if ((flags & (ECF_CONST | ECF_PURE))
3357 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3358 && (ignore || target == const0_rtx
3359 || TYPE_MODE (rettype) == VOIDmode))
3360 {
3361 bool volatilep = false;
3362 tree arg;
3363 call_expr_arg_iterator iter;
3364
3365 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3366 if (TREE_THIS_VOLATILE (arg))
3367 {
3368 volatilep = true;
3369 break;
3370 }
3371
3372 if (! volatilep)
3373 {
3374 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3375 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3376 return const0_rtx;
3377 }
3378 }
3379
3380 #ifdef REG_PARM_STACK_SPACE
3381 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3382 #endif
3383
3384 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3385 && reg_parm_stack_space > 0 && PUSH_ARGS)
3386 must_preallocate = 1;
3387
3388 /* Set up a place to return a structure. */
3389
3390 /* Cater to broken compilers. */
3391 if (aggregate_value_p (exp, fntype))
3392 {
3393 /* This call returns a big structure. */
3394 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3395
3396 #ifdef PCC_STATIC_STRUCT_RETURN
3397 {
3398 pcc_struct_value = 1;
3399 }
3400 #else /* not PCC_STATIC_STRUCT_RETURN */
3401 {
3402 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3403 struct_value_size = -1;
3404
3405 /* Even if it is semantically safe to use the target as the return
3406 slot, it may be not sufficiently aligned for the return type. */
3407 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3408 && target
3409 && MEM_P (target)
3410 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3411 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3412 MEM_ALIGN (target))))
3413 structure_value_addr = XEXP (target, 0);
3414 else
3415 {
3416 /* For variable-sized objects, we must be called with a target
3417 specified. If we were to allocate space on the stack here,
3418 we would have no way of knowing when to free it. */
3419 rtx d = assign_temp (rettype, 1, 1);
3420 structure_value_addr = XEXP (d, 0);
3421 target = 0;
3422 }
3423 }
3424 #endif /* not PCC_STATIC_STRUCT_RETURN */
3425 }
3426
3427 /* Figure out the amount to which the stack should be aligned. */
3428 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3429 if (fndecl)
3430 {
3431 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3432 /* Without automatic stack alignment, we can't increase preferred
3433 stack boundary. With automatic stack alignment, it is
3434 unnecessary since unless we can guarantee that all callers will
3435 align the outgoing stack properly, callee has to align its
3436 stack anyway. */
3437 if (i
3438 && i->preferred_incoming_stack_boundary
3439 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3440 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3441 }
3442
3443 /* Operand 0 is a pointer-to-function; get the type of the function. */
3444 funtype = TREE_TYPE (addr);
3445 gcc_assert (POINTER_TYPE_P (funtype));
3446 funtype = TREE_TYPE (funtype);
3447
3448 /* Count whether there are actual complex arguments that need to be split
3449 into their real and imaginary parts. Munge the type_arg_types
3450 appropriately here as well. */
3451 if (targetm.calls.split_complex_arg)
3452 {
3453 call_expr_arg_iterator iter;
3454 tree arg;
3455 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3456 {
3457 tree type = TREE_TYPE (arg);
3458 if (type && TREE_CODE (type) == COMPLEX_TYPE
3459 && targetm.calls.split_complex_arg (type))
3460 num_complex_actuals++;
3461 }
3462 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3463 }
3464 else
3465 type_arg_types = TYPE_ARG_TYPES (funtype);
3466
3467 if (flags & ECF_MAY_BE_ALLOCA)
3468 cfun->calls_alloca = 1;
3469
3470 /* If struct_value_rtx is 0, it means pass the address
3471 as if it were an extra parameter. Put the argument expression
3472 in structure_value_addr_value. */
3473 if (structure_value_addr && struct_value == 0)
3474 {
3475 /* If structure_value_addr is a REG other than
3476 virtual_outgoing_args_rtx, we can use always use it. If it
3477 is not a REG, we must always copy it into a register.
3478 If it is virtual_outgoing_args_rtx, we must copy it to another
3479 register in some cases. */
3480 rtx temp = (!REG_P (structure_value_addr)
3481 || (ACCUMULATE_OUTGOING_ARGS
3482 && stack_arg_under_construction
3483 && structure_value_addr == virtual_outgoing_args_rtx)
3484 ? copy_addr_to_reg (convert_memory_address
3485 (Pmode, structure_value_addr))
3486 : structure_value_addr);
3487
3488 structure_value_addr_value =
3489 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3490 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
3491 }
3492
3493 /* Count the arguments and set NUM_ACTUALS. */
3494 num_actuals =
3495 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3496
3497 /* Compute number of named args.
3498 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3499
3500 if (type_arg_types != 0)
3501 n_named_args
3502 = (list_length (type_arg_types)
3503 /* Count the struct value address, if it is passed as a parm. */
3504 + structure_value_addr_parm);
3505 else
3506 /* If we know nothing, treat all args as named. */
3507 n_named_args = num_actuals;
3508
3509 /* Start updating where the next arg would go.
3510
3511 On some machines (such as the PA) indirect calls have a different
3512 calling convention than normal calls. The fourth argument in
3513 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3514 or not. */
3515 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3516 args_so_far = pack_cumulative_args (&args_so_far_v);
3517
3518 /* Now possibly adjust the number of named args.
3519 Normally, don't include the last named arg if anonymous args follow.
3520 We do include the last named arg if
3521 targetm.calls.strict_argument_naming() returns nonzero.
3522 (If no anonymous args follow, the result of list_length is actually
3523 one too large. This is harmless.)
3524
3525 If targetm.calls.pretend_outgoing_varargs_named() returns
3526 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3527 this machine will be able to place unnamed args that were passed
3528 in registers into the stack. So treat all args as named. This
3529 allows the insns emitting for a specific argument list to be
3530 independent of the function declaration.
3531
3532 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3533 we do not have any reliable way to pass unnamed args in
3534 registers, so we must force them into memory. */
3535
3536 if (type_arg_types != 0
3537 && targetm.calls.strict_argument_naming (args_so_far))
3538 ;
3539 else if (type_arg_types != 0
3540 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3541 /* Don't include the last named arg. */
3542 --n_named_args;
3543 else
3544 /* Treat all args as named. */
3545 n_named_args = num_actuals;
3546
3547 /* Make a vector to hold all the information about each arg. */
3548 args = XCNEWVEC (struct arg_data, num_actuals);
3549
3550 /* Build up entries in the ARGS array, compute the size of the
3551 arguments into ARGS_SIZE, etc. */
3552 initialize_argument_information (num_actuals, args, &args_size,
3553 n_named_args, exp,
3554 structure_value_addr_value, fndecl, fntype,
3555 args_so_far, reg_parm_stack_space,
3556 &old_stack_level, &old_pending_adj,
3557 &must_preallocate, &flags,
3558 &try_tail_call, CALL_FROM_THUNK_P (exp));
3559
3560 if (args_size.var)
3561 must_preallocate = 1;
3562
3563 /* Now make final decision about preallocating stack space. */
3564 must_preallocate = finalize_must_preallocate (must_preallocate,
3565 num_actuals, args,
3566 &args_size);
3567
3568 /* If the structure value address will reference the stack pointer, we
3569 must stabilize it. We don't need to do this if we know that we are
3570 not going to adjust the stack pointer in processing this call. */
3571
3572 if (structure_value_addr
3573 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3574 || reg_mentioned_p (virtual_outgoing_args_rtx,
3575 structure_value_addr))
3576 && (args_size.var
3577 || (!ACCUMULATE_OUTGOING_ARGS
3578 && maybe_ne (args_size.constant, 0))))
3579 structure_value_addr = copy_to_reg (structure_value_addr);
3580
3581 /* Tail calls can make things harder to debug, and we've traditionally
3582 pushed these optimizations into -O2. Don't try if we're already
3583 expanding a call, as that means we're an argument. Don't try if
3584 there's cleanups, as we know there's code to follow the call. */
3585
3586 if (currently_expanding_call++ != 0
3587 || !flag_optimize_sibling_calls
3588 || args_size.var
3589 || dbg_cnt (tail_call) == false)
3590 try_tail_call = 0;
3591
3592 /* If the user has marked the function as requiring tail-call
3593 optimization, attempt it. */
3594 if (must_tail_call)
3595 try_tail_call = 1;
3596
3597 /* Rest of purposes for tail call optimizations to fail. */
3598 if (try_tail_call)
3599 try_tail_call = can_implement_as_sibling_call_p (exp,
3600 structure_value_addr,
3601 funtype,
3602 reg_parm_stack_space,
3603 fndecl,
3604 flags, addr, args_size);
3605
3606 /* Check if caller and callee disagree in promotion of function
3607 return value. */
3608 if (try_tail_call)
3609 {
3610 machine_mode caller_mode, caller_promoted_mode;
3611 machine_mode callee_mode, callee_promoted_mode;
3612 int caller_unsignedp, callee_unsignedp;
3613 tree caller_res = DECL_RESULT (current_function_decl);
3614
3615 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3616 caller_mode = DECL_MODE (caller_res);
3617 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3618 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3619 caller_promoted_mode
3620 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3621 &caller_unsignedp,
3622 TREE_TYPE (current_function_decl), 1);
3623 callee_promoted_mode
3624 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3625 &callee_unsignedp,
3626 funtype, 1);
3627 if (caller_mode != VOIDmode
3628 && (caller_promoted_mode != callee_promoted_mode
3629 || ((caller_mode != caller_promoted_mode
3630 || callee_mode != callee_promoted_mode)
3631 && (caller_unsignedp != callee_unsignedp
3632 || partial_subreg_p (caller_mode, callee_mode)))))
3633 {
3634 try_tail_call = 0;
3635 maybe_complain_about_tail_call (exp,
3636 "caller and callee disagree in"
3637 " promotion of function"
3638 " return value");
3639 }
3640 }
3641
3642 /* Ensure current function's preferred stack boundary is at least
3643 what we need. Stack alignment may also increase preferred stack
3644 boundary. */
3645 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3646 crtl->preferred_stack_boundary = preferred_stack_boundary;
3647 else
3648 preferred_stack_boundary = crtl->preferred_stack_boundary;
3649
3650 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3651
3652 /* We want to make two insn chains; one for a sibling call, the other
3653 for a normal call. We will select one of the two chains after
3654 initial RTL generation is complete. */
3655 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3656 {
3657 int sibcall_failure = 0;
3658 /* We want to emit any pending stack adjustments before the tail
3659 recursion "call". That way we know any adjustment after the tail
3660 recursion call can be ignored if we indeed use the tail
3661 call expansion. */
3662 saved_pending_stack_adjust save;
3663 rtx_insn *insns, *before_call, *after_args;
3664 rtx next_arg_reg;
3665
3666 if (pass == 0)
3667 {
3668 /* State variables we need to save and restore between
3669 iterations. */
3670 save_pending_stack_adjust (&save);
3671 }
3672 if (pass)
3673 flags &= ~ECF_SIBCALL;
3674 else
3675 flags |= ECF_SIBCALL;
3676
3677 /* Other state variables that we must reinitialize each time
3678 through the loop (that are not initialized by the loop itself). */
3679 argblock = 0;
3680 call_fusage = 0;
3681
3682 /* Start a new sequence for the normal call case.
3683
3684 From this point on, if the sibling call fails, we want to set
3685 sibcall_failure instead of continuing the loop. */
3686 start_sequence ();
3687
3688 /* Don't let pending stack adjusts add up to too much.
3689 Also, do all pending adjustments now if there is any chance
3690 this might be a call to alloca or if we are expanding a sibling
3691 call sequence.
3692 Also do the adjustments before a throwing call, otherwise
3693 exception handling can fail; PR 19225. */
3694 if (maybe_ge (pending_stack_adjust, 32)
3695 || (maybe_ne (pending_stack_adjust, 0)
3696 && (flags & ECF_MAY_BE_ALLOCA))
3697 || (maybe_ne (pending_stack_adjust, 0)
3698 && flag_exceptions && !(flags & ECF_NOTHROW))
3699 || pass == 0)
3700 do_pending_stack_adjust ();
3701
3702 /* Precompute any arguments as needed. */
3703 if (pass)
3704 precompute_arguments (num_actuals, args);
3705
3706 /* Now we are about to start emitting insns that can be deleted
3707 if a libcall is deleted. */
3708 if (pass && (flags & ECF_MALLOC))
3709 start_sequence ();
3710
3711 if (pass == 0
3712 && crtl->stack_protect_guard
3713 && targetm.stack_protect_runtime_enabled_p ())
3714 stack_protect_epilogue ();
3715
3716 adjusted_args_size = args_size;
3717 /* Compute the actual size of the argument block required. The variable
3718 and constant sizes must be combined, the size may have to be rounded,
3719 and there may be a minimum required size. When generating a sibcall
3720 pattern, do not round up, since we'll be re-using whatever space our
3721 caller provided. */
3722 unadjusted_args_size
3723 = compute_argument_block_size (reg_parm_stack_space,
3724 &adjusted_args_size,
3725 fndecl, fntype,
3726 (pass == 0 ? 0
3727 : preferred_stack_boundary));
3728
3729 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3730
3731 /* The argument block when performing a sibling call is the
3732 incoming argument block. */
3733 if (pass == 0)
3734 {
3735 argblock = crtl->args.internal_arg_pointer;
3736 if (STACK_GROWS_DOWNWARD)
3737 argblock
3738 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3739 else
3740 argblock
3741 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3742
3743 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3744 stored_args_map = sbitmap_alloc (map_size);
3745 bitmap_clear (stored_args_map);
3746 stored_args_watermark = HOST_WIDE_INT_M1U;
3747 }
3748
3749 /* If we have no actual push instructions, or shouldn't use them,
3750 make space for all args right now. */
3751 else if (adjusted_args_size.var != 0)
3752 {
3753 if (old_stack_level == 0)
3754 {
3755 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3756 old_stack_pointer_delta = stack_pointer_delta;
3757 old_pending_adj = pending_stack_adjust;
3758 pending_stack_adjust = 0;
3759 /* stack_arg_under_construction says whether a stack arg is
3760 being constructed at the old stack level. Pushing the stack
3761 gets a clean outgoing argument block. */
3762 old_stack_arg_under_construction = stack_arg_under_construction;
3763 stack_arg_under_construction = 0;
3764 }
3765 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3766 if (flag_stack_usage_info)
3767 current_function_has_unbounded_dynamic_stack_size = 1;
3768 }
3769 else
3770 {
3771 /* Note that we must go through the motions of allocating an argument
3772 block even if the size is zero because we may be storing args
3773 in the area reserved for register arguments, which may be part of
3774 the stack frame. */
3775
3776 poly_int64 needed = adjusted_args_size.constant;
3777
3778 /* Store the maximum argument space used. It will be pushed by
3779 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3780 checking). */
3781
3782 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3783 needed);
3784
3785 if (must_preallocate)
3786 {
3787 if (ACCUMULATE_OUTGOING_ARGS)
3788 {
3789 /* Since the stack pointer will never be pushed, it is
3790 possible for the evaluation of a parm to clobber
3791 something we have already written to the stack.
3792 Since most function calls on RISC machines do not use
3793 the stack, this is uncommon, but must work correctly.
3794
3795 Therefore, we save any area of the stack that was already
3796 written and that we are using. Here we set up to do this
3797 by making a new stack usage map from the old one. The
3798 actual save will be done by store_one_arg.
3799
3800 Another approach might be to try to reorder the argument
3801 evaluations to avoid this conflicting stack usage. */
3802
3803 /* Since we will be writing into the entire argument area,
3804 the map must be allocated for its entire size, not just
3805 the part that is the responsibility of the caller. */
3806 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3807 needed += reg_parm_stack_space;
3808
3809 poly_int64 limit = needed;
3810 if (ARGS_GROW_DOWNWARD)
3811 limit += 1;
3812
3813 /* For polynomial sizes, this is the maximum possible
3814 size needed for arguments with a constant size
3815 and offset. */
3816 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3817 highest_outgoing_arg_in_use
3818 = MAX (initial_highest_arg_in_use, const_limit);
3819
3820 free (stack_usage_map_buf);
3821 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3822 stack_usage_map = stack_usage_map_buf;
3823
3824 if (initial_highest_arg_in_use)
3825 memcpy (stack_usage_map, initial_stack_usage_map,
3826 initial_highest_arg_in_use);
3827
3828 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3829 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3830 (highest_outgoing_arg_in_use
3831 - initial_highest_arg_in_use));
3832 needed = 0;
3833
3834 /* The address of the outgoing argument list must not be
3835 copied to a register here, because argblock would be left
3836 pointing to the wrong place after the call to
3837 allocate_dynamic_stack_space below. */
3838
3839 argblock = virtual_outgoing_args_rtx;
3840 }
3841 else
3842 {
3843 /* Try to reuse some or all of the pending_stack_adjust
3844 to get this space. */
3845 if (inhibit_defer_pop == 0
3846 && (combine_pending_stack_adjustment_and_call
3847 (&needed,
3848 unadjusted_args_size,
3849 &adjusted_args_size,
3850 preferred_unit_stack_boundary)))
3851 {
3852 /* combine_pending_stack_adjustment_and_call computes
3853 an adjustment before the arguments are allocated.
3854 Account for them and see whether or not the stack
3855 needs to go up or down. */
3856 needed = unadjusted_args_size - needed;
3857
3858 /* Checked by
3859 combine_pending_stack_adjustment_and_call. */
3860 gcc_checking_assert (ordered_p (needed, 0));
3861 if (maybe_lt (needed, 0))
3862 {
3863 /* We're releasing stack space. */
3864 /* ??? We can avoid any adjustment at all if we're
3865 already aligned. FIXME. */
3866 pending_stack_adjust = -needed;
3867 do_pending_stack_adjust ();
3868 needed = 0;
3869 }
3870 else
3871 /* We need to allocate space. We'll do that in
3872 push_block below. */
3873 pending_stack_adjust = 0;
3874 }
3875
3876 /* Special case this because overhead of `push_block' in
3877 this case is non-trivial. */
3878 if (known_eq (needed, 0))
3879 argblock = virtual_outgoing_args_rtx;
3880 else
3881 {
3882 rtx needed_rtx = gen_int_mode (needed, Pmode);
3883 argblock = push_block (needed_rtx, 0, 0);
3884 if (ARGS_GROW_DOWNWARD)
3885 argblock = plus_constant (Pmode, argblock, needed);
3886 }
3887
3888 /* We only really need to call `copy_to_reg' in the case
3889 where push insns are going to be used to pass ARGBLOCK
3890 to a function call in ARGS. In that case, the stack
3891 pointer changes value from the allocation point to the
3892 call point, and hence the value of
3893 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3894 as well always do it. */
3895 argblock = copy_to_reg (argblock);
3896 }
3897 }
3898 }
3899
3900 if (ACCUMULATE_OUTGOING_ARGS)
3901 {
3902 /* The save/restore code in store_one_arg handles all
3903 cases except one: a constructor call (including a C
3904 function returning a BLKmode struct) to initialize
3905 an argument. */
3906 if (stack_arg_under_construction)
3907 {
3908 rtx push_size
3909 = (gen_int_mode
3910 (adjusted_args_size.constant
3911 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3912 : TREE_TYPE (fndecl))
3913 ? 0 : reg_parm_stack_space), Pmode));
3914 if (old_stack_level == 0)
3915 {
3916 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3917 old_stack_pointer_delta = stack_pointer_delta;
3918 old_pending_adj = pending_stack_adjust;
3919 pending_stack_adjust = 0;
3920 /* stack_arg_under_construction says whether a stack
3921 arg is being constructed at the old stack level.
3922 Pushing the stack gets a clean outgoing argument
3923 block. */
3924 old_stack_arg_under_construction
3925 = stack_arg_under_construction;
3926 stack_arg_under_construction = 0;
3927 /* Make a new map for the new argument list. */
3928 free (stack_usage_map_buf);
3929 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3930 stack_usage_map = stack_usage_map_buf;
3931 highest_outgoing_arg_in_use = 0;
3932 stack_usage_watermark = HOST_WIDE_INT_M1U;
3933 }
3934 /* We can pass TRUE as the 4th argument because we just
3935 saved the stack pointer and will restore it right after
3936 the call. */
3937 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3938 -1, true);
3939 }
3940
3941 /* If argument evaluation might modify the stack pointer,
3942 copy the address of the argument list to a register. */
3943 for (i = 0; i < num_actuals; i++)
3944 if (args[i].pass_on_stack)
3945 {
3946 argblock = copy_addr_to_reg (argblock);
3947 break;
3948 }
3949 }
3950
3951 compute_argument_addresses (args, argblock, num_actuals);
3952
3953 /* Stack is properly aligned, pops can't safely be deferred during
3954 the evaluation of the arguments. */
3955 NO_DEFER_POP;
3956
3957 /* Precompute all register parameters. It isn't safe to compute
3958 anything once we have started filling any specific hard regs.
3959 TLS symbols sometimes need a call to resolve. Precompute
3960 register parameters before any stack pointer manipulation
3961 to avoid unaligned stack in the called function. */
3962 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3963
3964 OK_DEFER_POP;
3965
3966 /* Perform stack alignment before the first push (the last arg). */
3967 if (argblock == 0
3968 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3969 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
3970 {
3971 /* When the stack adjustment is pending, we get better code
3972 by combining the adjustments. */
3973 if (maybe_ne (pending_stack_adjust, 0)
3974 && ! inhibit_defer_pop
3975 && (combine_pending_stack_adjustment_and_call
3976 (&pending_stack_adjust,
3977 unadjusted_args_size,
3978 &adjusted_args_size,
3979 preferred_unit_stack_boundary)))
3980 do_pending_stack_adjust ();
3981 else if (argblock == 0)
3982 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3983 - unadjusted_args_size,
3984 Pmode));
3985 }
3986 /* Now that the stack is properly aligned, pops can't safely
3987 be deferred during the evaluation of the arguments. */
3988 NO_DEFER_POP;
3989
3990 /* Record the maximum pushed stack space size. We need to delay
3991 doing it this far to take into account the optimization done
3992 by combine_pending_stack_adjustment_and_call. */
3993 if (flag_stack_usage_info
3994 && !ACCUMULATE_OUTGOING_ARGS
3995 && pass
3996 && adjusted_args_size.var == 0)
3997 {
3998 poly_int64 pushed = (adjusted_args_size.constant
3999 + pending_stack_adjust);
4000 current_function_pushed_stack_size
4001 = upper_bound (current_function_pushed_stack_size, pushed);
4002 }
4003
4004 funexp = rtx_for_function_call (fndecl, addr);
4005
4006 if (CALL_EXPR_STATIC_CHAIN (exp))
4007 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4008 else
4009 static_chain_value = 0;
4010
4011 #ifdef REG_PARM_STACK_SPACE
4012 /* Save the fixed argument area if it's part of the caller's frame and
4013 is clobbered by argument setup for this call. */
4014 if (ACCUMULATE_OUTGOING_ARGS && pass)
4015 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4016 &low_to_save, &high_to_save);
4017 #endif
4018
4019 /* Now store (and compute if necessary) all non-register parms.
4020 These come before register parms, since they can require block-moves,
4021 which could clobber the registers used for register parms.
4022 Parms which have partial registers are not stored here,
4023 but we do preallocate space here if they want that. */
4024
4025 for (i = 0; i < num_actuals; i++)
4026 {
4027 /* Delay bounds until all other args are stored. */
4028 if (POINTER_BOUNDS_P (args[i].tree_value))
4029 continue;
4030 else if (args[i].reg == 0 || args[i].pass_on_stack)
4031 {
4032 rtx_insn *before_arg = get_last_insn ();
4033
4034 /* We don't allow passing huge (> 2^30 B) arguments
4035 by value. It would cause an overflow later on. */
4036 if (constant_lower_bound (adjusted_args_size.constant)
4037 >= (1 << (HOST_BITS_PER_INT - 2)))
4038 {
4039 sorry ("passing too large argument on stack");
4040 continue;
4041 }
4042
4043 if (store_one_arg (&args[i], argblock, flags,
4044 adjusted_args_size.var != 0,
4045 reg_parm_stack_space)
4046 || (pass == 0
4047 && check_sibcall_argument_overlap (before_arg,
4048 &args[i], 1)))
4049 sibcall_failure = 1;
4050 }
4051
4052 if (args[i].stack)
4053 call_fusage
4054 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4055 gen_rtx_USE (VOIDmode, args[i].stack),
4056 call_fusage);
4057 }
4058
4059 /* If we have a parm that is passed in registers but not in memory
4060 and whose alignment does not permit a direct copy into registers,
4061 make a group of pseudos that correspond to each register that we
4062 will later fill. */
4063 if (STRICT_ALIGNMENT)
4064 store_unaligned_arguments_into_pseudos (args, num_actuals);
4065
4066 /* Now store any partially-in-registers parm.
4067 This is the last place a block-move can happen. */
4068 if (reg_parm_seen)
4069 for (i = 0; i < num_actuals; i++)
4070 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4071 {
4072 rtx_insn *before_arg = get_last_insn ();
4073
4074 /* On targets with weird calling conventions (e.g. PA) it's
4075 hard to ensure that all cases of argument overlap between
4076 stack and registers work. Play it safe and bail out. */
4077 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4078 {
4079 sibcall_failure = 1;
4080 break;
4081 }
4082
4083 if (store_one_arg (&args[i], argblock, flags,
4084 adjusted_args_size.var != 0,
4085 reg_parm_stack_space)
4086 || (pass == 0
4087 && check_sibcall_argument_overlap (before_arg,
4088 &args[i], 1)))
4089 sibcall_failure = 1;
4090 }
4091
4092 bool any_regs = false;
4093 for (i = 0; i < num_actuals; i++)
4094 if (args[i].reg != NULL_RTX)
4095 {
4096 any_regs = true;
4097 targetm.calls.call_args (args[i].reg, funtype);
4098 }
4099 if (!any_regs)
4100 targetm.calls.call_args (pc_rtx, funtype);
4101
4102 /* Figure out the register where the value, if any, will come back. */
4103 valreg = 0;
4104 valbnd = 0;
4105 if (TYPE_MODE (rettype) != VOIDmode
4106 && ! structure_value_addr)
4107 {
4108 if (pcc_struct_value)
4109 {
4110 valreg = hard_function_value (build_pointer_type (rettype),
4111 fndecl, NULL, (pass == 0));
4112 if (CALL_WITH_BOUNDS_P (exp))
4113 valbnd = targetm.calls.
4114 chkp_function_value_bounds (build_pointer_type (rettype),
4115 fndecl, (pass == 0));
4116 }
4117 else
4118 {
4119 valreg = hard_function_value (rettype, fndecl, fntype,
4120 (pass == 0));
4121 if (CALL_WITH_BOUNDS_P (exp))
4122 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
4123 fndecl,
4124 (pass == 0));
4125 }
4126
4127 /* If VALREG is a PARALLEL whose first member has a zero
4128 offset, use that. This is for targets such as m68k that
4129 return the same value in multiple places. */
4130 if (GET_CODE (valreg) == PARALLEL)
4131 {
4132 rtx elem = XVECEXP (valreg, 0, 0);
4133 rtx where = XEXP (elem, 0);
4134 rtx offset = XEXP (elem, 1);
4135 if (offset == const0_rtx
4136 && GET_MODE (where) == GET_MODE (valreg))
4137 valreg = where;
4138 }
4139 }
4140
4141 /* Store all bounds not passed in registers. */
4142 for (i = 0; i < num_actuals; i++)
4143 {
4144 if (POINTER_BOUNDS_P (args[i].tree_value)
4145 && !args[i].reg)
4146 store_bounds (&args[i],
4147 args[i].pointer_arg == -1
4148 ? NULL
4149 : &args[args[i].pointer_arg]);
4150 }
4151
4152 /* If register arguments require space on the stack and stack space
4153 was not preallocated, allocate stack space here for arguments
4154 passed in registers. */
4155 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4156 && !ACCUMULATE_OUTGOING_ARGS
4157 && must_preallocate == 0 && reg_parm_stack_space > 0)
4158 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4159
4160 /* Pass the function the address in which to return a
4161 structure value. */
4162 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4163 {
4164 structure_value_addr
4165 = convert_memory_address (Pmode, structure_value_addr);
4166 emit_move_insn (struct_value,
4167 force_reg (Pmode,
4168 force_operand (structure_value_addr,
4169 NULL_RTX)));
4170
4171 if (REG_P (struct_value))
4172 use_reg (&call_fusage, struct_value);
4173 }
4174
4175 after_args = get_last_insn ();
4176 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4177 static_chain_value, &call_fusage,
4178 reg_parm_seen, flags);
4179
4180 load_register_parameters (args, num_actuals, &call_fusage, flags,
4181 pass == 0, &sibcall_failure);
4182
4183 /* Save a pointer to the last insn before the call, so that we can
4184 later safely search backwards to find the CALL_INSN. */
4185 before_call = get_last_insn ();
4186
4187 /* Set up next argument register. For sibling calls on machines
4188 with register windows this should be the incoming register. */
4189 if (pass == 0)
4190 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
4191 VOIDmode,
4192 void_type_node,
4193 true);
4194 else
4195 next_arg_reg = targetm.calls.function_arg (args_so_far,
4196 VOIDmode, void_type_node,
4197 true);
4198
4199 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4200 {
4201 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4202 arg_nr = num_actuals - arg_nr - 1;
4203 if (arg_nr >= 0
4204 && arg_nr < num_actuals
4205 && args[arg_nr].reg
4206 && valreg
4207 && REG_P (valreg)
4208 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4209 call_fusage
4210 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4211 gen_rtx_SET (valreg, args[arg_nr].reg),
4212 call_fusage);
4213 }
4214 /* All arguments and registers used for the call must be set up by
4215 now! */
4216
4217 /* Stack must be properly aligned now. */
4218 gcc_assert (!pass
4219 || multiple_p (stack_pointer_delta,
4220 preferred_unit_stack_boundary));
4221
4222 /* Generate the actual call instruction. */
4223 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4224 adjusted_args_size.constant, struct_value_size,
4225 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4226 flags, args_so_far);
4227
4228 if (flag_ipa_ra)
4229 {
4230 rtx_call_insn *last;
4231 rtx datum = NULL_RTX;
4232 if (fndecl != NULL_TREE)
4233 {
4234 datum = XEXP (DECL_RTL (fndecl), 0);
4235 gcc_assert (datum != NULL_RTX
4236 && GET_CODE (datum) == SYMBOL_REF);
4237 }
4238 last = last_call_insn ();
4239 add_reg_note (last, REG_CALL_DECL, datum);
4240 }
4241
4242 /* If the call setup or the call itself overlaps with anything
4243 of the argument setup we probably clobbered our call address.
4244 In that case we can't do sibcalls. */
4245 if (pass == 0
4246 && check_sibcall_argument_overlap (after_args, 0, 0))
4247 sibcall_failure = 1;
4248
4249 /* If a non-BLKmode value is returned at the most significant end
4250 of a register, shift the register right by the appropriate amount
4251 and update VALREG accordingly. BLKmode values are handled by the
4252 group load/store machinery below. */
4253 if (!structure_value_addr
4254 && !pcc_struct_value
4255 && TYPE_MODE (rettype) != VOIDmode
4256 && TYPE_MODE (rettype) != BLKmode
4257 && REG_P (valreg)
4258 && targetm.calls.return_in_msb (rettype))
4259 {
4260 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4261 sibcall_failure = 1;
4262 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4263 }
4264
4265 if (pass && (flags & ECF_MALLOC))
4266 {
4267 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4268 rtx_insn *last, *insns;
4269
4270 /* The return value from a malloc-like function is a pointer. */
4271 if (TREE_CODE (rettype) == POINTER_TYPE)
4272 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4273
4274 emit_move_insn (temp, valreg);
4275
4276 /* The return value from a malloc-like function can not alias
4277 anything else. */
4278 last = get_last_insn ();
4279 add_reg_note (last, REG_NOALIAS, temp);
4280
4281 /* Write out the sequence. */
4282 insns = get_insns ();
4283 end_sequence ();
4284 emit_insn (insns);
4285 valreg = temp;
4286 }
4287
4288 /* For calls to `setjmp', etc., inform
4289 function.c:setjmp_warnings that it should complain if
4290 nonvolatile values are live. For functions that cannot
4291 return, inform flow that control does not fall through. */
4292
4293 if ((flags & ECF_NORETURN) || pass == 0)
4294 {
4295 /* The barrier must be emitted
4296 immediately after the CALL_INSN. Some ports emit more
4297 than just a CALL_INSN above, so we must search for it here. */
4298
4299 rtx_insn *last = get_last_insn ();
4300 while (!CALL_P (last))
4301 {
4302 last = PREV_INSN (last);
4303 /* There was no CALL_INSN? */
4304 gcc_assert (last != before_call);
4305 }
4306
4307 emit_barrier_after (last);
4308
4309 /* Stack adjustments after a noreturn call are dead code.
4310 However when NO_DEFER_POP is in effect, we must preserve
4311 stack_pointer_delta. */
4312 if (inhibit_defer_pop == 0)
4313 {
4314 stack_pointer_delta = old_stack_allocated;
4315 pending_stack_adjust = 0;
4316 }
4317 }
4318
4319 /* If value type not void, return an rtx for the value. */
4320
4321 if (TYPE_MODE (rettype) == VOIDmode
4322 || ignore)
4323 target = const0_rtx;
4324 else if (structure_value_addr)
4325 {
4326 if (target == 0 || !MEM_P (target))
4327 {
4328 target
4329 = gen_rtx_MEM (TYPE_MODE (rettype),
4330 memory_address (TYPE_MODE (rettype),
4331 structure_value_addr));
4332 set_mem_attributes (target, rettype, 1);
4333 }
4334 }
4335 else if (pcc_struct_value)
4336 {
4337 /* This is the special C++ case where we need to
4338 know what the true target was. We take care to
4339 never use this value more than once in one expression. */
4340 target = gen_rtx_MEM (TYPE_MODE (rettype),
4341 copy_to_reg (valreg));
4342 set_mem_attributes (target, rettype, 1);
4343 }
4344 /* Handle calls that return values in multiple non-contiguous locations.
4345 The Irix 6 ABI has examples of this. */
4346 else if (GET_CODE (valreg) == PARALLEL)
4347 {
4348 if (target == 0)
4349 target = emit_group_move_into_temps (valreg);
4350 else if (rtx_equal_p (target, valreg))
4351 ;
4352 else if (GET_CODE (target) == PARALLEL)
4353 /* Handle the result of a emit_group_move_into_temps
4354 call in the previous pass. */
4355 emit_group_move (target, valreg);
4356 else
4357 emit_group_store (target, valreg, rettype,
4358 int_size_in_bytes (rettype));
4359 }
4360 else if (target
4361 && GET_MODE (target) == TYPE_MODE (rettype)
4362 && GET_MODE (target) == GET_MODE (valreg))
4363 {
4364 bool may_overlap = false;
4365
4366 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4367 reg to a plain register. */
4368 if (!REG_P (target) || HARD_REGISTER_P (target))
4369 valreg = avoid_likely_spilled_reg (valreg);
4370
4371 /* If TARGET is a MEM in the argument area, and we have
4372 saved part of the argument area, then we can't store
4373 directly into TARGET as it may get overwritten when we
4374 restore the argument save area below. Don't work too
4375 hard though and simply force TARGET to a register if it
4376 is a MEM; the optimizer is quite likely to sort it out. */
4377 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4378 for (i = 0; i < num_actuals; i++)
4379 if (args[i].save_area)
4380 {
4381 may_overlap = true;
4382 break;
4383 }
4384
4385 if (may_overlap)
4386 target = copy_to_reg (valreg);
4387 else
4388 {
4389 /* TARGET and VALREG cannot be equal at this point
4390 because the latter would not have
4391 REG_FUNCTION_VALUE_P true, while the former would if
4392 it were referring to the same register.
4393
4394 If they refer to the same register, this move will be
4395 a no-op, except when function inlining is being
4396 done. */
4397 emit_move_insn (target, valreg);
4398
4399 /* If we are setting a MEM, this code must be executed.
4400 Since it is emitted after the call insn, sibcall
4401 optimization cannot be performed in that case. */
4402 if (MEM_P (target))
4403 sibcall_failure = 1;
4404 }
4405 }
4406 else
4407 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4408
4409 /* If we promoted this return value, make the proper SUBREG.
4410 TARGET might be const0_rtx here, so be careful. */
4411 if (REG_P (target)
4412 && TYPE_MODE (rettype) != BLKmode
4413 && GET_MODE (target) != TYPE_MODE (rettype))
4414 {
4415 tree type = rettype;
4416 int unsignedp = TYPE_UNSIGNED (type);
4417 machine_mode pmode;
4418
4419 /* Ensure we promote as expected, and get the new unsignedness. */
4420 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4421 funtype, 1);
4422 gcc_assert (GET_MODE (target) == pmode);
4423
4424 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4425 GET_MODE (target));
4426 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4427 SUBREG_PROMOTED_VAR_P (target) = 1;
4428 SUBREG_PROMOTED_SET (target, unsignedp);
4429 }
4430
4431 /* If size of args is variable or this was a constructor call for a stack
4432 argument, restore saved stack-pointer value. */
4433
4434 if (old_stack_level)
4435 {
4436 rtx_insn *prev = get_last_insn ();
4437
4438 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4439 stack_pointer_delta = old_stack_pointer_delta;
4440
4441 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4442
4443 pending_stack_adjust = old_pending_adj;
4444 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4445 stack_arg_under_construction = old_stack_arg_under_construction;
4446 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4447 stack_usage_map = initial_stack_usage_map;
4448 stack_usage_watermark = initial_stack_usage_watermark;
4449 sibcall_failure = 1;
4450 }
4451 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4452 {
4453 #ifdef REG_PARM_STACK_SPACE
4454 if (save_area)
4455 restore_fixed_argument_area (save_area, argblock,
4456 high_to_save, low_to_save);
4457 #endif
4458
4459 /* If we saved any argument areas, restore them. */
4460 for (i = 0; i < num_actuals; i++)
4461 if (args[i].save_area)
4462 {
4463 machine_mode save_mode = GET_MODE (args[i].save_area);
4464 rtx stack_area
4465 = gen_rtx_MEM (save_mode,
4466 memory_address (save_mode,
4467 XEXP (args[i].stack_slot, 0)));
4468
4469 if (save_mode != BLKmode)
4470 emit_move_insn (stack_area, args[i].save_area);
4471 else
4472 emit_block_move (stack_area, args[i].save_area,
4473 (gen_int_mode
4474 (args[i].locate.size.constant, Pmode)),
4475 BLOCK_OP_CALL_PARM);
4476 }
4477
4478 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4479 stack_usage_map = initial_stack_usage_map;
4480 stack_usage_watermark = initial_stack_usage_watermark;
4481 }
4482
4483 /* If this was alloca, record the new stack level. */
4484 if (flags & ECF_MAY_BE_ALLOCA)
4485 record_new_stack_level ();
4486
4487 /* Free up storage we no longer need. */
4488 for (i = 0; i < num_actuals; ++i)
4489 free (args[i].aligned_regs);
4490
4491 targetm.calls.end_call_args ();
4492
4493 insns = get_insns ();
4494 end_sequence ();
4495
4496 if (pass == 0)
4497 {
4498 tail_call_insns = insns;
4499
4500 /* Restore the pending stack adjustment now that we have
4501 finished generating the sibling call sequence. */
4502
4503 restore_pending_stack_adjust (&save);
4504
4505 /* Prepare arg structure for next iteration. */
4506 for (i = 0; i < num_actuals; i++)
4507 {
4508 args[i].value = 0;
4509 args[i].aligned_regs = 0;
4510 args[i].stack = 0;
4511 }
4512
4513 sbitmap_free (stored_args_map);
4514 internal_arg_pointer_exp_state.scan_start = NULL;
4515 internal_arg_pointer_exp_state.cache.release ();
4516 }
4517 else
4518 {
4519 normal_call_insns = insns;
4520
4521 /* Verify that we've deallocated all the stack we used. */
4522 gcc_assert ((flags & ECF_NORETURN)
4523 || known_eq (old_stack_allocated,
4524 stack_pointer_delta
4525 - pending_stack_adjust));
4526 }
4527
4528 /* If something prevents making this a sibling call,
4529 zero out the sequence. */
4530 if (sibcall_failure)
4531 tail_call_insns = NULL;
4532 else
4533 break;
4534 }
4535
4536 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4537 arguments too, as argument area is now clobbered by the call. */
4538 if (tail_call_insns)
4539 {
4540 emit_insn (tail_call_insns);
4541 crtl->tail_call_emit = true;
4542 }
4543 else
4544 {
4545 emit_insn (normal_call_insns);
4546 if (try_tail_call)
4547 /* Ideally we'd emit a message for all of the ways that it could
4548 have failed. */
4549 maybe_complain_about_tail_call (exp, "tail call production failed");
4550 }
4551
4552 currently_expanding_call--;
4553
4554 free (stack_usage_map_buf);
4555 free (args);
4556
4557 /* Join result with returned bounds so caller may use them if needed. */
4558 target = chkp_join_splitted_slot (target, valbnd);
4559
4560 return target;
4561 }
4562
4563 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4564 this function's incoming arguments.
4565
4566 At the start of RTL generation we know the only REG_EQUIV notes
4567 in the rtl chain are those for incoming arguments, so we can look
4568 for REG_EQUIV notes between the start of the function and the
4569 NOTE_INSN_FUNCTION_BEG.
4570
4571 This is (slight) overkill. We could keep track of the highest
4572 argument we clobber and be more selective in removing notes, but it
4573 does not seem to be worth the effort. */
4574
4575 void
4576 fixup_tail_calls (void)
4577 {
4578 rtx_insn *insn;
4579
4580 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4581 {
4582 rtx note;
4583
4584 /* There are never REG_EQUIV notes for the incoming arguments
4585 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4586 if (NOTE_P (insn)
4587 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4588 break;
4589
4590 note = find_reg_note (insn, REG_EQUIV, 0);
4591 if (note)
4592 remove_note (insn, note);
4593 note = find_reg_note (insn, REG_EQUIV, 0);
4594 gcc_assert (!note);
4595 }
4596 }
4597
4598 /* Traverse a list of TYPES and expand all complex types into their
4599 components. */
4600 static tree
4601 split_complex_types (tree types)
4602 {
4603 tree p;
4604
4605 /* Before allocating memory, check for the common case of no complex. */
4606 for (p = types; p; p = TREE_CHAIN (p))
4607 {
4608 tree type = TREE_VALUE (p);
4609 if (TREE_CODE (type) == COMPLEX_TYPE
4610 && targetm.calls.split_complex_arg (type))
4611 goto found;
4612 }
4613 return types;
4614
4615 found:
4616 types = copy_list (types);
4617
4618 for (p = types; p; p = TREE_CHAIN (p))
4619 {
4620 tree complex_type = TREE_VALUE (p);
4621
4622 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4623 && targetm.calls.split_complex_arg (complex_type))
4624 {
4625 tree next, imag;
4626
4627 /* Rewrite complex type with component type. */
4628 TREE_VALUE (p) = TREE_TYPE (complex_type);
4629 next = TREE_CHAIN (p);
4630
4631 /* Add another component type for the imaginary part. */
4632 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4633 TREE_CHAIN (p) = imag;
4634 TREE_CHAIN (imag) = next;
4635
4636 /* Skip the newly created node. */
4637 p = TREE_CHAIN (p);
4638 }
4639 }
4640
4641 return types;
4642 }
4643 \f
4644 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4645 for a value of mode OUTMODE,
4646 with NARGS different arguments, passed as ARGS.
4647 Store the return value if RETVAL is nonzero: store it in VALUE if
4648 VALUE is nonnull, otherwise pick a convenient location. In either
4649 case return the location of the stored value.
4650
4651 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4652 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4653 other types of library calls. */
4654
4655 rtx
4656 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4657 enum libcall_type fn_type,
4658 machine_mode outmode, int nargs, rtx_mode_t *args)
4659 {
4660 /* Total size in bytes of all the stack-parms scanned so far. */
4661 struct args_size args_size;
4662 /* Size of arguments before any adjustments (such as rounding). */
4663 struct args_size original_args_size;
4664 int argnum;
4665 rtx fun;
4666 /* Todo, choose the correct decl type of orgfun. Sadly this information
4667 isn't present here, so we default to native calling abi here. */
4668 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4669 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4670 int count;
4671 rtx argblock = 0;
4672 CUMULATIVE_ARGS args_so_far_v;
4673 cumulative_args_t args_so_far;
4674 struct arg
4675 {
4676 rtx value;
4677 machine_mode mode;
4678 rtx reg;
4679 int partial;
4680 struct locate_and_pad_arg_data locate;
4681 rtx save_area;
4682 };
4683 struct arg *argvec;
4684 int old_inhibit_defer_pop = inhibit_defer_pop;
4685 rtx call_fusage = 0;
4686 rtx mem_value = 0;
4687 rtx valreg;
4688 int pcc_struct_value = 0;
4689 poly_int64 struct_value_size = 0;
4690 int flags;
4691 int reg_parm_stack_space = 0;
4692 poly_int64 needed;
4693 rtx_insn *before_call;
4694 bool have_push_fusage;
4695 tree tfom; /* type_for_mode (outmode, 0) */
4696
4697 #ifdef REG_PARM_STACK_SPACE
4698 /* Define the boundary of the register parm stack space that needs to be
4699 save, if any. */
4700 int low_to_save = 0, high_to_save = 0;
4701 rtx save_area = 0; /* Place that it is saved. */
4702 #endif
4703
4704 /* Size of the stack reserved for parameter registers. */
4705 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4706 char *initial_stack_usage_map = stack_usage_map;
4707 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4708 char *stack_usage_map_buf = NULL;
4709
4710 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4711
4712 #ifdef REG_PARM_STACK_SPACE
4713 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4714 #endif
4715
4716 /* By default, library functions cannot throw. */
4717 flags = ECF_NOTHROW;
4718
4719 switch (fn_type)
4720 {
4721 case LCT_NORMAL:
4722 break;
4723 case LCT_CONST:
4724 flags |= ECF_CONST;
4725 break;
4726 case LCT_PURE:
4727 flags |= ECF_PURE;
4728 break;
4729 case LCT_NORETURN:
4730 flags |= ECF_NORETURN;
4731 break;
4732 case LCT_THROW:
4733 flags &= ~ECF_NOTHROW;
4734 break;
4735 case LCT_RETURNS_TWICE:
4736 flags = ECF_RETURNS_TWICE;
4737 break;
4738 }
4739 fun = orgfun;
4740
4741 /* Ensure current function's preferred stack boundary is at least
4742 what we need. */
4743 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4744 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4745
4746 /* If this kind of value comes back in memory,
4747 decide where in memory it should come back. */
4748 if (outmode != VOIDmode)
4749 {
4750 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4751 if (aggregate_value_p (tfom, 0))
4752 {
4753 #ifdef PCC_STATIC_STRUCT_RETURN
4754 rtx pointer_reg
4755 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4756 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4757 pcc_struct_value = 1;
4758 if (value == 0)
4759 value = gen_reg_rtx (outmode);
4760 #else /* not PCC_STATIC_STRUCT_RETURN */
4761 struct_value_size = GET_MODE_SIZE (outmode);
4762 if (value != 0 && MEM_P (value))
4763 mem_value = value;
4764 else
4765 mem_value = assign_temp (tfom, 1, 1);
4766 #endif
4767 /* This call returns a big structure. */
4768 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4769 }
4770 }
4771 else
4772 tfom = void_type_node;
4773
4774 /* ??? Unfinished: must pass the memory address as an argument. */
4775
4776 /* Copy all the libcall-arguments out of the varargs data
4777 and into a vector ARGVEC.
4778
4779 Compute how to pass each argument. We only support a very small subset
4780 of the full argument passing conventions to limit complexity here since
4781 library functions shouldn't have many args. */
4782
4783 argvec = XALLOCAVEC (struct arg, nargs + 1);
4784 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4785
4786 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4787 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4788 #else
4789 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4790 #endif
4791 args_so_far = pack_cumulative_args (&args_so_far_v);
4792
4793 args_size.constant = 0;
4794 args_size.var = 0;
4795
4796 count = 0;
4797
4798 push_temp_slots ();
4799
4800 /* If there's a structure value address to be passed,
4801 either pass it in the special place, or pass it as an extra argument. */
4802 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4803 {
4804 rtx addr = XEXP (mem_value, 0);
4805
4806 nargs++;
4807
4808 /* Make sure it is a reasonable operand for a move or push insn. */
4809 if (!REG_P (addr) && !MEM_P (addr)
4810 && !(CONSTANT_P (addr)
4811 && targetm.legitimate_constant_p (Pmode, addr)))
4812 addr = force_operand (addr, NULL_RTX);
4813
4814 argvec[count].value = addr;
4815 argvec[count].mode = Pmode;
4816 argvec[count].partial = 0;
4817
4818 argvec[count].reg = targetm.calls.function_arg (args_so_far,
4819 Pmode, NULL_TREE, true);
4820 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4821 NULL_TREE, 1) == 0);
4822
4823 locate_and_pad_parm (Pmode, NULL_TREE,
4824 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4825 1,
4826 #else
4827 argvec[count].reg != 0,
4828 #endif
4829 reg_parm_stack_space, 0,
4830 NULL_TREE, &args_size, &argvec[count].locate);
4831
4832 if (argvec[count].reg == 0 || argvec[count].partial != 0
4833 || reg_parm_stack_space > 0)
4834 args_size.constant += argvec[count].locate.size.constant;
4835
4836 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4837
4838 count++;
4839 }
4840
4841 for (unsigned int i = 0; count < nargs; i++, count++)
4842 {
4843 rtx val = args[i].first;
4844 machine_mode mode = args[i].second;
4845 int unsigned_p = 0;
4846
4847 /* We cannot convert the arg value to the mode the library wants here;
4848 must do it earlier where we know the signedness of the arg. */
4849 gcc_assert (mode != BLKmode
4850 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4851
4852 /* Make sure it is a reasonable operand for a move or push insn. */
4853 if (!REG_P (val) && !MEM_P (val)
4854 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4855 val = force_operand (val, NULL_RTX);
4856
4857 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4858 {
4859 rtx slot;
4860 int must_copy
4861 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4862
4863 /* If this was a CONST function, it is now PURE since it now
4864 reads memory. */
4865 if (flags & ECF_CONST)
4866 {
4867 flags &= ~ECF_CONST;
4868 flags |= ECF_PURE;
4869 }
4870
4871 if (MEM_P (val) && !must_copy)
4872 {
4873 tree val_expr = MEM_EXPR (val);
4874 if (val_expr)
4875 mark_addressable (val_expr);
4876 slot = val;
4877 }
4878 else
4879 {
4880 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4881 1, 1);
4882 emit_move_insn (slot, val);
4883 }
4884
4885 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4886 gen_rtx_USE (VOIDmode, slot),
4887 call_fusage);
4888 if (must_copy)
4889 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4890 gen_rtx_CLOBBER (VOIDmode,
4891 slot),
4892 call_fusage);
4893
4894 mode = Pmode;
4895 val = force_operand (XEXP (slot, 0), NULL_RTX);
4896 }
4897
4898 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4899 argvec[count].mode = mode;
4900 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4901 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4902 NULL_TREE, true);
4903
4904 argvec[count].partial
4905 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4906
4907 if (argvec[count].reg == 0
4908 || argvec[count].partial != 0
4909 || reg_parm_stack_space > 0)
4910 {
4911 locate_and_pad_parm (mode, NULL_TREE,
4912 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4913 1,
4914 #else
4915 argvec[count].reg != 0,
4916 #endif
4917 reg_parm_stack_space, argvec[count].partial,
4918 NULL_TREE, &args_size, &argvec[count].locate);
4919 args_size.constant += argvec[count].locate.size.constant;
4920 gcc_assert (!argvec[count].locate.size.var);
4921 }
4922 #ifdef BLOCK_REG_PADDING
4923 else
4924 /* The argument is passed entirely in registers. See at which
4925 end it should be padded. */
4926 argvec[count].locate.where_pad =
4927 BLOCK_REG_PADDING (mode, NULL_TREE,
4928 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4929 #endif
4930
4931 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4932 }
4933
4934 /* If this machine requires an external definition for library
4935 functions, write one out. */
4936 assemble_external_libcall (fun);
4937
4938 original_args_size = args_size;
4939 args_size.constant = (aligned_upper_bound (args_size.constant
4940 + stack_pointer_delta,
4941 STACK_BYTES)
4942 - stack_pointer_delta);
4943
4944 args_size.constant = upper_bound (args_size.constant,
4945 reg_parm_stack_space);
4946
4947 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4948 args_size.constant -= reg_parm_stack_space;
4949
4950 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4951 args_size.constant);
4952
4953 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4954 {
4955 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4956 current_function_pushed_stack_size
4957 = upper_bound (current_function_pushed_stack_size, pushed);
4958 }
4959
4960 if (ACCUMULATE_OUTGOING_ARGS)
4961 {
4962 /* Since the stack pointer will never be pushed, it is possible for
4963 the evaluation of a parm to clobber something we have already
4964 written to the stack. Since most function calls on RISC machines
4965 do not use the stack, this is uncommon, but must work correctly.
4966
4967 Therefore, we save any area of the stack that was already written
4968 and that we are using. Here we set up to do this by making a new
4969 stack usage map from the old one.
4970
4971 Another approach might be to try to reorder the argument
4972 evaluations to avoid this conflicting stack usage. */
4973
4974 needed = args_size.constant;
4975
4976 /* Since we will be writing into the entire argument area, the
4977 map must be allocated for its entire size, not just the part that
4978 is the responsibility of the caller. */
4979 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4980 needed += reg_parm_stack_space;
4981
4982 poly_int64 limit = needed;
4983 if (ARGS_GROW_DOWNWARD)
4984 limit += 1;
4985
4986 /* For polynomial sizes, this is the maximum possible size needed
4987 for arguments with a constant size and offset. */
4988 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4989 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4990 const_limit);
4991
4992 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4993 stack_usage_map = stack_usage_map_buf;
4994
4995 if (initial_highest_arg_in_use)
4996 memcpy (stack_usage_map, initial_stack_usage_map,
4997 initial_highest_arg_in_use);
4998
4999 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5000 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5001 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5002 needed = 0;
5003
5004 /* We must be careful to use virtual regs before they're instantiated,
5005 and real regs afterwards. Loop optimization, for example, can create
5006 new libcalls after we've instantiated the virtual regs, and if we
5007 use virtuals anyway, they won't match the rtl patterns. */
5008
5009 if (virtuals_instantiated)
5010 argblock = plus_constant (Pmode, stack_pointer_rtx,
5011 STACK_POINTER_OFFSET);
5012 else
5013 argblock = virtual_outgoing_args_rtx;
5014 }
5015 else
5016 {
5017 if (!PUSH_ARGS)
5018 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5019 }
5020
5021 /* We push args individually in reverse order, perform stack alignment
5022 before the first push (the last arg). */
5023 if (argblock == 0)
5024 anti_adjust_stack (gen_int_mode (args_size.constant
5025 - original_args_size.constant,
5026 Pmode));
5027
5028 argnum = nargs - 1;
5029
5030 #ifdef REG_PARM_STACK_SPACE
5031 if (ACCUMULATE_OUTGOING_ARGS)
5032 {
5033 /* The argument list is the property of the called routine and it
5034 may clobber it. If the fixed area has been used for previous
5035 parameters, we must save and restore it. */
5036 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5037 &low_to_save, &high_to_save);
5038 }
5039 #endif
5040
5041 /* When expanding a normal call, args are stored in push order,
5042 which is the reverse of what we have here. */
5043 bool any_regs = false;
5044 for (int i = nargs; i-- > 0; )
5045 if (argvec[i].reg != NULL_RTX)
5046 {
5047 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5048 any_regs = true;
5049 }
5050 if (!any_regs)
5051 targetm.calls.call_args (pc_rtx, NULL_TREE);
5052
5053 /* Push the args that need to be pushed. */
5054
5055 have_push_fusage = false;
5056
5057 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5058 are to be pushed. */
5059 for (count = 0; count < nargs; count++, argnum--)
5060 {
5061 machine_mode mode = argvec[argnum].mode;
5062 rtx val = argvec[argnum].value;
5063 rtx reg = argvec[argnum].reg;
5064 int partial = argvec[argnum].partial;
5065 unsigned int parm_align = argvec[argnum].locate.boundary;
5066 poly_int64 lower_bound = 0, upper_bound = 0;
5067
5068 if (! (reg != 0 && partial == 0))
5069 {
5070 rtx use;
5071
5072 if (ACCUMULATE_OUTGOING_ARGS)
5073 {
5074 /* If this is being stored into a pre-allocated, fixed-size,
5075 stack area, save any previous data at that location. */
5076
5077 if (ARGS_GROW_DOWNWARD)
5078 {
5079 /* stack_slot is negative, but we want to index stack_usage_map
5080 with positive values. */
5081 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5082 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5083 }
5084 else
5085 {
5086 lower_bound = argvec[argnum].locate.slot_offset.constant;
5087 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5088 }
5089
5090 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5091 reg_parm_stack_space))
5092 {
5093 /* We need to make a save area. */
5094 poly_uint64 size
5095 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5096 machine_mode save_mode
5097 = int_mode_for_size (size, 1).else_blk ();
5098 rtx adr
5099 = plus_constant (Pmode, argblock,
5100 argvec[argnum].locate.offset.constant);
5101 rtx stack_area
5102 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5103
5104 if (save_mode == BLKmode)
5105 {
5106 argvec[argnum].save_area
5107 = assign_stack_temp (BLKmode,
5108 argvec[argnum].locate.size.constant
5109 );
5110
5111 emit_block_move (validize_mem
5112 (copy_rtx (argvec[argnum].save_area)),
5113 stack_area,
5114 (gen_int_mode
5115 (argvec[argnum].locate.size.constant,
5116 Pmode)),
5117 BLOCK_OP_CALL_PARM);
5118 }
5119 else
5120 {
5121 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5122
5123 emit_move_insn (argvec[argnum].save_area, stack_area);
5124 }
5125 }
5126 }
5127
5128 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5129 partial, reg, 0, argblock,
5130 (gen_int_mode
5131 (argvec[argnum].locate.offset.constant, Pmode)),
5132 reg_parm_stack_space,
5133 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5134
5135 /* Now mark the segment we just used. */
5136 if (ACCUMULATE_OUTGOING_ARGS)
5137 mark_stack_region_used (lower_bound, upper_bound);
5138
5139 NO_DEFER_POP;
5140
5141 /* Indicate argument access so that alias.c knows that these
5142 values are live. */
5143 if (argblock)
5144 use = plus_constant (Pmode, argblock,
5145 argvec[argnum].locate.offset.constant);
5146 else if (have_push_fusage)
5147 continue;
5148 else
5149 {
5150 /* When arguments are pushed, trying to tell alias.c where
5151 exactly this argument is won't work, because the
5152 auto-increment causes confusion. So we merely indicate
5153 that we access something with a known mode somewhere on
5154 the stack. */
5155 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5156 gen_rtx_SCRATCH (Pmode));
5157 have_push_fusage = true;
5158 }
5159 use = gen_rtx_MEM (argvec[argnum].mode, use);
5160 use = gen_rtx_USE (VOIDmode, use);
5161 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5162 }
5163 }
5164
5165 argnum = nargs - 1;
5166
5167 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5168
5169 /* Now load any reg parms into their regs. */
5170
5171 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5172 are to be pushed. */
5173 for (count = 0; count < nargs; count++, argnum--)
5174 {
5175 machine_mode mode = argvec[argnum].mode;
5176 rtx val = argvec[argnum].value;
5177 rtx reg = argvec[argnum].reg;
5178 int partial = argvec[argnum].partial;
5179
5180 /* Handle calls that pass values in multiple non-contiguous
5181 locations. The PA64 has examples of this for library calls. */
5182 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5183 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5184 else if (reg != 0 && partial == 0)
5185 {
5186 emit_move_insn (reg, val);
5187 #ifdef BLOCK_REG_PADDING
5188 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5189
5190 /* Copied from load_register_parameters. */
5191
5192 /* Handle case where we have a value that needs shifting
5193 up to the msb. eg. a QImode value and we're padding
5194 upward on a BYTES_BIG_ENDIAN machine. */
5195 if (known_lt (size, UNITS_PER_WORD)
5196 && (argvec[argnum].locate.where_pad
5197 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5198 {
5199 rtx x;
5200 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5201
5202 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5203 report the whole reg as used. Strictly speaking, the
5204 call only uses SIZE bytes at the msb end, but it doesn't
5205 seem worth generating rtl to say that. */
5206 reg = gen_rtx_REG (word_mode, REGNO (reg));
5207 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5208 if (x != reg)
5209 emit_move_insn (reg, x);
5210 }
5211 #endif
5212 }
5213
5214 NO_DEFER_POP;
5215 }
5216
5217 /* Any regs containing parms remain in use through the call. */
5218 for (count = 0; count < nargs; count++)
5219 {
5220 rtx reg = argvec[count].reg;
5221 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5222 use_group_regs (&call_fusage, reg);
5223 else if (reg != 0)
5224 {
5225 int partial = argvec[count].partial;
5226 if (partial)
5227 {
5228 int nregs;
5229 gcc_assert (partial % UNITS_PER_WORD == 0);
5230 nregs = partial / UNITS_PER_WORD;
5231 use_regs (&call_fusage, REGNO (reg), nregs);
5232 }
5233 else
5234 use_reg (&call_fusage, reg);
5235 }
5236 }
5237
5238 /* Pass the function the address in which to return a structure value. */
5239 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5240 {
5241 emit_move_insn (struct_value,
5242 force_reg (Pmode,
5243 force_operand (XEXP (mem_value, 0),
5244 NULL_RTX)));
5245 if (REG_P (struct_value))
5246 use_reg (&call_fusage, struct_value);
5247 }
5248
5249 /* Don't allow popping to be deferred, since then
5250 cse'ing of library calls could delete a call and leave the pop. */
5251 NO_DEFER_POP;
5252 valreg = (mem_value == 0 && outmode != VOIDmode
5253 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5254
5255 /* Stack must be properly aligned now. */
5256 gcc_assert (multiple_p (stack_pointer_delta,
5257 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5258
5259 before_call = get_last_insn ();
5260
5261 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5262 will set inhibit_defer_pop to that value. */
5263 /* The return type is needed to decide how many bytes the function pops.
5264 Signedness plays no role in that, so for simplicity, we pretend it's
5265 always signed. We also assume that the list of arguments passed has
5266 no impact, so we pretend it is unknown. */
5267
5268 emit_call_1 (fun, NULL,
5269 get_identifier (XSTR (orgfun, 0)),
5270 build_function_type (tfom, NULL_TREE),
5271 original_args_size.constant, args_size.constant,
5272 struct_value_size,
5273 targetm.calls.function_arg (args_so_far,
5274 VOIDmode, void_type_node, true),
5275 valreg,
5276 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5277
5278 if (flag_ipa_ra)
5279 {
5280 rtx datum = orgfun;
5281 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5282 rtx_call_insn *last = last_call_insn ();
5283 add_reg_note (last, REG_CALL_DECL, datum);
5284 }
5285
5286 /* Right-shift returned value if necessary. */
5287 if (!pcc_struct_value
5288 && TYPE_MODE (tfom) != BLKmode
5289 && targetm.calls.return_in_msb (tfom))
5290 {
5291 shift_return_value (TYPE_MODE (tfom), false, valreg);
5292 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5293 }
5294
5295 targetm.calls.end_call_args ();
5296
5297 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5298 that it should complain if nonvolatile values are live. For
5299 functions that cannot return, inform flow that control does not
5300 fall through. */
5301 if (flags & ECF_NORETURN)
5302 {
5303 /* The barrier note must be emitted
5304 immediately after the CALL_INSN. Some ports emit more than
5305 just a CALL_INSN above, so we must search for it here. */
5306 rtx_insn *last = get_last_insn ();
5307 while (!CALL_P (last))
5308 {
5309 last = PREV_INSN (last);
5310 /* There was no CALL_INSN? */
5311 gcc_assert (last != before_call);
5312 }
5313
5314 emit_barrier_after (last);
5315 }
5316
5317 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5318 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5319 if (flags & ECF_NOTHROW)
5320 {
5321 rtx_insn *last = get_last_insn ();
5322 while (!CALL_P (last))
5323 {
5324 last = PREV_INSN (last);
5325 /* There was no CALL_INSN? */
5326 gcc_assert (last != before_call);
5327 }
5328
5329 make_reg_eh_region_note_nothrow_nononlocal (last);
5330 }
5331
5332 /* Now restore inhibit_defer_pop to its actual original value. */
5333 OK_DEFER_POP;
5334
5335 pop_temp_slots ();
5336
5337 /* Copy the value to the right place. */
5338 if (outmode != VOIDmode && retval)
5339 {
5340 if (mem_value)
5341 {
5342 if (value == 0)
5343 value = mem_value;
5344 if (value != mem_value)
5345 emit_move_insn (value, mem_value);
5346 }
5347 else if (GET_CODE (valreg) == PARALLEL)
5348 {
5349 if (value == 0)
5350 value = gen_reg_rtx (outmode);
5351 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5352 }
5353 else
5354 {
5355 /* Convert to the proper mode if a promotion has been active. */
5356 if (GET_MODE (valreg) != outmode)
5357 {
5358 int unsignedp = TYPE_UNSIGNED (tfom);
5359
5360 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5361 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5362 == GET_MODE (valreg));
5363 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5364 }
5365
5366 if (value != 0)
5367 emit_move_insn (value, valreg);
5368 else
5369 value = valreg;
5370 }
5371 }
5372
5373 if (ACCUMULATE_OUTGOING_ARGS)
5374 {
5375 #ifdef REG_PARM_STACK_SPACE
5376 if (save_area)
5377 restore_fixed_argument_area (save_area, argblock,
5378 high_to_save, low_to_save);
5379 #endif
5380
5381 /* If we saved any argument areas, restore them. */
5382 for (count = 0; count < nargs; count++)
5383 if (argvec[count].save_area)
5384 {
5385 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5386 rtx adr = plus_constant (Pmode, argblock,
5387 argvec[count].locate.offset.constant);
5388 rtx stack_area = gen_rtx_MEM (save_mode,
5389 memory_address (save_mode, adr));
5390
5391 if (save_mode == BLKmode)
5392 emit_block_move (stack_area,
5393 validize_mem
5394 (copy_rtx (argvec[count].save_area)),
5395 (gen_int_mode
5396 (argvec[count].locate.size.constant, Pmode)),
5397 BLOCK_OP_CALL_PARM);
5398 else
5399 emit_move_insn (stack_area, argvec[count].save_area);
5400 }
5401
5402 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5403 stack_usage_map = initial_stack_usage_map;
5404 stack_usage_watermark = initial_stack_usage_watermark;
5405 }
5406
5407 free (stack_usage_map_buf);
5408
5409 return value;
5410
5411 }
5412 \f
5413
5414 /* Store pointer bounds argument ARG into Bounds Table entry
5415 associated with PARM. */
5416 static void
5417 store_bounds (struct arg_data *arg, struct arg_data *parm)
5418 {
5419 rtx slot = NULL, ptr = NULL, addr = NULL;
5420
5421 /* We may pass bounds not associated with any pointer. */
5422 if (!parm)
5423 {
5424 gcc_assert (arg->special_slot);
5425 slot = arg->special_slot;
5426 ptr = const0_rtx;
5427 }
5428 /* Find pointer associated with bounds and where it is
5429 passed. */
5430 else
5431 {
5432 if (!parm->reg)
5433 {
5434 gcc_assert (!arg->special_slot);
5435
5436 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5437 }
5438 else if (REG_P (parm->reg))
5439 {
5440 gcc_assert (arg->special_slot);
5441 slot = arg->special_slot;
5442
5443 if (MEM_P (parm->value))
5444 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5445 else if (REG_P (parm->value))
5446 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5447 else
5448 {
5449 gcc_assert (!arg->pointer_offset);
5450 ptr = parm->value;
5451 }
5452 }
5453 else
5454 {
5455 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5456
5457 gcc_assert (arg->special_slot);
5458 slot = arg->special_slot;
5459
5460 if (parm->parallel_value)
5461 ptr = chkp_get_value_with_offs (parm->parallel_value,
5462 GEN_INT (arg->pointer_offset));
5463 else
5464 gcc_unreachable ();
5465 }
5466 }
5467
5468 /* Expand bounds. */
5469 if (!arg->value)
5470 arg->value = expand_normal (arg->tree_value);
5471
5472 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5473 }
5474
5475 /* Store a single argument for a function call
5476 into the register or memory area where it must be passed.
5477 *ARG describes the argument value and where to pass it.
5478
5479 ARGBLOCK is the address of the stack-block for all the arguments,
5480 or 0 on a machine where arguments are pushed individually.
5481
5482 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5483 so must be careful about how the stack is used.
5484
5485 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5486 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5487 that we need not worry about saving and restoring the stack.
5488
5489 FNDECL is the declaration of the function we are calling.
5490
5491 Return nonzero if this arg should cause sibcall failure,
5492 zero otherwise. */
5493
5494 static int
5495 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5496 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5497 {
5498 tree pval = arg->tree_value;
5499 rtx reg = 0;
5500 int partial = 0;
5501 poly_int64 used = 0;
5502 poly_int64 lower_bound = 0, upper_bound = 0;
5503 int sibcall_failure = 0;
5504
5505 if (TREE_CODE (pval) == ERROR_MARK)
5506 return 1;
5507
5508 /* Push a new temporary level for any temporaries we make for
5509 this argument. */
5510 push_temp_slots ();
5511
5512 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5513 {
5514 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5515 save any previous data at that location. */
5516 if (argblock && ! variable_size && arg->stack)
5517 {
5518 if (ARGS_GROW_DOWNWARD)
5519 {
5520 /* stack_slot is negative, but we want to index stack_usage_map
5521 with positive values. */
5522 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5523 {
5524 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5525 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5526 }
5527 else
5528 upper_bound = 0;
5529
5530 lower_bound = upper_bound - arg->locate.size.constant;
5531 }
5532 else
5533 {
5534 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5535 {
5536 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5537 lower_bound = rtx_to_poly_int64 (offset);
5538 }
5539 else
5540 lower_bound = 0;
5541
5542 upper_bound = lower_bound + arg->locate.size.constant;
5543 }
5544
5545 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5546 reg_parm_stack_space))
5547 {
5548 /* We need to make a save area. */
5549 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5550 machine_mode save_mode
5551 = int_mode_for_size (size, 1).else_blk ();
5552 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5553 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5554
5555 if (save_mode == BLKmode)
5556 {
5557 arg->save_area
5558 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5559 preserve_temp_slots (arg->save_area);
5560 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5561 stack_area,
5562 (gen_int_mode
5563 (arg->locate.size.constant, Pmode)),
5564 BLOCK_OP_CALL_PARM);
5565 }
5566 else
5567 {
5568 arg->save_area = gen_reg_rtx (save_mode);
5569 emit_move_insn (arg->save_area, stack_area);
5570 }
5571 }
5572 }
5573 }
5574
5575 /* If this isn't going to be placed on both the stack and in registers,
5576 set up the register and number of words. */
5577 if (! arg->pass_on_stack)
5578 {
5579 if (flags & ECF_SIBCALL)
5580 reg = arg->tail_call_reg;
5581 else
5582 reg = arg->reg;
5583 partial = arg->partial;
5584 }
5585
5586 /* Being passed entirely in a register. We shouldn't be called in
5587 this case. */
5588 gcc_assert (reg == 0 || partial != 0);
5589
5590 /* If this arg needs special alignment, don't load the registers
5591 here. */
5592 if (arg->n_aligned_regs != 0)
5593 reg = 0;
5594
5595 /* If this is being passed partially in a register, we can't evaluate
5596 it directly into its stack slot. Otherwise, we can. */
5597 if (arg->value == 0)
5598 {
5599 /* stack_arg_under_construction is nonzero if a function argument is
5600 being evaluated directly into the outgoing argument list and
5601 expand_call must take special action to preserve the argument list
5602 if it is called recursively.
5603
5604 For scalar function arguments stack_usage_map is sufficient to
5605 determine which stack slots must be saved and restored. Scalar
5606 arguments in general have pass_on_stack == 0.
5607
5608 If this argument is initialized by a function which takes the
5609 address of the argument (a C++ constructor or a C function
5610 returning a BLKmode structure), then stack_usage_map is
5611 insufficient and expand_call must push the stack around the
5612 function call. Such arguments have pass_on_stack == 1.
5613
5614 Note that it is always safe to set stack_arg_under_construction,
5615 but this generates suboptimal code if set when not needed. */
5616
5617 if (arg->pass_on_stack)
5618 stack_arg_under_construction++;
5619
5620 arg->value = expand_expr (pval,
5621 (partial
5622 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5623 ? NULL_RTX : arg->stack,
5624 VOIDmode, EXPAND_STACK_PARM);
5625
5626 /* If we are promoting object (or for any other reason) the mode
5627 doesn't agree, convert the mode. */
5628
5629 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5630 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5631 arg->value, arg->unsignedp);
5632
5633 if (arg->pass_on_stack)
5634 stack_arg_under_construction--;
5635 }
5636
5637 /* Check for overlap with already clobbered argument area. */
5638 if ((flags & ECF_SIBCALL)
5639 && MEM_P (arg->value)
5640 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5641 arg->locate.size.constant))
5642 sibcall_failure = 1;
5643
5644 /* Don't allow anything left on stack from computation
5645 of argument to alloca. */
5646 if (flags & ECF_MAY_BE_ALLOCA)
5647 do_pending_stack_adjust ();
5648
5649 if (arg->value == arg->stack)
5650 /* If the value is already in the stack slot, we are done. */
5651 ;
5652 else if (arg->mode != BLKmode)
5653 {
5654 unsigned int parm_align;
5655
5656 /* Argument is a scalar, not entirely passed in registers.
5657 (If part is passed in registers, arg->partial says how much
5658 and emit_push_insn will take care of putting it there.)
5659
5660 Push it, and if its size is less than the
5661 amount of space allocated to it,
5662 also bump stack pointer by the additional space.
5663 Note that in C the default argument promotions
5664 will prevent such mismatches. */
5665
5666 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5667 ? 0 : GET_MODE_SIZE (arg->mode));
5668
5669 /* Compute how much space the push instruction will push.
5670 On many machines, pushing a byte will advance the stack
5671 pointer by a halfword. */
5672 #ifdef PUSH_ROUNDING
5673 size = PUSH_ROUNDING (size);
5674 #endif
5675 used = size;
5676
5677 /* Compute how much space the argument should get:
5678 round up to a multiple of the alignment for arguments. */
5679 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5680 != PAD_NONE)
5681 /* At the moment we don't (need to) support ABIs for which the
5682 padding isn't known at compile time. In principle it should
5683 be easy to add though. */
5684 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5685
5686 /* Compute the alignment of the pushed argument. */
5687 parm_align = arg->locate.boundary;
5688 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5689 == PAD_DOWNWARD)
5690 {
5691 poly_int64 pad = used - size;
5692 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5693 if (pad_align != 0)
5694 parm_align = MIN (parm_align, pad_align);
5695 }
5696
5697 /* This isn't already where we want it on the stack, so put it there.
5698 This can either be done with push or copy insns. */
5699 if (maybe_ne (used, 0)
5700 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5701 NULL_RTX, parm_align, partial, reg, used - size,
5702 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5703 reg_parm_stack_space,
5704 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5705 sibcall_failure = 1;
5706
5707 /* Unless this is a partially-in-register argument, the argument is now
5708 in the stack. */
5709 if (partial == 0)
5710 arg->value = arg->stack;
5711 }
5712 else
5713 {
5714 /* BLKmode, at least partly to be pushed. */
5715
5716 unsigned int parm_align;
5717 poly_int64 excess;
5718 rtx size_rtx;
5719
5720 /* Pushing a nonscalar.
5721 If part is passed in registers, PARTIAL says how much
5722 and emit_push_insn will take care of putting it there. */
5723
5724 /* Round its size up to a multiple
5725 of the allocation unit for arguments. */
5726
5727 if (arg->locate.size.var != 0)
5728 {
5729 excess = 0;
5730 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5731 }
5732 else
5733 {
5734 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5735 for BLKmode is careful to avoid it. */
5736 excess = (arg->locate.size.constant
5737 - arg_int_size_in_bytes (TREE_TYPE (pval))
5738 + partial);
5739 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5740 NULL_RTX, TYPE_MODE (sizetype),
5741 EXPAND_NORMAL);
5742 }
5743
5744 parm_align = arg->locate.boundary;
5745
5746 /* When an argument is padded down, the block is aligned to
5747 PARM_BOUNDARY, but the actual argument isn't. */
5748 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5749 == PAD_DOWNWARD)
5750 {
5751 if (arg->locate.size.var)
5752 parm_align = BITS_PER_UNIT;
5753 else
5754 {
5755 unsigned int excess_align
5756 = known_alignment (excess) * BITS_PER_UNIT;
5757 if (excess_align != 0)
5758 parm_align = MIN (parm_align, excess_align);
5759 }
5760 }
5761
5762 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5763 {
5764 /* emit_push_insn might not work properly if arg->value and
5765 argblock + arg->locate.offset areas overlap. */
5766 rtx x = arg->value;
5767 poly_int64 i = 0;
5768
5769 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
5770 || (GET_CODE (XEXP (x, 0)) == PLUS
5771 && XEXP (XEXP (x, 0), 0) ==
5772 crtl->args.internal_arg_pointer
5773 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5774 {
5775 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5776 i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
5777
5778 /* arg.locate doesn't contain the pretend_args_size offset,
5779 it's part of argblock. Ensure we don't count it in I. */
5780 if (STACK_GROWS_DOWNWARD)
5781 i -= crtl->args.pretend_args_size;
5782 else
5783 i += crtl->args.pretend_args_size;
5784
5785 /* expand_call should ensure this. */
5786 gcc_assert (!arg->locate.offset.var
5787 && arg->locate.size.var == 0);
5788 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
5789
5790 if (known_eq (arg->locate.offset.constant, i))
5791 {
5792 /* Even though they appear to be at the same location,
5793 if part of the outgoing argument is in registers,
5794 they aren't really at the same location. Check for
5795 this by making sure that the incoming size is the
5796 same as the outgoing size. */
5797 if (maybe_ne (arg->locate.size.constant, size_val))
5798 sibcall_failure = 1;
5799 }
5800 else if (maybe_in_range_p (arg->locate.offset.constant,
5801 i, size_val))
5802 sibcall_failure = 1;
5803 /* Use arg->locate.size.constant instead of size_rtx
5804 because we only care about the part of the argument
5805 on the stack. */
5806 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5807 arg->locate.size.constant))
5808 sibcall_failure = 1;
5809 }
5810 }
5811
5812 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5813 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5814 parm_align, partial, reg, excess, argblock,
5815 ARGS_SIZE_RTX (arg->locate.offset),
5816 reg_parm_stack_space,
5817 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5818
5819 /* Unless this is a partially-in-register argument, the argument is now
5820 in the stack.
5821
5822 ??? Unlike the case above, in which we want the actual
5823 address of the data, so that we can load it directly into a
5824 register, here we want the address of the stack slot, so that
5825 it's properly aligned for word-by-word copying or something
5826 like that. It's not clear that this is always correct. */
5827 if (partial == 0)
5828 arg->value = arg->stack_slot;
5829 }
5830
5831 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5832 {
5833 tree type = TREE_TYPE (arg->tree_value);
5834 arg->parallel_value
5835 = emit_group_load_into_temps (arg->reg, arg->value, type,
5836 int_size_in_bytes (type));
5837 }
5838
5839 /* Mark all slots this store used. */
5840 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5841 && argblock && ! variable_size && arg->stack)
5842 mark_stack_region_used (lower_bound, upper_bound);
5843
5844 /* Once we have pushed something, pops can't safely
5845 be deferred during the rest of the arguments. */
5846 NO_DEFER_POP;
5847
5848 /* Free any temporary slots made in processing this argument. */
5849 pop_temp_slots ();
5850
5851 return sibcall_failure;
5852 }
5853
5854 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5855
5856 bool
5857 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5858 const_tree type)
5859 {
5860 if (!type)
5861 return false;
5862
5863 /* If the type has variable size... */
5864 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5865 return true;
5866
5867 /* If the type is marked as addressable (it is required
5868 to be constructed into the stack)... */
5869 if (TREE_ADDRESSABLE (type))
5870 return true;
5871
5872 return false;
5873 }
5874
5875 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5876 takes trailing padding of a structure into account. */
5877 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5878
5879 bool
5880 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5881 {
5882 if (!type)
5883 return false;
5884
5885 /* If the type has variable size... */
5886 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5887 return true;
5888
5889 /* If the type is marked as addressable (it is required
5890 to be constructed into the stack)... */
5891 if (TREE_ADDRESSABLE (type))
5892 return true;
5893
5894 if (TYPE_EMPTY_P (type))
5895 return false;
5896
5897 /* If the padding and mode of the type is such that a copy into
5898 a register would put it into the wrong part of the register. */
5899 if (mode == BLKmode
5900 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5901 && (targetm.calls.function_arg_padding (mode, type)
5902 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5903 return true;
5904
5905 return false;
5906 }
5907
5908 /* Tell the garbage collector about GTY markers in this source file. */
5909 #include "gt-calls.h"