]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
Underline argument in -Wnonnull and in C++ extend warning to the this pointer [PR...
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-vrp.h"
51 #include "tree-ssanames.h"
52 #include "tree-ssa-strlen.h"
53 #include "intl.h"
54 #include "stringpool.h"
55 #include "hash-map.h"
56 #include "hash-traits.h"
57 #include "attribs.h"
58 #include "builtins.h"
59 #include "gimple-fold.h"
60
61 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
62 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
63
64 /* Data structure and subroutines used within expand_call. */
65
66 struct arg_data
67 {
68 /* Tree node for this argument. */
69 tree tree_value;
70 /* Mode for value; TYPE_MODE unless promoted. */
71 machine_mode mode;
72 /* Current RTL value for argument, or 0 if it isn't precomputed. */
73 rtx value;
74 /* Initially-compute RTL value for argument; only for const functions. */
75 rtx initial_value;
76 /* Register to pass this argument in, 0 if passed on stack, or an
77 PARALLEL if the arg is to be copied into multiple non-contiguous
78 registers. */
79 rtx reg;
80 /* Register to pass this argument in when generating tail call sequence.
81 This is not the same register as for normal calls on machines with
82 register windows. */
83 rtx tail_call_reg;
84 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
85 form for emit_group_move. */
86 rtx parallel_value;
87 /* If REG was promoted from the actual mode of the argument expression,
88 indicates whether the promotion is sign- or zero-extended. */
89 int unsignedp;
90 /* Number of bytes to put in registers. 0 means put the whole arg
91 in registers. Also 0 if not passed in registers. */
92 int partial;
93 /* Nonzero if argument must be passed on stack.
94 Note that some arguments may be passed on the stack
95 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
96 pass_on_stack identifies arguments that *cannot* go in registers. */
97 int pass_on_stack;
98 /* Some fields packaged up for locate_and_pad_parm. */
99 struct locate_and_pad_arg_data locate;
100 /* Location on the stack at which parameter should be stored. The store
101 has already been done if STACK == VALUE. */
102 rtx stack;
103 /* Location on the stack of the start of this argument slot. This can
104 differ from STACK if this arg pads downward. This location is known
105 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
106 rtx stack_slot;
107 /* Place that this stack area has been saved, if needed. */
108 rtx save_area;
109 /* If an argument's alignment does not permit direct copying into registers,
110 copy in smaller-sized pieces into pseudos. These are stored in a
111 block pointed to by this field. The next field says how many
112 word-sized pseudos we made. */
113 rtx *aligned_regs;
114 int n_aligned_regs;
115 };
116
117 /* A vector of one char per byte of stack space. A byte if nonzero if
118 the corresponding stack location has been used.
119 This vector is used to prevent a function call within an argument from
120 clobbering any stack already set up. */
121 static char *stack_usage_map;
122
123 /* Size of STACK_USAGE_MAP. */
124 static unsigned int highest_outgoing_arg_in_use;
125
126 /* Assume that any stack location at this byte index is used,
127 without checking the contents of stack_usage_map. */
128 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
129
130 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
131 stack location's tail call argument has been already stored into the stack.
132 This bitmap is used to prevent sibling call optimization if function tries
133 to use parent's incoming argument slots when they have been already
134 overwritten with tail call arguments. */
135 static sbitmap stored_args_map;
136
137 /* Assume that any virtual-incoming location at this byte index has been
138 stored, without checking the contents of stored_args_map. */
139 static unsigned HOST_WIDE_INT stored_args_watermark;
140
141 /* stack_arg_under_construction is nonzero when an argument may be
142 initialized with a constructor call (including a C function that
143 returns a BLKmode struct) and expand_call must take special action
144 to make sure the object being constructed does not overlap the
145 argument list for the constructor call. */
146 static int stack_arg_under_construction;
147
148 static void precompute_register_parameters (int, struct arg_data *, int *);
149 static int store_one_arg (struct arg_data *, rtx, int, int, int);
150 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
151 static int finalize_must_preallocate (int, int, struct arg_data *,
152 struct args_size *);
153 static void precompute_arguments (int, struct arg_data *);
154 static void compute_argument_addresses (struct arg_data *, rtx, int);
155 static rtx rtx_for_function_call (tree, tree);
156 static void load_register_parameters (struct arg_data *, int, rtx *, int,
157 int, int *);
158 static int special_function_p (const_tree, int);
159 static int check_sibcall_argument_overlap_1 (rtx);
160 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
161
162 static tree split_complex_types (tree);
163
164 #ifdef REG_PARM_STACK_SPACE
165 static rtx save_fixed_argument_area (int, rtx, int *, int *);
166 static void restore_fixed_argument_area (rtx, rtx, int, int);
167 #endif
168 \f
169 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
170 stack region might already be in use. */
171
172 static bool
173 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
174 unsigned int reg_parm_stack_space)
175 {
176 unsigned HOST_WIDE_INT const_lower, const_upper;
177 const_lower = constant_lower_bound (lower_bound);
178 if (!upper_bound.is_constant (&const_upper))
179 const_upper = HOST_WIDE_INT_M1U;
180
181 if (const_upper > stack_usage_watermark)
182 return true;
183
184 /* Don't worry about things in the fixed argument area;
185 it has already been saved. */
186 const_lower = MAX (const_lower, reg_parm_stack_space);
187 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
188 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
189 if (stack_usage_map[i])
190 return true;
191 return false;
192 }
193
194 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
195 stack region are now in use. */
196
197 static void
198 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
199 {
200 unsigned HOST_WIDE_INT const_lower, const_upper;
201 const_lower = constant_lower_bound (lower_bound);
202 if (upper_bound.is_constant (&const_upper))
203 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
204 stack_usage_map[i] = 1;
205 else
206 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
207 }
208
209 /* Force FUNEXP into a form suitable for the address of a CALL,
210 and return that as an rtx. Also load the static chain register
211 if FNDECL is a nested function.
212
213 CALL_FUSAGE points to a variable holding the prospective
214 CALL_INSN_FUNCTION_USAGE information. */
215
216 rtx
217 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
218 rtx *call_fusage, int reg_parm_seen, int flags)
219 {
220 /* Make a valid memory address and copy constants through pseudo-regs,
221 but not for a constant address if -fno-function-cse. */
222 if (GET_CODE (funexp) != SYMBOL_REF)
223 {
224 /* If it's an indirect call by descriptor, generate code to perform
225 runtime identification of the pointer and load the descriptor. */
226 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
227 {
228 const int bit_val = targetm.calls.custom_function_descriptors;
229 rtx call_lab = gen_label_rtx ();
230
231 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
232 fndecl_or_type
233 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
234 fndecl_or_type);
235 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
236 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
237
238 if (GET_MODE (funexp) != Pmode)
239 funexp = convert_memory_address (Pmode, funexp);
240
241 /* Avoid long live ranges around function calls. */
242 funexp = copy_to_mode_reg (Pmode, funexp);
243
244 if (REG_P (chain))
245 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
246
247 /* Emit the runtime identification pattern. */
248 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
249 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
250 call_lab);
251
252 /* Statically predict the branch to very likely taken. */
253 rtx_insn *insn = get_last_insn ();
254 if (JUMP_P (insn))
255 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
256
257 /* Load the descriptor. */
258 rtx mem = gen_rtx_MEM (ptr_mode,
259 plus_constant (Pmode, funexp, - bit_val));
260 MEM_NOTRAP_P (mem) = 1;
261 mem = convert_memory_address (Pmode, mem);
262 emit_move_insn (chain, mem);
263
264 mem = gen_rtx_MEM (ptr_mode,
265 plus_constant (Pmode, funexp,
266 POINTER_SIZE / BITS_PER_UNIT
267 - bit_val));
268 MEM_NOTRAP_P (mem) = 1;
269 mem = convert_memory_address (Pmode, mem);
270 emit_move_insn (funexp, mem);
271
272 emit_label (call_lab);
273
274 if (REG_P (chain))
275 {
276 use_reg (call_fusage, chain);
277 STATIC_CHAIN_REG_P (chain) = 1;
278 }
279
280 /* Make sure we're not going to be overwritten below. */
281 gcc_assert (!static_chain_value);
282 }
283
284 /* If we are using registers for parameters, force the
285 function address into a register now. */
286 funexp = ((reg_parm_seen
287 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
288 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
289 : memory_address (FUNCTION_MODE, funexp));
290 }
291 else
292 {
293 /* funexp could be a SYMBOL_REF represents a function pointer which is
294 of ptr_mode. In this case, it should be converted into address mode
295 to be a valid address for memory rtx pattern. See PR 64971. */
296 if (GET_MODE (funexp) != Pmode)
297 funexp = convert_memory_address (Pmode, funexp);
298
299 if (!(flags & ECF_SIBCALL))
300 {
301 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
302 funexp = force_reg (Pmode, funexp);
303 }
304 }
305
306 if (static_chain_value != 0
307 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
308 || DECL_STATIC_CHAIN (fndecl_or_type)))
309 {
310 rtx chain;
311
312 chain = targetm.calls.static_chain (fndecl_or_type, false);
313 static_chain_value = convert_memory_address (Pmode, static_chain_value);
314
315 emit_move_insn (chain, static_chain_value);
316 if (REG_P (chain))
317 {
318 use_reg (call_fusage, chain);
319 STATIC_CHAIN_REG_P (chain) = 1;
320 }
321 }
322
323 return funexp;
324 }
325
326 /* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
329
330 FNDECL is the declaration node of the function. This is given to the
331 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
332 its own args.
333
334 FUNTYPE is the data type of the function. This is given to the hook
335 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
336 own args. We used to allow an identifier for library functions, but
337 that doesn't work when the return type is an aggregate type and the
338 calling convention says that the pointer to this aggregate is to be
339 popped by the callee.
340
341 STACK_SIZE is the number of bytes of arguments on the stack,
342 ROUNDED_STACK_SIZE is that number rounded up to
343 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
344 both to put into the call insn and to generate explicit popping
345 code if necessary.
346
347 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
348 It is zero if this call doesn't want a structure value.
349
350 NEXT_ARG_REG is the rtx that results from executing
351 targetm.calls.function_arg (&args_so_far,
352 function_arg_info::end_marker ());
353 just after all the args have had their registers assigned.
354 This could be whatever you like, but normally it is the first
355 arg-register beyond those used for args in this call,
356 or 0 if all the arg-registers are used in this call.
357 It is passed on to `gen_call' so you can put this info in the call insn.
358
359 VALREG is a hard register in which a value is returned,
360 or 0 if the call does not return a value.
361
362 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
363 the args to this call were processed.
364 We restore `inhibit_defer_pop' to that value.
365
366 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
367 denote registers used by the called function. */
368
369 static void
370 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
371 tree funtype ATTRIBUTE_UNUSED,
372 poly_int64 stack_size ATTRIBUTE_UNUSED,
373 poly_int64 rounded_stack_size,
374 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
375 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
376 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
377 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
378 {
379 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
380 rtx call, funmem, pat;
381 int already_popped = 0;
382 poly_int64 n_popped = 0;
383
384 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
385 patterns exist). Any popping that the callee does on return will
386 be from our caller's frame rather than ours. */
387 if (!(ecf_flags & ECF_SIBCALL))
388 {
389 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
390
391 #ifdef CALL_POPS_ARGS
392 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
393 #endif
394 }
395
396 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
397 and we don't want to load it into a register as an optimization,
398 because prepare_call_address already did it if it should be done. */
399 if (GET_CODE (funexp) != SYMBOL_REF)
400 funexp = memory_address (FUNCTION_MODE, funexp);
401
402 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
403 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
404 {
405 tree t = fndecl;
406
407 /* Although a built-in FUNCTION_DECL and its non-__builtin
408 counterpart compare equal and get a shared mem_attrs, they
409 produce different dump output in compare-debug compilations,
410 if an entry gets garbage collected in one compilation, then
411 adds a different (but equivalent) entry, while the other
412 doesn't run the garbage collector at the same spot and then
413 shares the mem_attr with the equivalent entry. */
414 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
415 {
416 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
417 if (t2)
418 t = t2;
419 }
420
421 set_mem_expr (funmem, t);
422 }
423 else if (fntree)
424 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
425
426 if (ecf_flags & ECF_SIBCALL)
427 {
428 if (valreg)
429 pat = targetm.gen_sibcall_value (valreg, funmem,
430 rounded_stack_size_rtx,
431 next_arg_reg, NULL_RTX);
432 else
433 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
434 next_arg_reg,
435 gen_int_mode (struct_value_size, Pmode));
436 }
437 /* If the target has "call" or "call_value" insns, then prefer them
438 if no arguments are actually popped. If the target does not have
439 "call" or "call_value" insns, then we must use the popping versions
440 even if the call has no arguments to pop. */
441 else if (maybe_ne (n_popped, 0)
442 || !(valreg
443 ? targetm.have_call_value ()
444 : targetm.have_call ()))
445 {
446 rtx n_pop = gen_int_mode (n_popped, Pmode);
447
448 /* If this subroutine pops its own args, record that in the call insn
449 if possible, for the sake of frame pointer elimination. */
450
451 if (valreg)
452 pat = targetm.gen_call_value_pop (valreg, funmem,
453 rounded_stack_size_rtx,
454 next_arg_reg, n_pop);
455 else
456 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
457 next_arg_reg, n_pop);
458
459 already_popped = 1;
460 }
461 else
462 {
463 if (valreg)
464 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
465 next_arg_reg, NULL_RTX);
466 else
467 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
468 gen_int_mode (struct_value_size, Pmode));
469 }
470 emit_insn (pat);
471
472 /* Find the call we just emitted. */
473 rtx_call_insn *call_insn = last_call_insn ();
474
475 /* Some target create a fresh MEM instead of reusing the one provided
476 above. Set its MEM_EXPR. */
477 call = get_call_rtx_from (call_insn);
478 if (call
479 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
480 && MEM_EXPR (funmem) != NULL_TREE)
481 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
482
483 /* Put the register usage information there. */
484 add_function_usage_to (call_insn, call_fusage);
485
486 /* If this is a const call, then set the insn's unchanging bit. */
487 if (ecf_flags & ECF_CONST)
488 RTL_CONST_CALL_P (call_insn) = 1;
489
490 /* If this is a pure call, then set the insn's unchanging bit. */
491 if (ecf_flags & ECF_PURE)
492 RTL_PURE_CALL_P (call_insn) = 1;
493
494 /* If this is a const call, then set the insn's unchanging bit. */
495 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
496 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
497
498 /* Create a nothrow REG_EH_REGION note, if needed. */
499 make_reg_eh_region_note (call_insn, ecf_flags, 0);
500
501 if (ecf_flags & ECF_NORETURN)
502 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
503
504 if (ecf_flags & ECF_RETURNS_TWICE)
505 {
506 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
507 cfun->calls_setjmp = 1;
508 }
509
510 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
511
512 /* Restore this now, so that we do defer pops for this call's args
513 if the context of the call as a whole permits. */
514 inhibit_defer_pop = old_inhibit_defer_pop;
515
516 if (maybe_ne (n_popped, 0))
517 {
518 if (!already_popped)
519 CALL_INSN_FUNCTION_USAGE (call_insn)
520 = gen_rtx_EXPR_LIST (VOIDmode,
521 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
522 CALL_INSN_FUNCTION_USAGE (call_insn));
523 rounded_stack_size -= n_popped;
524 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
525 stack_pointer_delta -= n_popped;
526
527 add_args_size_note (call_insn, stack_pointer_delta);
528
529 /* If popup is needed, stack realign must use DRAP */
530 if (SUPPORTS_STACK_ALIGNMENT)
531 crtl->need_drap = true;
532 }
533 /* For noreturn calls when not accumulating outgoing args force
534 REG_ARGS_SIZE note to prevent crossjumping of calls with different
535 args sizes. */
536 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
537 add_args_size_note (call_insn, stack_pointer_delta);
538
539 if (!ACCUMULATE_OUTGOING_ARGS)
540 {
541 /* If returning from the subroutine does not automatically pop the args,
542 we need an instruction to pop them sooner or later.
543 Perhaps do it now; perhaps just record how much space to pop later.
544
545 If returning from the subroutine does pop the args, indicate that the
546 stack pointer will be changed. */
547
548 if (maybe_ne (rounded_stack_size, 0))
549 {
550 if (ecf_flags & ECF_NORETURN)
551 /* Just pretend we did the pop. */
552 stack_pointer_delta -= rounded_stack_size;
553 else if (flag_defer_pop && inhibit_defer_pop == 0
554 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
555 pending_stack_adjust += rounded_stack_size;
556 else
557 adjust_stack (rounded_stack_size_rtx);
558 }
559 }
560 /* When we accumulate outgoing args, we must avoid any stack manipulations.
561 Restore the stack pointer to its original value now. Usually
562 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
563 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
564 popping variants of functions exist as well.
565
566 ??? We may optimize similar to defer_pop above, but it is
567 probably not worthwhile.
568
569 ??? It will be worthwhile to enable combine_stack_adjustments even for
570 such machines. */
571 else if (maybe_ne (n_popped, 0))
572 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
573 }
574
575 /* Determine if the function identified by FNDECL is one with
576 special properties we wish to know about. Modify FLAGS accordingly.
577
578 For example, if the function might return more than one time (setjmp), then
579 set ECF_RETURNS_TWICE.
580
581 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
582 space from the stack such as alloca. */
583
584 static int
585 special_function_p (const_tree fndecl, int flags)
586 {
587 tree name_decl = DECL_NAME (fndecl);
588
589 if (maybe_special_function_p (fndecl)
590 && IDENTIFIER_LENGTH (name_decl) <= 11)
591 {
592 const char *name = IDENTIFIER_POINTER (name_decl);
593 const char *tname = name;
594
595 /* We assume that alloca will always be called by name. It
596 makes no sense to pass it as a pointer-to-function to
597 anything that does not understand its behavior. */
598 if (IDENTIFIER_LENGTH (name_decl) == 6
599 && name[0] == 'a'
600 && ! strcmp (name, "alloca"))
601 flags |= ECF_MAY_BE_ALLOCA;
602
603 /* Disregard prefix _ or __. */
604 if (name[0] == '_')
605 {
606 if (name[1] == '_')
607 tname += 2;
608 else
609 tname += 1;
610 }
611
612 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
613 if (! strcmp (tname, "setjmp")
614 || ! strcmp (tname, "sigsetjmp")
615 || ! strcmp (name, "savectx")
616 || ! strcmp (name, "vfork")
617 || ! strcmp (name, "getcontext"))
618 flags |= ECF_RETURNS_TWICE;
619 }
620
621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
622 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
623 flags |= ECF_MAY_BE_ALLOCA;
624
625 return flags;
626 }
627
628 /* Similar to special_function_p; return a set of ERF_ flags for the
629 function FNDECL. */
630 static int
631 decl_return_flags (tree fndecl)
632 {
633 tree attr;
634 tree type = TREE_TYPE (fndecl);
635 if (!type)
636 return 0;
637
638 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
639 if (!attr)
640 return 0;
641
642 attr = TREE_VALUE (TREE_VALUE (attr));
643 if (!attr || TREE_STRING_LENGTH (attr) < 1)
644 return 0;
645
646 switch (TREE_STRING_POINTER (attr)[0])
647 {
648 case '1':
649 case '2':
650 case '3':
651 case '4':
652 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
653
654 case 'm':
655 return ERF_NOALIAS;
656
657 case '.':
658 default:
659 return 0;
660 }
661 }
662
663 /* Return nonzero when FNDECL represents a call to setjmp. */
664
665 int
666 setjmp_call_p (const_tree fndecl)
667 {
668 if (DECL_IS_RETURNS_TWICE (fndecl))
669 return ECF_RETURNS_TWICE;
670 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
671 }
672
673
674 /* Return true if STMT may be an alloca call. */
675
676 bool
677 gimple_maybe_alloca_call_p (const gimple *stmt)
678 {
679 tree fndecl;
680
681 if (!is_gimple_call (stmt))
682 return false;
683
684 fndecl = gimple_call_fndecl (stmt);
685 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
686 return true;
687
688 return false;
689 }
690
691 /* Return true if STMT is a builtin alloca call. */
692
693 bool
694 gimple_alloca_call_p (const gimple *stmt)
695 {
696 tree fndecl;
697
698 if (!is_gimple_call (stmt))
699 return false;
700
701 fndecl = gimple_call_fndecl (stmt);
702 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
703 switch (DECL_FUNCTION_CODE (fndecl))
704 {
705 CASE_BUILT_IN_ALLOCA:
706 return gimple_call_num_args (stmt) > 0;
707 default:
708 break;
709 }
710
711 return false;
712 }
713
714 /* Return true when exp contains a builtin alloca call. */
715
716 bool
717 alloca_call_p (const_tree exp)
718 {
719 tree fndecl;
720 if (TREE_CODE (exp) == CALL_EXPR
721 && (fndecl = get_callee_fndecl (exp))
722 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
723 switch (DECL_FUNCTION_CODE (fndecl))
724 {
725 CASE_BUILT_IN_ALLOCA:
726 return true;
727 default:
728 break;
729 }
730
731 return false;
732 }
733
734 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
735 function. Return FALSE otherwise. */
736
737 static bool
738 is_tm_builtin (const_tree fndecl)
739 {
740 if (fndecl == NULL)
741 return false;
742
743 if (decl_is_tm_clone (fndecl))
744 return true;
745
746 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
747 {
748 switch (DECL_FUNCTION_CODE (fndecl))
749 {
750 case BUILT_IN_TM_COMMIT:
751 case BUILT_IN_TM_COMMIT_EH:
752 case BUILT_IN_TM_ABORT:
753 case BUILT_IN_TM_IRREVOCABLE:
754 case BUILT_IN_TM_GETTMCLONE_IRR:
755 case BUILT_IN_TM_MEMCPY:
756 case BUILT_IN_TM_MEMMOVE:
757 case BUILT_IN_TM_MEMSET:
758 CASE_BUILT_IN_TM_STORE (1):
759 CASE_BUILT_IN_TM_STORE (2):
760 CASE_BUILT_IN_TM_STORE (4):
761 CASE_BUILT_IN_TM_STORE (8):
762 CASE_BUILT_IN_TM_STORE (FLOAT):
763 CASE_BUILT_IN_TM_STORE (DOUBLE):
764 CASE_BUILT_IN_TM_STORE (LDOUBLE):
765 CASE_BUILT_IN_TM_STORE (M64):
766 CASE_BUILT_IN_TM_STORE (M128):
767 CASE_BUILT_IN_TM_STORE (M256):
768 CASE_BUILT_IN_TM_LOAD (1):
769 CASE_BUILT_IN_TM_LOAD (2):
770 CASE_BUILT_IN_TM_LOAD (4):
771 CASE_BUILT_IN_TM_LOAD (8):
772 CASE_BUILT_IN_TM_LOAD (FLOAT):
773 CASE_BUILT_IN_TM_LOAD (DOUBLE):
774 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
775 CASE_BUILT_IN_TM_LOAD (M64):
776 CASE_BUILT_IN_TM_LOAD (M128):
777 CASE_BUILT_IN_TM_LOAD (M256):
778 case BUILT_IN_TM_LOG:
779 case BUILT_IN_TM_LOG_1:
780 case BUILT_IN_TM_LOG_2:
781 case BUILT_IN_TM_LOG_4:
782 case BUILT_IN_TM_LOG_8:
783 case BUILT_IN_TM_LOG_FLOAT:
784 case BUILT_IN_TM_LOG_DOUBLE:
785 case BUILT_IN_TM_LOG_LDOUBLE:
786 case BUILT_IN_TM_LOG_M64:
787 case BUILT_IN_TM_LOG_M128:
788 case BUILT_IN_TM_LOG_M256:
789 return true;
790 default:
791 break;
792 }
793 }
794 return false;
795 }
796
797 /* Detect flags (function attributes) from the function decl or type node. */
798
799 int
800 flags_from_decl_or_type (const_tree exp)
801 {
802 int flags = 0;
803
804 if (DECL_P (exp))
805 {
806 /* The function exp may have the `malloc' attribute. */
807 if (DECL_IS_MALLOC (exp))
808 flags |= ECF_MALLOC;
809
810 /* The function exp may have the `returns_twice' attribute. */
811 if (DECL_IS_RETURNS_TWICE (exp))
812 flags |= ECF_RETURNS_TWICE;
813
814 /* Process the pure and const attributes. */
815 if (TREE_READONLY (exp))
816 flags |= ECF_CONST;
817 if (DECL_PURE_P (exp))
818 flags |= ECF_PURE;
819 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
820 flags |= ECF_LOOPING_CONST_OR_PURE;
821
822 if (DECL_IS_NOVOPS (exp))
823 flags |= ECF_NOVOPS;
824 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
825 flags |= ECF_LEAF;
826 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
827 flags |= ECF_COLD;
828
829 if (TREE_NOTHROW (exp))
830 flags |= ECF_NOTHROW;
831
832 if (flag_tm)
833 {
834 if (is_tm_builtin (exp))
835 flags |= ECF_TM_BUILTIN;
836 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
837 || lookup_attribute ("transaction_pure",
838 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
839 flags |= ECF_TM_PURE;
840 }
841
842 flags = special_function_p (exp, flags);
843 }
844 else if (TYPE_P (exp))
845 {
846 if (TYPE_READONLY (exp))
847 flags |= ECF_CONST;
848
849 if (flag_tm
850 && ((flags & ECF_CONST) != 0
851 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
852 flags |= ECF_TM_PURE;
853 }
854 else
855 gcc_unreachable ();
856
857 if (TREE_THIS_VOLATILE (exp))
858 {
859 flags |= ECF_NORETURN;
860 if (flags & (ECF_CONST|ECF_PURE))
861 flags |= ECF_LOOPING_CONST_OR_PURE;
862 }
863
864 return flags;
865 }
866
867 /* Detect flags from a CALL_EXPR. */
868
869 int
870 call_expr_flags (const_tree t)
871 {
872 int flags;
873 tree decl = get_callee_fndecl (t);
874
875 if (decl)
876 flags = flags_from_decl_or_type (decl);
877 else if (CALL_EXPR_FN (t) == NULL_TREE)
878 flags = internal_fn_flags (CALL_EXPR_IFN (t));
879 else
880 {
881 tree type = TREE_TYPE (CALL_EXPR_FN (t));
882 if (type && TREE_CODE (type) == POINTER_TYPE)
883 flags = flags_from_decl_or_type (TREE_TYPE (type));
884 else
885 flags = 0;
886 if (CALL_EXPR_BY_DESCRIPTOR (t))
887 flags |= ECF_BY_DESCRIPTOR;
888 }
889
890 return flags;
891 }
892
893 /* Return true if ARG should be passed by invisible reference. */
894
895 bool
896 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
897 {
898 if (tree type = arg.type)
899 {
900 /* If this type contains non-trivial constructors, then it is
901 forbidden for the middle-end to create any new copies. */
902 if (TREE_ADDRESSABLE (type))
903 return true;
904
905 /* GCC post 3.4 passes *all* variable sized types by reference. */
906 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
907 return true;
908
909 /* If a record type should be passed the same as its first (and only)
910 member, use the type and mode of that member. */
911 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
912 {
913 arg.type = TREE_TYPE (first_field (type));
914 arg.mode = TYPE_MODE (arg.type);
915 }
916 }
917
918 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
919 }
920
921 /* Return true if TYPE should be passed by reference when passed to
922 the "..." arguments of a function. */
923
924 bool
925 pass_va_arg_by_reference (tree type)
926 {
927 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
928 }
929
930 /* Decide whether ARG, which occurs in the state described by CA,
931 should be passed by reference. Return true if so and update
932 ARG accordingly. */
933
934 bool
935 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
936 {
937 if (pass_by_reference (ca, arg))
938 {
939 arg.type = build_pointer_type (arg.type);
940 arg.mode = TYPE_MODE (arg.type);
941 arg.pass_by_reference = true;
942 return true;
943 }
944 return false;
945 }
946
947 /* Return true if ARG, which is passed by reference, should be callee
948 copied instead of caller copied. */
949
950 bool
951 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
952 {
953 if (arg.type && TREE_ADDRESSABLE (arg.type))
954 return false;
955 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
956 }
957
958
959 /* Precompute all register parameters as described by ARGS, storing values
960 into fields within the ARGS array.
961
962 NUM_ACTUALS indicates the total number elements in the ARGS array.
963
964 Set REG_PARM_SEEN if we encounter a register parameter. */
965
966 static void
967 precompute_register_parameters (int num_actuals, struct arg_data *args,
968 int *reg_parm_seen)
969 {
970 int i;
971
972 *reg_parm_seen = 0;
973
974 for (i = 0; i < num_actuals; i++)
975 if (args[i].reg != 0 && ! args[i].pass_on_stack)
976 {
977 *reg_parm_seen = 1;
978
979 if (args[i].value == 0)
980 {
981 push_temp_slots ();
982 args[i].value = expand_normal (args[i].tree_value);
983 preserve_temp_slots (args[i].value);
984 pop_temp_slots ();
985 }
986
987 /* If we are to promote the function arg to a wider mode,
988 do it now. */
989
990 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
991 args[i].value
992 = convert_modes (args[i].mode,
993 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
994 args[i].value, args[i].unsignedp);
995
996 /* If the value is a non-legitimate constant, force it into a
997 pseudo now. TLS symbols sometimes need a call to resolve. */
998 if (CONSTANT_P (args[i].value)
999 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1000 args[i].value = force_reg (args[i].mode, args[i].value);
1001
1002 /* If we're going to have to load the value by parts, pull the
1003 parts into pseudos. The part extraction process can involve
1004 non-trivial computation. */
1005 if (GET_CODE (args[i].reg) == PARALLEL)
1006 {
1007 tree type = TREE_TYPE (args[i].tree_value);
1008 args[i].parallel_value
1009 = emit_group_load_into_temps (args[i].reg, args[i].value,
1010 type, int_size_in_bytes (type));
1011 }
1012
1013 /* If the value is expensive, and we are inside an appropriately
1014 short loop, put the value into a pseudo and then put the pseudo
1015 into the hard reg.
1016
1017 For small register classes, also do this if this call uses
1018 register parameters. This is to avoid reload conflicts while
1019 loading the parameters registers. */
1020
1021 else if ((! (REG_P (args[i].value)
1022 || (GET_CODE (args[i].value) == SUBREG
1023 && REG_P (SUBREG_REG (args[i].value)))))
1024 && args[i].mode != BLKmode
1025 && (set_src_cost (args[i].value, args[i].mode,
1026 optimize_insn_for_speed_p ())
1027 > COSTS_N_INSNS (1))
1028 && ((*reg_parm_seen
1029 && targetm.small_register_classes_for_mode_p (args[i].mode))
1030 || optimize))
1031 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1032 }
1033 }
1034
1035 #ifdef REG_PARM_STACK_SPACE
1036
1037 /* The argument list is the property of the called routine and it
1038 may clobber it. If the fixed area has been used for previous
1039 parameters, we must save and restore it. */
1040
1041 static rtx
1042 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1043 {
1044 unsigned int low;
1045 unsigned int high;
1046
1047 /* Compute the boundary of the area that needs to be saved, if any. */
1048 high = reg_parm_stack_space;
1049 if (ARGS_GROW_DOWNWARD)
1050 high += 1;
1051
1052 if (high > highest_outgoing_arg_in_use)
1053 high = highest_outgoing_arg_in_use;
1054
1055 for (low = 0; low < high; low++)
1056 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1057 {
1058 int num_to_save;
1059 machine_mode save_mode;
1060 int delta;
1061 rtx addr;
1062 rtx stack_area;
1063 rtx save_area;
1064
1065 while (stack_usage_map[--high] == 0)
1066 ;
1067
1068 *low_to_save = low;
1069 *high_to_save = high;
1070
1071 num_to_save = high - low + 1;
1072
1073 /* If we don't have the required alignment, must do this
1074 in BLKmode. */
1075 scalar_int_mode imode;
1076 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1077 && (low & (MIN (GET_MODE_SIZE (imode),
1078 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1079 save_mode = imode;
1080 else
1081 save_mode = BLKmode;
1082
1083 if (ARGS_GROW_DOWNWARD)
1084 delta = -high;
1085 else
1086 delta = low;
1087
1088 addr = plus_constant (Pmode, argblock, delta);
1089 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1090
1091 set_mem_align (stack_area, PARM_BOUNDARY);
1092 if (save_mode == BLKmode)
1093 {
1094 save_area = assign_stack_temp (BLKmode, num_to_save);
1095 emit_block_move (validize_mem (save_area), stack_area,
1096 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1097 }
1098 else
1099 {
1100 save_area = gen_reg_rtx (save_mode);
1101 emit_move_insn (save_area, stack_area);
1102 }
1103
1104 return save_area;
1105 }
1106
1107 return NULL_RTX;
1108 }
1109
1110 static void
1111 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1112 {
1113 machine_mode save_mode = GET_MODE (save_area);
1114 int delta;
1115 rtx addr, stack_area;
1116
1117 if (ARGS_GROW_DOWNWARD)
1118 delta = -high_to_save;
1119 else
1120 delta = low_to_save;
1121
1122 addr = plus_constant (Pmode, argblock, delta);
1123 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1124 set_mem_align (stack_area, PARM_BOUNDARY);
1125
1126 if (save_mode != BLKmode)
1127 emit_move_insn (stack_area, save_area);
1128 else
1129 emit_block_move (stack_area, validize_mem (save_area),
1130 GEN_INT (high_to_save - low_to_save + 1),
1131 BLOCK_OP_CALL_PARM);
1132 }
1133 #endif /* REG_PARM_STACK_SPACE */
1134
1135 /* If any elements in ARGS refer to parameters that are to be passed in
1136 registers, but not in memory, and whose alignment does not permit a
1137 direct copy into registers. Copy the values into a group of pseudos
1138 which we will later copy into the appropriate hard registers.
1139
1140 Pseudos for each unaligned argument will be stored into the array
1141 args[argnum].aligned_regs. The caller is responsible for deallocating
1142 the aligned_regs array if it is nonzero. */
1143
1144 static void
1145 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1146 {
1147 int i, j;
1148
1149 for (i = 0; i < num_actuals; i++)
1150 if (args[i].reg != 0 && ! args[i].pass_on_stack
1151 && GET_CODE (args[i].reg) != PARALLEL
1152 && args[i].mode == BLKmode
1153 && MEM_P (args[i].value)
1154 && (MEM_ALIGN (args[i].value)
1155 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1156 {
1157 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1158 int endian_correction = 0;
1159
1160 if (args[i].partial)
1161 {
1162 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1163 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1164 }
1165 else
1166 {
1167 args[i].n_aligned_regs
1168 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1169 }
1170
1171 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1172
1173 /* Structures smaller than a word are normally aligned to the
1174 least significant byte. On a BYTES_BIG_ENDIAN machine,
1175 this means we must skip the empty high order bytes when
1176 calculating the bit offset. */
1177 if (bytes < UNITS_PER_WORD
1178 #ifdef BLOCK_REG_PADDING
1179 && (BLOCK_REG_PADDING (args[i].mode,
1180 TREE_TYPE (args[i].tree_value), 1)
1181 == PAD_DOWNWARD)
1182 #else
1183 && BYTES_BIG_ENDIAN
1184 #endif
1185 )
1186 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1187
1188 for (j = 0; j < args[i].n_aligned_regs; j++)
1189 {
1190 rtx reg = gen_reg_rtx (word_mode);
1191 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1192 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1193
1194 args[i].aligned_regs[j] = reg;
1195 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1196 word_mode, word_mode, false, NULL);
1197
1198 /* There is no need to restrict this code to loading items
1199 in TYPE_ALIGN sized hunks. The bitfield instructions can
1200 load up entire word sized registers efficiently.
1201
1202 ??? This may not be needed anymore.
1203 We use to emit a clobber here but that doesn't let later
1204 passes optimize the instructions we emit. By storing 0 into
1205 the register later passes know the first AND to zero out the
1206 bitfield being set in the register is unnecessary. The store
1207 of 0 will be deleted as will at least the first AND. */
1208
1209 emit_move_insn (reg, const0_rtx);
1210
1211 bytes -= bitsize / BITS_PER_UNIT;
1212 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1213 word_mode, word, false);
1214 }
1215 }
1216 }
1217
1218 /* The limit set by -Walloc-larger-than=. */
1219 static GTY(()) tree alloc_object_size_limit;
1220
1221 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1222 setting if the option is specified, or to the maximum object size if it
1223 is not. Return the initialized value. */
1224
1225 static tree
1226 alloc_max_size (void)
1227 {
1228 if (alloc_object_size_limit)
1229 return alloc_object_size_limit;
1230
1231 HOST_WIDE_INT limit = warn_alloc_size_limit;
1232 if (limit == HOST_WIDE_INT_MAX)
1233 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1234
1235 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1236
1237 return alloc_object_size_limit;
1238 }
1239
1240 /* Return true when EXP's range can be determined and set RANGE[] to it
1241 after adjusting it if necessary to make EXP a represents a valid size
1242 of object, or a valid size argument to an allocation function declared
1243 with attribute alloc_size (whose argument may be signed), or to a string
1244 manipulation function like memset. When ALLOW_ZERO is true, allow
1245 returning a range of [0, 0] for a size in an anti-range [1, N] where
1246 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1247 allocation functions like malloc but it is a valid argument to
1248 functions like memset. */
1249
1250 bool
1251 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1252 {
1253 if (!exp)
1254 return false;
1255
1256 if (tree_fits_uhwi_p (exp))
1257 {
1258 /* EXP is a constant. */
1259 range[0] = range[1] = exp;
1260 return true;
1261 }
1262
1263 tree exptype = TREE_TYPE (exp);
1264 bool integral = INTEGRAL_TYPE_P (exptype);
1265
1266 wide_int min, max;
1267 enum value_range_kind range_type;
1268
1269 if (integral)
1270 range_type = determine_value_range (exp, &min, &max);
1271 else
1272 range_type = VR_VARYING;
1273
1274 if (range_type == VR_VARYING)
1275 {
1276 if (integral)
1277 {
1278 /* Use the full range of the type of the expression when
1279 no value range information is available. */
1280 range[0] = TYPE_MIN_VALUE (exptype);
1281 range[1] = TYPE_MAX_VALUE (exptype);
1282 return true;
1283 }
1284
1285 range[0] = NULL_TREE;
1286 range[1] = NULL_TREE;
1287 return false;
1288 }
1289
1290 unsigned expprec = TYPE_PRECISION (exptype);
1291
1292 bool signed_p = !TYPE_UNSIGNED (exptype);
1293
1294 if (range_type == VR_ANTI_RANGE)
1295 {
1296 if (signed_p)
1297 {
1298 if (wi::les_p (max, 0))
1299 {
1300 /* EXP is not in a strictly negative range. That means
1301 it must be in some (not necessarily strictly) positive
1302 range which includes zero. Since in signed to unsigned
1303 conversions negative values end up converted to large
1304 positive values, and otherwise they are not valid sizes,
1305 the resulting range is in both cases [0, TYPE_MAX]. */
1306 min = wi::zero (expprec);
1307 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1308 }
1309 else if (wi::les_p (min - 1, 0))
1310 {
1311 /* EXP is not in a negative-positive range. That means EXP
1312 is either negative, or greater than max. Since negative
1313 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1314 min = max + 1;
1315 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1316 }
1317 else
1318 {
1319 max = min - 1;
1320 min = wi::zero (expprec);
1321 }
1322 }
1323 else if (wi::eq_p (0, min - 1))
1324 {
1325 /* EXP is unsigned and not in the range [1, MAX]. That means
1326 it's either zero or greater than MAX. Even though 0 would
1327 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1328 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1329 is greater than the limit the whole range is diagnosed. */
1330 if (allow_zero)
1331 min = max = wi::zero (expprec);
1332 else
1333 {
1334 min = max + 1;
1335 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1336 }
1337 }
1338 else
1339 {
1340 max = min - 1;
1341 min = wi::zero (expprec);
1342 }
1343 }
1344
1345 range[0] = wide_int_to_tree (exptype, min);
1346 range[1] = wide_int_to_tree (exptype, max);
1347
1348 return true;
1349 }
1350
1351 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1352 whose argument numbers given by IDX with values given by ARGS exceed
1353 the maximum object size or cause an unsigned oveflow (wrapping) when
1354 multiplied. FN is null when EXP is a call via a function pointer.
1355 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1356 for functions like malloc, and non-null for those like calloc that
1357 are decorated with a two-argument attribute alloc_size. */
1358
1359 void
1360 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1361 {
1362 /* The range each of the (up to) two arguments is known to be in. */
1363 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1364
1365 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1366 tree maxobjsize = alloc_max_size ();
1367
1368 location_t loc = EXPR_LOCATION (exp);
1369
1370 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1371 bool warned = false;
1372
1373 /* Validate each argument individually. */
1374 for (unsigned i = 0; i != 2 && args[i]; ++i)
1375 {
1376 if (TREE_CODE (args[i]) == INTEGER_CST)
1377 {
1378 argrange[i][0] = args[i];
1379 argrange[i][1] = args[i];
1380
1381 if (tree_int_cst_lt (args[i], integer_zero_node))
1382 {
1383 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1384 "%Kargument %i value %qE is negative",
1385 exp, idx[i] + 1, args[i]);
1386 }
1387 else if (integer_zerop (args[i]))
1388 {
1389 /* Avoid issuing -Walloc-zero for allocation functions other
1390 than __builtin_alloca that are declared with attribute
1391 returns_nonnull because there's no portability risk. This
1392 avoids warning for such calls to libiberty's xmalloc and
1393 friends.
1394 Also avoid issuing the warning for calls to function named
1395 "alloca". */
1396 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1397 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1398 : !lookup_attribute ("returns_nonnull",
1399 TYPE_ATTRIBUTES (fntype)))
1400 warned = warning_at (loc, OPT_Walloc_zero,
1401 "%Kargument %i value is zero",
1402 exp, idx[i] + 1);
1403 }
1404 else if (tree_int_cst_lt (maxobjsize, args[i]))
1405 {
1406 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1407 mode and with -fno-exceptions as a way to indicate array
1408 size overflow. There's no good way to detect C++98 here
1409 so avoid diagnosing these calls for all C++ modes. */
1410 if (i == 0
1411 && fn
1412 && !args[1]
1413 && lang_GNU_CXX ()
1414 && DECL_IS_OPERATOR_NEW_P (fn)
1415 && integer_all_onesp (args[i]))
1416 continue;
1417
1418 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1419 "%Kargument %i value %qE exceeds "
1420 "maximum object size %E",
1421 exp, idx[i] + 1, args[i], maxobjsize);
1422 }
1423 }
1424 else if (TREE_CODE (args[i]) == SSA_NAME
1425 && get_size_range (args[i], argrange[i]))
1426 {
1427 /* Verify that the argument's range is not negative (including
1428 upper bound of zero). */
1429 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1430 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1431 {
1432 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1433 "%Kargument %i range [%E, %E] is negative",
1434 exp, idx[i] + 1,
1435 argrange[i][0], argrange[i][1]);
1436 }
1437 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1438 {
1439 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1440 "%Kargument %i range [%E, %E] exceeds "
1441 "maximum object size %E",
1442 exp, idx[i] + 1,
1443 argrange[i][0], argrange[i][1],
1444 maxobjsize);
1445 }
1446 }
1447 }
1448
1449 if (!argrange[0])
1450 return;
1451
1452 /* For a two-argument alloc_size, validate the product of the two
1453 arguments if both of their values or ranges are known. */
1454 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1455 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1456 && !integer_onep (argrange[0][0])
1457 && !integer_onep (argrange[1][0]))
1458 {
1459 /* Check for overflow in the product of a function decorated with
1460 attribute alloc_size (X, Y). */
1461 unsigned szprec = TYPE_PRECISION (size_type_node);
1462 wide_int x = wi::to_wide (argrange[0][0], szprec);
1463 wide_int y = wi::to_wide (argrange[1][0], szprec);
1464
1465 wi::overflow_type vflow;
1466 wide_int prod = wi::umul (x, y, &vflow);
1467
1468 if (vflow)
1469 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1470 "%Kproduct %<%E * %E%> of arguments %i and %i "
1471 "exceeds %<SIZE_MAX%>",
1472 exp, argrange[0][0], argrange[1][0],
1473 idx[0] + 1, idx[1] + 1);
1474 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1475 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1476 "%Kproduct %<%E * %E%> of arguments %i and %i "
1477 "exceeds maximum object size %E",
1478 exp, argrange[0][0], argrange[1][0],
1479 idx[0] + 1, idx[1] + 1,
1480 maxobjsize);
1481
1482 if (warned)
1483 {
1484 /* Print the full range of each of the two arguments to make
1485 it clear when it is, in fact, in a range and not constant. */
1486 if (argrange[0][0] != argrange [0][1])
1487 inform (loc, "argument %i in the range [%E, %E]",
1488 idx[0] + 1, argrange[0][0], argrange[0][1]);
1489 if (argrange[1][0] != argrange [1][1])
1490 inform (loc, "argument %i in the range [%E, %E]",
1491 idx[1] + 1, argrange[1][0], argrange[1][1]);
1492 }
1493 }
1494
1495 if (warned && fn)
1496 {
1497 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1498
1499 if (DECL_IS_BUILTIN (fn))
1500 inform (loc,
1501 "in a call to built-in allocation function %qD", fn);
1502 else
1503 inform (fnloc,
1504 "in a call to allocation function %qD declared here", fn);
1505 }
1506 }
1507
1508 /* If EXPR refers to a character array or pointer declared attribute
1509 nonstring return a decl for that array or pointer and set *REF to
1510 the referenced enclosing object or pointer. Otherwise returns
1511 null. */
1512
1513 tree
1514 get_attr_nonstring_decl (tree expr, tree *ref)
1515 {
1516 tree decl = expr;
1517 tree var = NULL_TREE;
1518 if (TREE_CODE (decl) == SSA_NAME)
1519 {
1520 gimple *def = SSA_NAME_DEF_STMT (decl);
1521
1522 if (is_gimple_assign (def))
1523 {
1524 tree_code code = gimple_assign_rhs_code (def);
1525 if (code == ADDR_EXPR
1526 || code == COMPONENT_REF
1527 || code == VAR_DECL)
1528 decl = gimple_assign_rhs1 (def);
1529 }
1530 else
1531 var = SSA_NAME_VAR (decl);
1532 }
1533
1534 if (TREE_CODE (decl) == ADDR_EXPR)
1535 decl = TREE_OPERAND (decl, 0);
1536
1537 /* To simplify calling code, store the referenced DECL regardless of
1538 the attribute determined below, but avoid storing the SSA_NAME_VAR
1539 obtained above (it's not useful for dataflow purposes). */
1540 if (ref)
1541 *ref = decl;
1542
1543 /* Use the SSA_NAME_VAR that was determined above to see if it's
1544 declared nonstring. Otherwise drill down into the referenced
1545 DECL. */
1546 if (var)
1547 decl = var;
1548 else if (TREE_CODE (decl) == ARRAY_REF)
1549 decl = TREE_OPERAND (decl, 0);
1550 else if (TREE_CODE (decl) == COMPONENT_REF)
1551 decl = TREE_OPERAND (decl, 1);
1552 else if (TREE_CODE (decl) == MEM_REF)
1553 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1554
1555 if (DECL_P (decl)
1556 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1557 return decl;
1558
1559 return NULL_TREE;
1560 }
1561
1562 /* Warn about passing a non-string array/pointer to a function that
1563 expects a nul-terminated string argument. */
1564
1565 void
1566 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1567 {
1568 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1569 return;
1570
1571 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
1572 return;
1573
1574 /* Avoid clearly invalid calls (more checking done below). */
1575 unsigned nargs = call_expr_nargs (exp);
1576 if (!nargs)
1577 return;
1578
1579 /* The bound argument to a bounded string function like strncpy. */
1580 tree bound = NULL_TREE;
1581
1582 /* The longest known or possible string argument to one of the comparison
1583 functions. If the length is less than the bound it is used instead.
1584 Since the length is only used for warning and not for code generation
1585 disable strict mode in the calls to get_range_strlen below. */
1586 tree maxlen = NULL_TREE;
1587
1588 /* It's safe to call "bounded" string functions with a non-string
1589 argument since the functions provide an explicit bound for this
1590 purpose. The exception is strncat where the bound may refer to
1591 either the destination or the source. */
1592 int fncode = DECL_FUNCTION_CODE (fndecl);
1593 switch (fncode)
1594 {
1595 case BUILT_IN_STRCMP:
1596 case BUILT_IN_STRNCMP:
1597 case BUILT_IN_STRNCASECMP:
1598 {
1599 /* For these, if one argument refers to one or more of a set
1600 of string constants or arrays of known size, determine
1601 the range of their known or possible lengths and use it
1602 conservatively as the bound for the unbounded function,
1603 and to adjust the range of the bound of the bounded ones. */
1604 for (unsigned argno = 0;
1605 argno < MIN (nargs, 2)
1606 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1607 {
1608 tree arg = CALL_EXPR_ARG (exp, argno);
1609 if (!get_attr_nonstring_decl (arg))
1610 {
1611 c_strlen_data lendata = { };
1612 /* Set MAXBOUND to an arbitrary non-null non-integer
1613 node as a request to have it set to the length of
1614 the longest string in a PHI. */
1615 lendata.maxbound = arg;
1616 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1617 maxlen = lendata.maxbound;
1618 }
1619 }
1620 }
1621 /* Fall through. */
1622
1623 case BUILT_IN_STRNCAT:
1624 case BUILT_IN_STPNCPY:
1625 case BUILT_IN_STRNCPY:
1626 if (nargs > 2)
1627 bound = CALL_EXPR_ARG (exp, 2);
1628 break;
1629
1630 case BUILT_IN_STRNDUP:
1631 if (nargs > 1)
1632 bound = CALL_EXPR_ARG (exp, 1);
1633 break;
1634
1635 case BUILT_IN_STRNLEN:
1636 {
1637 tree arg = CALL_EXPR_ARG (exp, 0);
1638 if (!get_attr_nonstring_decl (arg))
1639 {
1640 c_strlen_data lendata = { };
1641 /* Set MAXBOUND to an arbitrary non-null non-integer
1642 node as a request to have it set to the length of
1643 the longest string in a PHI. */
1644 lendata.maxbound = arg;
1645 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1646 maxlen = lendata.maxbound;
1647 }
1648 if (nargs > 1)
1649 bound = CALL_EXPR_ARG (exp, 1);
1650 break;
1651 }
1652
1653 default:
1654 break;
1655 }
1656
1657 /* Determine the range of the bound argument (if specified). */
1658 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1659 if (bound)
1660 {
1661 STRIP_NOPS (bound);
1662 get_size_range (bound, bndrng);
1663 }
1664
1665 location_t loc = EXPR_LOCATION (exp);
1666
1667 if (bndrng[0])
1668 {
1669 /* Diagnose excessive bound prior the adjustment below and
1670 regardless of attribute nonstring. */
1671 tree maxobjsize = max_object_size ();
1672 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1673 {
1674 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1675 warning_at (loc, OPT_Wstringop_overflow_,
1676 "%K%qD specified bound %E "
1677 "exceeds maximum object size %E",
1678 exp, fndecl, bndrng[0], maxobjsize);
1679 else
1680 warning_at (loc, OPT_Wstringop_overflow_,
1681 "%K%qD specified bound [%E, %E] "
1682 "exceeds maximum object size %E",
1683 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1684 return;
1685 }
1686 }
1687
1688 if (maxlen && !integer_all_onesp (maxlen))
1689 {
1690 /* Add one for the nul. */
1691 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1692 size_one_node);
1693
1694 if (!bndrng[0])
1695 {
1696 /* Conservatively use the upper bound of the lengths for
1697 both the lower and the upper bound of the operation. */
1698 bndrng[0] = maxlen;
1699 bndrng[1] = maxlen;
1700 bound = void_type_node;
1701 }
1702 else if (maxlen)
1703 {
1704 /* Replace the bound on the operation with the upper bound
1705 of the length of the string if the latter is smaller. */
1706 if (tree_int_cst_lt (maxlen, bndrng[0]))
1707 bndrng[0] = maxlen;
1708 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1709 bndrng[1] = maxlen;
1710 }
1711 }
1712
1713 /* Iterate over the built-in function's formal arguments and check
1714 each const char* against the actual argument. If the actual
1715 argument is declared attribute non-string issue a warning unless
1716 the argument's maximum length is bounded. */
1717 function_args_iterator it;
1718 function_args_iter_init (&it, TREE_TYPE (fndecl));
1719
1720 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1721 {
1722 /* Avoid iterating past the declared argument in a call
1723 to function declared without a prototype. */
1724 if (argno >= nargs)
1725 break;
1726
1727 tree argtype = function_args_iter_cond (&it);
1728 if (!argtype)
1729 break;
1730
1731 if (TREE_CODE (argtype) != POINTER_TYPE)
1732 continue;
1733
1734 argtype = TREE_TYPE (argtype);
1735
1736 if (TREE_CODE (argtype) != INTEGER_TYPE
1737 || !TYPE_READONLY (argtype))
1738 continue;
1739
1740 argtype = TYPE_MAIN_VARIANT (argtype);
1741 if (argtype != char_type_node)
1742 continue;
1743
1744 tree callarg = CALL_EXPR_ARG (exp, argno);
1745 if (TREE_CODE (callarg) == ADDR_EXPR)
1746 callarg = TREE_OPERAND (callarg, 0);
1747
1748 /* See if the destination is declared with attribute "nonstring". */
1749 tree decl = get_attr_nonstring_decl (callarg);
1750 if (!decl)
1751 continue;
1752
1753 /* The maximum number of array elements accessed. */
1754 offset_int wibnd = 0;
1755
1756 if (argno && fncode == BUILT_IN_STRNCAT)
1757 {
1758 /* See if the bound in strncat is derived from the length
1759 of the strlen of the destination (as it's expected to be).
1760 If so, reset BOUND and FNCODE to trigger a warning. */
1761 tree dstarg = CALL_EXPR_ARG (exp, 0);
1762 if (is_strlen_related_p (dstarg, bound))
1763 {
1764 /* The bound applies to the destination, not to the source,
1765 so reset these to trigger a warning without mentioning
1766 the bound. */
1767 bound = NULL;
1768 fncode = 0;
1769 }
1770 else if (bndrng[1])
1771 /* Use the upper bound of the range for strncat. */
1772 wibnd = wi::to_offset (bndrng[1]);
1773 }
1774 else if (bndrng[0])
1775 /* Use the lower bound of the range for functions other than
1776 strncat. */
1777 wibnd = wi::to_offset (bndrng[0]);
1778
1779 /* Determine the size of the argument array if it is one. */
1780 offset_int asize = wibnd;
1781 bool known_size = false;
1782 tree type = TREE_TYPE (decl);
1783
1784 /* Determine the array size. For arrays of unknown bound and
1785 pointers reset BOUND to trigger the appropriate warning. */
1786 if (TREE_CODE (type) == ARRAY_TYPE)
1787 {
1788 if (tree arrbnd = TYPE_DOMAIN (type))
1789 {
1790 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1791 {
1792 asize = wi::to_offset (arrbnd) + 1;
1793 known_size = true;
1794 }
1795 }
1796 else if (bound == void_type_node)
1797 bound = NULL_TREE;
1798 }
1799 else if (bound == void_type_node)
1800 bound = NULL_TREE;
1801
1802 /* In a call to strncat with a bound in a range whose lower but
1803 not upper bound is less than the array size, reset ASIZE to
1804 be the same as the bound and the other variable to trigger
1805 the apprpriate warning below. */
1806 if (fncode == BUILT_IN_STRNCAT
1807 && bndrng[0] != bndrng[1]
1808 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1809 && (!known_size
1810 || wi::ltu_p (asize, wibnd)))
1811 {
1812 asize = wibnd;
1813 bound = NULL_TREE;
1814 fncode = 0;
1815 }
1816
1817 bool warned = false;
1818
1819 auto_diagnostic_group d;
1820 if (wi::ltu_p (asize, wibnd))
1821 {
1822 if (bndrng[0] == bndrng[1])
1823 warned = warning_at (loc, OPT_Wstringop_overflow_,
1824 "%qD argument %i declared attribute "
1825 "%<nonstring%> is smaller than the specified "
1826 "bound %wu",
1827 fndecl, argno + 1, wibnd.to_uhwi ());
1828 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1829 warned = warning_at (loc, OPT_Wstringop_overflow_,
1830 "%qD argument %i declared attribute "
1831 "%<nonstring%> is smaller than "
1832 "the specified bound [%E, %E]",
1833 fndecl, argno + 1, bndrng[0], bndrng[1]);
1834 else
1835 warned = warning_at (loc, OPT_Wstringop_overflow_,
1836 "%qD argument %i declared attribute "
1837 "%<nonstring%> may be smaller than "
1838 "the specified bound [%E, %E]",
1839 fndecl, argno + 1, bndrng[0], bndrng[1]);
1840 }
1841 else if (fncode == BUILT_IN_STRNCAT)
1842 ; /* Avoid warning for calls to strncat() when the bound
1843 is equal to the size of the non-string argument. */
1844 else if (!bound)
1845 warned = warning_at (loc, OPT_Wstringop_overflow_,
1846 "%qD argument %i declared attribute %<nonstring%>",
1847 fndecl, argno + 1);
1848
1849 if (warned)
1850 inform (DECL_SOURCE_LOCATION (decl),
1851 "argument %qD declared here", decl);
1852 }
1853 }
1854
1855 /* Issue an error if CALL_EXPR was flagged as requiring
1856 tall-call optimization. */
1857
1858 static void
1859 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1860 {
1861 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1862 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1863 return;
1864
1865 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1866 }
1867
1868 /* Returns the type of the argument ARGNO to function with type FNTYPE
1869 or null when the typoe cannot be determined or no such argument exists. */
1870
1871 static tree
1872 fntype_argno_type (tree fntype, unsigned argno)
1873 {
1874 if (!prototype_p (fntype))
1875 return NULL_TREE;
1876
1877 tree argtype;
1878 function_args_iterator it;
1879 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1880 if (argno-- == 0)
1881 return argtype;
1882
1883 return NULL_TREE;
1884 }
1885
1886 /* Helper to append the "rdwr" attribute specification described
1887 by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1888 diagnostics. */
1889
1890 static inline void
1891 append_attrname (const std::pair<int, attr_access> &access,
1892 char *attrstr, size_t strsize)
1893 {
1894 /* Append the relevant attribute to the string. This (deliberately)
1895 appends the attribute pointer operand even when none was specified. */
1896 size_t len = strlen (attrstr);
1897
1898 const char* const atname
1899 = (access.second.mode == attr_access::read_only
1900 ? "read_only"
1901 : (access.second.mode == attr_access::write_only
1902 ? "write_only"
1903 : (access.second.mode == attr_access::read_write
1904 ? "read_write" : "none")));
1905
1906 const char *sep = len ? ", " : "";
1907
1908 if (access.second.sizarg == UINT_MAX)
1909 snprintf (attrstr + len, strsize - len,
1910 "%s%s (%i)", sep, atname,
1911 access.second.ptrarg + 1);
1912 else
1913 snprintf (attrstr + len, strsize - len,
1914 "%s%s (%i, %i)", sep, atname,
1915 access.second.ptrarg + 1, access.second.sizarg + 1);
1916 }
1917
1918 /* Iterate over attribute access read-only, read-write, and write-only
1919 arguments and diagnose past-the-end accesses and related problems
1920 in the function call EXP. */
1921
1922 static void
1923 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree exp)
1924 {
1925 tree fndecl = NULL_TREE;
1926 tree fntype = NULL_TREE;
1927 if (tree fnaddr = CALL_EXPR_FN (exp))
1928 {
1929 if (TREE_CODE (fnaddr) == ADDR_EXPR)
1930 {
1931 fndecl = TREE_OPERAND (fnaddr, 0);
1932 fntype = TREE_TYPE (fndecl);
1933 }
1934 else
1935 fntype = TREE_TYPE (TREE_TYPE (fnaddr));
1936 }
1937
1938 if (!fntype)
1939 return;
1940
1941 auto_diagnostic_group adg;
1942
1943 /* A string describing the attributes that the warnings issued by this
1944 function apply to. Used to print one informational note per function
1945 call, rather than one per warning. That reduces clutter. */
1946 char attrstr[80];
1947 attrstr[0] = 0;
1948
1949 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1950 {
1951 std::pair<int, attr_access> access = *it;
1952
1953 /* Get the function call arguments corresponding to the attribute's
1954 positional arguments. When both arguments have been specified
1955 there will be two entries in *RWM, one for each. They are
1956 cross-referenced by their respective argument numbers in
1957 ACCESS.PTRARG and ACCESS.SIZARG. */
1958 const int ptridx = access.second.ptrarg;
1959 const int sizidx = access.second.sizarg;
1960
1961 gcc_assert (ptridx != -1);
1962 gcc_assert (access.first == ptridx || access.first == sizidx);
1963
1964 /* The pointer is set to null for the entry corresponding to
1965 the size argument. Skip it. It's handled when the entry
1966 corresponding to the pointer argument comes up. */
1967 if (!access.second.ptr)
1968 continue;
1969
1970 tree argtype = fntype_argno_type (fntype, ptridx);
1971 argtype = TREE_TYPE (argtype);
1972
1973 tree size;
1974 if (sizidx == -1)
1975 {
1976 /* If only the pointer attribute operand was specified
1977 and not size, set SIZE to the size of one element of
1978 the pointed to type to detect smaller objects (null
1979 pointers are diagnosed in this case only if
1980 the pointer is also declared with attribute nonnull. */
1981 size = size_one_node;
1982 }
1983 else
1984 size = rwm->get (sizidx)->size;
1985
1986 tree ptr = access.second.ptr;
1987 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
1988 if (get_size_range (size, sizrng, true)
1989 && tree_int_cst_sgn (sizrng[0]) < 0
1990 && tree_int_cst_sgn (sizrng[1]) < 0)
1991 {
1992 /* Warn about negative sizes. */
1993 bool warned = false;
1994 location_t loc = EXPR_LOCATION (exp);
1995 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
1996 warned = warning_at (loc, OPT_Wstringop_overflow_,
1997 "%Kargument %i value %E is negative",
1998 exp, sizidx + 1, size);
1999 else
2000 warned = warning_at (loc, OPT_Wstringop_overflow_,
2001 "%Kargument %i range [%E, %E] is negative",
2002 exp, sizidx + 1, sizrng[0], sizrng[1]);
2003 if (warned)
2004 {
2005 append_attrname (access, attrstr, sizeof attrstr);
2006 /* Avoid warning again for the same attribute. */
2007 continue;
2008 }
2009 }
2010
2011 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2012 {
2013 if (COMPLETE_TYPE_P (argtype))
2014 {
2015 /* Multiple SIZE by the size of the type the pointer
2016 argument points to. If it's incomplete the size
2017 is used as is. */
2018 size = NULL_TREE;
2019 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2020 if (TREE_CODE (argsize) == INTEGER_CST)
2021 {
2022 const int prec = TYPE_PRECISION (sizetype);
2023 wide_int minsize = wi::to_wide (sizrng[0], prec);
2024 minsize *= wi::to_wide (argsize, prec);
2025 size = wide_int_to_tree (sizetype, minsize);
2026 }
2027 }
2028 }
2029 else
2030 size = NULL_TREE;
2031
2032 if (sizidx >= 0
2033 && integer_zerop (ptr)
2034 && tree_int_cst_sgn (sizrng[0]) > 0)
2035 {
2036 /* Warn about null pointers with positive sizes. This is
2037 different from also declaring the pointer argument with
2038 attribute nonnull when the function accepts null pointers
2039 only when the corresponding size is zero. */
2040 bool warned = false;
2041 const location_t loc = EXPR_LOC_OR_LOC (ptr, EXPR_LOCATION (exp));
2042 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2043 warned = warning_at (loc, OPT_Wnonnull,
2044 "%Kargument %i is null but the corresponding "
2045 "size argument %i value is %E",
2046 exp, ptridx + 1, sizidx + 1, size);
2047 else
2048 warned = warning_at (loc, OPT_Wnonnull,
2049 "%Kargument %i is null but the corresponding "
2050 "size argument %i range is [%E, %E]",
2051 exp, ptridx + 1, sizidx + 1,
2052 sizrng[0], sizrng[1]);
2053 if (warned)
2054 {
2055 append_attrname (access, attrstr, sizeof attrstr);
2056 /* Avoid warning again for the same attribute. */
2057 continue;
2058 }
2059 }
2060
2061 tree objsize = compute_objsize (ptr, 0);
2062
2063 tree srcsize;
2064 if (access.second.mode == attr_access::write_only)
2065 {
2066 /* For a write-only argument there is no source. */
2067 srcsize = NULL_TREE;
2068 }
2069 else
2070 {
2071 /* For read-only and read-write attributes also set the source
2072 size. */
2073 srcsize = objsize;
2074 if (access.second.mode == attr_access::read_only
2075 || access.second.mode == attr_access::none)
2076 {
2077 /* For a read-only attribute there is no destination so
2078 clear OBJSIZE. This emits "reading N bytes" kind of
2079 diagnostics instead of the "writing N bytes" kind,
2080 unless MODE is none. */
2081 objsize = NULL_TREE;
2082 }
2083 }
2084
2085 /* Clear the no-warning bit in case it was set in a prior
2086 iteration so that accesses via different arguments are
2087 diagnosed. */
2088 TREE_NO_WARNING (exp) = false;
2089 check_access (exp, NULL_TREE, NULL_TREE, size, /*maxread=*/ NULL_TREE,
2090 srcsize, objsize, access.second.mode != attr_access::none);
2091
2092 if (TREE_NO_WARNING (exp))
2093 /* If check_access issued a warning above, append the relevant
2094 attribute to the string. */
2095 append_attrname (access, attrstr, sizeof attrstr);
2096 }
2097
2098 if (!*attrstr)
2099 return;
2100
2101 if (fndecl)
2102 inform (DECL_SOURCE_LOCATION (fndecl),
2103 "in a call to function %qD declared with attribute %qs",
2104 fndecl, attrstr);
2105 else
2106 inform (EXPR_LOCATION (fndecl),
2107 "in a call with type %qT and attribute %qs",
2108 fntype, attrstr);
2109
2110 /* Set the bit in case if was cleared and not set above. */
2111 TREE_NO_WARNING (exp) = true;
2112 }
2113
2114 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2115 CALL_EXPR EXP.
2116
2117 NUM_ACTUALS is the total number of parameters.
2118
2119 N_NAMED_ARGS is the total number of named arguments.
2120
2121 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2122 value, or null.
2123
2124 FNDECL is the tree code for the target of this call (if known)
2125
2126 ARGS_SO_FAR holds state needed by the target to know where to place
2127 the next argument.
2128
2129 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2130 for arguments which are passed in registers.
2131
2132 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2133 and may be modified by this routine.
2134
2135 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2136 flags which may be modified by this routine.
2137
2138 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2139 that requires allocation of stack space.
2140
2141 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2142 the thunked-to function. */
2143
2144 static void
2145 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2146 struct arg_data *args,
2147 struct args_size *args_size,
2148 int n_named_args ATTRIBUTE_UNUSED,
2149 tree exp, tree struct_value_addr_value,
2150 tree fndecl, tree fntype,
2151 cumulative_args_t args_so_far,
2152 int reg_parm_stack_space,
2153 rtx *old_stack_level,
2154 poly_int64_pod *old_pending_adj,
2155 int *must_preallocate, int *ecf_flags,
2156 bool *may_tailcall, bool call_from_thunk_p)
2157 {
2158 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2159 location_t loc = EXPR_LOCATION (exp);
2160
2161 /* Count arg position in order args appear. */
2162 int argpos;
2163
2164 int i;
2165
2166 args_size->constant = 0;
2167 args_size->var = 0;
2168
2169 bitmap_obstack_initialize (NULL);
2170
2171 /* In this loop, we consider args in the order they are written.
2172 We fill up ARGS from the back. */
2173
2174 i = num_actuals - 1;
2175 {
2176 int j = i;
2177 call_expr_arg_iterator iter;
2178 tree arg;
2179 bitmap slots = NULL;
2180
2181 if (struct_value_addr_value)
2182 {
2183 args[j].tree_value = struct_value_addr_value;
2184 j--;
2185 }
2186 argpos = 0;
2187 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2188 {
2189 tree argtype = TREE_TYPE (arg);
2190
2191 if (targetm.calls.split_complex_arg
2192 && argtype
2193 && TREE_CODE (argtype) == COMPLEX_TYPE
2194 && targetm.calls.split_complex_arg (argtype))
2195 {
2196 tree subtype = TREE_TYPE (argtype);
2197 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2198 j--;
2199 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2200 }
2201 else
2202 args[j].tree_value = arg;
2203 j--;
2204 argpos++;
2205 }
2206
2207 if (slots)
2208 BITMAP_FREE (slots);
2209 }
2210
2211 bitmap_obstack_release (NULL);
2212
2213 /* Extract attribute alloc_size from the type of the called expression
2214 (which could be a function or a function pointer) and if set, store
2215 the indices of the corresponding arguments in ALLOC_IDX, and then
2216 the actual argument(s) at those indices in ALLOC_ARGS. */
2217 int alloc_idx[2] = { -1, -1 };
2218 if (tree alloc_size = lookup_attribute ("alloc_size",
2219 TYPE_ATTRIBUTES (fntype)))
2220 {
2221 tree args = TREE_VALUE (alloc_size);
2222 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2223 if (TREE_CHAIN (args))
2224 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2225 }
2226
2227 /* Array for up to the two attribute alloc_size arguments. */
2228 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2229
2230 /* Map of attribute accewss specifications for function arguments. */
2231 rdwr_map rdwr_idx;
2232 init_attr_rdwr_indices (&rdwr_idx, fntype);
2233
2234 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2235 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2236 {
2237 tree type = TREE_TYPE (args[i].tree_value);
2238 int unsignedp;
2239
2240 /* Replace erroneous argument with constant zero. */
2241 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2242 args[i].tree_value = integer_zero_node, type = integer_type_node;
2243
2244 /* If TYPE is a transparent union or record, pass things the way
2245 we would pass the first field of the union or record. We have
2246 already verified that the modes are the same. */
2247 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2248 type = TREE_TYPE (first_field (type));
2249
2250 /* Decide where to pass this arg.
2251
2252 args[i].reg is nonzero if all or part is passed in registers.
2253
2254 args[i].partial is nonzero if part but not all is passed in registers,
2255 and the exact value says how many bytes are passed in registers.
2256
2257 args[i].pass_on_stack is nonzero if the argument must at least be
2258 computed on the stack. It may then be loaded back into registers
2259 if args[i].reg is nonzero.
2260
2261 These decisions are driven by the FUNCTION_... macros and must agree
2262 with those made by function.c. */
2263
2264 /* See if this argument should be passed by invisible reference. */
2265 function_arg_info arg (type, argpos < n_named_args);
2266 if (pass_by_reference (args_so_far_pnt, arg))
2267 {
2268 bool callee_copies;
2269 tree base = NULL_TREE;
2270
2271 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
2272
2273 /* If we're compiling a thunk, pass through invisible references
2274 instead of making a copy. */
2275 if (call_from_thunk_p
2276 || (callee_copies
2277 && !TREE_ADDRESSABLE (type)
2278 && (base = get_base_address (args[i].tree_value))
2279 && TREE_CODE (base) != SSA_NAME
2280 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2281 {
2282 /* We may have turned the parameter value into an SSA name.
2283 Go back to the original parameter so we can take the
2284 address. */
2285 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2286 {
2287 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2288 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2289 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2290 }
2291 /* Argument setup code may have copied the value to register. We
2292 revert that optimization now because the tail call code must
2293 use the original location. */
2294 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2295 && !MEM_P (DECL_RTL (args[i].tree_value))
2296 && DECL_INCOMING_RTL (args[i].tree_value)
2297 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2298 set_decl_rtl (args[i].tree_value,
2299 DECL_INCOMING_RTL (args[i].tree_value));
2300
2301 mark_addressable (args[i].tree_value);
2302
2303 /* We can't use sibcalls if a callee-copied argument is
2304 stored in the current function's frame. */
2305 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2306 {
2307 *may_tailcall = false;
2308 maybe_complain_about_tail_call (exp,
2309 "a callee-copied argument is"
2310 " stored in the current"
2311 " function's frame");
2312 }
2313
2314 args[i].tree_value = build_fold_addr_expr_loc (loc,
2315 args[i].tree_value);
2316 type = TREE_TYPE (args[i].tree_value);
2317
2318 if (*ecf_flags & ECF_CONST)
2319 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2320 }
2321 else
2322 {
2323 /* We make a copy of the object and pass the address to the
2324 function being called. */
2325 rtx copy;
2326
2327 if (!COMPLETE_TYPE_P (type)
2328 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2329 || (flag_stack_check == GENERIC_STACK_CHECK
2330 && compare_tree_int (TYPE_SIZE_UNIT (type),
2331 STACK_CHECK_MAX_VAR_SIZE) > 0))
2332 {
2333 /* This is a variable-sized object. Make space on the stack
2334 for it. */
2335 rtx size_rtx = expr_size (args[i].tree_value);
2336
2337 if (*old_stack_level == 0)
2338 {
2339 emit_stack_save (SAVE_BLOCK, old_stack_level);
2340 *old_pending_adj = pending_stack_adjust;
2341 pending_stack_adjust = 0;
2342 }
2343
2344 /* We can pass TRUE as the 4th argument because we just
2345 saved the stack pointer and will restore it right after
2346 the call. */
2347 copy = allocate_dynamic_stack_space (size_rtx,
2348 TYPE_ALIGN (type),
2349 TYPE_ALIGN (type),
2350 max_int_size_in_bytes
2351 (type),
2352 true);
2353 copy = gen_rtx_MEM (BLKmode, copy);
2354 set_mem_attributes (copy, type, 1);
2355 }
2356 else
2357 copy = assign_temp (type, 1, 0);
2358
2359 store_expr (args[i].tree_value, copy, 0, false, false);
2360
2361 /* Just change the const function to pure and then let
2362 the next test clear the pure based on
2363 callee_copies. */
2364 if (*ecf_flags & ECF_CONST)
2365 {
2366 *ecf_flags &= ~ECF_CONST;
2367 *ecf_flags |= ECF_PURE;
2368 }
2369
2370 if (!callee_copies && *ecf_flags & ECF_PURE)
2371 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2372
2373 args[i].tree_value
2374 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2375 type = TREE_TYPE (args[i].tree_value);
2376 *may_tailcall = false;
2377 maybe_complain_about_tail_call (exp,
2378 "argument must be passed"
2379 " by copying");
2380 }
2381 arg.pass_by_reference = true;
2382 }
2383
2384 unsignedp = TYPE_UNSIGNED (type);
2385 arg.type = type;
2386 arg.mode
2387 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2388 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2389
2390 args[i].unsignedp = unsignedp;
2391 args[i].mode = arg.mode;
2392
2393 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2394
2395 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2396
2397 if (args[i].reg && CONST_INT_P (args[i].reg))
2398 args[i].reg = NULL;
2399
2400 /* If this is a sibling call and the machine has register windows, the
2401 register window has to be unwinded before calling the routine, so
2402 arguments have to go into the incoming registers. */
2403 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2404 args[i].tail_call_reg
2405 = targetm.calls.function_incoming_arg (args_so_far, arg);
2406 else
2407 args[i].tail_call_reg = args[i].reg;
2408
2409 if (args[i].reg)
2410 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2411
2412 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2413
2414 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2415 it means that we are to pass this arg in the register(s) designated
2416 by the PARALLEL, but also to pass it in the stack. */
2417 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2418 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2419 args[i].pass_on_stack = 1;
2420
2421 /* If this is an addressable type, we must preallocate the stack
2422 since we must evaluate the object into its final location.
2423
2424 If this is to be passed in both registers and the stack, it is simpler
2425 to preallocate. */
2426 if (TREE_ADDRESSABLE (type)
2427 || (args[i].pass_on_stack && args[i].reg != 0))
2428 *must_preallocate = 1;
2429
2430 /* Compute the stack-size of this argument. */
2431 if (args[i].reg == 0 || args[i].partial != 0
2432 || reg_parm_stack_space > 0
2433 || args[i].pass_on_stack)
2434 locate_and_pad_parm (arg.mode, type,
2435 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2436 1,
2437 #else
2438 args[i].reg != 0,
2439 #endif
2440 reg_parm_stack_space,
2441 args[i].pass_on_stack ? 0 : args[i].partial,
2442 fndecl, args_size, &args[i].locate);
2443 #ifdef BLOCK_REG_PADDING
2444 else
2445 /* The argument is passed entirely in registers. See at which
2446 end it should be padded. */
2447 args[i].locate.where_pad =
2448 BLOCK_REG_PADDING (arg.mode, type,
2449 int_size_in_bytes (type) <= UNITS_PER_WORD);
2450 #endif
2451
2452 /* Update ARGS_SIZE, the total stack space for args so far. */
2453
2454 args_size->constant += args[i].locate.size.constant;
2455 if (args[i].locate.size.var)
2456 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2457
2458 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2459 have been used, etc. */
2460
2461 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2462 promoted_mode used for function_arg above. However, the
2463 corresponding handling of incoming arguments in function.c
2464 does pass the promoted mode. */
2465 arg.mode = TYPE_MODE (type);
2466 targetm.calls.function_arg_advance (args_so_far, arg);
2467
2468 /* Store argument values for functions decorated with attribute
2469 alloc_size. */
2470 if (argpos == alloc_idx[0])
2471 alloc_args[0] = args[i].tree_value;
2472 else if (argpos == alloc_idx[1])
2473 alloc_args[1] = args[i].tree_value;
2474
2475 /* Save the actual argument that corresponds to the access attribute
2476 operand for later processing. */
2477 if (attr_access *access = rdwr_idx.get (argpos))
2478 {
2479 if (POINTER_TYPE_P (type))
2480 {
2481 access->ptr = args[i].tree_value;
2482 gcc_assert (access->size == NULL_TREE);
2483 }
2484 else
2485 {
2486 access->size = args[i].tree_value;
2487 gcc_assert (access->ptr == NULL_TREE);
2488 }
2489 }
2490 }
2491
2492 if (alloc_args[0])
2493 {
2494 /* Check the arguments of functions decorated with attribute
2495 alloc_size. */
2496 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2497 }
2498
2499 /* Detect passing non-string arguments to functions expecting
2500 nul-terminated strings. */
2501 maybe_warn_nonstring_arg (fndecl, exp);
2502
2503 /* Check attribute access arguments. */
2504 maybe_warn_rdwr_sizes (&rdwr_idx, exp);
2505 }
2506
2507 /* Update ARGS_SIZE to contain the total size for the argument block.
2508 Return the original constant component of the argument block's size.
2509
2510 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2511 for arguments passed in registers. */
2512
2513 static poly_int64
2514 compute_argument_block_size (int reg_parm_stack_space,
2515 struct args_size *args_size,
2516 tree fndecl ATTRIBUTE_UNUSED,
2517 tree fntype ATTRIBUTE_UNUSED,
2518 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2519 {
2520 poly_int64 unadjusted_args_size = args_size->constant;
2521
2522 /* For accumulate outgoing args mode we don't need to align, since the frame
2523 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2524 backends from generating misaligned frame sizes. */
2525 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2526 preferred_stack_boundary = STACK_BOUNDARY;
2527
2528 /* Compute the actual size of the argument block required. The variable
2529 and constant sizes must be combined, the size may have to be rounded,
2530 and there may be a minimum required size. */
2531
2532 if (args_size->var)
2533 {
2534 args_size->var = ARGS_SIZE_TREE (*args_size);
2535 args_size->constant = 0;
2536
2537 preferred_stack_boundary /= BITS_PER_UNIT;
2538 if (preferred_stack_boundary > 1)
2539 {
2540 /* We don't handle this case yet. To handle it correctly we have
2541 to add the delta, round and subtract the delta.
2542 Currently no machine description requires this support. */
2543 gcc_assert (multiple_p (stack_pointer_delta,
2544 preferred_stack_boundary));
2545 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2546 }
2547
2548 if (reg_parm_stack_space > 0)
2549 {
2550 args_size->var
2551 = size_binop (MAX_EXPR, args_size->var,
2552 ssize_int (reg_parm_stack_space));
2553
2554 /* The area corresponding to register parameters is not to count in
2555 the size of the block we need. So make the adjustment. */
2556 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2557 args_size->var
2558 = size_binop (MINUS_EXPR, args_size->var,
2559 ssize_int (reg_parm_stack_space));
2560 }
2561 }
2562 else
2563 {
2564 preferred_stack_boundary /= BITS_PER_UNIT;
2565 if (preferred_stack_boundary < 1)
2566 preferred_stack_boundary = 1;
2567 args_size->constant = (aligned_upper_bound (args_size->constant
2568 + stack_pointer_delta,
2569 preferred_stack_boundary)
2570 - stack_pointer_delta);
2571
2572 args_size->constant = upper_bound (args_size->constant,
2573 reg_parm_stack_space);
2574
2575 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2576 args_size->constant -= reg_parm_stack_space;
2577 }
2578 return unadjusted_args_size;
2579 }
2580
2581 /* Precompute parameters as needed for a function call.
2582
2583 FLAGS is mask of ECF_* constants.
2584
2585 NUM_ACTUALS is the number of arguments.
2586
2587 ARGS is an array containing information for each argument; this
2588 routine fills in the INITIAL_VALUE and VALUE fields for each
2589 precomputed argument. */
2590
2591 static void
2592 precompute_arguments (int num_actuals, struct arg_data *args)
2593 {
2594 int i;
2595
2596 /* If this is a libcall, then precompute all arguments so that we do not
2597 get extraneous instructions emitted as part of the libcall sequence. */
2598
2599 /* If we preallocated the stack space, and some arguments must be passed
2600 on the stack, then we must precompute any parameter which contains a
2601 function call which will store arguments on the stack.
2602 Otherwise, evaluating the parameter may clobber previous parameters
2603 which have already been stored into the stack. (we have code to avoid
2604 such case by saving the outgoing stack arguments, but it results in
2605 worse code) */
2606 if (!ACCUMULATE_OUTGOING_ARGS)
2607 return;
2608
2609 for (i = 0; i < num_actuals; i++)
2610 {
2611 tree type;
2612 machine_mode mode;
2613
2614 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2615 continue;
2616
2617 /* If this is an addressable type, we cannot pre-evaluate it. */
2618 type = TREE_TYPE (args[i].tree_value);
2619 gcc_assert (!TREE_ADDRESSABLE (type));
2620
2621 args[i].initial_value = args[i].value
2622 = expand_normal (args[i].tree_value);
2623
2624 mode = TYPE_MODE (type);
2625 if (mode != args[i].mode)
2626 {
2627 int unsignedp = args[i].unsignedp;
2628 args[i].value
2629 = convert_modes (args[i].mode, mode,
2630 args[i].value, args[i].unsignedp);
2631
2632 /* CSE will replace this only if it contains args[i].value
2633 pseudo, so convert it down to the declared mode using
2634 a SUBREG. */
2635 if (REG_P (args[i].value)
2636 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2637 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2638 {
2639 args[i].initial_value
2640 = gen_lowpart_SUBREG (mode, args[i].value);
2641 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2642 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2643 }
2644 }
2645 }
2646 }
2647
2648 /* Given the current state of MUST_PREALLOCATE and information about
2649 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2650 compute and return the final value for MUST_PREALLOCATE. */
2651
2652 static int
2653 finalize_must_preallocate (int must_preallocate, int num_actuals,
2654 struct arg_data *args, struct args_size *args_size)
2655 {
2656 /* See if we have or want to preallocate stack space.
2657
2658 If we would have to push a partially-in-regs parm
2659 before other stack parms, preallocate stack space instead.
2660
2661 If the size of some parm is not a multiple of the required stack
2662 alignment, we must preallocate.
2663
2664 If the total size of arguments that would otherwise create a copy in
2665 a temporary (such as a CALL) is more than half the total argument list
2666 size, preallocation is faster.
2667
2668 Another reason to preallocate is if we have a machine (like the m88k)
2669 where stack alignment is required to be maintained between every
2670 pair of insns, not just when the call is made. However, we assume here
2671 that such machines either do not have push insns (and hence preallocation
2672 would occur anyway) or the problem is taken care of with
2673 PUSH_ROUNDING. */
2674
2675 if (! must_preallocate)
2676 {
2677 int partial_seen = 0;
2678 poly_int64 copy_to_evaluate_size = 0;
2679 int i;
2680
2681 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2682 {
2683 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2684 partial_seen = 1;
2685 else if (partial_seen && args[i].reg == 0)
2686 must_preallocate = 1;
2687
2688 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2689 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2690 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2691 || TREE_CODE (args[i].tree_value) == COND_EXPR
2692 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2693 copy_to_evaluate_size
2694 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2695 }
2696
2697 if (maybe_ne (args_size->constant, 0)
2698 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2699 must_preallocate = 1;
2700 }
2701 return must_preallocate;
2702 }
2703
2704 /* If we preallocated stack space, compute the address of each argument
2705 and store it into the ARGS array.
2706
2707 We need not ensure it is a valid memory address here; it will be
2708 validized when it is used.
2709
2710 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2711
2712 static void
2713 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2714 {
2715 if (argblock)
2716 {
2717 rtx arg_reg = argblock;
2718 int i;
2719 poly_int64 arg_offset = 0;
2720
2721 if (GET_CODE (argblock) == PLUS)
2722 {
2723 arg_reg = XEXP (argblock, 0);
2724 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2725 }
2726
2727 for (i = 0; i < num_actuals; i++)
2728 {
2729 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2730 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2731 rtx addr;
2732 unsigned int align, boundary;
2733 poly_uint64 units_on_stack = 0;
2734 machine_mode partial_mode = VOIDmode;
2735
2736 /* Skip this parm if it will not be passed on the stack. */
2737 if (! args[i].pass_on_stack
2738 && args[i].reg != 0
2739 && args[i].partial == 0)
2740 continue;
2741
2742 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2743 continue;
2744
2745 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2746 addr = plus_constant (Pmode, addr, arg_offset);
2747
2748 if (args[i].partial != 0)
2749 {
2750 /* Only part of the parameter is being passed on the stack.
2751 Generate a simple memory reference of the correct size. */
2752 units_on_stack = args[i].locate.size.constant;
2753 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2754 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2755 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2756 set_mem_size (args[i].stack, units_on_stack);
2757 }
2758 else
2759 {
2760 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2761 set_mem_attributes (args[i].stack,
2762 TREE_TYPE (args[i].tree_value), 1);
2763 }
2764 align = BITS_PER_UNIT;
2765 boundary = args[i].locate.boundary;
2766 poly_int64 offset_val;
2767 if (args[i].locate.where_pad != PAD_DOWNWARD)
2768 align = boundary;
2769 else if (poly_int_rtx_p (offset, &offset_val))
2770 {
2771 align = least_bit_hwi (boundary);
2772 unsigned int offset_align
2773 = known_alignment (offset_val) * BITS_PER_UNIT;
2774 if (offset_align != 0)
2775 align = MIN (align, offset_align);
2776 }
2777 set_mem_align (args[i].stack, align);
2778
2779 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2780 addr = plus_constant (Pmode, addr, arg_offset);
2781
2782 if (args[i].partial != 0)
2783 {
2784 /* Only part of the parameter is being passed on the stack.
2785 Generate a simple memory reference of the correct size.
2786 */
2787 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2788 set_mem_size (args[i].stack_slot, units_on_stack);
2789 }
2790 else
2791 {
2792 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2793 set_mem_attributes (args[i].stack_slot,
2794 TREE_TYPE (args[i].tree_value), 1);
2795 }
2796 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2797
2798 /* Function incoming arguments may overlap with sibling call
2799 outgoing arguments and we cannot allow reordering of reads
2800 from function arguments with stores to outgoing arguments
2801 of sibling calls. */
2802 set_mem_alias_set (args[i].stack, 0);
2803 set_mem_alias_set (args[i].stack_slot, 0);
2804 }
2805 }
2806 }
2807
2808 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2809 in a call instruction.
2810
2811 FNDECL is the tree node for the target function. For an indirect call
2812 FNDECL will be NULL_TREE.
2813
2814 ADDR is the operand 0 of CALL_EXPR for this call. */
2815
2816 static rtx
2817 rtx_for_function_call (tree fndecl, tree addr)
2818 {
2819 rtx funexp;
2820
2821 /* Get the function to call, in the form of RTL. */
2822 if (fndecl)
2823 {
2824 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2825 TREE_USED (fndecl) = 1;
2826
2827 /* Get a SYMBOL_REF rtx for the function address. */
2828 funexp = XEXP (DECL_RTL (fndecl), 0);
2829 }
2830 else
2831 /* Generate an rtx (probably a pseudo-register) for the address. */
2832 {
2833 push_temp_slots ();
2834 funexp = expand_normal (addr);
2835 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2836 }
2837 return funexp;
2838 }
2839
2840 /* Return the static chain for this function, if any. */
2841
2842 rtx
2843 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2844 {
2845 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2846 return NULL;
2847
2848 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2849 }
2850
2851 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2852 static struct
2853 {
2854 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2855 or NULL_RTX if none has been scanned yet. */
2856 rtx_insn *scan_start;
2857 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2858 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2859 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2860 with fixed offset, or PC if this is with variable or unknown offset. */
2861 vec<rtx> cache;
2862 } internal_arg_pointer_exp_state;
2863
2864 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2865
2866 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2867 the tail call sequence, starting with first insn that hasn't been
2868 scanned yet, and note for each pseudo on the LHS whether it is based
2869 on crtl->args.internal_arg_pointer or not, and what offset from that
2870 that pointer it has. */
2871
2872 static void
2873 internal_arg_pointer_based_exp_scan (void)
2874 {
2875 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2876
2877 if (scan_start == NULL_RTX)
2878 insn = get_insns ();
2879 else
2880 insn = NEXT_INSN (scan_start);
2881
2882 while (insn)
2883 {
2884 rtx set = single_set (insn);
2885 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2886 {
2887 rtx val = NULL_RTX;
2888 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2889 /* Punt on pseudos set multiple times. */
2890 if (idx < internal_arg_pointer_exp_state.cache.length ()
2891 && (internal_arg_pointer_exp_state.cache[idx]
2892 != NULL_RTX))
2893 val = pc_rtx;
2894 else
2895 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2896 if (val != NULL_RTX)
2897 {
2898 if (idx >= internal_arg_pointer_exp_state.cache.length ())
2899 internal_arg_pointer_exp_state.cache
2900 .safe_grow_cleared (idx + 1);
2901 internal_arg_pointer_exp_state.cache[idx] = val;
2902 }
2903 }
2904 if (NEXT_INSN (insn) == NULL_RTX)
2905 scan_start = insn;
2906 insn = NEXT_INSN (insn);
2907 }
2908
2909 internal_arg_pointer_exp_state.scan_start = scan_start;
2910 }
2911
2912 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2913 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2914 it with fixed offset, or PC if this is with variable or unknown offset.
2915 TOPLEVEL is true if the function is invoked at the topmost level. */
2916
2917 static rtx
2918 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2919 {
2920 if (CONSTANT_P (rtl))
2921 return NULL_RTX;
2922
2923 if (rtl == crtl->args.internal_arg_pointer)
2924 return const0_rtx;
2925
2926 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2927 return NULL_RTX;
2928
2929 poly_int64 offset;
2930 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2931 {
2932 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2933 if (val == NULL_RTX || val == pc_rtx)
2934 return val;
2935 return plus_constant (Pmode, val, offset);
2936 }
2937
2938 /* When called at the topmost level, scan pseudo assignments in between the
2939 last scanned instruction in the tail call sequence and the latest insn
2940 in that sequence. */
2941 if (toplevel)
2942 internal_arg_pointer_based_exp_scan ();
2943
2944 if (REG_P (rtl))
2945 {
2946 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2947 if (idx < internal_arg_pointer_exp_state.cache.length ())
2948 return internal_arg_pointer_exp_state.cache[idx];
2949
2950 return NULL_RTX;
2951 }
2952
2953 subrtx_iterator::array_type array;
2954 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2955 {
2956 const_rtx x = *iter;
2957 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2958 return pc_rtx;
2959 if (MEM_P (x))
2960 iter.skip_subrtxes ();
2961 }
2962
2963 return NULL_RTX;
2964 }
2965
2966 /* Return true if SIZE bytes starting from address ADDR might overlap an
2967 already-clobbered argument area. This function is used to determine
2968 if we should give up a sibcall. */
2969
2970 static bool
2971 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2972 {
2973 poly_int64 i;
2974 unsigned HOST_WIDE_INT start, end;
2975 rtx val;
2976
2977 if (bitmap_empty_p (stored_args_map)
2978 && stored_args_watermark == HOST_WIDE_INT_M1U)
2979 return false;
2980 val = internal_arg_pointer_based_exp (addr, true);
2981 if (val == NULL_RTX)
2982 return false;
2983 else if (!poly_int_rtx_p (val, &i))
2984 return true;
2985
2986 if (known_eq (size, 0U))
2987 return false;
2988
2989 if (STACK_GROWS_DOWNWARD)
2990 i -= crtl->args.pretend_args_size;
2991 else
2992 i += crtl->args.pretend_args_size;
2993
2994 if (ARGS_GROW_DOWNWARD)
2995 i = -i - size;
2996
2997 /* We can ignore any references to the function's pretend args,
2998 which at this point would manifest as negative values of I. */
2999 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3000 return false;
3001
3002 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3003 if (!(i + size).is_constant (&end))
3004 end = HOST_WIDE_INT_M1U;
3005
3006 if (end > stored_args_watermark)
3007 return true;
3008
3009 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3010 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3011 if (bitmap_bit_p (stored_args_map, k))
3012 return true;
3013
3014 return false;
3015 }
3016
3017 /* Do the register loads required for any wholly-register parms or any
3018 parms which are passed both on the stack and in a register. Their
3019 expressions were already evaluated.
3020
3021 Mark all register-parms as living through the call, putting these USE
3022 insns in the CALL_INSN_FUNCTION_USAGE field.
3023
3024 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3025 checking, setting *SIBCALL_FAILURE if appropriate. */
3026
3027 static void
3028 load_register_parameters (struct arg_data *args, int num_actuals,
3029 rtx *call_fusage, int flags, int is_sibcall,
3030 int *sibcall_failure)
3031 {
3032 int i, j;
3033
3034 for (i = 0; i < num_actuals; i++)
3035 {
3036 rtx reg = ((flags & ECF_SIBCALL)
3037 ? args[i].tail_call_reg : args[i].reg);
3038 if (reg)
3039 {
3040 int partial = args[i].partial;
3041 int nregs;
3042 poly_int64 size = 0;
3043 HOST_WIDE_INT const_size = 0;
3044 rtx_insn *before_arg = get_last_insn ();
3045 tree type = TREE_TYPE (args[i].tree_value);
3046 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3047 type = TREE_TYPE (first_field (type));
3048 /* Set non-negative if we must move a word at a time, even if
3049 just one word (e.g, partial == 4 && mode == DFmode). Set
3050 to -1 if we just use a normal move insn. This value can be
3051 zero if the argument is a zero size structure. */
3052 nregs = -1;
3053 if (GET_CODE (reg) == PARALLEL)
3054 ;
3055 else if (partial)
3056 {
3057 gcc_assert (partial % UNITS_PER_WORD == 0);
3058 nregs = partial / UNITS_PER_WORD;
3059 }
3060 else if (TYPE_MODE (type) == BLKmode)
3061 {
3062 /* Variable-sized parameters should be described by a
3063 PARALLEL instead. */
3064 const_size = int_size_in_bytes (type);
3065 gcc_assert (const_size >= 0);
3066 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3067 size = const_size;
3068 }
3069 else
3070 size = GET_MODE_SIZE (args[i].mode);
3071
3072 /* Handle calls that pass values in multiple non-contiguous
3073 locations. The Irix 6 ABI has examples of this. */
3074
3075 if (GET_CODE (reg) == PARALLEL)
3076 emit_group_move (reg, args[i].parallel_value);
3077
3078 /* If simple case, just do move. If normal partial, store_one_arg
3079 has already loaded the register for us. In all other cases,
3080 load the register(s) from memory. */
3081
3082 else if (nregs == -1)
3083 {
3084 emit_move_insn (reg, args[i].value);
3085 #ifdef BLOCK_REG_PADDING
3086 /* Handle case where we have a value that needs shifting
3087 up to the msb. eg. a QImode value and we're padding
3088 upward on a BYTES_BIG_ENDIAN machine. */
3089 if (args[i].locate.where_pad
3090 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3091 {
3092 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3093 if (maybe_lt (size, UNITS_PER_WORD))
3094 {
3095 rtx x;
3096 poly_int64 shift
3097 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3098
3099 /* Assigning REG here rather than a temp makes
3100 CALL_FUSAGE report the whole reg as used.
3101 Strictly speaking, the call only uses SIZE
3102 bytes at the msb end, but it doesn't seem worth
3103 generating rtl to say that. */
3104 reg = gen_rtx_REG (word_mode, REGNO (reg));
3105 x = expand_shift (LSHIFT_EXPR, word_mode,
3106 reg, shift, reg, 1);
3107 if (x != reg)
3108 emit_move_insn (reg, x);
3109 }
3110 }
3111 #endif
3112 }
3113
3114 /* If we have pre-computed the values to put in the registers in
3115 the case of non-aligned structures, copy them in now. */
3116
3117 else if (args[i].n_aligned_regs != 0)
3118 for (j = 0; j < args[i].n_aligned_regs; j++)
3119 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3120 args[i].aligned_regs[j]);
3121
3122 else if (partial == 0 || args[i].pass_on_stack)
3123 {
3124 /* SIZE and CONST_SIZE are 0 for partial arguments and
3125 the size of a BLKmode type otherwise. */
3126 gcc_checking_assert (known_eq (size, const_size));
3127 rtx mem = validize_mem (copy_rtx (args[i].value));
3128
3129 /* Check for overlap with already clobbered argument area,
3130 providing that this has non-zero size. */
3131 if (is_sibcall
3132 && const_size != 0
3133 && (mem_might_overlap_already_clobbered_arg_p
3134 (XEXP (args[i].value, 0), const_size)))
3135 *sibcall_failure = 1;
3136
3137 if (const_size % UNITS_PER_WORD == 0
3138 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3139 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3140 else
3141 {
3142 if (nregs > 1)
3143 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3144 args[i].mode);
3145 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3146 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3147 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3148 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3149 word_mode, word_mode, false,
3150 NULL);
3151 if (BYTES_BIG_ENDIAN)
3152 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3153 BITS_PER_WORD - bitsize, dest, 1);
3154 if (x != dest)
3155 emit_move_insn (dest, x);
3156 }
3157
3158 /* Handle a BLKmode that needs shifting. */
3159 if (nregs == 1 && const_size < UNITS_PER_WORD
3160 #ifdef BLOCK_REG_PADDING
3161 && args[i].locate.where_pad == PAD_DOWNWARD
3162 #else
3163 && BYTES_BIG_ENDIAN
3164 #endif
3165 )
3166 {
3167 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3168 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3169 enum tree_code dir = (BYTES_BIG_ENDIAN
3170 ? RSHIFT_EXPR : LSHIFT_EXPR);
3171 rtx x;
3172
3173 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3174 if (x != dest)
3175 emit_move_insn (dest, x);
3176 }
3177 }
3178
3179 /* When a parameter is a block, and perhaps in other cases, it is
3180 possible that it did a load from an argument slot that was
3181 already clobbered. */
3182 if (is_sibcall
3183 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3184 *sibcall_failure = 1;
3185
3186 /* Handle calls that pass values in multiple non-contiguous
3187 locations. The Irix 6 ABI has examples of this. */
3188 if (GET_CODE (reg) == PARALLEL)
3189 use_group_regs (call_fusage, reg);
3190 else if (nregs == -1)
3191 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3192 else if (nregs > 0)
3193 use_regs (call_fusage, REGNO (reg), nregs);
3194 }
3195 }
3196 }
3197
3198 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3199 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3200 bytes, then we would need to push some additional bytes to pad the
3201 arguments. So, we try to compute an adjust to the stack pointer for an
3202 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3203 bytes. Then, when the arguments are pushed the stack will be perfectly
3204 aligned.
3205
3206 Return true if this optimization is possible, storing the adjustment
3207 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3208 bytes that should be popped after the call. */
3209
3210 static bool
3211 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3212 poly_int64 unadjusted_args_size,
3213 struct args_size *args_size,
3214 unsigned int preferred_unit_stack_boundary)
3215 {
3216 /* The number of bytes to pop so that the stack will be
3217 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3218 poly_int64 adjustment;
3219 /* The alignment of the stack after the arguments are pushed, if we
3220 just pushed the arguments without adjust the stack here. */
3221 unsigned HOST_WIDE_INT unadjusted_alignment;
3222
3223 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3224 preferred_unit_stack_boundary,
3225 &unadjusted_alignment))
3226 return false;
3227
3228 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3229 as possible -- leaving just enough left to cancel out the
3230 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3231 PENDING_STACK_ADJUST is non-negative, and congruent to
3232 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3233
3234 /* Begin by trying to pop all the bytes. */
3235 unsigned HOST_WIDE_INT tmp_misalignment;
3236 if (!known_misalignment (pending_stack_adjust,
3237 preferred_unit_stack_boundary,
3238 &tmp_misalignment))
3239 return false;
3240 unadjusted_alignment -= tmp_misalignment;
3241 adjustment = pending_stack_adjust;
3242 /* Push enough additional bytes that the stack will be aligned
3243 after the arguments are pushed. */
3244 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3245 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3246
3247 /* We need to know whether the adjusted argument size
3248 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3249 or a deallocation. */
3250 if (!ordered_p (adjustment, unadjusted_args_size))
3251 return false;
3252
3253 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3254 bytes after the call. The right number is the entire
3255 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3256 by the arguments in the first place. */
3257 args_size->constant
3258 = pending_stack_adjust - adjustment + unadjusted_args_size;
3259
3260 *adjustment_out = adjustment;
3261 return true;
3262 }
3263
3264 /* Scan X expression if it does not dereference any argument slots
3265 we already clobbered by tail call arguments (as noted in stored_args_map
3266 bitmap).
3267 Return nonzero if X expression dereferences such argument slots,
3268 zero otherwise. */
3269
3270 static int
3271 check_sibcall_argument_overlap_1 (rtx x)
3272 {
3273 RTX_CODE code;
3274 int i, j;
3275 const char *fmt;
3276
3277 if (x == NULL_RTX)
3278 return 0;
3279
3280 code = GET_CODE (x);
3281
3282 /* We need not check the operands of the CALL expression itself. */
3283 if (code == CALL)
3284 return 0;
3285
3286 if (code == MEM)
3287 return (mem_might_overlap_already_clobbered_arg_p
3288 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3289
3290 /* Scan all subexpressions. */
3291 fmt = GET_RTX_FORMAT (code);
3292 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3293 {
3294 if (*fmt == 'e')
3295 {
3296 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3297 return 1;
3298 }
3299 else if (*fmt == 'E')
3300 {
3301 for (j = 0; j < XVECLEN (x, i); j++)
3302 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3303 return 1;
3304 }
3305 }
3306 return 0;
3307 }
3308
3309 /* Scan sequence after INSN if it does not dereference any argument slots
3310 we already clobbered by tail call arguments (as noted in stored_args_map
3311 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3312 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3313 should be 0). Return nonzero if sequence after INSN dereferences such argument
3314 slots, zero otherwise. */
3315
3316 static int
3317 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3318 int mark_stored_args_map)
3319 {
3320 poly_uint64 low, high;
3321 unsigned HOST_WIDE_INT const_low, const_high;
3322
3323 if (insn == NULL_RTX)
3324 insn = get_insns ();
3325 else
3326 insn = NEXT_INSN (insn);
3327
3328 for (; insn; insn = NEXT_INSN (insn))
3329 if (INSN_P (insn)
3330 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3331 break;
3332
3333 if (mark_stored_args_map)
3334 {
3335 if (ARGS_GROW_DOWNWARD)
3336 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3337 else
3338 low = arg->locate.slot_offset.constant;
3339 high = low + arg->locate.size.constant;
3340
3341 const_low = constant_lower_bound (low);
3342 if (high.is_constant (&const_high))
3343 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3344 bitmap_set_bit (stored_args_map, i);
3345 else
3346 stored_args_watermark = MIN (stored_args_watermark, const_low);
3347 }
3348 return insn != NULL_RTX;
3349 }
3350
3351 /* Given that a function returns a value of mode MODE at the most
3352 significant end of hard register VALUE, shift VALUE left or right
3353 as specified by LEFT_P. Return true if some action was needed. */
3354
3355 bool
3356 shift_return_value (machine_mode mode, bool left_p, rtx value)
3357 {
3358 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3359 machine_mode value_mode = GET_MODE (value);
3360 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3361
3362 if (known_eq (shift, 0))
3363 return false;
3364
3365 /* Use ashr rather than lshr for right shifts. This is for the benefit
3366 of the MIPS port, which requires SImode values to be sign-extended
3367 when stored in 64-bit registers. */
3368 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3369 value, gen_int_shift_amount (value_mode, shift),
3370 value, 1, OPTAB_WIDEN))
3371 gcc_unreachable ();
3372 return true;
3373 }
3374
3375 /* If X is a likely-spilled register value, copy it to a pseudo
3376 register and return that register. Return X otherwise. */
3377
3378 static rtx
3379 avoid_likely_spilled_reg (rtx x)
3380 {
3381 rtx new_rtx;
3382
3383 if (REG_P (x)
3384 && HARD_REGISTER_P (x)
3385 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3386 {
3387 /* Make sure that we generate a REG rather than a CONCAT.
3388 Moves into CONCATs can need nontrivial instructions,
3389 and the whole point of this function is to avoid
3390 using the hard register directly in such a situation. */
3391 generating_concat_p = 0;
3392 new_rtx = gen_reg_rtx (GET_MODE (x));
3393 generating_concat_p = 1;
3394 emit_move_insn (new_rtx, x);
3395 return new_rtx;
3396 }
3397 return x;
3398 }
3399
3400 /* Helper function for expand_call.
3401 Return false is EXP is not implementable as a sibling call. */
3402
3403 static bool
3404 can_implement_as_sibling_call_p (tree exp,
3405 rtx structure_value_addr,
3406 tree funtype,
3407 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3408 tree fndecl,
3409 int flags,
3410 tree addr,
3411 const args_size &args_size)
3412 {
3413 if (!targetm.have_sibcall_epilogue ())
3414 {
3415 maybe_complain_about_tail_call
3416 (exp,
3417 "machine description does not have"
3418 " a sibcall_epilogue instruction pattern");
3419 return false;
3420 }
3421
3422 /* Doing sibling call optimization needs some work, since
3423 structure_value_addr can be allocated on the stack.
3424 It does not seem worth the effort since few optimizable
3425 sibling calls will return a structure. */
3426 if (structure_value_addr != NULL_RTX)
3427 {
3428 maybe_complain_about_tail_call (exp, "callee returns a structure");
3429 return false;
3430 }
3431
3432 #ifdef REG_PARM_STACK_SPACE
3433 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
3434 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3435 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3436 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3437 {
3438 maybe_complain_about_tail_call (exp,
3439 "inconsistent size of stack space"
3440 " allocated for arguments which are"
3441 " passed in registers");
3442 return false;
3443 }
3444 #endif
3445
3446 /* Check whether the target is able to optimize the call
3447 into a sibcall. */
3448 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3449 {
3450 maybe_complain_about_tail_call (exp,
3451 "target is not able to optimize the"
3452 " call into a sibling call");
3453 return false;
3454 }
3455
3456 /* Functions that do not return exactly once may not be sibcall
3457 optimized. */
3458 if (flags & ECF_RETURNS_TWICE)
3459 {
3460 maybe_complain_about_tail_call (exp, "callee returns twice");
3461 return false;
3462 }
3463 if (flags & ECF_NORETURN)
3464 {
3465 maybe_complain_about_tail_call (exp, "callee does not return");
3466 return false;
3467 }
3468
3469 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3470 {
3471 maybe_complain_about_tail_call (exp, "volatile function type");
3472 return false;
3473 }
3474
3475 /* If the called function is nested in the current one, it might access
3476 some of the caller's arguments, but could clobber them beforehand if
3477 the argument areas are shared. */
3478 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3479 {
3480 maybe_complain_about_tail_call (exp, "nested function");
3481 return false;
3482 }
3483
3484 /* If this function requires more stack slots than the current
3485 function, we cannot change it into a sibling call.
3486 crtl->args.pretend_args_size is not part of the
3487 stack allocated by our caller. */
3488 if (maybe_gt (args_size.constant,
3489 crtl->args.size - crtl->args.pretend_args_size))
3490 {
3491 maybe_complain_about_tail_call (exp,
3492 "callee required more stack slots"
3493 " than the caller");
3494 return false;
3495 }
3496
3497 /* If the callee pops its own arguments, then it must pop exactly
3498 the same number of arguments as the current function. */
3499 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3500 args_size.constant),
3501 targetm.calls.return_pops_args (current_function_decl,
3502 TREE_TYPE
3503 (current_function_decl),
3504 crtl->args.size)))
3505 {
3506 maybe_complain_about_tail_call (exp,
3507 "inconsistent number of"
3508 " popped arguments");
3509 return false;
3510 }
3511
3512 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3513 {
3514 maybe_complain_about_tail_call (exp, "frontend does not support"
3515 " sibling call");
3516 return false;
3517 }
3518
3519 /* All checks passed. */
3520 return true;
3521 }
3522
3523 /* Update stack alignment when the parameter is passed in the stack
3524 since the outgoing parameter requires extra alignment on the calling
3525 function side. */
3526
3527 static void
3528 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3529 {
3530 if (crtl->stack_alignment_needed < locate->boundary)
3531 crtl->stack_alignment_needed = locate->boundary;
3532 if (crtl->preferred_stack_boundary < locate->boundary)
3533 crtl->preferred_stack_boundary = locate->boundary;
3534 }
3535
3536 /* Generate all the code for a CALL_EXPR exp
3537 and return an rtx for its value.
3538 Store the value in TARGET (specified as an rtx) if convenient.
3539 If the value is stored in TARGET then TARGET is returned.
3540 If IGNORE is nonzero, then we ignore the value of the function call. */
3541
3542 rtx
3543 expand_call (tree exp, rtx target, int ignore)
3544 {
3545 /* Nonzero if we are currently expanding a call. */
3546 static int currently_expanding_call = 0;
3547
3548 /* RTX for the function to be called. */
3549 rtx funexp;
3550 /* Sequence of insns to perform a normal "call". */
3551 rtx_insn *normal_call_insns = NULL;
3552 /* Sequence of insns to perform a tail "call". */
3553 rtx_insn *tail_call_insns = NULL;
3554 /* Data type of the function. */
3555 tree funtype;
3556 tree type_arg_types;
3557 tree rettype;
3558 /* Declaration of the function being called,
3559 or 0 if the function is computed (not known by name). */
3560 tree fndecl = 0;
3561 /* The type of the function being called. */
3562 tree fntype;
3563 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3564 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3565 int pass;
3566
3567 /* Register in which non-BLKmode value will be returned,
3568 or 0 if no value or if value is BLKmode. */
3569 rtx valreg;
3570 /* Address where we should return a BLKmode value;
3571 0 if value not BLKmode. */
3572 rtx structure_value_addr = 0;
3573 /* Nonzero if that address is being passed by treating it as
3574 an extra, implicit first parameter. Otherwise,
3575 it is passed by being copied directly into struct_value_rtx. */
3576 int structure_value_addr_parm = 0;
3577 /* Holds the value of implicit argument for the struct value. */
3578 tree structure_value_addr_value = NULL_TREE;
3579 /* Size of aggregate value wanted, or zero if none wanted
3580 or if we are using the non-reentrant PCC calling convention
3581 or expecting the value in registers. */
3582 poly_int64 struct_value_size = 0;
3583 /* Nonzero if called function returns an aggregate in memory PCC style,
3584 by returning the address of where to find it. */
3585 int pcc_struct_value = 0;
3586 rtx struct_value = 0;
3587
3588 /* Number of actual parameters in this call, including struct value addr. */
3589 int num_actuals;
3590 /* Number of named args. Args after this are anonymous ones
3591 and they must all go on the stack. */
3592 int n_named_args;
3593 /* Number of complex actual arguments that need to be split. */
3594 int num_complex_actuals = 0;
3595
3596 /* Vector of information about each argument.
3597 Arguments are numbered in the order they will be pushed,
3598 not the order they are written. */
3599 struct arg_data *args;
3600
3601 /* Total size in bytes of all the stack-parms scanned so far. */
3602 struct args_size args_size;
3603 struct args_size adjusted_args_size;
3604 /* Size of arguments before any adjustments (such as rounding). */
3605 poly_int64 unadjusted_args_size;
3606 /* Data on reg parms scanned so far. */
3607 CUMULATIVE_ARGS args_so_far_v;
3608 cumulative_args_t args_so_far;
3609 /* Nonzero if a reg parm has been scanned. */
3610 int reg_parm_seen;
3611 /* Nonzero if this is an indirect function call. */
3612
3613 /* Nonzero if we must avoid push-insns in the args for this call.
3614 If stack space is allocated for register parameters, but not by the
3615 caller, then it is preallocated in the fixed part of the stack frame.
3616 So the entire argument block must then be preallocated (i.e., we
3617 ignore PUSH_ROUNDING in that case). */
3618
3619 int must_preallocate = !PUSH_ARGS;
3620
3621 /* Size of the stack reserved for parameter registers. */
3622 int reg_parm_stack_space = 0;
3623
3624 /* Address of space preallocated for stack parms
3625 (on machines that lack push insns), or 0 if space not preallocated. */
3626 rtx argblock = 0;
3627
3628 /* Mask of ECF_ and ERF_ flags. */
3629 int flags = 0;
3630 int return_flags = 0;
3631 #ifdef REG_PARM_STACK_SPACE
3632 /* Define the boundary of the register parm stack space that needs to be
3633 saved, if any. */
3634 int low_to_save, high_to_save;
3635 rtx save_area = 0; /* Place that it is saved */
3636 #endif
3637
3638 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3639 char *initial_stack_usage_map = stack_usage_map;
3640 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3641 char *stack_usage_map_buf = NULL;
3642
3643 poly_int64 old_stack_allocated;
3644
3645 /* State variables to track stack modifications. */
3646 rtx old_stack_level = 0;
3647 int old_stack_arg_under_construction = 0;
3648 poly_int64 old_pending_adj = 0;
3649 int old_inhibit_defer_pop = inhibit_defer_pop;
3650
3651 /* Some stack pointer alterations we make are performed via
3652 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3653 which we then also need to save/restore along the way. */
3654 poly_int64 old_stack_pointer_delta = 0;
3655
3656 rtx call_fusage;
3657 tree addr = CALL_EXPR_FN (exp);
3658 int i;
3659 /* The alignment of the stack, in bits. */
3660 unsigned HOST_WIDE_INT preferred_stack_boundary;
3661 /* The alignment of the stack, in bytes. */
3662 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3663 /* The static chain value to use for this call. */
3664 rtx static_chain_value;
3665 /* See if this is "nothrow" function call. */
3666 if (TREE_NOTHROW (exp))
3667 flags |= ECF_NOTHROW;
3668
3669 /* See if we can find a DECL-node for the actual function, and get the
3670 function attributes (flags) from the function decl or type node. */
3671 fndecl = get_callee_fndecl (exp);
3672 if (fndecl)
3673 {
3674 fntype = TREE_TYPE (fndecl);
3675 flags |= flags_from_decl_or_type (fndecl);
3676 return_flags |= decl_return_flags (fndecl);
3677 }
3678 else
3679 {
3680 fntype = TREE_TYPE (TREE_TYPE (addr));
3681 flags |= flags_from_decl_or_type (fntype);
3682 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3683 flags |= ECF_BY_DESCRIPTOR;
3684 }
3685 rettype = TREE_TYPE (exp);
3686
3687 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3688
3689 /* Warn if this value is an aggregate type,
3690 regardless of which calling convention we are using for it. */
3691 if (AGGREGATE_TYPE_P (rettype))
3692 warning (OPT_Waggregate_return, "function call has aggregate value");
3693
3694 /* If the result of a non looping pure or const function call is
3695 ignored (or void), and none of its arguments are volatile, we can
3696 avoid expanding the call and just evaluate the arguments for
3697 side-effects. */
3698 if ((flags & (ECF_CONST | ECF_PURE))
3699 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3700 && (ignore || target == const0_rtx
3701 || TYPE_MODE (rettype) == VOIDmode))
3702 {
3703 bool volatilep = false;
3704 tree arg;
3705 call_expr_arg_iterator iter;
3706
3707 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3708 if (TREE_THIS_VOLATILE (arg))
3709 {
3710 volatilep = true;
3711 break;
3712 }
3713
3714 if (! volatilep)
3715 {
3716 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3717 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3718 return const0_rtx;
3719 }
3720 }
3721
3722 #ifdef REG_PARM_STACK_SPACE
3723 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3724 #endif
3725
3726 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3727 && reg_parm_stack_space > 0 && PUSH_ARGS)
3728 must_preallocate = 1;
3729
3730 /* Set up a place to return a structure. */
3731
3732 /* Cater to broken compilers. */
3733 if (aggregate_value_p (exp, fntype))
3734 {
3735 /* This call returns a big structure. */
3736 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3737
3738 #ifdef PCC_STATIC_STRUCT_RETURN
3739 {
3740 pcc_struct_value = 1;
3741 }
3742 #else /* not PCC_STATIC_STRUCT_RETURN */
3743 {
3744 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3745 struct_value_size = -1;
3746
3747 /* Even if it is semantically safe to use the target as the return
3748 slot, it may be not sufficiently aligned for the return type. */
3749 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3750 && target
3751 && MEM_P (target)
3752 /* If rettype is addressable, we may not create a temporary.
3753 If target is properly aligned at runtime and the compiler
3754 just doesn't know about it, it will work fine, otherwise it
3755 will be UB. */
3756 && (TREE_ADDRESSABLE (rettype)
3757 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3758 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3759 MEM_ALIGN (target)))))
3760 structure_value_addr = XEXP (target, 0);
3761 else
3762 {
3763 /* For variable-sized objects, we must be called with a target
3764 specified. If we were to allocate space on the stack here,
3765 we would have no way of knowing when to free it. */
3766 rtx d = assign_temp (rettype, 1, 1);
3767 structure_value_addr = XEXP (d, 0);
3768 target = 0;
3769 }
3770 }
3771 #endif /* not PCC_STATIC_STRUCT_RETURN */
3772 }
3773
3774 /* Figure out the amount to which the stack should be aligned. */
3775 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3776 if (fndecl)
3777 {
3778 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3779 /* Without automatic stack alignment, we can't increase preferred
3780 stack boundary. With automatic stack alignment, it is
3781 unnecessary since unless we can guarantee that all callers will
3782 align the outgoing stack properly, callee has to align its
3783 stack anyway. */
3784 if (i
3785 && i->preferred_incoming_stack_boundary
3786 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3787 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3788 }
3789
3790 /* Operand 0 is a pointer-to-function; get the type of the function. */
3791 funtype = TREE_TYPE (addr);
3792 gcc_assert (POINTER_TYPE_P (funtype));
3793 funtype = TREE_TYPE (funtype);
3794
3795 /* Count whether there are actual complex arguments that need to be split
3796 into their real and imaginary parts. Munge the type_arg_types
3797 appropriately here as well. */
3798 if (targetm.calls.split_complex_arg)
3799 {
3800 call_expr_arg_iterator iter;
3801 tree arg;
3802 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3803 {
3804 tree type = TREE_TYPE (arg);
3805 if (type && TREE_CODE (type) == COMPLEX_TYPE
3806 && targetm.calls.split_complex_arg (type))
3807 num_complex_actuals++;
3808 }
3809 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3810 }
3811 else
3812 type_arg_types = TYPE_ARG_TYPES (funtype);
3813
3814 if (flags & ECF_MAY_BE_ALLOCA)
3815 cfun->calls_alloca = 1;
3816
3817 /* If struct_value_rtx is 0, it means pass the address
3818 as if it were an extra parameter. Put the argument expression
3819 in structure_value_addr_value. */
3820 if (structure_value_addr && struct_value == 0)
3821 {
3822 /* If structure_value_addr is a REG other than
3823 virtual_outgoing_args_rtx, we can use always use it. If it
3824 is not a REG, we must always copy it into a register.
3825 If it is virtual_outgoing_args_rtx, we must copy it to another
3826 register in some cases. */
3827 rtx temp = (!REG_P (structure_value_addr)
3828 || (ACCUMULATE_OUTGOING_ARGS
3829 && stack_arg_under_construction
3830 && structure_value_addr == virtual_outgoing_args_rtx)
3831 ? copy_addr_to_reg (convert_memory_address
3832 (Pmode, structure_value_addr))
3833 : structure_value_addr);
3834
3835 structure_value_addr_value =
3836 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3837 structure_value_addr_parm = 1;
3838 }
3839
3840 /* Count the arguments and set NUM_ACTUALS. */
3841 num_actuals =
3842 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3843
3844 /* Compute number of named args.
3845 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3846
3847 if (type_arg_types != 0)
3848 n_named_args
3849 = (list_length (type_arg_types)
3850 /* Count the struct value address, if it is passed as a parm. */
3851 + structure_value_addr_parm);
3852 else
3853 /* If we know nothing, treat all args as named. */
3854 n_named_args = num_actuals;
3855
3856 /* Start updating where the next arg would go.
3857
3858 On some machines (such as the PA) indirect calls have a different
3859 calling convention than normal calls. The fourth argument in
3860 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3861 or not. */
3862 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3863 args_so_far = pack_cumulative_args (&args_so_far_v);
3864
3865 /* Now possibly adjust the number of named args.
3866 Normally, don't include the last named arg if anonymous args follow.
3867 We do include the last named arg if
3868 targetm.calls.strict_argument_naming() returns nonzero.
3869 (If no anonymous args follow, the result of list_length is actually
3870 one too large. This is harmless.)
3871
3872 If targetm.calls.pretend_outgoing_varargs_named() returns
3873 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3874 this machine will be able to place unnamed args that were passed
3875 in registers into the stack. So treat all args as named. This
3876 allows the insns emitting for a specific argument list to be
3877 independent of the function declaration.
3878
3879 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3880 we do not have any reliable way to pass unnamed args in
3881 registers, so we must force them into memory. */
3882
3883 if (type_arg_types != 0
3884 && targetm.calls.strict_argument_naming (args_so_far))
3885 ;
3886 else if (type_arg_types != 0
3887 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3888 /* Don't include the last named arg. */
3889 --n_named_args;
3890 else
3891 /* Treat all args as named. */
3892 n_named_args = num_actuals;
3893
3894 /* Make a vector to hold all the information about each arg. */
3895 args = XCNEWVEC (struct arg_data, num_actuals);
3896
3897 /* Build up entries in the ARGS array, compute the size of the
3898 arguments into ARGS_SIZE, etc. */
3899 initialize_argument_information (num_actuals, args, &args_size,
3900 n_named_args, exp,
3901 structure_value_addr_value, fndecl, fntype,
3902 args_so_far, reg_parm_stack_space,
3903 &old_stack_level, &old_pending_adj,
3904 &must_preallocate, &flags,
3905 &try_tail_call, CALL_FROM_THUNK_P (exp));
3906
3907 if (args_size.var)
3908 must_preallocate = 1;
3909
3910 /* Now make final decision about preallocating stack space. */
3911 must_preallocate = finalize_must_preallocate (must_preallocate,
3912 num_actuals, args,
3913 &args_size);
3914
3915 /* If the structure value address will reference the stack pointer, we
3916 must stabilize it. We don't need to do this if we know that we are
3917 not going to adjust the stack pointer in processing this call. */
3918
3919 if (structure_value_addr
3920 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3921 || reg_mentioned_p (virtual_outgoing_args_rtx,
3922 structure_value_addr))
3923 && (args_size.var
3924 || (!ACCUMULATE_OUTGOING_ARGS
3925 && maybe_ne (args_size.constant, 0))))
3926 structure_value_addr = copy_to_reg (structure_value_addr);
3927
3928 /* Tail calls can make things harder to debug, and we've traditionally
3929 pushed these optimizations into -O2. Don't try if we're already
3930 expanding a call, as that means we're an argument. Don't try if
3931 there's cleanups, as we know there's code to follow the call. */
3932 if (currently_expanding_call++ != 0
3933 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
3934 || args_size.var
3935 || dbg_cnt (tail_call) == false)
3936 try_tail_call = 0;
3937
3938 /* Workaround buggy C/C++ wrappers around Fortran routines with
3939 character(len=constant) arguments if the hidden string length arguments
3940 are passed on the stack; if the callers forget to pass those arguments,
3941 attempting to tail call in such routines leads to stack corruption.
3942 Avoid tail calls in functions where at least one such hidden string
3943 length argument is passed (partially or fully) on the stack in the
3944 caller and the callee needs to pass any arguments on the stack.
3945 See PR90329. */
3946 if (try_tail_call && maybe_ne (args_size.constant, 0))
3947 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3948 arg; arg = DECL_CHAIN (arg))
3949 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3950 {
3951 subrtx_iterator::array_type array;
3952 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3953 if (MEM_P (*iter))
3954 {
3955 try_tail_call = 0;
3956 break;
3957 }
3958 }
3959
3960 /* If the user has marked the function as requiring tail-call
3961 optimization, attempt it. */
3962 if (must_tail_call)
3963 try_tail_call = 1;
3964
3965 /* Rest of purposes for tail call optimizations to fail. */
3966 if (try_tail_call)
3967 try_tail_call = can_implement_as_sibling_call_p (exp,
3968 structure_value_addr,
3969 funtype,
3970 reg_parm_stack_space,
3971 fndecl,
3972 flags, addr, args_size);
3973
3974 /* Check if caller and callee disagree in promotion of function
3975 return value. */
3976 if (try_tail_call)
3977 {
3978 machine_mode caller_mode, caller_promoted_mode;
3979 machine_mode callee_mode, callee_promoted_mode;
3980 int caller_unsignedp, callee_unsignedp;
3981 tree caller_res = DECL_RESULT (current_function_decl);
3982
3983 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3984 caller_mode = DECL_MODE (caller_res);
3985 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3986 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3987 caller_promoted_mode
3988 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3989 &caller_unsignedp,
3990 TREE_TYPE (current_function_decl), 1);
3991 callee_promoted_mode
3992 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3993 &callee_unsignedp,
3994 funtype, 1);
3995 if (caller_mode != VOIDmode
3996 && (caller_promoted_mode != callee_promoted_mode
3997 || ((caller_mode != caller_promoted_mode
3998 || callee_mode != callee_promoted_mode)
3999 && (caller_unsignedp != callee_unsignedp
4000 || partial_subreg_p (caller_mode, callee_mode)))))
4001 {
4002 try_tail_call = 0;
4003 maybe_complain_about_tail_call (exp,
4004 "caller and callee disagree in"
4005 " promotion of function"
4006 " return value");
4007 }
4008 }
4009
4010 /* Ensure current function's preferred stack boundary is at least
4011 what we need. Stack alignment may also increase preferred stack
4012 boundary. */
4013 for (i = 0; i < num_actuals; i++)
4014 if (reg_parm_stack_space > 0
4015 || args[i].reg == 0
4016 || args[i].partial != 0
4017 || args[i].pass_on_stack)
4018 update_stack_alignment_for_call (&args[i].locate);
4019 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4020 crtl->preferred_stack_boundary = preferred_stack_boundary;
4021 else
4022 preferred_stack_boundary = crtl->preferred_stack_boundary;
4023
4024 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4025
4026 if (flag_callgraph_info)
4027 record_final_call (fndecl, EXPR_LOCATION (exp));
4028
4029 /* We want to make two insn chains; one for a sibling call, the other
4030 for a normal call. We will select one of the two chains after
4031 initial RTL generation is complete. */
4032 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4033 {
4034 int sibcall_failure = 0;
4035 /* We want to emit any pending stack adjustments before the tail
4036 recursion "call". That way we know any adjustment after the tail
4037 recursion call can be ignored if we indeed use the tail
4038 call expansion. */
4039 saved_pending_stack_adjust save;
4040 rtx_insn *insns, *before_call, *after_args;
4041 rtx next_arg_reg;
4042
4043 if (pass == 0)
4044 {
4045 /* State variables we need to save and restore between
4046 iterations. */
4047 save_pending_stack_adjust (&save);
4048 }
4049 if (pass)
4050 flags &= ~ECF_SIBCALL;
4051 else
4052 flags |= ECF_SIBCALL;
4053
4054 /* Other state variables that we must reinitialize each time
4055 through the loop (that are not initialized by the loop itself). */
4056 argblock = 0;
4057 call_fusage = 0;
4058
4059 /* Start a new sequence for the normal call case.
4060
4061 From this point on, if the sibling call fails, we want to set
4062 sibcall_failure instead of continuing the loop. */
4063 start_sequence ();
4064
4065 /* Don't let pending stack adjusts add up to too much.
4066 Also, do all pending adjustments now if there is any chance
4067 this might be a call to alloca or if we are expanding a sibling
4068 call sequence.
4069 Also do the adjustments before a throwing call, otherwise
4070 exception handling can fail; PR 19225. */
4071 if (maybe_ge (pending_stack_adjust, 32)
4072 || (maybe_ne (pending_stack_adjust, 0)
4073 && (flags & ECF_MAY_BE_ALLOCA))
4074 || (maybe_ne (pending_stack_adjust, 0)
4075 && flag_exceptions && !(flags & ECF_NOTHROW))
4076 || pass == 0)
4077 do_pending_stack_adjust ();
4078
4079 /* Precompute any arguments as needed. */
4080 if (pass)
4081 precompute_arguments (num_actuals, args);
4082
4083 /* Now we are about to start emitting insns that can be deleted
4084 if a libcall is deleted. */
4085 if (pass && (flags & ECF_MALLOC))
4086 start_sequence ();
4087
4088 if (pass == 0
4089 && crtl->stack_protect_guard
4090 && targetm.stack_protect_runtime_enabled_p ())
4091 stack_protect_epilogue ();
4092
4093 adjusted_args_size = args_size;
4094 /* Compute the actual size of the argument block required. The variable
4095 and constant sizes must be combined, the size may have to be rounded,
4096 and there may be a minimum required size. When generating a sibcall
4097 pattern, do not round up, since we'll be re-using whatever space our
4098 caller provided. */
4099 unadjusted_args_size
4100 = compute_argument_block_size (reg_parm_stack_space,
4101 &adjusted_args_size,
4102 fndecl, fntype,
4103 (pass == 0 ? 0
4104 : preferred_stack_boundary));
4105
4106 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4107
4108 /* The argument block when performing a sibling call is the
4109 incoming argument block. */
4110 if (pass == 0)
4111 {
4112 argblock = crtl->args.internal_arg_pointer;
4113 if (STACK_GROWS_DOWNWARD)
4114 argblock
4115 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4116 else
4117 argblock
4118 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4119
4120 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4121 stored_args_map = sbitmap_alloc (map_size);
4122 bitmap_clear (stored_args_map);
4123 stored_args_watermark = HOST_WIDE_INT_M1U;
4124 }
4125
4126 /* If we have no actual push instructions, or shouldn't use them,
4127 make space for all args right now. */
4128 else if (adjusted_args_size.var != 0)
4129 {
4130 if (old_stack_level == 0)
4131 {
4132 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4133 old_stack_pointer_delta = stack_pointer_delta;
4134 old_pending_adj = pending_stack_adjust;
4135 pending_stack_adjust = 0;
4136 /* stack_arg_under_construction says whether a stack arg is
4137 being constructed at the old stack level. Pushing the stack
4138 gets a clean outgoing argument block. */
4139 old_stack_arg_under_construction = stack_arg_under_construction;
4140 stack_arg_under_construction = 0;
4141 }
4142 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4143 if (flag_stack_usage_info)
4144 current_function_has_unbounded_dynamic_stack_size = 1;
4145 }
4146 else
4147 {
4148 /* Note that we must go through the motions of allocating an argument
4149 block even if the size is zero because we may be storing args
4150 in the area reserved for register arguments, which may be part of
4151 the stack frame. */
4152
4153 poly_int64 needed = adjusted_args_size.constant;
4154
4155 /* Store the maximum argument space used. It will be pushed by
4156 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4157 checking). */
4158
4159 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4160 needed);
4161
4162 if (must_preallocate)
4163 {
4164 if (ACCUMULATE_OUTGOING_ARGS)
4165 {
4166 /* Since the stack pointer will never be pushed, it is
4167 possible for the evaluation of a parm to clobber
4168 something we have already written to the stack.
4169 Since most function calls on RISC machines do not use
4170 the stack, this is uncommon, but must work correctly.
4171
4172 Therefore, we save any area of the stack that was already
4173 written and that we are using. Here we set up to do this
4174 by making a new stack usage map from the old one. The
4175 actual save will be done by store_one_arg.
4176
4177 Another approach might be to try to reorder the argument
4178 evaluations to avoid this conflicting stack usage. */
4179
4180 /* Since we will be writing into the entire argument area,
4181 the map must be allocated for its entire size, not just
4182 the part that is the responsibility of the caller. */
4183 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4184 needed += reg_parm_stack_space;
4185
4186 poly_int64 limit = needed;
4187 if (ARGS_GROW_DOWNWARD)
4188 limit += 1;
4189
4190 /* For polynomial sizes, this is the maximum possible
4191 size needed for arguments with a constant size
4192 and offset. */
4193 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4194 highest_outgoing_arg_in_use
4195 = MAX (initial_highest_arg_in_use, const_limit);
4196
4197 free (stack_usage_map_buf);
4198 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4199 stack_usage_map = stack_usage_map_buf;
4200
4201 if (initial_highest_arg_in_use)
4202 memcpy (stack_usage_map, initial_stack_usage_map,
4203 initial_highest_arg_in_use);
4204
4205 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4206 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4207 (highest_outgoing_arg_in_use
4208 - initial_highest_arg_in_use));
4209 needed = 0;
4210
4211 /* The address of the outgoing argument list must not be
4212 copied to a register here, because argblock would be left
4213 pointing to the wrong place after the call to
4214 allocate_dynamic_stack_space below. */
4215
4216 argblock = virtual_outgoing_args_rtx;
4217 }
4218 else
4219 {
4220 /* Try to reuse some or all of the pending_stack_adjust
4221 to get this space. */
4222 if (inhibit_defer_pop == 0
4223 && (combine_pending_stack_adjustment_and_call
4224 (&needed,
4225 unadjusted_args_size,
4226 &adjusted_args_size,
4227 preferred_unit_stack_boundary)))
4228 {
4229 /* combine_pending_stack_adjustment_and_call computes
4230 an adjustment before the arguments are allocated.
4231 Account for them and see whether or not the stack
4232 needs to go up or down. */
4233 needed = unadjusted_args_size - needed;
4234
4235 /* Checked by
4236 combine_pending_stack_adjustment_and_call. */
4237 gcc_checking_assert (ordered_p (needed, 0));
4238 if (maybe_lt (needed, 0))
4239 {
4240 /* We're releasing stack space. */
4241 /* ??? We can avoid any adjustment at all if we're
4242 already aligned. FIXME. */
4243 pending_stack_adjust = -needed;
4244 do_pending_stack_adjust ();
4245 needed = 0;
4246 }
4247 else
4248 /* We need to allocate space. We'll do that in
4249 push_block below. */
4250 pending_stack_adjust = 0;
4251 }
4252
4253 /* Special case this because overhead of `push_block' in
4254 this case is non-trivial. */
4255 if (known_eq (needed, 0))
4256 argblock = virtual_outgoing_args_rtx;
4257 else
4258 {
4259 rtx needed_rtx = gen_int_mode (needed, Pmode);
4260 argblock = push_block (needed_rtx, 0, 0);
4261 if (ARGS_GROW_DOWNWARD)
4262 argblock = plus_constant (Pmode, argblock, needed);
4263 }
4264
4265 /* We only really need to call `copy_to_reg' in the case
4266 where push insns are going to be used to pass ARGBLOCK
4267 to a function call in ARGS. In that case, the stack
4268 pointer changes value from the allocation point to the
4269 call point, and hence the value of
4270 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4271 as well always do it. */
4272 argblock = copy_to_reg (argblock);
4273 }
4274 }
4275 }
4276
4277 if (ACCUMULATE_OUTGOING_ARGS)
4278 {
4279 /* The save/restore code in store_one_arg handles all
4280 cases except one: a constructor call (including a C
4281 function returning a BLKmode struct) to initialize
4282 an argument. */
4283 if (stack_arg_under_construction)
4284 {
4285 rtx push_size
4286 = (gen_int_mode
4287 (adjusted_args_size.constant
4288 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4289 : TREE_TYPE (fndecl))
4290 ? 0 : reg_parm_stack_space), Pmode));
4291 if (old_stack_level == 0)
4292 {
4293 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4294 old_stack_pointer_delta = stack_pointer_delta;
4295 old_pending_adj = pending_stack_adjust;
4296 pending_stack_adjust = 0;
4297 /* stack_arg_under_construction says whether a stack
4298 arg is being constructed at the old stack level.
4299 Pushing the stack gets a clean outgoing argument
4300 block. */
4301 old_stack_arg_under_construction
4302 = stack_arg_under_construction;
4303 stack_arg_under_construction = 0;
4304 /* Make a new map for the new argument list. */
4305 free (stack_usage_map_buf);
4306 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4307 stack_usage_map = stack_usage_map_buf;
4308 highest_outgoing_arg_in_use = 0;
4309 stack_usage_watermark = HOST_WIDE_INT_M1U;
4310 }
4311 /* We can pass TRUE as the 4th argument because we just
4312 saved the stack pointer and will restore it right after
4313 the call. */
4314 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4315 -1, true);
4316 }
4317
4318 /* If argument evaluation might modify the stack pointer,
4319 copy the address of the argument list to a register. */
4320 for (i = 0; i < num_actuals; i++)
4321 if (args[i].pass_on_stack)
4322 {
4323 argblock = copy_addr_to_reg (argblock);
4324 break;
4325 }
4326 }
4327
4328 compute_argument_addresses (args, argblock, num_actuals);
4329
4330 /* Stack is properly aligned, pops can't safely be deferred during
4331 the evaluation of the arguments. */
4332 NO_DEFER_POP;
4333
4334 /* Precompute all register parameters. It isn't safe to compute
4335 anything once we have started filling any specific hard regs.
4336 TLS symbols sometimes need a call to resolve. Precompute
4337 register parameters before any stack pointer manipulation
4338 to avoid unaligned stack in the called function. */
4339 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4340
4341 OK_DEFER_POP;
4342
4343 /* Perform stack alignment before the first push (the last arg). */
4344 if (argblock == 0
4345 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4346 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4347 {
4348 /* When the stack adjustment is pending, we get better code
4349 by combining the adjustments. */
4350 if (maybe_ne (pending_stack_adjust, 0)
4351 && ! inhibit_defer_pop
4352 && (combine_pending_stack_adjustment_and_call
4353 (&pending_stack_adjust,
4354 unadjusted_args_size,
4355 &adjusted_args_size,
4356 preferred_unit_stack_boundary)))
4357 do_pending_stack_adjust ();
4358 else if (argblock == 0)
4359 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4360 - unadjusted_args_size,
4361 Pmode));
4362 }
4363 /* Now that the stack is properly aligned, pops can't safely
4364 be deferred during the evaluation of the arguments. */
4365 NO_DEFER_POP;
4366
4367 /* Record the maximum pushed stack space size. We need to delay
4368 doing it this far to take into account the optimization done
4369 by combine_pending_stack_adjustment_and_call. */
4370 if (flag_stack_usage_info
4371 && !ACCUMULATE_OUTGOING_ARGS
4372 && pass
4373 && adjusted_args_size.var == 0)
4374 {
4375 poly_int64 pushed = (adjusted_args_size.constant
4376 + pending_stack_adjust);
4377 current_function_pushed_stack_size
4378 = upper_bound (current_function_pushed_stack_size, pushed);
4379 }
4380
4381 funexp = rtx_for_function_call (fndecl, addr);
4382
4383 if (CALL_EXPR_STATIC_CHAIN (exp))
4384 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4385 else
4386 static_chain_value = 0;
4387
4388 #ifdef REG_PARM_STACK_SPACE
4389 /* Save the fixed argument area if it's part of the caller's frame and
4390 is clobbered by argument setup for this call. */
4391 if (ACCUMULATE_OUTGOING_ARGS && pass)
4392 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4393 &low_to_save, &high_to_save);
4394 #endif
4395
4396 /* Now store (and compute if necessary) all non-register parms.
4397 These come before register parms, since they can require block-moves,
4398 which could clobber the registers used for register parms.
4399 Parms which have partial registers are not stored here,
4400 but we do preallocate space here if they want that. */
4401
4402 for (i = 0; i < num_actuals; i++)
4403 {
4404 if (args[i].reg == 0 || args[i].pass_on_stack)
4405 {
4406 rtx_insn *before_arg = get_last_insn ();
4407
4408 /* We don't allow passing huge (> 2^30 B) arguments
4409 by value. It would cause an overflow later on. */
4410 if (constant_lower_bound (adjusted_args_size.constant)
4411 >= (1 << (HOST_BITS_PER_INT - 2)))
4412 {
4413 sorry ("passing too large argument on stack");
4414 continue;
4415 }
4416
4417 if (store_one_arg (&args[i], argblock, flags,
4418 adjusted_args_size.var != 0,
4419 reg_parm_stack_space)
4420 || (pass == 0
4421 && check_sibcall_argument_overlap (before_arg,
4422 &args[i], 1)))
4423 sibcall_failure = 1;
4424 }
4425
4426 if (args[i].stack)
4427 call_fusage
4428 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4429 gen_rtx_USE (VOIDmode, args[i].stack),
4430 call_fusage);
4431 }
4432
4433 /* If we have a parm that is passed in registers but not in memory
4434 and whose alignment does not permit a direct copy into registers,
4435 make a group of pseudos that correspond to each register that we
4436 will later fill. */
4437 if (STRICT_ALIGNMENT)
4438 store_unaligned_arguments_into_pseudos (args, num_actuals);
4439
4440 /* Now store any partially-in-registers parm.
4441 This is the last place a block-move can happen. */
4442 if (reg_parm_seen)
4443 for (i = 0; i < num_actuals; i++)
4444 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4445 {
4446 rtx_insn *before_arg = get_last_insn ();
4447
4448 /* On targets with weird calling conventions (e.g. PA) it's
4449 hard to ensure that all cases of argument overlap between
4450 stack and registers work. Play it safe and bail out. */
4451 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4452 {
4453 sibcall_failure = 1;
4454 break;
4455 }
4456
4457 if (store_one_arg (&args[i], argblock, flags,
4458 adjusted_args_size.var != 0,
4459 reg_parm_stack_space)
4460 || (pass == 0
4461 && check_sibcall_argument_overlap (before_arg,
4462 &args[i], 1)))
4463 sibcall_failure = 1;
4464 }
4465
4466 bool any_regs = false;
4467 for (i = 0; i < num_actuals; i++)
4468 if (args[i].reg != NULL_RTX)
4469 {
4470 any_regs = true;
4471 targetm.calls.call_args (args[i].reg, funtype);
4472 }
4473 if (!any_regs)
4474 targetm.calls.call_args (pc_rtx, funtype);
4475
4476 /* Figure out the register where the value, if any, will come back. */
4477 valreg = 0;
4478 if (TYPE_MODE (rettype) != VOIDmode
4479 && ! structure_value_addr)
4480 {
4481 if (pcc_struct_value)
4482 valreg = hard_function_value (build_pointer_type (rettype),
4483 fndecl, NULL, (pass == 0));
4484 else
4485 valreg = hard_function_value (rettype, fndecl, fntype,
4486 (pass == 0));
4487
4488 /* If VALREG is a PARALLEL whose first member has a zero
4489 offset, use that. This is for targets such as m68k that
4490 return the same value in multiple places. */
4491 if (GET_CODE (valreg) == PARALLEL)
4492 {
4493 rtx elem = XVECEXP (valreg, 0, 0);
4494 rtx where = XEXP (elem, 0);
4495 rtx offset = XEXP (elem, 1);
4496 if (offset == const0_rtx
4497 && GET_MODE (where) == GET_MODE (valreg))
4498 valreg = where;
4499 }
4500 }
4501
4502 /* If register arguments require space on the stack and stack space
4503 was not preallocated, allocate stack space here for arguments
4504 passed in registers. */
4505 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4506 && !ACCUMULATE_OUTGOING_ARGS
4507 && must_preallocate == 0 && reg_parm_stack_space > 0)
4508 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4509
4510 /* Pass the function the address in which to return a
4511 structure value. */
4512 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4513 {
4514 structure_value_addr
4515 = convert_memory_address (Pmode, structure_value_addr);
4516 emit_move_insn (struct_value,
4517 force_reg (Pmode,
4518 force_operand (structure_value_addr,
4519 NULL_RTX)));
4520
4521 if (REG_P (struct_value))
4522 use_reg (&call_fusage, struct_value);
4523 }
4524
4525 after_args = get_last_insn ();
4526 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4527 static_chain_value, &call_fusage,
4528 reg_parm_seen, flags);
4529
4530 load_register_parameters (args, num_actuals, &call_fusage, flags,
4531 pass == 0, &sibcall_failure);
4532
4533 /* Save a pointer to the last insn before the call, so that we can
4534 later safely search backwards to find the CALL_INSN. */
4535 before_call = get_last_insn ();
4536
4537 /* Set up next argument register. For sibling calls on machines
4538 with register windows this should be the incoming register. */
4539 if (pass == 0)
4540 next_arg_reg = targetm.calls.function_incoming_arg
4541 (args_so_far, function_arg_info::end_marker ());
4542 else
4543 next_arg_reg = targetm.calls.function_arg
4544 (args_so_far, function_arg_info::end_marker ());
4545
4546 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4547 {
4548 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4549 arg_nr = num_actuals - arg_nr - 1;
4550 if (arg_nr >= 0
4551 && arg_nr < num_actuals
4552 && args[arg_nr].reg
4553 && valreg
4554 && REG_P (valreg)
4555 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4556 call_fusage
4557 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4558 gen_rtx_SET (valreg, args[arg_nr].reg),
4559 call_fusage);
4560 }
4561 /* All arguments and registers used for the call must be set up by
4562 now! */
4563
4564 /* Stack must be properly aligned now. */
4565 gcc_assert (!pass
4566 || multiple_p (stack_pointer_delta,
4567 preferred_unit_stack_boundary));
4568
4569 /* Generate the actual call instruction. */
4570 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4571 adjusted_args_size.constant, struct_value_size,
4572 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4573 flags, args_so_far);
4574
4575 if (flag_ipa_ra)
4576 {
4577 rtx_call_insn *last;
4578 rtx datum = NULL_RTX;
4579 if (fndecl != NULL_TREE)
4580 {
4581 datum = XEXP (DECL_RTL (fndecl), 0);
4582 gcc_assert (datum != NULL_RTX
4583 && GET_CODE (datum) == SYMBOL_REF);
4584 }
4585 last = last_call_insn ();
4586 add_reg_note (last, REG_CALL_DECL, datum);
4587 }
4588
4589 /* If the call setup or the call itself overlaps with anything
4590 of the argument setup we probably clobbered our call address.
4591 In that case we can't do sibcalls. */
4592 if (pass == 0
4593 && check_sibcall_argument_overlap (after_args, 0, 0))
4594 sibcall_failure = 1;
4595
4596 /* If a non-BLKmode value is returned at the most significant end
4597 of a register, shift the register right by the appropriate amount
4598 and update VALREG accordingly. BLKmode values are handled by the
4599 group load/store machinery below. */
4600 if (!structure_value_addr
4601 && !pcc_struct_value
4602 && TYPE_MODE (rettype) != VOIDmode
4603 && TYPE_MODE (rettype) != BLKmode
4604 && REG_P (valreg)
4605 && targetm.calls.return_in_msb (rettype))
4606 {
4607 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4608 sibcall_failure = 1;
4609 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4610 }
4611
4612 if (pass && (flags & ECF_MALLOC))
4613 {
4614 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4615 rtx_insn *last, *insns;
4616
4617 /* The return value from a malloc-like function is a pointer. */
4618 if (TREE_CODE (rettype) == POINTER_TYPE)
4619 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4620
4621 emit_move_insn (temp, valreg);
4622
4623 /* The return value from a malloc-like function cannot alias
4624 anything else. */
4625 last = get_last_insn ();
4626 add_reg_note (last, REG_NOALIAS, temp);
4627
4628 /* Write out the sequence. */
4629 insns = get_insns ();
4630 end_sequence ();
4631 emit_insn (insns);
4632 valreg = temp;
4633 }
4634
4635 /* For calls to `setjmp', etc., inform
4636 function.c:setjmp_warnings that it should complain if
4637 nonvolatile values are live. For functions that cannot
4638 return, inform flow that control does not fall through. */
4639
4640 if ((flags & ECF_NORETURN) || pass == 0)
4641 {
4642 /* The barrier must be emitted
4643 immediately after the CALL_INSN. Some ports emit more
4644 than just a CALL_INSN above, so we must search for it here. */
4645
4646 rtx_insn *last = get_last_insn ();
4647 while (!CALL_P (last))
4648 {
4649 last = PREV_INSN (last);
4650 /* There was no CALL_INSN? */
4651 gcc_assert (last != before_call);
4652 }
4653
4654 emit_barrier_after (last);
4655
4656 /* Stack adjustments after a noreturn call are dead code.
4657 However when NO_DEFER_POP is in effect, we must preserve
4658 stack_pointer_delta. */
4659 if (inhibit_defer_pop == 0)
4660 {
4661 stack_pointer_delta = old_stack_allocated;
4662 pending_stack_adjust = 0;
4663 }
4664 }
4665
4666 /* If value type not void, return an rtx for the value. */
4667
4668 if (TYPE_MODE (rettype) == VOIDmode
4669 || ignore)
4670 target = const0_rtx;
4671 else if (structure_value_addr)
4672 {
4673 if (target == 0 || !MEM_P (target))
4674 {
4675 target
4676 = gen_rtx_MEM (TYPE_MODE (rettype),
4677 memory_address (TYPE_MODE (rettype),
4678 structure_value_addr));
4679 set_mem_attributes (target, rettype, 1);
4680 }
4681 }
4682 else if (pcc_struct_value)
4683 {
4684 /* This is the special C++ case where we need to
4685 know what the true target was. We take care to
4686 never use this value more than once in one expression. */
4687 target = gen_rtx_MEM (TYPE_MODE (rettype),
4688 copy_to_reg (valreg));
4689 set_mem_attributes (target, rettype, 1);
4690 }
4691 /* Handle calls that return values in multiple non-contiguous locations.
4692 The Irix 6 ABI has examples of this. */
4693 else if (GET_CODE (valreg) == PARALLEL)
4694 {
4695 if (target == 0)
4696 target = emit_group_move_into_temps (valreg);
4697 else if (rtx_equal_p (target, valreg))
4698 ;
4699 else if (GET_CODE (target) == PARALLEL)
4700 /* Handle the result of a emit_group_move_into_temps
4701 call in the previous pass. */
4702 emit_group_move (target, valreg);
4703 else
4704 emit_group_store (target, valreg, rettype,
4705 int_size_in_bytes (rettype));
4706 }
4707 else if (target
4708 && GET_MODE (target) == TYPE_MODE (rettype)
4709 && GET_MODE (target) == GET_MODE (valreg))
4710 {
4711 bool may_overlap = false;
4712
4713 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4714 reg to a plain register. */
4715 if (!REG_P (target) || HARD_REGISTER_P (target))
4716 valreg = avoid_likely_spilled_reg (valreg);
4717
4718 /* If TARGET is a MEM in the argument area, and we have
4719 saved part of the argument area, then we can't store
4720 directly into TARGET as it may get overwritten when we
4721 restore the argument save area below. Don't work too
4722 hard though and simply force TARGET to a register if it
4723 is a MEM; the optimizer is quite likely to sort it out. */
4724 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4725 for (i = 0; i < num_actuals; i++)
4726 if (args[i].save_area)
4727 {
4728 may_overlap = true;
4729 break;
4730 }
4731
4732 if (may_overlap)
4733 target = copy_to_reg (valreg);
4734 else
4735 {
4736 /* TARGET and VALREG cannot be equal at this point
4737 because the latter would not have
4738 REG_FUNCTION_VALUE_P true, while the former would if
4739 it were referring to the same register.
4740
4741 If they refer to the same register, this move will be
4742 a no-op, except when function inlining is being
4743 done. */
4744 emit_move_insn (target, valreg);
4745
4746 /* If we are setting a MEM, this code must be executed.
4747 Since it is emitted after the call insn, sibcall
4748 optimization cannot be performed in that case. */
4749 if (MEM_P (target))
4750 sibcall_failure = 1;
4751 }
4752 }
4753 else
4754 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4755
4756 /* If we promoted this return value, make the proper SUBREG.
4757 TARGET might be const0_rtx here, so be careful. */
4758 if (REG_P (target)
4759 && TYPE_MODE (rettype) != BLKmode
4760 && GET_MODE (target) != TYPE_MODE (rettype))
4761 {
4762 tree type = rettype;
4763 int unsignedp = TYPE_UNSIGNED (type);
4764 machine_mode pmode;
4765
4766 /* Ensure we promote as expected, and get the new unsignedness. */
4767 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4768 funtype, 1);
4769 gcc_assert (GET_MODE (target) == pmode);
4770
4771 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4772 GET_MODE (target));
4773 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4774 SUBREG_PROMOTED_VAR_P (target) = 1;
4775 SUBREG_PROMOTED_SET (target, unsignedp);
4776 }
4777
4778 /* If size of args is variable or this was a constructor call for a stack
4779 argument, restore saved stack-pointer value. */
4780
4781 if (old_stack_level)
4782 {
4783 rtx_insn *prev = get_last_insn ();
4784
4785 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4786 stack_pointer_delta = old_stack_pointer_delta;
4787
4788 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4789
4790 pending_stack_adjust = old_pending_adj;
4791 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4792 stack_arg_under_construction = old_stack_arg_under_construction;
4793 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4794 stack_usage_map = initial_stack_usage_map;
4795 stack_usage_watermark = initial_stack_usage_watermark;
4796 sibcall_failure = 1;
4797 }
4798 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4799 {
4800 #ifdef REG_PARM_STACK_SPACE
4801 if (save_area)
4802 restore_fixed_argument_area (save_area, argblock,
4803 high_to_save, low_to_save);
4804 #endif
4805
4806 /* If we saved any argument areas, restore them. */
4807 for (i = 0; i < num_actuals; i++)
4808 if (args[i].save_area)
4809 {
4810 machine_mode save_mode = GET_MODE (args[i].save_area);
4811 rtx stack_area
4812 = gen_rtx_MEM (save_mode,
4813 memory_address (save_mode,
4814 XEXP (args[i].stack_slot, 0)));
4815
4816 if (save_mode != BLKmode)
4817 emit_move_insn (stack_area, args[i].save_area);
4818 else
4819 emit_block_move (stack_area, args[i].save_area,
4820 (gen_int_mode
4821 (args[i].locate.size.constant, Pmode)),
4822 BLOCK_OP_CALL_PARM);
4823 }
4824
4825 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4826 stack_usage_map = initial_stack_usage_map;
4827 stack_usage_watermark = initial_stack_usage_watermark;
4828 }
4829
4830 /* If this was alloca, record the new stack level. */
4831 if (flags & ECF_MAY_BE_ALLOCA)
4832 record_new_stack_level ();
4833
4834 /* Free up storage we no longer need. */
4835 for (i = 0; i < num_actuals; ++i)
4836 free (args[i].aligned_regs);
4837
4838 targetm.calls.end_call_args ();
4839
4840 insns = get_insns ();
4841 end_sequence ();
4842
4843 if (pass == 0)
4844 {
4845 tail_call_insns = insns;
4846
4847 /* Restore the pending stack adjustment now that we have
4848 finished generating the sibling call sequence. */
4849
4850 restore_pending_stack_adjust (&save);
4851
4852 /* Prepare arg structure for next iteration. */
4853 for (i = 0; i < num_actuals; i++)
4854 {
4855 args[i].value = 0;
4856 args[i].aligned_regs = 0;
4857 args[i].stack = 0;
4858 }
4859
4860 sbitmap_free (stored_args_map);
4861 internal_arg_pointer_exp_state.scan_start = NULL;
4862 internal_arg_pointer_exp_state.cache.release ();
4863 }
4864 else
4865 {
4866 normal_call_insns = insns;
4867
4868 /* Verify that we've deallocated all the stack we used. */
4869 gcc_assert ((flags & ECF_NORETURN)
4870 || known_eq (old_stack_allocated,
4871 stack_pointer_delta
4872 - pending_stack_adjust));
4873 }
4874
4875 /* If something prevents making this a sibling call,
4876 zero out the sequence. */
4877 if (sibcall_failure)
4878 tail_call_insns = NULL;
4879 else
4880 break;
4881 }
4882
4883 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4884 arguments too, as argument area is now clobbered by the call. */
4885 if (tail_call_insns)
4886 {
4887 emit_insn (tail_call_insns);
4888 crtl->tail_call_emit = true;
4889 }
4890 else
4891 {
4892 emit_insn (normal_call_insns);
4893 if (try_tail_call)
4894 /* Ideally we'd emit a message for all of the ways that it could
4895 have failed. */
4896 maybe_complain_about_tail_call (exp, "tail call production failed");
4897 }
4898
4899 currently_expanding_call--;
4900
4901 free (stack_usage_map_buf);
4902 free (args);
4903 return target;
4904 }
4905
4906 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4907 this function's incoming arguments.
4908
4909 At the start of RTL generation we know the only REG_EQUIV notes
4910 in the rtl chain are those for incoming arguments, so we can look
4911 for REG_EQUIV notes between the start of the function and the
4912 NOTE_INSN_FUNCTION_BEG.
4913
4914 This is (slight) overkill. We could keep track of the highest
4915 argument we clobber and be more selective in removing notes, but it
4916 does not seem to be worth the effort. */
4917
4918 void
4919 fixup_tail_calls (void)
4920 {
4921 rtx_insn *insn;
4922
4923 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4924 {
4925 rtx note;
4926
4927 /* There are never REG_EQUIV notes for the incoming arguments
4928 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4929 if (NOTE_P (insn)
4930 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4931 break;
4932
4933 note = find_reg_note (insn, REG_EQUIV, 0);
4934 if (note)
4935 remove_note (insn, note);
4936 note = find_reg_note (insn, REG_EQUIV, 0);
4937 gcc_assert (!note);
4938 }
4939 }
4940
4941 /* Traverse a list of TYPES and expand all complex types into their
4942 components. */
4943 static tree
4944 split_complex_types (tree types)
4945 {
4946 tree p;
4947
4948 /* Before allocating memory, check for the common case of no complex. */
4949 for (p = types; p; p = TREE_CHAIN (p))
4950 {
4951 tree type = TREE_VALUE (p);
4952 if (TREE_CODE (type) == COMPLEX_TYPE
4953 && targetm.calls.split_complex_arg (type))
4954 goto found;
4955 }
4956 return types;
4957
4958 found:
4959 types = copy_list (types);
4960
4961 for (p = types; p; p = TREE_CHAIN (p))
4962 {
4963 tree complex_type = TREE_VALUE (p);
4964
4965 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4966 && targetm.calls.split_complex_arg (complex_type))
4967 {
4968 tree next, imag;
4969
4970 /* Rewrite complex type with component type. */
4971 TREE_VALUE (p) = TREE_TYPE (complex_type);
4972 next = TREE_CHAIN (p);
4973
4974 /* Add another component type for the imaginary part. */
4975 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4976 TREE_CHAIN (p) = imag;
4977 TREE_CHAIN (imag) = next;
4978
4979 /* Skip the newly created node. */
4980 p = TREE_CHAIN (p);
4981 }
4982 }
4983
4984 return types;
4985 }
4986 \f
4987 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4988 for a value of mode OUTMODE,
4989 with NARGS different arguments, passed as ARGS.
4990 Store the return value if RETVAL is nonzero: store it in VALUE if
4991 VALUE is nonnull, otherwise pick a convenient location. In either
4992 case return the location of the stored value.
4993
4994 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4995 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4996 other types of library calls. */
4997
4998 rtx
4999 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5000 enum libcall_type fn_type,
5001 machine_mode outmode, int nargs, rtx_mode_t *args)
5002 {
5003 /* Total size in bytes of all the stack-parms scanned so far. */
5004 struct args_size args_size;
5005 /* Size of arguments before any adjustments (such as rounding). */
5006 struct args_size original_args_size;
5007 int argnum;
5008 rtx fun;
5009 /* Todo, choose the correct decl type of orgfun. Sadly this information
5010 isn't present here, so we default to native calling abi here. */
5011 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5012 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5013 int count;
5014 rtx argblock = 0;
5015 CUMULATIVE_ARGS args_so_far_v;
5016 cumulative_args_t args_so_far;
5017 struct arg
5018 {
5019 rtx value;
5020 machine_mode mode;
5021 rtx reg;
5022 int partial;
5023 struct locate_and_pad_arg_data locate;
5024 rtx save_area;
5025 };
5026 struct arg *argvec;
5027 int old_inhibit_defer_pop = inhibit_defer_pop;
5028 rtx call_fusage = 0;
5029 rtx mem_value = 0;
5030 rtx valreg;
5031 int pcc_struct_value = 0;
5032 poly_int64 struct_value_size = 0;
5033 int flags;
5034 int reg_parm_stack_space = 0;
5035 poly_int64 needed;
5036 rtx_insn *before_call;
5037 bool have_push_fusage;
5038 tree tfom; /* type_for_mode (outmode, 0) */
5039
5040 #ifdef REG_PARM_STACK_SPACE
5041 /* Define the boundary of the register parm stack space that needs to be
5042 save, if any. */
5043 int low_to_save = 0, high_to_save = 0;
5044 rtx save_area = 0; /* Place that it is saved. */
5045 #endif
5046
5047 /* Size of the stack reserved for parameter registers. */
5048 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5049 char *initial_stack_usage_map = stack_usage_map;
5050 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5051 char *stack_usage_map_buf = NULL;
5052
5053 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5054
5055 #ifdef REG_PARM_STACK_SPACE
5056 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5057 #endif
5058
5059 /* By default, library functions cannot throw. */
5060 flags = ECF_NOTHROW;
5061
5062 switch (fn_type)
5063 {
5064 case LCT_NORMAL:
5065 break;
5066 case LCT_CONST:
5067 flags |= ECF_CONST;
5068 break;
5069 case LCT_PURE:
5070 flags |= ECF_PURE;
5071 break;
5072 case LCT_NORETURN:
5073 flags |= ECF_NORETURN;
5074 break;
5075 case LCT_THROW:
5076 flags &= ~ECF_NOTHROW;
5077 break;
5078 case LCT_RETURNS_TWICE:
5079 flags = ECF_RETURNS_TWICE;
5080 break;
5081 }
5082 fun = orgfun;
5083
5084 /* Ensure current function's preferred stack boundary is at least
5085 what we need. */
5086 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5087 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5088
5089 /* If this kind of value comes back in memory,
5090 decide where in memory it should come back. */
5091 if (outmode != VOIDmode)
5092 {
5093 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5094 if (aggregate_value_p (tfom, 0))
5095 {
5096 #ifdef PCC_STATIC_STRUCT_RETURN
5097 rtx pointer_reg
5098 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5099 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5100 pcc_struct_value = 1;
5101 if (value == 0)
5102 value = gen_reg_rtx (outmode);
5103 #else /* not PCC_STATIC_STRUCT_RETURN */
5104 struct_value_size = GET_MODE_SIZE (outmode);
5105 if (value != 0 && MEM_P (value))
5106 mem_value = value;
5107 else
5108 mem_value = assign_temp (tfom, 1, 1);
5109 #endif
5110 /* This call returns a big structure. */
5111 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5112 }
5113 }
5114 else
5115 tfom = void_type_node;
5116
5117 /* ??? Unfinished: must pass the memory address as an argument. */
5118
5119 /* Copy all the libcall-arguments out of the varargs data
5120 and into a vector ARGVEC.
5121
5122 Compute how to pass each argument. We only support a very small subset
5123 of the full argument passing conventions to limit complexity here since
5124 library functions shouldn't have many args. */
5125
5126 argvec = XALLOCAVEC (struct arg, nargs + 1);
5127 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5128
5129 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5130 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5131 #else
5132 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5133 #endif
5134 args_so_far = pack_cumulative_args (&args_so_far_v);
5135
5136 args_size.constant = 0;
5137 args_size.var = 0;
5138
5139 count = 0;
5140
5141 push_temp_slots ();
5142
5143 /* If there's a structure value address to be passed,
5144 either pass it in the special place, or pass it as an extra argument. */
5145 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5146 {
5147 rtx addr = XEXP (mem_value, 0);
5148
5149 nargs++;
5150
5151 /* Make sure it is a reasonable operand for a move or push insn. */
5152 if (!REG_P (addr) && !MEM_P (addr)
5153 && !(CONSTANT_P (addr)
5154 && targetm.legitimate_constant_p (Pmode, addr)))
5155 addr = force_operand (addr, NULL_RTX);
5156
5157 argvec[count].value = addr;
5158 argvec[count].mode = Pmode;
5159 argvec[count].partial = 0;
5160
5161 function_arg_info ptr_arg (Pmode, /*named=*/true);
5162 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5163 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5164
5165 locate_and_pad_parm (Pmode, NULL_TREE,
5166 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5167 1,
5168 #else
5169 argvec[count].reg != 0,
5170 #endif
5171 reg_parm_stack_space, 0,
5172 NULL_TREE, &args_size, &argvec[count].locate);
5173
5174 if (argvec[count].reg == 0 || argvec[count].partial != 0
5175 || reg_parm_stack_space > 0)
5176 args_size.constant += argvec[count].locate.size.constant;
5177
5178 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5179
5180 count++;
5181 }
5182
5183 for (unsigned int i = 0; count < nargs; i++, count++)
5184 {
5185 rtx val = args[i].first;
5186 function_arg_info arg (args[i].second, /*named=*/true);
5187 int unsigned_p = 0;
5188
5189 /* We cannot convert the arg value to the mode the library wants here;
5190 must do it earlier where we know the signedness of the arg. */
5191 gcc_assert (arg.mode != BLKmode
5192 && (GET_MODE (val) == arg.mode
5193 || GET_MODE (val) == VOIDmode));
5194
5195 /* Make sure it is a reasonable operand for a move or push insn. */
5196 if (!REG_P (val) && !MEM_P (val)
5197 && !(CONSTANT_P (val)
5198 && targetm.legitimate_constant_p (arg.mode, val)))
5199 val = force_operand (val, NULL_RTX);
5200
5201 if (pass_by_reference (&args_so_far_v, arg))
5202 {
5203 rtx slot;
5204 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5205
5206 /* If this was a CONST function, it is now PURE since it now
5207 reads memory. */
5208 if (flags & ECF_CONST)
5209 {
5210 flags &= ~ECF_CONST;
5211 flags |= ECF_PURE;
5212 }
5213
5214 if (MEM_P (val) && !must_copy)
5215 {
5216 tree val_expr = MEM_EXPR (val);
5217 if (val_expr)
5218 mark_addressable (val_expr);
5219 slot = val;
5220 }
5221 else
5222 {
5223 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5224 1, 1);
5225 emit_move_insn (slot, val);
5226 }
5227
5228 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5229 gen_rtx_USE (VOIDmode, slot),
5230 call_fusage);
5231 if (must_copy)
5232 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5233 gen_rtx_CLOBBER (VOIDmode,
5234 slot),
5235 call_fusage);
5236
5237 arg.mode = Pmode;
5238 arg.pass_by_reference = true;
5239 val = force_operand (XEXP (slot, 0), NULL_RTX);
5240 }
5241
5242 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5243 NULL_TREE, 0);
5244 argvec[count].mode = arg.mode;
5245 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5246 unsigned_p);
5247 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5248
5249 argvec[count].partial
5250 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5251
5252 if (argvec[count].reg == 0
5253 || argvec[count].partial != 0
5254 || reg_parm_stack_space > 0)
5255 {
5256 locate_and_pad_parm (arg.mode, NULL_TREE,
5257 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5258 1,
5259 #else
5260 argvec[count].reg != 0,
5261 #endif
5262 reg_parm_stack_space, argvec[count].partial,
5263 NULL_TREE, &args_size, &argvec[count].locate);
5264 args_size.constant += argvec[count].locate.size.constant;
5265 gcc_assert (!argvec[count].locate.size.var);
5266 }
5267 #ifdef BLOCK_REG_PADDING
5268 else
5269 /* The argument is passed entirely in registers. See at which
5270 end it should be padded. */
5271 argvec[count].locate.where_pad =
5272 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5273 known_le (GET_MODE_SIZE (arg.mode),
5274 UNITS_PER_WORD));
5275 #endif
5276
5277 targetm.calls.function_arg_advance (args_so_far, arg);
5278 }
5279
5280 for (int i = 0; i < nargs; i++)
5281 if (reg_parm_stack_space > 0
5282 || argvec[i].reg == 0
5283 || argvec[i].partial != 0)
5284 update_stack_alignment_for_call (&argvec[i].locate);
5285
5286 /* If this machine requires an external definition for library
5287 functions, write one out. */
5288 assemble_external_libcall (fun);
5289
5290 original_args_size = args_size;
5291 args_size.constant = (aligned_upper_bound (args_size.constant
5292 + stack_pointer_delta,
5293 STACK_BYTES)
5294 - stack_pointer_delta);
5295
5296 args_size.constant = upper_bound (args_size.constant,
5297 reg_parm_stack_space);
5298
5299 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5300 args_size.constant -= reg_parm_stack_space;
5301
5302 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5303 args_size.constant);
5304
5305 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5306 {
5307 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5308 current_function_pushed_stack_size
5309 = upper_bound (current_function_pushed_stack_size, pushed);
5310 }
5311
5312 if (ACCUMULATE_OUTGOING_ARGS)
5313 {
5314 /* Since the stack pointer will never be pushed, it is possible for
5315 the evaluation of a parm to clobber something we have already
5316 written to the stack. Since most function calls on RISC machines
5317 do not use the stack, this is uncommon, but must work correctly.
5318
5319 Therefore, we save any area of the stack that was already written
5320 and that we are using. Here we set up to do this by making a new
5321 stack usage map from the old one.
5322
5323 Another approach might be to try to reorder the argument
5324 evaluations to avoid this conflicting stack usage. */
5325
5326 needed = args_size.constant;
5327
5328 /* Since we will be writing into the entire argument area, the
5329 map must be allocated for its entire size, not just the part that
5330 is the responsibility of the caller. */
5331 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5332 needed += reg_parm_stack_space;
5333
5334 poly_int64 limit = needed;
5335 if (ARGS_GROW_DOWNWARD)
5336 limit += 1;
5337
5338 /* For polynomial sizes, this is the maximum possible size needed
5339 for arguments with a constant size and offset. */
5340 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5341 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5342 const_limit);
5343
5344 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5345 stack_usage_map = stack_usage_map_buf;
5346
5347 if (initial_highest_arg_in_use)
5348 memcpy (stack_usage_map, initial_stack_usage_map,
5349 initial_highest_arg_in_use);
5350
5351 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5352 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5353 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5354 needed = 0;
5355
5356 /* We must be careful to use virtual regs before they're instantiated,
5357 and real regs afterwards. Loop optimization, for example, can create
5358 new libcalls after we've instantiated the virtual regs, and if we
5359 use virtuals anyway, they won't match the rtl patterns. */
5360
5361 if (virtuals_instantiated)
5362 argblock = plus_constant (Pmode, stack_pointer_rtx,
5363 STACK_POINTER_OFFSET);
5364 else
5365 argblock = virtual_outgoing_args_rtx;
5366 }
5367 else
5368 {
5369 if (!PUSH_ARGS)
5370 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5371 }
5372
5373 /* We push args individually in reverse order, perform stack alignment
5374 before the first push (the last arg). */
5375 if (argblock == 0)
5376 anti_adjust_stack (gen_int_mode (args_size.constant
5377 - original_args_size.constant,
5378 Pmode));
5379
5380 argnum = nargs - 1;
5381
5382 #ifdef REG_PARM_STACK_SPACE
5383 if (ACCUMULATE_OUTGOING_ARGS)
5384 {
5385 /* The argument list is the property of the called routine and it
5386 may clobber it. If the fixed area has been used for previous
5387 parameters, we must save and restore it. */
5388 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5389 &low_to_save, &high_to_save);
5390 }
5391 #endif
5392
5393 /* When expanding a normal call, args are stored in push order,
5394 which is the reverse of what we have here. */
5395 bool any_regs = false;
5396 for (int i = nargs; i-- > 0; )
5397 if (argvec[i].reg != NULL_RTX)
5398 {
5399 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5400 any_regs = true;
5401 }
5402 if (!any_regs)
5403 targetm.calls.call_args (pc_rtx, NULL_TREE);
5404
5405 /* Push the args that need to be pushed. */
5406
5407 have_push_fusage = false;
5408
5409 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5410 are to be pushed. */
5411 for (count = 0; count < nargs; count++, argnum--)
5412 {
5413 machine_mode mode = argvec[argnum].mode;
5414 rtx val = argvec[argnum].value;
5415 rtx reg = argvec[argnum].reg;
5416 int partial = argvec[argnum].partial;
5417 unsigned int parm_align = argvec[argnum].locate.boundary;
5418 poly_int64 lower_bound = 0, upper_bound = 0;
5419
5420 if (! (reg != 0 && partial == 0))
5421 {
5422 rtx use;
5423
5424 if (ACCUMULATE_OUTGOING_ARGS)
5425 {
5426 /* If this is being stored into a pre-allocated, fixed-size,
5427 stack area, save any previous data at that location. */
5428
5429 if (ARGS_GROW_DOWNWARD)
5430 {
5431 /* stack_slot is negative, but we want to index stack_usage_map
5432 with positive values. */
5433 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5434 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5435 }
5436 else
5437 {
5438 lower_bound = argvec[argnum].locate.slot_offset.constant;
5439 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5440 }
5441
5442 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5443 reg_parm_stack_space))
5444 {
5445 /* We need to make a save area. */
5446 poly_uint64 size
5447 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5448 machine_mode save_mode
5449 = int_mode_for_size (size, 1).else_blk ();
5450 rtx adr
5451 = plus_constant (Pmode, argblock,
5452 argvec[argnum].locate.offset.constant);
5453 rtx stack_area
5454 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5455
5456 if (save_mode == BLKmode)
5457 {
5458 argvec[argnum].save_area
5459 = assign_stack_temp (BLKmode,
5460 argvec[argnum].locate.size.constant
5461 );
5462
5463 emit_block_move (validize_mem
5464 (copy_rtx (argvec[argnum].save_area)),
5465 stack_area,
5466 (gen_int_mode
5467 (argvec[argnum].locate.size.constant,
5468 Pmode)),
5469 BLOCK_OP_CALL_PARM);
5470 }
5471 else
5472 {
5473 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5474
5475 emit_move_insn (argvec[argnum].save_area, stack_area);
5476 }
5477 }
5478 }
5479
5480 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5481 partial, reg, 0, argblock,
5482 (gen_int_mode
5483 (argvec[argnum].locate.offset.constant, Pmode)),
5484 reg_parm_stack_space,
5485 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5486
5487 /* Now mark the segment we just used. */
5488 if (ACCUMULATE_OUTGOING_ARGS)
5489 mark_stack_region_used (lower_bound, upper_bound);
5490
5491 NO_DEFER_POP;
5492
5493 /* Indicate argument access so that alias.c knows that these
5494 values are live. */
5495 if (argblock)
5496 use = plus_constant (Pmode, argblock,
5497 argvec[argnum].locate.offset.constant);
5498 else if (have_push_fusage)
5499 continue;
5500 else
5501 {
5502 /* When arguments are pushed, trying to tell alias.c where
5503 exactly this argument is won't work, because the
5504 auto-increment causes confusion. So we merely indicate
5505 that we access something with a known mode somewhere on
5506 the stack. */
5507 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5508 gen_rtx_SCRATCH (Pmode));
5509 have_push_fusage = true;
5510 }
5511 use = gen_rtx_MEM (argvec[argnum].mode, use);
5512 use = gen_rtx_USE (VOIDmode, use);
5513 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5514 }
5515 }
5516
5517 argnum = nargs - 1;
5518
5519 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5520
5521 /* Now load any reg parms into their regs. */
5522
5523 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5524 are to be pushed. */
5525 for (count = 0; count < nargs; count++, argnum--)
5526 {
5527 machine_mode mode = argvec[argnum].mode;
5528 rtx val = argvec[argnum].value;
5529 rtx reg = argvec[argnum].reg;
5530 int partial = argvec[argnum].partial;
5531
5532 /* Handle calls that pass values in multiple non-contiguous
5533 locations. The PA64 has examples of this for library calls. */
5534 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5535 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5536 else if (reg != 0 && partial == 0)
5537 {
5538 emit_move_insn (reg, val);
5539 #ifdef BLOCK_REG_PADDING
5540 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5541
5542 /* Copied from load_register_parameters. */
5543
5544 /* Handle case where we have a value that needs shifting
5545 up to the msb. eg. a QImode value and we're padding
5546 upward on a BYTES_BIG_ENDIAN machine. */
5547 if (known_lt (size, UNITS_PER_WORD)
5548 && (argvec[argnum].locate.where_pad
5549 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5550 {
5551 rtx x;
5552 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5553
5554 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5555 report the whole reg as used. Strictly speaking, the
5556 call only uses SIZE bytes at the msb end, but it doesn't
5557 seem worth generating rtl to say that. */
5558 reg = gen_rtx_REG (word_mode, REGNO (reg));
5559 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5560 if (x != reg)
5561 emit_move_insn (reg, x);
5562 }
5563 #endif
5564 }
5565
5566 NO_DEFER_POP;
5567 }
5568
5569 /* Any regs containing parms remain in use through the call. */
5570 for (count = 0; count < nargs; count++)
5571 {
5572 rtx reg = argvec[count].reg;
5573 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5574 use_group_regs (&call_fusage, reg);
5575 else if (reg != 0)
5576 {
5577 int partial = argvec[count].partial;
5578 if (partial)
5579 {
5580 int nregs;
5581 gcc_assert (partial % UNITS_PER_WORD == 0);
5582 nregs = partial / UNITS_PER_WORD;
5583 use_regs (&call_fusage, REGNO (reg), nregs);
5584 }
5585 else
5586 use_reg (&call_fusage, reg);
5587 }
5588 }
5589
5590 /* Pass the function the address in which to return a structure value. */
5591 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5592 {
5593 emit_move_insn (struct_value,
5594 force_reg (Pmode,
5595 force_operand (XEXP (mem_value, 0),
5596 NULL_RTX)));
5597 if (REG_P (struct_value))
5598 use_reg (&call_fusage, struct_value);
5599 }
5600
5601 /* Don't allow popping to be deferred, since then
5602 cse'ing of library calls could delete a call and leave the pop. */
5603 NO_DEFER_POP;
5604 valreg = (mem_value == 0 && outmode != VOIDmode
5605 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5606
5607 /* Stack must be properly aligned now. */
5608 gcc_assert (multiple_p (stack_pointer_delta,
5609 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5610
5611 before_call = get_last_insn ();
5612
5613 if (flag_callgraph_info)
5614 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5615
5616 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5617 will set inhibit_defer_pop to that value. */
5618 /* The return type is needed to decide how many bytes the function pops.
5619 Signedness plays no role in that, so for simplicity, we pretend it's
5620 always signed. We also assume that the list of arguments passed has
5621 no impact, so we pretend it is unknown. */
5622
5623 emit_call_1 (fun, NULL,
5624 get_identifier (XSTR (orgfun, 0)),
5625 build_function_type (tfom, NULL_TREE),
5626 original_args_size.constant, args_size.constant,
5627 struct_value_size,
5628 targetm.calls.function_arg (args_so_far,
5629 function_arg_info::end_marker ()),
5630 valreg,
5631 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5632
5633 if (flag_ipa_ra)
5634 {
5635 rtx datum = orgfun;
5636 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5637 rtx_call_insn *last = last_call_insn ();
5638 add_reg_note (last, REG_CALL_DECL, datum);
5639 }
5640
5641 /* Right-shift returned value if necessary. */
5642 if (!pcc_struct_value
5643 && TYPE_MODE (tfom) != BLKmode
5644 && targetm.calls.return_in_msb (tfom))
5645 {
5646 shift_return_value (TYPE_MODE (tfom), false, valreg);
5647 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5648 }
5649
5650 targetm.calls.end_call_args ();
5651
5652 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5653 that it should complain if nonvolatile values are live. For
5654 functions that cannot return, inform flow that control does not
5655 fall through. */
5656 if (flags & ECF_NORETURN)
5657 {
5658 /* The barrier note must be emitted
5659 immediately after the CALL_INSN. Some ports emit more than
5660 just a CALL_INSN above, so we must search for it here. */
5661 rtx_insn *last = get_last_insn ();
5662 while (!CALL_P (last))
5663 {
5664 last = PREV_INSN (last);
5665 /* There was no CALL_INSN? */
5666 gcc_assert (last != before_call);
5667 }
5668
5669 emit_barrier_after (last);
5670 }
5671
5672 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5673 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5674 if (flags & ECF_NOTHROW)
5675 {
5676 rtx_insn *last = get_last_insn ();
5677 while (!CALL_P (last))
5678 {
5679 last = PREV_INSN (last);
5680 /* There was no CALL_INSN? */
5681 gcc_assert (last != before_call);
5682 }
5683
5684 make_reg_eh_region_note_nothrow_nononlocal (last);
5685 }
5686
5687 /* Now restore inhibit_defer_pop to its actual original value. */
5688 OK_DEFER_POP;
5689
5690 pop_temp_slots ();
5691
5692 /* Copy the value to the right place. */
5693 if (outmode != VOIDmode && retval)
5694 {
5695 if (mem_value)
5696 {
5697 if (value == 0)
5698 value = mem_value;
5699 if (value != mem_value)
5700 emit_move_insn (value, mem_value);
5701 }
5702 else if (GET_CODE (valreg) == PARALLEL)
5703 {
5704 if (value == 0)
5705 value = gen_reg_rtx (outmode);
5706 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5707 }
5708 else
5709 {
5710 /* Convert to the proper mode if a promotion has been active. */
5711 if (GET_MODE (valreg) != outmode)
5712 {
5713 int unsignedp = TYPE_UNSIGNED (tfom);
5714
5715 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5716 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5717 == GET_MODE (valreg));
5718 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5719 }
5720
5721 if (value != 0)
5722 emit_move_insn (value, valreg);
5723 else
5724 value = valreg;
5725 }
5726 }
5727
5728 if (ACCUMULATE_OUTGOING_ARGS)
5729 {
5730 #ifdef REG_PARM_STACK_SPACE
5731 if (save_area)
5732 restore_fixed_argument_area (save_area, argblock,
5733 high_to_save, low_to_save);
5734 #endif
5735
5736 /* If we saved any argument areas, restore them. */
5737 for (count = 0; count < nargs; count++)
5738 if (argvec[count].save_area)
5739 {
5740 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5741 rtx adr = plus_constant (Pmode, argblock,
5742 argvec[count].locate.offset.constant);
5743 rtx stack_area = gen_rtx_MEM (save_mode,
5744 memory_address (save_mode, adr));
5745
5746 if (save_mode == BLKmode)
5747 emit_block_move (stack_area,
5748 validize_mem
5749 (copy_rtx (argvec[count].save_area)),
5750 (gen_int_mode
5751 (argvec[count].locate.size.constant, Pmode)),
5752 BLOCK_OP_CALL_PARM);
5753 else
5754 emit_move_insn (stack_area, argvec[count].save_area);
5755 }
5756
5757 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5758 stack_usage_map = initial_stack_usage_map;
5759 stack_usage_watermark = initial_stack_usage_watermark;
5760 }
5761
5762 free (stack_usage_map_buf);
5763
5764 return value;
5765
5766 }
5767 \f
5768
5769 /* Store a single argument for a function call
5770 into the register or memory area where it must be passed.
5771 *ARG describes the argument value and where to pass it.
5772
5773 ARGBLOCK is the address of the stack-block for all the arguments,
5774 or 0 on a machine where arguments are pushed individually.
5775
5776 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5777 so must be careful about how the stack is used.
5778
5779 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5780 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5781 that we need not worry about saving and restoring the stack.
5782
5783 FNDECL is the declaration of the function we are calling.
5784
5785 Return nonzero if this arg should cause sibcall failure,
5786 zero otherwise. */
5787
5788 static int
5789 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5790 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5791 {
5792 tree pval = arg->tree_value;
5793 rtx reg = 0;
5794 int partial = 0;
5795 poly_int64 used = 0;
5796 poly_int64 lower_bound = 0, upper_bound = 0;
5797 int sibcall_failure = 0;
5798
5799 if (TREE_CODE (pval) == ERROR_MARK)
5800 return 1;
5801
5802 /* Push a new temporary level for any temporaries we make for
5803 this argument. */
5804 push_temp_slots ();
5805
5806 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5807 {
5808 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5809 save any previous data at that location. */
5810 if (argblock && ! variable_size && arg->stack)
5811 {
5812 if (ARGS_GROW_DOWNWARD)
5813 {
5814 /* stack_slot is negative, but we want to index stack_usage_map
5815 with positive values. */
5816 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5817 {
5818 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5819 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5820 }
5821 else
5822 upper_bound = 0;
5823
5824 lower_bound = upper_bound - arg->locate.size.constant;
5825 }
5826 else
5827 {
5828 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5829 {
5830 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5831 lower_bound = rtx_to_poly_int64 (offset);
5832 }
5833 else
5834 lower_bound = 0;
5835
5836 upper_bound = lower_bound + arg->locate.size.constant;
5837 }
5838
5839 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5840 reg_parm_stack_space))
5841 {
5842 /* We need to make a save area. */
5843 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5844 machine_mode save_mode
5845 = int_mode_for_size (size, 1).else_blk ();
5846 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5847 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5848
5849 if (save_mode == BLKmode)
5850 {
5851 arg->save_area
5852 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5853 preserve_temp_slots (arg->save_area);
5854 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5855 stack_area,
5856 (gen_int_mode
5857 (arg->locate.size.constant, Pmode)),
5858 BLOCK_OP_CALL_PARM);
5859 }
5860 else
5861 {
5862 arg->save_area = gen_reg_rtx (save_mode);
5863 emit_move_insn (arg->save_area, stack_area);
5864 }
5865 }
5866 }
5867 }
5868
5869 /* If this isn't going to be placed on both the stack and in registers,
5870 set up the register and number of words. */
5871 if (! arg->pass_on_stack)
5872 {
5873 if (flags & ECF_SIBCALL)
5874 reg = arg->tail_call_reg;
5875 else
5876 reg = arg->reg;
5877 partial = arg->partial;
5878 }
5879
5880 /* Being passed entirely in a register. We shouldn't be called in
5881 this case. */
5882 gcc_assert (reg == 0 || partial != 0);
5883
5884 /* If this arg needs special alignment, don't load the registers
5885 here. */
5886 if (arg->n_aligned_regs != 0)
5887 reg = 0;
5888
5889 /* If this is being passed partially in a register, we can't evaluate
5890 it directly into its stack slot. Otherwise, we can. */
5891 if (arg->value == 0)
5892 {
5893 /* stack_arg_under_construction is nonzero if a function argument is
5894 being evaluated directly into the outgoing argument list and
5895 expand_call must take special action to preserve the argument list
5896 if it is called recursively.
5897
5898 For scalar function arguments stack_usage_map is sufficient to
5899 determine which stack slots must be saved and restored. Scalar
5900 arguments in general have pass_on_stack == 0.
5901
5902 If this argument is initialized by a function which takes the
5903 address of the argument (a C++ constructor or a C function
5904 returning a BLKmode structure), then stack_usage_map is
5905 insufficient and expand_call must push the stack around the
5906 function call. Such arguments have pass_on_stack == 1.
5907
5908 Note that it is always safe to set stack_arg_under_construction,
5909 but this generates suboptimal code if set when not needed. */
5910
5911 if (arg->pass_on_stack)
5912 stack_arg_under_construction++;
5913
5914 arg->value = expand_expr (pval,
5915 (partial
5916 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5917 ? NULL_RTX : arg->stack,
5918 VOIDmode, EXPAND_STACK_PARM);
5919
5920 /* If we are promoting object (or for any other reason) the mode
5921 doesn't agree, convert the mode. */
5922
5923 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5924 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5925 arg->value, arg->unsignedp);
5926
5927 if (arg->pass_on_stack)
5928 stack_arg_under_construction--;
5929 }
5930
5931 /* Check for overlap with already clobbered argument area. */
5932 if ((flags & ECF_SIBCALL)
5933 && MEM_P (arg->value)
5934 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5935 arg->locate.size.constant))
5936 sibcall_failure = 1;
5937
5938 /* Don't allow anything left on stack from computation
5939 of argument to alloca. */
5940 if (flags & ECF_MAY_BE_ALLOCA)
5941 do_pending_stack_adjust ();
5942
5943 if (arg->value == arg->stack)
5944 /* If the value is already in the stack slot, we are done. */
5945 ;
5946 else if (arg->mode != BLKmode)
5947 {
5948 unsigned int parm_align;
5949
5950 /* Argument is a scalar, not entirely passed in registers.
5951 (If part is passed in registers, arg->partial says how much
5952 and emit_push_insn will take care of putting it there.)
5953
5954 Push it, and if its size is less than the
5955 amount of space allocated to it,
5956 also bump stack pointer by the additional space.
5957 Note that in C the default argument promotions
5958 will prevent such mismatches. */
5959
5960 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5961 ? 0 : GET_MODE_SIZE (arg->mode));
5962
5963 /* Compute how much space the push instruction will push.
5964 On many machines, pushing a byte will advance the stack
5965 pointer by a halfword. */
5966 #ifdef PUSH_ROUNDING
5967 size = PUSH_ROUNDING (size);
5968 #endif
5969 used = size;
5970
5971 /* Compute how much space the argument should get:
5972 round up to a multiple of the alignment for arguments. */
5973 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5974 != PAD_NONE)
5975 /* At the moment we don't (need to) support ABIs for which the
5976 padding isn't known at compile time. In principle it should
5977 be easy to add though. */
5978 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5979
5980 /* Compute the alignment of the pushed argument. */
5981 parm_align = arg->locate.boundary;
5982 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5983 == PAD_DOWNWARD)
5984 {
5985 poly_int64 pad = used - size;
5986 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5987 if (pad_align != 0)
5988 parm_align = MIN (parm_align, pad_align);
5989 }
5990
5991 /* This isn't already where we want it on the stack, so put it there.
5992 This can either be done with push or copy insns. */
5993 if (maybe_ne (used, 0)
5994 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5995 NULL_RTX, parm_align, partial, reg, used - size,
5996 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5997 reg_parm_stack_space,
5998 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5999 sibcall_failure = 1;
6000
6001 /* Unless this is a partially-in-register argument, the argument is now
6002 in the stack. */
6003 if (partial == 0)
6004 arg->value = arg->stack;
6005 }
6006 else
6007 {
6008 /* BLKmode, at least partly to be pushed. */
6009
6010 unsigned int parm_align;
6011 poly_int64 excess;
6012 rtx size_rtx;
6013
6014 /* Pushing a nonscalar.
6015 If part is passed in registers, PARTIAL says how much
6016 and emit_push_insn will take care of putting it there. */
6017
6018 /* Round its size up to a multiple
6019 of the allocation unit for arguments. */
6020
6021 if (arg->locate.size.var != 0)
6022 {
6023 excess = 0;
6024 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6025 }
6026 else
6027 {
6028 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6029 for BLKmode is careful to avoid it. */
6030 excess = (arg->locate.size.constant
6031 - arg_int_size_in_bytes (TREE_TYPE (pval))
6032 + partial);
6033 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6034 NULL_RTX, TYPE_MODE (sizetype),
6035 EXPAND_NORMAL);
6036 }
6037
6038 parm_align = arg->locate.boundary;
6039
6040 /* When an argument is padded down, the block is aligned to
6041 PARM_BOUNDARY, but the actual argument isn't. */
6042 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6043 == PAD_DOWNWARD)
6044 {
6045 if (arg->locate.size.var)
6046 parm_align = BITS_PER_UNIT;
6047 else
6048 {
6049 unsigned int excess_align
6050 = known_alignment (excess) * BITS_PER_UNIT;
6051 if (excess_align != 0)
6052 parm_align = MIN (parm_align, excess_align);
6053 }
6054 }
6055
6056 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6057 {
6058 /* emit_push_insn might not work properly if arg->value and
6059 argblock + arg->locate.offset areas overlap. */
6060 rtx x = arg->value;
6061 poly_int64 i = 0;
6062
6063 if (strip_offset (XEXP (x, 0), &i)
6064 == crtl->args.internal_arg_pointer)
6065 {
6066 /* arg.locate doesn't contain the pretend_args_size offset,
6067 it's part of argblock. Ensure we don't count it in I. */
6068 if (STACK_GROWS_DOWNWARD)
6069 i -= crtl->args.pretend_args_size;
6070 else
6071 i += crtl->args.pretend_args_size;
6072
6073 /* expand_call should ensure this. */
6074 gcc_assert (!arg->locate.offset.var
6075 && arg->locate.size.var == 0);
6076 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6077
6078 if (known_eq (arg->locate.offset.constant, i))
6079 {
6080 /* Even though they appear to be at the same location,
6081 if part of the outgoing argument is in registers,
6082 they aren't really at the same location. Check for
6083 this by making sure that the incoming size is the
6084 same as the outgoing size. */
6085 if (maybe_ne (arg->locate.size.constant, size_val))
6086 sibcall_failure = 1;
6087 }
6088 else if (maybe_in_range_p (arg->locate.offset.constant,
6089 i, size_val))
6090 sibcall_failure = 1;
6091 /* Use arg->locate.size.constant instead of size_rtx
6092 because we only care about the part of the argument
6093 on the stack. */
6094 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6095 arg->locate.size.constant))
6096 sibcall_failure = 1;
6097 }
6098 }
6099
6100 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6101 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6102 parm_align, partial, reg, excess, argblock,
6103 ARGS_SIZE_RTX (arg->locate.offset),
6104 reg_parm_stack_space,
6105 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6106
6107 /* Unless this is a partially-in-register argument, the argument is now
6108 in the stack.
6109
6110 ??? Unlike the case above, in which we want the actual
6111 address of the data, so that we can load it directly into a
6112 register, here we want the address of the stack slot, so that
6113 it's properly aligned for word-by-word copying or something
6114 like that. It's not clear that this is always correct. */
6115 if (partial == 0)
6116 arg->value = arg->stack_slot;
6117 }
6118
6119 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6120 {
6121 tree type = TREE_TYPE (arg->tree_value);
6122 arg->parallel_value
6123 = emit_group_load_into_temps (arg->reg, arg->value, type,
6124 int_size_in_bytes (type));
6125 }
6126
6127 /* Mark all slots this store used. */
6128 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6129 && argblock && ! variable_size && arg->stack)
6130 mark_stack_region_used (lower_bound, upper_bound);
6131
6132 /* Once we have pushed something, pops can't safely
6133 be deferred during the rest of the arguments. */
6134 NO_DEFER_POP;
6135
6136 /* Free any temporary slots made in processing this argument. */
6137 pop_temp_slots ();
6138
6139 return sibcall_failure;
6140 }
6141
6142 /* Nonzero if we do not know how to pass ARG solely in registers. */
6143
6144 bool
6145 must_pass_in_stack_var_size (const function_arg_info &arg)
6146 {
6147 if (!arg.type)
6148 return false;
6149
6150 /* If the type has variable size... */
6151 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6152 return true;
6153
6154 /* If the type is marked as addressable (it is required
6155 to be constructed into the stack)... */
6156 if (TREE_ADDRESSABLE (arg.type))
6157 return true;
6158
6159 return false;
6160 }
6161
6162 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6163 takes trailing padding of a structure into account. */
6164 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6165
6166 bool
6167 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6168 {
6169 if (!arg.type)
6170 return false;
6171
6172 /* If the type has variable size... */
6173 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6174 return true;
6175
6176 /* If the type is marked as addressable (it is required
6177 to be constructed into the stack)... */
6178 if (TREE_ADDRESSABLE (arg.type))
6179 return true;
6180
6181 if (TYPE_EMPTY_P (arg.type))
6182 return false;
6183
6184 /* If the padding and mode of the type is such that a copy into
6185 a register would put it into the wrong part of the register. */
6186 if (arg.mode == BLKmode
6187 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6188 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6189 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6190 return true;
6191
6192 return false;
6193 }
6194
6195 /* Return true if TYPE must be passed on the stack when passed to
6196 the "..." arguments of a function. */
6197
6198 bool
6199 must_pass_va_arg_in_stack (tree type)
6200 {
6201 function_arg_info arg (type, /*named=*/false);
6202 return targetm.calls.must_pass_in_stack (arg);
6203 }
6204
6205 /* Return true if FIELD is the C++17 empty base field that should
6206 be ignored for ABI calling convention decisions in order to
6207 maintain ABI compatibility between C++14 and earlier, which doesn't
6208 add this FIELD to classes with empty bases, and C++17 and later
6209 which does. */
6210
6211 bool
6212 cxx17_empty_base_field_p (const_tree field)
6213 {
6214 return (DECL_FIELD_ABI_IGNORED (field)
6215 && DECL_ARTIFICIAL (field)
6216 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6217 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6218 }
6219
6220 /* Tell the garbage collector about GTY markers in this source file. */
6221 #include "gt-calls.h"