]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/calls.c
middle-end/100394 - avoid DSE/DCE of pure call that throws
[thirdparty/gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define INCLUDE_STRING
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "diagnostic-core.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "internal-fn.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "except.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-strlen.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "hash-map.h"
57 #include "hash-traits.h"
58 #include "attribs.h"
59 #include "builtins.h"
60 #include "gimple-fold.h"
61 #include "attr-fnspec.h"
62 #include "value-query.h"
63
64 #include "tree-pretty-print.h"
65
66 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
68
69 /* Data structure and subroutines used within expand_call. */
70
71 struct arg_data
72 {
73 /* Tree node for this argument. */
74 tree tree_value;
75 /* Mode for value; TYPE_MODE unless promoted. */
76 machine_mode mode;
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
82 PARALLEL if the arg is to be copied into multiple non-contiguous
83 registers. */
84 rtx reg;
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
97 int partial;
98 /* Nonzero if argument must be passed on stack.
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
102 int pass_on_stack;
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
111 rtx stack_slot;
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
120 };
121
122 /* A vector of one char per byte of stack space. A byte if nonzero if
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126 static char *stack_usage_map;
127
128 /* Size of STACK_USAGE_MAP. */
129 static unsigned int highest_outgoing_arg_in_use;
130
131 /* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
134
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140 static sbitmap stored_args_map;
141
142 /* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144 static unsigned HOST_WIDE_INT stored_args_watermark;
145
146 /* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
151 static int stack_arg_under_construction;
152
153 static void precompute_register_parameters (int, struct arg_data *, int *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static int special_function_p (const_tree, int);
164 static int check_sibcall_argument_overlap_1 (rtx);
165 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
166
167 static tree split_complex_types (tree);
168
169 #ifdef REG_PARM_STACK_SPACE
170 static rtx save_fixed_argument_area (int, rtx, int *, int *);
171 static void restore_fixed_argument_area (rtx, rtx, int, int);
172 #endif
173 \f
174 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
176
177 static bool
178 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
180 {
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
185
186 if (const_upper > stack_usage_watermark)
187 return true;
188
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
197 }
198
199 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
201
202 static void
203 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
204 {
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212 }
213
214 /* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
220
221 rtx
222 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
224 {
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
296 else
297 {
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
304 if (!(flags & ECF_SIBCALL))
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
309 }
310
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
314 {
315 rtx chain;
316
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
319
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
322 {
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
326 }
327
328 return funexp;
329 }
330
331 /* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
338
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
373
374 static void
375 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
383 {
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 int already_popped = 0;
387 poly_int64 n_popped = 0;
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
395
396 #ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398 #endif
399 }
400
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
409 {
410 tree t = fndecl;
411
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
427 }
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
430
431 if (ecf_flags & ECF_SIBCALL)
432 {
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
441 }
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (n_popped, 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
450 {
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
455
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
463
464 already_popped = 1;
465 }
466 else
467 {
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
474 }
475 emit_insn (pat);
476
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
479
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
502
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
505
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
508
509 if (ecf_flags & ECF_RETURNS_TWICE)
510 {
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
513 }
514
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
521 if (maybe_ne (n_popped, 0))
522 {
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
531
532 add_args_size_note (call_insn, stack_pointer_delta);
533
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
537 }
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
543
544 if (!ACCUMULATE_OUTGOING_ARGS)
545 {
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
553 if (maybe_ne (rounded_stack_size, 0))
554 {
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
564 }
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
573
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
578 }
579
580 /* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
582
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
585
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
588
589 static int
590 special_function_p (const_tree fndecl, int flags)
591 {
592 tree name_decl = DECL_NAME (fndecl);
593
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
596 {
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
599
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
607
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
610 {
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
615 }
616
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
624 }
625
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
629
630 return flags;
631 }
632
633 /* Return fnspec for DECL. */
634
635 static attr_fnspec
636 decl_fnspec (tree fndecl)
637 {
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651 }
652
653 /* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655 static int
656 decl_return_flags (tree fndecl)
657 {
658 attr_fnspec fnspec = decl_fnspec (fndecl);
659
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
667 }
668
669 /* Return nonzero when FNDECL represents a call to setjmp. */
670
671 int
672 setjmp_call_p (const_tree fndecl)
673 {
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return ECF_RETURNS_TWICE;
676 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
677 }
678
679
680 /* Return true if STMT may be an alloca call. */
681
682 bool
683 gimple_maybe_alloca_call_p (const gimple *stmt)
684 {
685 tree fndecl;
686
687 if (!is_gimple_call (stmt))
688 return false;
689
690 fndecl = gimple_call_fndecl (stmt);
691 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
692 return true;
693
694 return false;
695 }
696
697 /* Return true if STMT is a builtin alloca call. */
698
699 bool
700 gimple_alloca_call_p (const gimple *stmt)
701 {
702 tree fndecl;
703
704 if (!is_gimple_call (stmt))
705 return false;
706
707 fndecl = gimple_call_fndecl (stmt);
708 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
709 switch (DECL_FUNCTION_CODE (fndecl))
710 {
711 CASE_BUILT_IN_ALLOCA:
712 return gimple_call_num_args (stmt) > 0;
713 default:
714 break;
715 }
716
717 return false;
718 }
719
720 /* Return true when exp contains a builtin alloca call. */
721
722 bool
723 alloca_call_p (const_tree exp)
724 {
725 tree fndecl;
726 if (TREE_CODE (exp) == CALL_EXPR
727 && (fndecl = get_callee_fndecl (exp))
728 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
729 switch (DECL_FUNCTION_CODE (fndecl))
730 {
731 CASE_BUILT_IN_ALLOCA:
732 return true;
733 default:
734 break;
735 }
736
737 return false;
738 }
739
740 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
741 function. Return FALSE otherwise. */
742
743 static bool
744 is_tm_builtin (const_tree fndecl)
745 {
746 if (fndecl == NULL)
747 return false;
748
749 if (decl_is_tm_clone (fndecl))
750 return true;
751
752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
753 {
754 switch (DECL_FUNCTION_CODE (fndecl))
755 {
756 case BUILT_IN_TM_COMMIT:
757 case BUILT_IN_TM_COMMIT_EH:
758 case BUILT_IN_TM_ABORT:
759 case BUILT_IN_TM_IRREVOCABLE:
760 case BUILT_IN_TM_GETTMCLONE_IRR:
761 case BUILT_IN_TM_MEMCPY:
762 case BUILT_IN_TM_MEMMOVE:
763 case BUILT_IN_TM_MEMSET:
764 CASE_BUILT_IN_TM_STORE (1):
765 CASE_BUILT_IN_TM_STORE (2):
766 CASE_BUILT_IN_TM_STORE (4):
767 CASE_BUILT_IN_TM_STORE (8):
768 CASE_BUILT_IN_TM_STORE (FLOAT):
769 CASE_BUILT_IN_TM_STORE (DOUBLE):
770 CASE_BUILT_IN_TM_STORE (LDOUBLE):
771 CASE_BUILT_IN_TM_STORE (M64):
772 CASE_BUILT_IN_TM_STORE (M128):
773 CASE_BUILT_IN_TM_STORE (M256):
774 CASE_BUILT_IN_TM_LOAD (1):
775 CASE_BUILT_IN_TM_LOAD (2):
776 CASE_BUILT_IN_TM_LOAD (4):
777 CASE_BUILT_IN_TM_LOAD (8):
778 CASE_BUILT_IN_TM_LOAD (FLOAT):
779 CASE_BUILT_IN_TM_LOAD (DOUBLE):
780 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
781 CASE_BUILT_IN_TM_LOAD (M64):
782 CASE_BUILT_IN_TM_LOAD (M128):
783 CASE_BUILT_IN_TM_LOAD (M256):
784 case BUILT_IN_TM_LOG:
785 case BUILT_IN_TM_LOG_1:
786 case BUILT_IN_TM_LOG_2:
787 case BUILT_IN_TM_LOG_4:
788 case BUILT_IN_TM_LOG_8:
789 case BUILT_IN_TM_LOG_FLOAT:
790 case BUILT_IN_TM_LOG_DOUBLE:
791 case BUILT_IN_TM_LOG_LDOUBLE:
792 case BUILT_IN_TM_LOG_M64:
793 case BUILT_IN_TM_LOG_M128:
794 case BUILT_IN_TM_LOG_M256:
795 return true;
796 default:
797 break;
798 }
799 }
800 return false;
801 }
802
803 /* Detect flags (function attributes) from the function decl or type node. */
804
805 int
806 flags_from_decl_or_type (const_tree exp)
807 {
808 int flags = 0;
809
810 if (DECL_P (exp))
811 {
812 /* The function exp may have the `malloc' attribute. */
813 if (DECL_IS_MALLOC (exp))
814 flags |= ECF_MALLOC;
815
816 /* The function exp may have the `returns_twice' attribute. */
817 if (DECL_IS_RETURNS_TWICE (exp))
818 flags |= ECF_RETURNS_TWICE;
819
820 /* Process the pure and const attributes. */
821 if (TREE_READONLY (exp))
822 flags |= ECF_CONST;
823 if (DECL_PURE_P (exp))
824 flags |= ECF_PURE;
825 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
826 flags |= ECF_LOOPING_CONST_OR_PURE;
827
828 if (DECL_IS_NOVOPS (exp))
829 flags |= ECF_NOVOPS;
830 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
831 flags |= ECF_LEAF;
832 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_COLD;
834
835 if (TREE_NOTHROW (exp))
836 flags |= ECF_NOTHROW;
837
838 if (flag_tm)
839 {
840 if (is_tm_builtin (exp))
841 flags |= ECF_TM_BUILTIN;
842 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
843 || lookup_attribute ("transaction_pure",
844 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
845 flags |= ECF_TM_PURE;
846 }
847
848 flags = special_function_p (exp, flags);
849 }
850 else if (TYPE_P (exp))
851 {
852 if (TYPE_READONLY (exp))
853 flags |= ECF_CONST;
854
855 if (flag_tm
856 && ((flags & ECF_CONST) != 0
857 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
858 flags |= ECF_TM_PURE;
859 }
860 else
861 gcc_unreachable ();
862
863 if (TREE_THIS_VOLATILE (exp))
864 {
865 flags |= ECF_NORETURN;
866 if (flags & (ECF_CONST|ECF_PURE))
867 flags |= ECF_LOOPING_CONST_OR_PURE;
868 }
869
870 return flags;
871 }
872
873 /* Detect flags from a CALL_EXPR. */
874
875 int
876 call_expr_flags (const_tree t)
877 {
878 int flags;
879 tree decl = get_callee_fndecl (t);
880
881 if (decl)
882 flags = flags_from_decl_or_type (decl);
883 else if (CALL_EXPR_FN (t) == NULL_TREE)
884 flags = internal_fn_flags (CALL_EXPR_IFN (t));
885 else
886 {
887 tree type = TREE_TYPE (CALL_EXPR_FN (t));
888 if (type && TREE_CODE (type) == POINTER_TYPE)
889 flags = flags_from_decl_or_type (TREE_TYPE (type));
890 else
891 flags = 0;
892 if (CALL_EXPR_BY_DESCRIPTOR (t))
893 flags |= ECF_BY_DESCRIPTOR;
894 }
895
896 return flags;
897 }
898
899 /* Return true if ARG should be passed by invisible reference. */
900
901 bool
902 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
903 {
904 if (tree type = arg.type)
905 {
906 /* If this type contains non-trivial constructors, then it is
907 forbidden for the middle-end to create any new copies. */
908 if (TREE_ADDRESSABLE (type))
909 return true;
910
911 /* GCC post 3.4 passes *all* variable sized types by reference. */
912 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
913 return true;
914
915 /* If a record type should be passed the same as its first (and only)
916 member, use the type and mode of that member. */
917 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
918 {
919 arg.type = TREE_TYPE (first_field (type));
920 arg.mode = TYPE_MODE (arg.type);
921 }
922 }
923
924 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
925 }
926
927 /* Return true if TYPE should be passed by reference when passed to
928 the "..." arguments of a function. */
929
930 bool
931 pass_va_arg_by_reference (tree type)
932 {
933 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
934 }
935
936 /* Decide whether ARG, which occurs in the state described by CA,
937 should be passed by reference. Return true if so and update
938 ARG accordingly. */
939
940 bool
941 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
942 {
943 if (pass_by_reference (ca, arg))
944 {
945 arg.type = build_pointer_type (arg.type);
946 arg.mode = TYPE_MODE (arg.type);
947 arg.pass_by_reference = true;
948 return true;
949 }
950 return false;
951 }
952
953 /* Return true if ARG, which is passed by reference, should be callee
954 copied instead of caller copied. */
955
956 bool
957 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
958 {
959 if (arg.type && TREE_ADDRESSABLE (arg.type))
960 return false;
961 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
962 }
963
964
965 /* Precompute all register parameters as described by ARGS, storing values
966 into fields within the ARGS array.
967
968 NUM_ACTUALS indicates the total number elements in the ARGS array.
969
970 Set REG_PARM_SEEN if we encounter a register parameter. */
971
972 static void
973 precompute_register_parameters (int num_actuals, struct arg_data *args,
974 int *reg_parm_seen)
975 {
976 int i;
977
978 *reg_parm_seen = 0;
979
980 for (i = 0; i < num_actuals; i++)
981 if (args[i].reg != 0 && ! args[i].pass_on_stack)
982 {
983 *reg_parm_seen = 1;
984
985 if (args[i].value == 0)
986 {
987 push_temp_slots ();
988 args[i].value = expand_normal (args[i].tree_value);
989 preserve_temp_slots (args[i].value);
990 pop_temp_slots ();
991 }
992
993 /* If we are to promote the function arg to a wider mode,
994 do it now. */
995
996 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
997 args[i].value
998 = convert_modes (args[i].mode,
999 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1000 args[i].value, args[i].unsignedp);
1001
1002 /* If the value is a non-legitimate constant, force it into a
1003 pseudo now. TLS symbols sometimes need a call to resolve. */
1004 if (CONSTANT_P (args[i].value)
1005 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1006 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
1007 args[i].value = force_reg (args[i].mode, args[i].value);
1008
1009 /* If we're going to have to load the value by parts, pull the
1010 parts into pseudos. The part extraction process can involve
1011 non-trivial computation. */
1012 if (GET_CODE (args[i].reg) == PARALLEL)
1013 {
1014 tree type = TREE_TYPE (args[i].tree_value);
1015 args[i].parallel_value
1016 = emit_group_load_into_temps (args[i].reg, args[i].value,
1017 type, int_size_in_bytes (type));
1018 }
1019
1020 /* If the value is expensive, and we are inside an appropriately
1021 short loop, put the value into a pseudo and then put the pseudo
1022 into the hard reg.
1023
1024 For small register classes, also do this if this call uses
1025 register parameters. This is to avoid reload conflicts while
1026 loading the parameters registers. */
1027
1028 else if ((! (REG_P (args[i].value)
1029 || (GET_CODE (args[i].value) == SUBREG
1030 && REG_P (SUBREG_REG (args[i].value)))))
1031 && args[i].mode != BLKmode
1032 && (set_src_cost (args[i].value, args[i].mode,
1033 optimize_insn_for_speed_p ())
1034 > COSTS_N_INSNS (1))
1035 && ((*reg_parm_seen
1036 && targetm.small_register_classes_for_mode_p (args[i].mode))
1037 || optimize))
1038 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1039 }
1040 }
1041
1042 #ifdef REG_PARM_STACK_SPACE
1043
1044 /* The argument list is the property of the called routine and it
1045 may clobber it. If the fixed area has been used for previous
1046 parameters, we must save and restore it. */
1047
1048 static rtx
1049 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1050 {
1051 unsigned int low;
1052 unsigned int high;
1053
1054 /* Compute the boundary of the area that needs to be saved, if any. */
1055 high = reg_parm_stack_space;
1056 if (ARGS_GROW_DOWNWARD)
1057 high += 1;
1058
1059 if (high > highest_outgoing_arg_in_use)
1060 high = highest_outgoing_arg_in_use;
1061
1062 for (low = 0; low < high; low++)
1063 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1064 {
1065 int num_to_save;
1066 machine_mode save_mode;
1067 int delta;
1068 rtx addr;
1069 rtx stack_area;
1070 rtx save_area;
1071
1072 while (stack_usage_map[--high] == 0)
1073 ;
1074
1075 *low_to_save = low;
1076 *high_to_save = high;
1077
1078 num_to_save = high - low + 1;
1079
1080 /* If we don't have the required alignment, must do this
1081 in BLKmode. */
1082 scalar_int_mode imode;
1083 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1084 && (low & (MIN (GET_MODE_SIZE (imode),
1085 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1086 save_mode = imode;
1087 else
1088 save_mode = BLKmode;
1089
1090 if (ARGS_GROW_DOWNWARD)
1091 delta = -high;
1092 else
1093 delta = low;
1094
1095 addr = plus_constant (Pmode, argblock, delta);
1096 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1097
1098 set_mem_align (stack_area, PARM_BOUNDARY);
1099 if (save_mode == BLKmode)
1100 {
1101 save_area = assign_stack_temp (BLKmode, num_to_save);
1102 emit_block_move (validize_mem (save_area), stack_area,
1103 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1104 }
1105 else
1106 {
1107 save_area = gen_reg_rtx (save_mode);
1108 emit_move_insn (save_area, stack_area);
1109 }
1110
1111 return save_area;
1112 }
1113
1114 return NULL_RTX;
1115 }
1116
1117 static void
1118 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1119 {
1120 machine_mode save_mode = GET_MODE (save_area);
1121 int delta;
1122 rtx addr, stack_area;
1123
1124 if (ARGS_GROW_DOWNWARD)
1125 delta = -high_to_save;
1126 else
1127 delta = low_to_save;
1128
1129 addr = plus_constant (Pmode, argblock, delta);
1130 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1131 set_mem_align (stack_area, PARM_BOUNDARY);
1132
1133 if (save_mode != BLKmode)
1134 emit_move_insn (stack_area, save_area);
1135 else
1136 emit_block_move (stack_area, validize_mem (save_area),
1137 GEN_INT (high_to_save - low_to_save + 1),
1138 BLOCK_OP_CALL_PARM);
1139 }
1140 #endif /* REG_PARM_STACK_SPACE */
1141
1142 /* If any elements in ARGS refer to parameters that are to be passed in
1143 registers, but not in memory, and whose alignment does not permit a
1144 direct copy into registers. Copy the values into a group of pseudos
1145 which we will later copy into the appropriate hard registers.
1146
1147 Pseudos for each unaligned argument will be stored into the array
1148 args[argnum].aligned_regs. The caller is responsible for deallocating
1149 the aligned_regs array if it is nonzero. */
1150
1151 static void
1152 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1153 {
1154 int i, j;
1155
1156 for (i = 0; i < num_actuals; i++)
1157 if (args[i].reg != 0 && ! args[i].pass_on_stack
1158 && GET_CODE (args[i].reg) != PARALLEL
1159 && args[i].mode == BLKmode
1160 && MEM_P (args[i].value)
1161 && (MEM_ALIGN (args[i].value)
1162 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1163 {
1164 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1165 int endian_correction = 0;
1166
1167 if (args[i].partial)
1168 {
1169 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1170 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1171 }
1172 else
1173 {
1174 args[i].n_aligned_regs
1175 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1176 }
1177
1178 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1179
1180 /* Structures smaller than a word are normally aligned to the
1181 least significant byte. On a BYTES_BIG_ENDIAN machine,
1182 this means we must skip the empty high order bytes when
1183 calculating the bit offset. */
1184 if (bytes < UNITS_PER_WORD
1185 #ifdef BLOCK_REG_PADDING
1186 && (BLOCK_REG_PADDING (args[i].mode,
1187 TREE_TYPE (args[i].tree_value), 1)
1188 == PAD_DOWNWARD)
1189 #else
1190 && BYTES_BIG_ENDIAN
1191 #endif
1192 )
1193 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1194
1195 for (j = 0; j < args[i].n_aligned_regs; j++)
1196 {
1197 rtx reg = gen_reg_rtx (word_mode);
1198 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1199 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1200
1201 args[i].aligned_regs[j] = reg;
1202 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1203 word_mode, word_mode, false, NULL);
1204
1205 /* There is no need to restrict this code to loading items
1206 in TYPE_ALIGN sized hunks. The bitfield instructions can
1207 load up entire word sized registers efficiently.
1208
1209 ??? This may not be needed anymore.
1210 We use to emit a clobber here but that doesn't let later
1211 passes optimize the instructions we emit. By storing 0 into
1212 the register later passes know the first AND to zero out the
1213 bitfield being set in the register is unnecessary. The store
1214 of 0 will be deleted as will at least the first AND. */
1215
1216 emit_move_insn (reg, const0_rtx);
1217
1218 bytes -= bitsize / BITS_PER_UNIT;
1219 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1220 word_mode, word, false);
1221 }
1222 }
1223 }
1224
1225 /* The limit set by -Walloc-larger-than=. */
1226 static GTY(()) tree alloc_object_size_limit;
1227
1228 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1229 setting if the option is specified, or to the maximum object size if it
1230 is not. Return the initialized value. */
1231
1232 static tree
1233 alloc_max_size (void)
1234 {
1235 if (alloc_object_size_limit)
1236 return alloc_object_size_limit;
1237
1238 HOST_WIDE_INT limit = warn_alloc_size_limit;
1239 if (limit == HOST_WIDE_INT_MAX)
1240 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1241
1242 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1243
1244 return alloc_object_size_limit;
1245 }
1246
1247 /* Return true when EXP's range can be determined and set RANGE[] to it
1248 after adjusting it if necessary to make EXP a represents a valid size
1249 of object, or a valid size argument to an allocation function declared
1250 with attribute alloc_size (whose argument may be signed), or to a string
1251 manipulation function like memset.
1252 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1253 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1254 a (nearly) invalid argument to allocation functions like malloc but it
1255 is a valid argument to functions like memset.
1256 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1257 in a multi-range, otherwise to the smallest valid subrange. */
1258
1259 bool
1260 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1261 int flags /* = 0 */)
1262 {
1263 if (!exp)
1264 return false;
1265
1266 if (tree_fits_uhwi_p (exp))
1267 {
1268 /* EXP is a constant. */
1269 range[0] = range[1] = exp;
1270 return true;
1271 }
1272
1273 tree exptype = TREE_TYPE (exp);
1274 bool integral = INTEGRAL_TYPE_P (exptype);
1275
1276 wide_int min, max;
1277 enum value_range_kind range_type;
1278
1279 if (integral)
1280 {
1281 value_range vr;
1282 if (query && query->range_of_expr (vr, exp, stmt))
1283 {
1284 if (vr.undefined_p ())
1285 vr.set_varying (TREE_TYPE (exp));
1286 range_type = vr.kind ();
1287 min = wi::to_wide (vr.min ());
1288 max = wi::to_wide (vr.max ());
1289 }
1290 else
1291 range_type = determine_value_range (exp, &min, &max);
1292 }
1293 else
1294 range_type = VR_VARYING;
1295
1296 if (range_type == VR_VARYING)
1297 {
1298 if (integral)
1299 {
1300 /* Use the full range of the type of the expression when
1301 no value range information is available. */
1302 range[0] = TYPE_MIN_VALUE (exptype);
1303 range[1] = TYPE_MAX_VALUE (exptype);
1304 return true;
1305 }
1306
1307 range[0] = NULL_TREE;
1308 range[1] = NULL_TREE;
1309 return false;
1310 }
1311
1312 unsigned expprec = TYPE_PRECISION (exptype);
1313
1314 bool signed_p = !TYPE_UNSIGNED (exptype);
1315
1316 if (range_type == VR_ANTI_RANGE)
1317 {
1318 if (signed_p)
1319 {
1320 if (wi::les_p (max, 0))
1321 {
1322 /* EXP is not in a strictly negative range. That means
1323 it must be in some (not necessarily strictly) positive
1324 range which includes zero. Since in signed to unsigned
1325 conversions negative values end up converted to large
1326 positive values, and otherwise they are not valid sizes,
1327 the resulting range is in both cases [0, TYPE_MAX]. */
1328 min = wi::zero (expprec);
1329 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1330 }
1331 else if (wi::les_p (min - 1, 0))
1332 {
1333 /* EXP is not in a negative-positive range. That means EXP
1334 is either negative, or greater than max. Since negative
1335 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1336 min = max + 1;
1337 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1338 }
1339 else
1340 {
1341 max = min - 1;
1342 min = wi::zero (expprec);
1343 }
1344 }
1345 else
1346 {
1347 wide_int maxsize = wi::to_wide (max_object_size ());
1348 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1349 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1350 if (wi::eq_p (0, min - 1))
1351 {
1352 /* EXP is unsigned and not in the range [1, MAX]. That means
1353 it's either zero or greater than MAX. Even though 0 would
1354 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1355 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1356 is greater than the limit the whole range is diagnosed. */
1357 wide_int maxsize = wi::to_wide (max_object_size ());
1358 if (flags & SR_ALLOW_ZERO)
1359 {
1360 if (wi::leu_p (maxsize, max + 1)
1361 || !(flags & SR_USE_LARGEST))
1362 min = max = wi::zero (expprec);
1363 else
1364 {
1365 min = max + 1;
1366 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1367 }
1368 }
1369 else
1370 {
1371 min = max + 1;
1372 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1373 }
1374 }
1375 else if ((flags & SR_USE_LARGEST)
1376 && wi::ltu_p (max + 1, maxsize))
1377 {
1378 /* When USE_LARGEST is set and the larger of the two subranges
1379 is a valid size, use it... */
1380 min = max + 1;
1381 max = maxsize;
1382 }
1383 else
1384 {
1385 /* ...otherwise use the smaller subrange. */
1386 max = min - 1;
1387 min = wi::zero (expprec);
1388 }
1389 }
1390 }
1391
1392 range[0] = wide_int_to_tree (exptype, min);
1393 range[1] = wide_int_to_tree (exptype, max);
1394
1395 return true;
1396 }
1397
1398 bool
1399 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1400 {
1401 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1402 }
1403
1404 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1405 whose argument numbers given by IDX with values given by ARGS exceed
1406 the maximum object size or cause an unsigned oveflow (wrapping) when
1407 multiplied. FN is null when EXP is a call via a function pointer.
1408 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1409 for functions like malloc, and non-null for those like calloc that
1410 are decorated with a two-argument attribute alloc_size. */
1411
1412 void
1413 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1414 {
1415 /* The range each of the (up to) two arguments is known to be in. */
1416 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1417
1418 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1419 tree maxobjsize = alloc_max_size ();
1420
1421 location_t loc = EXPR_LOCATION (exp);
1422
1423 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1424 bool warned = false;
1425
1426 /* Validate each argument individually. */
1427 for (unsigned i = 0; i != 2 && args[i]; ++i)
1428 {
1429 if (TREE_CODE (args[i]) == INTEGER_CST)
1430 {
1431 argrange[i][0] = args[i];
1432 argrange[i][1] = args[i];
1433
1434 if (tree_int_cst_lt (args[i], integer_zero_node))
1435 {
1436 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1437 "%Kargument %i value %qE is negative",
1438 exp, idx[i] + 1, args[i]);
1439 }
1440 else if (integer_zerop (args[i]))
1441 {
1442 /* Avoid issuing -Walloc-zero for allocation functions other
1443 than __builtin_alloca that are declared with attribute
1444 returns_nonnull because there's no portability risk. This
1445 avoids warning for such calls to libiberty's xmalloc and
1446 friends.
1447 Also avoid issuing the warning for calls to function named
1448 "alloca". */
1449 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1450 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1451 : !lookup_attribute ("returns_nonnull",
1452 TYPE_ATTRIBUTES (fntype)))
1453 warned = warning_at (loc, OPT_Walloc_zero,
1454 "%Kargument %i value is zero",
1455 exp, idx[i] + 1);
1456 }
1457 else if (tree_int_cst_lt (maxobjsize, args[i]))
1458 {
1459 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1460 mode and with -fno-exceptions as a way to indicate array
1461 size overflow. There's no good way to detect C++98 here
1462 so avoid diagnosing these calls for all C++ modes. */
1463 if (i == 0
1464 && fn
1465 && !args[1]
1466 && lang_GNU_CXX ()
1467 && DECL_IS_OPERATOR_NEW_P (fn)
1468 && integer_all_onesp (args[i]))
1469 continue;
1470
1471 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1472 "%Kargument %i value %qE exceeds "
1473 "maximum object size %E",
1474 exp, idx[i] + 1, args[i], maxobjsize);
1475 }
1476 }
1477 else if (TREE_CODE (args[i]) == SSA_NAME
1478 && get_size_range (args[i], argrange[i]))
1479 {
1480 /* Verify that the argument's range is not negative (including
1481 upper bound of zero). */
1482 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1483 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1484 {
1485 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1486 "%Kargument %i range [%E, %E] is negative",
1487 exp, idx[i] + 1,
1488 argrange[i][0], argrange[i][1]);
1489 }
1490 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1491 {
1492 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1493 "%Kargument %i range [%E, %E] exceeds "
1494 "maximum object size %E",
1495 exp, idx[i] + 1,
1496 argrange[i][0], argrange[i][1],
1497 maxobjsize);
1498 }
1499 }
1500 }
1501
1502 if (!argrange[0])
1503 return;
1504
1505 /* For a two-argument alloc_size, validate the product of the two
1506 arguments if both of their values or ranges are known. */
1507 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1508 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1509 && !integer_onep (argrange[0][0])
1510 && !integer_onep (argrange[1][0]))
1511 {
1512 /* Check for overflow in the product of a function decorated with
1513 attribute alloc_size (X, Y). */
1514 unsigned szprec = TYPE_PRECISION (size_type_node);
1515 wide_int x = wi::to_wide (argrange[0][0], szprec);
1516 wide_int y = wi::to_wide (argrange[1][0], szprec);
1517
1518 wi::overflow_type vflow;
1519 wide_int prod = wi::umul (x, y, &vflow);
1520
1521 if (vflow)
1522 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1523 "%Kproduct %<%E * %E%> of arguments %i and %i "
1524 "exceeds %<SIZE_MAX%>",
1525 exp, argrange[0][0], argrange[1][0],
1526 idx[0] + 1, idx[1] + 1);
1527 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1528 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1529 "%Kproduct %<%E * %E%> of arguments %i and %i "
1530 "exceeds maximum object size %E",
1531 exp, argrange[0][0], argrange[1][0],
1532 idx[0] + 1, idx[1] + 1,
1533 maxobjsize);
1534
1535 if (warned)
1536 {
1537 /* Print the full range of each of the two arguments to make
1538 it clear when it is, in fact, in a range and not constant. */
1539 if (argrange[0][0] != argrange [0][1])
1540 inform (loc, "argument %i in the range [%E, %E]",
1541 idx[0] + 1, argrange[0][0], argrange[0][1]);
1542 if (argrange[1][0] != argrange [1][1])
1543 inform (loc, "argument %i in the range [%E, %E]",
1544 idx[1] + 1, argrange[1][0], argrange[1][1]);
1545 }
1546 }
1547
1548 if (warned && fn)
1549 {
1550 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1551
1552 if (DECL_IS_UNDECLARED_BUILTIN (fn))
1553 inform (loc,
1554 "in a call to built-in allocation function %qD", fn);
1555 else
1556 inform (fnloc,
1557 "in a call to allocation function %qD declared here", fn);
1558 }
1559 }
1560
1561 /* If EXPR refers to a character array or pointer declared attribute
1562 nonstring return a decl for that array or pointer and set *REF to
1563 the referenced enclosing object or pointer. Otherwise returns
1564 null. */
1565
1566 tree
1567 get_attr_nonstring_decl (tree expr, tree *ref)
1568 {
1569 tree decl = expr;
1570 tree var = NULL_TREE;
1571 if (TREE_CODE (decl) == SSA_NAME)
1572 {
1573 gimple *def = SSA_NAME_DEF_STMT (decl);
1574
1575 if (is_gimple_assign (def))
1576 {
1577 tree_code code = gimple_assign_rhs_code (def);
1578 if (code == ADDR_EXPR
1579 || code == COMPONENT_REF
1580 || code == VAR_DECL)
1581 decl = gimple_assign_rhs1 (def);
1582 }
1583 else
1584 var = SSA_NAME_VAR (decl);
1585 }
1586
1587 if (TREE_CODE (decl) == ADDR_EXPR)
1588 decl = TREE_OPERAND (decl, 0);
1589
1590 /* To simplify calling code, store the referenced DECL regardless of
1591 the attribute determined below, but avoid storing the SSA_NAME_VAR
1592 obtained above (it's not useful for dataflow purposes). */
1593 if (ref)
1594 *ref = decl;
1595
1596 /* Use the SSA_NAME_VAR that was determined above to see if it's
1597 declared nonstring. Otherwise drill down into the referenced
1598 DECL. */
1599 if (var)
1600 decl = var;
1601 else if (TREE_CODE (decl) == ARRAY_REF)
1602 decl = TREE_OPERAND (decl, 0);
1603 else if (TREE_CODE (decl) == COMPONENT_REF)
1604 decl = TREE_OPERAND (decl, 1);
1605 else if (TREE_CODE (decl) == MEM_REF)
1606 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1607
1608 if (DECL_P (decl)
1609 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1610 return decl;
1611
1612 return NULL_TREE;
1613 }
1614
1615 /* Warn about passing a non-string array/pointer to a built-in function
1616 that expects a nul-terminated string argument. Returns true if
1617 a warning has been issued.*/
1618
1619 bool
1620 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1621 {
1622 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1623 return false;
1624
1625 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1626 return false;
1627
1628 /* Avoid clearly invalid calls (more checking done below). */
1629 unsigned nargs = call_expr_nargs (exp);
1630 if (!nargs)
1631 return false;
1632
1633 /* The bound argument to a bounded string function like strncpy. */
1634 tree bound = NULL_TREE;
1635
1636 /* The longest known or possible string argument to one of the comparison
1637 functions. If the length is less than the bound it is used instead.
1638 Since the length is only used for warning and not for code generation
1639 disable strict mode in the calls to get_range_strlen below. */
1640 tree maxlen = NULL_TREE;
1641
1642 /* It's safe to call "bounded" string functions with a non-string
1643 argument since the functions provide an explicit bound for this
1644 purpose. The exception is strncat where the bound may refer to
1645 either the destination or the source. */
1646 int fncode = DECL_FUNCTION_CODE (fndecl);
1647 switch (fncode)
1648 {
1649 case BUILT_IN_STRCMP:
1650 case BUILT_IN_STRNCMP:
1651 case BUILT_IN_STRNCASECMP:
1652 {
1653 /* For these, if one argument refers to one or more of a set
1654 of string constants or arrays of known size, determine
1655 the range of their known or possible lengths and use it
1656 conservatively as the bound for the unbounded function,
1657 and to adjust the range of the bound of the bounded ones. */
1658 for (unsigned argno = 0;
1659 argno < MIN (nargs, 2)
1660 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1661 {
1662 tree arg = CALL_EXPR_ARG (exp, argno);
1663 if (!get_attr_nonstring_decl (arg))
1664 {
1665 c_strlen_data lendata = { };
1666 /* Set MAXBOUND to an arbitrary non-null non-integer
1667 node as a request to have it set to the length of
1668 the longest string in a PHI. */
1669 lendata.maxbound = arg;
1670 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1671 maxlen = lendata.maxbound;
1672 }
1673 }
1674 }
1675 /* Fall through. */
1676
1677 case BUILT_IN_STRNCAT:
1678 case BUILT_IN_STPNCPY:
1679 case BUILT_IN_STRNCPY:
1680 if (nargs > 2)
1681 bound = CALL_EXPR_ARG (exp, 2);
1682 break;
1683
1684 case BUILT_IN_STRNDUP:
1685 if (nargs > 1)
1686 bound = CALL_EXPR_ARG (exp, 1);
1687 break;
1688
1689 case BUILT_IN_STRNLEN:
1690 {
1691 tree arg = CALL_EXPR_ARG (exp, 0);
1692 if (!get_attr_nonstring_decl (arg))
1693 {
1694 c_strlen_data lendata = { };
1695 /* Set MAXBOUND to an arbitrary non-null non-integer
1696 node as a request to have it set to the length of
1697 the longest string in a PHI. */
1698 lendata.maxbound = arg;
1699 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1700 maxlen = lendata.maxbound;
1701 }
1702 if (nargs > 1)
1703 bound = CALL_EXPR_ARG (exp, 1);
1704 break;
1705 }
1706
1707 default:
1708 break;
1709 }
1710
1711 /* Determine the range of the bound argument (if specified). */
1712 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1713 if (bound)
1714 {
1715 STRIP_NOPS (bound);
1716 get_size_range (bound, bndrng);
1717 }
1718
1719 location_t loc = EXPR_LOCATION (exp);
1720
1721 if (bndrng[0])
1722 {
1723 /* Diagnose excessive bound prior to the adjustment below and
1724 regardless of attribute nonstring. */
1725 tree maxobjsize = max_object_size ();
1726 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1727 {
1728 bool warned = false;
1729 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1730 warned = warning_at (loc, OPT_Wstringop_overread,
1731 "%K%qD specified bound %E "
1732 "exceeds maximum object size %E",
1733 exp, fndecl, bndrng[0], maxobjsize);
1734 else
1735 warned = warning_at (loc, OPT_Wstringop_overread,
1736 "%K%qD specified bound [%E, %E] "
1737 "exceeds maximum object size %E",
1738 exp, fndecl, bndrng[0], bndrng[1],
1739 maxobjsize);
1740 if (warned)
1741 TREE_NO_WARNING (exp) = true;
1742
1743 return warned;
1744 }
1745 }
1746
1747 if (maxlen && !integer_all_onesp (maxlen))
1748 {
1749 /* Add one for the nul. */
1750 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1751 size_one_node);
1752
1753 if (!bndrng[0])
1754 {
1755 /* Conservatively use the upper bound of the lengths for
1756 both the lower and the upper bound of the operation. */
1757 bndrng[0] = maxlen;
1758 bndrng[1] = maxlen;
1759 bound = void_type_node;
1760 }
1761 else if (maxlen)
1762 {
1763 /* Replace the bound on the operation with the upper bound
1764 of the length of the string if the latter is smaller. */
1765 if (tree_int_cst_lt (maxlen, bndrng[0]))
1766 bndrng[0] = maxlen;
1767 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1768 bndrng[1] = maxlen;
1769 }
1770 }
1771
1772 bool any_arg_warned = false;
1773 /* Iterate over the built-in function's formal arguments and check
1774 each const char* against the actual argument. If the actual
1775 argument is declared attribute non-string issue a warning unless
1776 the argument's maximum length is bounded. */
1777 function_args_iterator it;
1778 function_args_iter_init (&it, TREE_TYPE (fndecl));
1779
1780 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1781 {
1782 /* Avoid iterating past the declared argument in a call
1783 to function declared without a prototype. */
1784 if (argno >= nargs)
1785 break;
1786
1787 tree argtype = function_args_iter_cond (&it);
1788 if (!argtype)
1789 break;
1790
1791 if (TREE_CODE (argtype) != POINTER_TYPE)
1792 continue;
1793
1794 argtype = TREE_TYPE (argtype);
1795
1796 if (TREE_CODE (argtype) != INTEGER_TYPE
1797 || !TYPE_READONLY (argtype))
1798 continue;
1799
1800 argtype = TYPE_MAIN_VARIANT (argtype);
1801 if (argtype != char_type_node)
1802 continue;
1803
1804 tree callarg = CALL_EXPR_ARG (exp, argno);
1805 if (TREE_CODE (callarg) == ADDR_EXPR)
1806 callarg = TREE_OPERAND (callarg, 0);
1807
1808 /* See if the destination is declared with attribute "nonstring". */
1809 tree decl = get_attr_nonstring_decl (callarg);
1810 if (!decl)
1811 continue;
1812
1813 /* The maximum number of array elements accessed. */
1814 offset_int wibnd = 0;
1815
1816 if (argno && fncode == BUILT_IN_STRNCAT)
1817 {
1818 /* See if the bound in strncat is derived from the length
1819 of the strlen of the destination (as it's expected to be).
1820 If so, reset BOUND and FNCODE to trigger a warning. */
1821 tree dstarg = CALL_EXPR_ARG (exp, 0);
1822 if (is_strlen_related_p (dstarg, bound))
1823 {
1824 /* The bound applies to the destination, not to the source,
1825 so reset these to trigger a warning without mentioning
1826 the bound. */
1827 bound = NULL;
1828 fncode = 0;
1829 }
1830 else if (bndrng[1])
1831 /* Use the upper bound of the range for strncat. */
1832 wibnd = wi::to_offset (bndrng[1]);
1833 }
1834 else if (bndrng[0])
1835 /* Use the lower bound of the range for functions other than
1836 strncat. */
1837 wibnd = wi::to_offset (bndrng[0]);
1838
1839 /* Determine the size of the argument array if it is one. */
1840 offset_int asize = wibnd;
1841 bool known_size = false;
1842 tree type = TREE_TYPE (decl);
1843
1844 /* Determine the array size. For arrays of unknown bound and
1845 pointers reset BOUND to trigger the appropriate warning. */
1846 if (TREE_CODE (type) == ARRAY_TYPE)
1847 {
1848 if (tree arrbnd = TYPE_DOMAIN (type))
1849 {
1850 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1851 {
1852 asize = wi::to_offset (arrbnd) + 1;
1853 known_size = true;
1854 }
1855 }
1856 else if (bound == void_type_node)
1857 bound = NULL_TREE;
1858 }
1859 else if (bound == void_type_node)
1860 bound = NULL_TREE;
1861
1862 /* In a call to strncat with a bound in a range whose lower but
1863 not upper bound is less than the array size, reset ASIZE to
1864 be the same as the bound and the other variable to trigger
1865 the apprpriate warning below. */
1866 if (fncode == BUILT_IN_STRNCAT
1867 && bndrng[0] != bndrng[1]
1868 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1869 && (!known_size
1870 || wi::ltu_p (asize, wibnd)))
1871 {
1872 asize = wibnd;
1873 bound = NULL_TREE;
1874 fncode = 0;
1875 }
1876
1877 bool warned = false;
1878
1879 auto_diagnostic_group d;
1880 if (wi::ltu_p (asize, wibnd))
1881 {
1882 if (bndrng[0] == bndrng[1])
1883 warned = warning_at (loc, OPT_Wstringop_overread,
1884 "%qD argument %i declared attribute "
1885 "%<nonstring%> is smaller than the specified "
1886 "bound %wu",
1887 fndecl, argno + 1, wibnd.to_uhwi ());
1888 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1889 warned = warning_at (loc, OPT_Wstringop_overread,
1890 "%qD argument %i declared attribute "
1891 "%<nonstring%> is smaller than "
1892 "the specified bound [%E, %E]",
1893 fndecl, argno + 1, bndrng[0], bndrng[1]);
1894 else
1895 warned = warning_at (loc, OPT_Wstringop_overread,
1896 "%qD argument %i declared attribute "
1897 "%<nonstring%> may be smaller than "
1898 "the specified bound [%E, %E]",
1899 fndecl, argno + 1, bndrng[0], bndrng[1]);
1900 }
1901 else if (fncode == BUILT_IN_STRNCAT)
1902 ; /* Avoid warning for calls to strncat() when the bound
1903 is equal to the size of the non-string argument. */
1904 else if (!bound)
1905 warned = warning_at (loc, OPT_Wstringop_overread,
1906 "%qD argument %i declared attribute %<nonstring%>",
1907 fndecl, argno + 1);
1908
1909 if (warned)
1910 {
1911 inform (DECL_SOURCE_LOCATION (decl),
1912 "argument %qD declared here", decl);
1913 any_arg_warned = true;
1914 }
1915 }
1916
1917 if (any_arg_warned)
1918 TREE_NO_WARNING (exp) = true;
1919
1920 return any_arg_warned;
1921 }
1922
1923 /* Issue an error if CALL_EXPR was flagged as requiring
1924 tall-call optimization. */
1925
1926 void
1927 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1928 {
1929 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1930 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1931 return;
1932
1933 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1934 }
1935
1936 /* Returns the type of the argument ARGNO to function with type FNTYPE
1937 or null when the typoe cannot be determined or no such argument exists. */
1938
1939 static tree
1940 fntype_argno_type (tree fntype, unsigned argno)
1941 {
1942 if (!prototype_p (fntype))
1943 return NULL_TREE;
1944
1945 tree argtype;
1946 function_args_iterator it;
1947 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1948 if (argno-- == 0)
1949 return argtype;
1950
1951 return NULL_TREE;
1952 }
1953
1954 /* Helper to append the "human readable" attribute access specification
1955 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1956 diagnostics. */
1957
1958 static inline void
1959 append_attrname (const std::pair<int, attr_access> &access,
1960 char *attrstr, size_t strsize)
1961 {
1962 if (access.second.internal_p)
1963 return;
1964
1965 tree str = access.second.to_external_string ();
1966 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1967 strcpy (attrstr, TREE_STRING_POINTER (str));
1968 }
1969
1970 /* Iterate over attribute access read-only, read-write, and write-only
1971 arguments and diagnose past-the-end accesses and related problems
1972 in the function call EXP. */
1973
1974 static void
1975 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1976 {
1977 auto_diagnostic_group adg;
1978
1979 /* Set if a warning has been issued for any argument (used to decide
1980 whether to emit an informational note at the end). */
1981 bool any_warned = false;
1982
1983 /* A string describing the attributes that the warnings issued by this
1984 function apply to. Used to print one informational note per function
1985 call, rather than one per warning. That reduces clutter. */
1986 char attrstr[80];
1987 attrstr[0] = 0;
1988
1989 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1990 {
1991 std::pair<int, attr_access> access = *it;
1992
1993 /* Get the function call arguments corresponding to the attribute's
1994 positional arguments. When both arguments have been specified
1995 there will be two entries in *RWM, one for each. They are
1996 cross-referenced by their respective argument numbers in
1997 ACCESS.PTRARG and ACCESS.SIZARG. */
1998 const int ptridx = access.second.ptrarg;
1999 const int sizidx = access.second.sizarg;
2000
2001 gcc_assert (ptridx != -1);
2002 gcc_assert (access.first == ptridx || access.first == sizidx);
2003
2004 /* The pointer is set to null for the entry corresponding to
2005 the size argument. Skip it. It's handled when the entry
2006 corresponding to the pointer argument comes up. */
2007 if (!access.second.ptr)
2008 continue;
2009
2010 tree ptrtype = fntype_argno_type (fntype, ptridx);
2011 tree argtype = TREE_TYPE (ptrtype);
2012
2013 /* The size of the access by the call. */
2014 tree access_size;
2015 if (sizidx == -1)
2016 {
2017 /* If only the pointer attribute operand was specified and
2018 not size, set SIZE to the greater of MINSIZE or size of
2019 one element of the pointed to type to detect smaller
2020 objects (null pointers are diagnosed in this case only
2021 if the pointer is also declared with attribute nonnull. */
2022 if (access.second.minsize
2023 && access.second.minsize != HOST_WIDE_INT_M1U)
2024 access_size = build_int_cstu (sizetype, access.second.minsize);
2025 else
2026 access_size = size_one_node;
2027 }
2028 else
2029 access_size = rwm->get (sizidx)->size;
2030
2031 /* Format the value or range to avoid an explosion of messages. */
2032 char sizstr[80];
2033 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2034 if (get_size_range (access_size, sizrng, true))
2035 {
2036 char *s0 = print_generic_expr_to_str (sizrng[0]);
2037 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2038 {
2039 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2040 strcpy (sizstr, s0);
2041 }
2042 else
2043 {
2044 char *s1 = print_generic_expr_to_str (sizrng[1]);
2045 gcc_checking_assert (strlen (s0) + strlen (s1)
2046 < sizeof sizstr - 4);
2047 sprintf (sizstr, "[%s, %s]", s0, s1);
2048 free (s1);
2049 }
2050 free (s0);
2051 }
2052 else
2053 *sizstr = '\0';
2054
2055 /* Set if a warning has been issued for the current argument. */
2056 bool arg_warned = false;
2057 location_t loc = EXPR_LOCATION (exp);
2058 tree ptr = access.second.ptr;
2059 if (*sizstr
2060 && tree_int_cst_sgn (sizrng[0]) < 0
2061 && tree_int_cst_sgn (sizrng[1]) < 0)
2062 {
2063 /* Warn about negative sizes. */
2064 if (access.second.internal_p)
2065 {
2066 const std::string argtypestr
2067 = access.second.array_as_string (ptrtype);
2068
2069 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2070 "%Kbound argument %i value %s is "
2071 "negative for a variable length array "
2072 "argument %i of type %s",
2073 exp, sizidx + 1, sizstr,
2074 ptridx + 1, argtypestr.c_str ());
2075 }
2076 else
2077 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2078 "%Kargument %i value %s is negative",
2079 exp, sizidx + 1, sizstr);
2080
2081 if (arg_warned)
2082 {
2083 append_attrname (access, attrstr, sizeof attrstr);
2084 /* Remember a warning has been issued and avoid warning
2085 again below for the same attribute. */
2086 any_warned = true;
2087 continue;
2088 }
2089 }
2090
2091 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2092 {
2093 if (COMPLETE_TYPE_P (argtype))
2094 {
2095 /* Multiply ACCESS_SIZE by the size of the type the pointer
2096 argument points to. If it's incomplete the size is used
2097 as is. */
2098 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2099 if (TREE_CODE (argsize) == INTEGER_CST)
2100 {
2101 const int prec = TYPE_PRECISION (sizetype);
2102 wide_int minsize = wi::to_wide (sizrng[0], prec);
2103 minsize *= wi::to_wide (argsize, prec);
2104 access_size = wide_int_to_tree (sizetype, minsize);
2105 }
2106 }
2107 }
2108 else
2109 access_size = NULL_TREE;
2110
2111 if (integer_zerop (ptr))
2112 {
2113 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2114 {
2115 /* Warn about null pointers with positive sizes. This is
2116 different from also declaring the pointer argument with
2117 attribute nonnull when the function accepts null pointers
2118 only when the corresponding size is zero. */
2119 if (access.second.internal_p)
2120 {
2121 const std::string argtypestr
2122 = access.second.array_as_string (ptrtype);
2123
2124 arg_warned = warning_at (loc, OPT_Wnonnull,
2125 "%Kargument %i of variable length "
2126 "array %s is null but "
2127 "the corresponding bound argument "
2128 "%i value is %s",
2129 exp, sizidx + 1, argtypestr.c_str (),
2130 ptridx + 1, sizstr);
2131 }
2132 else
2133 arg_warned = warning_at (loc, OPT_Wnonnull,
2134 "%Kargument %i is null but "
2135 "the corresponding size argument "
2136 "%i value is %s",
2137 exp, ptridx + 1, sizidx + 1,
2138 sizstr);
2139 }
2140 else if (access_size && access.second.static_p)
2141 {
2142 /* Warn about null pointers for [static N] array arguments
2143 but do not warn for ordinary (i.e., nonstatic) arrays. */
2144 arg_warned = warning_at (loc, OPT_Wnonnull,
2145 "%Kargument %i to %<%T[static %E]%> "
2146 "is null where non-null expected",
2147 exp, ptridx + 1, argtype,
2148 access_size);
2149 }
2150
2151 if (arg_warned)
2152 {
2153 append_attrname (access, attrstr, sizeof attrstr);
2154 /* Remember a warning has been issued and avoid warning
2155 again below for the same attribute. */
2156 any_warned = true;
2157 continue;
2158 }
2159 }
2160
2161 access_data data (ptr, access.second.mode, NULL_TREE, false,
2162 NULL_TREE, false);
2163 access_ref* const pobj = (access.second.mode == access_write_only
2164 ? &data.dst : &data.src);
2165 tree objsize = compute_objsize (ptr, 1, pobj);
2166
2167 /* The size of the destination or source object. */
2168 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2169 if (access.second.mode == access_read_only
2170 || access.second.mode == access_none)
2171 {
2172 /* For a read-only argument there is no destination. For
2173 no access, set the source as well and differentiate via
2174 the access flag below. */
2175 srcsize = objsize;
2176 if (access.second.mode == access_read_only
2177 || access.second.mode == access_none)
2178 {
2179 /* For a read-only attribute there is no destination so
2180 clear OBJSIZE. This emits "reading N bytes" kind of
2181 diagnostics instead of the "writing N bytes" kind,
2182 unless MODE is none. */
2183 objsize = NULL_TREE;
2184 }
2185 }
2186 else
2187 dstsize = objsize;
2188
2189 /* Clear the no-warning bit in case it was set by check_access
2190 in a prior iteration so that accesses via different arguments
2191 are diagnosed. */
2192 TREE_NO_WARNING (exp) = false;
2193 access_mode mode = data.mode;
2194 if (mode == access_deferred)
2195 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2196 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2197 dstsize, mode, &data);
2198
2199 if (TREE_NO_WARNING (exp))
2200 {
2201 any_warned = true;
2202
2203 if (access.second.internal_p)
2204 inform (loc, "referencing argument %u of type %qT",
2205 ptridx + 1, ptrtype);
2206 else
2207 /* If check_access issued a warning above, append the relevant
2208 attribute to the string. */
2209 append_attrname (access, attrstr, sizeof attrstr);
2210 }
2211 }
2212
2213 if (*attrstr)
2214 {
2215 if (fndecl)
2216 inform (DECL_SOURCE_LOCATION (fndecl),
2217 "in a call to function %qD declared with attribute %qs",
2218 fndecl, attrstr);
2219 else
2220 inform (EXPR_LOCATION (fndecl),
2221 "in a call with type %qT and attribute %qs",
2222 fntype, attrstr);
2223 }
2224 else if (any_warned)
2225 {
2226 if (fndecl)
2227 inform (DECL_SOURCE_LOCATION (fndecl),
2228 "in a call to function %qD", fndecl);
2229 else
2230 inform (EXPR_LOCATION (fndecl),
2231 "in a call with type %qT", fntype);
2232 }
2233
2234 /* Set the bit in case if was cleared and not set above. */
2235 TREE_NO_WARNING (exp) = true;
2236 }
2237
2238 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2239 CALL_EXPR EXP.
2240
2241 NUM_ACTUALS is the total number of parameters.
2242
2243 N_NAMED_ARGS is the total number of named arguments.
2244
2245 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2246 value, or null.
2247
2248 FNDECL is the tree code for the target of this call (if known)
2249
2250 ARGS_SO_FAR holds state needed by the target to know where to place
2251 the next argument.
2252
2253 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2254 for arguments which are passed in registers.
2255
2256 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2257 and may be modified by this routine.
2258
2259 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2260 flags which may be modified by this routine.
2261
2262 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2263 that requires allocation of stack space.
2264
2265 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2266 the thunked-to function. */
2267
2268 static void
2269 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2270 struct arg_data *args,
2271 struct args_size *args_size,
2272 int n_named_args ATTRIBUTE_UNUSED,
2273 tree exp, tree struct_value_addr_value,
2274 tree fndecl, tree fntype,
2275 cumulative_args_t args_so_far,
2276 int reg_parm_stack_space,
2277 rtx *old_stack_level,
2278 poly_int64_pod *old_pending_adj,
2279 int *must_preallocate, int *ecf_flags,
2280 bool *may_tailcall, bool call_from_thunk_p)
2281 {
2282 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2283 location_t loc = EXPR_LOCATION (exp);
2284
2285 /* Count arg position in order args appear. */
2286 int argpos;
2287
2288 int i;
2289
2290 args_size->constant = 0;
2291 args_size->var = 0;
2292
2293 bitmap_obstack_initialize (NULL);
2294
2295 /* In this loop, we consider args in the order they are written.
2296 We fill up ARGS from the back. */
2297
2298 i = num_actuals - 1;
2299 {
2300 int j = i;
2301 call_expr_arg_iterator iter;
2302 tree arg;
2303 bitmap slots = NULL;
2304
2305 if (struct_value_addr_value)
2306 {
2307 args[j].tree_value = struct_value_addr_value;
2308 j--;
2309 }
2310 argpos = 0;
2311 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2312 {
2313 tree argtype = TREE_TYPE (arg);
2314
2315 if (targetm.calls.split_complex_arg
2316 && argtype
2317 && TREE_CODE (argtype) == COMPLEX_TYPE
2318 && targetm.calls.split_complex_arg (argtype))
2319 {
2320 tree subtype = TREE_TYPE (argtype);
2321 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2322 j--;
2323 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2324 }
2325 else
2326 args[j].tree_value = arg;
2327 j--;
2328 argpos++;
2329 }
2330
2331 if (slots)
2332 BITMAP_FREE (slots);
2333 }
2334
2335 bitmap_obstack_release (NULL);
2336
2337 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
2338 /* Extract attribute alloc_size from the type of the called expression
2339 (which could be a function or a function pointer) and if set, store
2340 the indices of the corresponding arguments in ALLOC_IDX, and then
2341 the actual argument(s) at those indices in ALLOC_ARGS. */
2342 int alloc_idx[2] = { -1, -1 };
2343 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
2344 {
2345 tree args = TREE_VALUE (alloc_size);
2346 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2347 if (TREE_CHAIN (args))
2348 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2349 }
2350
2351 /* Array for up to the two attribute alloc_size arguments. */
2352 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2353
2354 /* Map of attribute accewss specifications for function arguments. */
2355 rdwr_map rdwr_idx;
2356 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
2357
2358 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2359 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2360 {
2361 tree type = TREE_TYPE (args[i].tree_value);
2362 int unsignedp;
2363
2364 /* Replace erroneous argument with constant zero. */
2365 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2366 args[i].tree_value = integer_zero_node, type = integer_type_node;
2367
2368 /* If TYPE is a transparent union or record, pass things the way
2369 we would pass the first field of the union or record. We have
2370 already verified that the modes are the same. */
2371 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2372 type = TREE_TYPE (first_field (type));
2373
2374 /* Decide where to pass this arg.
2375
2376 args[i].reg is nonzero if all or part is passed in registers.
2377
2378 args[i].partial is nonzero if part but not all is passed in registers,
2379 and the exact value says how many bytes are passed in registers.
2380
2381 args[i].pass_on_stack is nonzero if the argument must at least be
2382 computed on the stack. It may then be loaded back into registers
2383 if args[i].reg is nonzero.
2384
2385 These decisions are driven by the FUNCTION_... macros and must agree
2386 with those made by function.c. */
2387
2388 /* See if this argument should be passed by invisible reference. */
2389 function_arg_info arg (type, argpos < n_named_args);
2390 if (pass_by_reference (args_so_far_pnt, arg))
2391 {
2392 const bool callee_copies
2393 = reference_callee_copied (args_so_far_pnt, arg);
2394 tree base;
2395
2396 /* If we're compiling a thunk, pass directly the address of an object
2397 already in memory, instead of making a copy. Likewise if we want
2398 to make the copy in the callee instead of the caller. */
2399 if ((call_from_thunk_p || callee_copies)
2400 && (base = get_base_address (args[i].tree_value))
2401 && TREE_CODE (base) != SSA_NAME
2402 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
2403 {
2404 /* We may have turned the parameter value into an SSA name.
2405 Go back to the original parameter so we can take the
2406 address. */
2407 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2408 {
2409 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2410 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2411 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2412 }
2413 /* Argument setup code may have copied the value to register. We
2414 revert that optimization now because the tail call code must
2415 use the original location. */
2416 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2417 && !MEM_P (DECL_RTL (args[i].tree_value))
2418 && DECL_INCOMING_RTL (args[i].tree_value)
2419 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2420 set_decl_rtl (args[i].tree_value,
2421 DECL_INCOMING_RTL (args[i].tree_value));
2422
2423 mark_addressable (args[i].tree_value);
2424
2425 /* We can't use sibcalls if a callee-copied argument is
2426 stored in the current function's frame. */
2427 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2428 {
2429 *may_tailcall = false;
2430 maybe_complain_about_tail_call (exp,
2431 "a callee-copied argument is"
2432 " stored in the current"
2433 " function's frame");
2434 }
2435
2436 args[i].tree_value = build_fold_addr_expr_loc (loc,
2437 args[i].tree_value);
2438 type = TREE_TYPE (args[i].tree_value);
2439
2440 if (*ecf_flags & ECF_CONST)
2441 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2442 }
2443 else
2444 {
2445 /* We make a copy of the object and pass the address to the
2446 function being called. */
2447 rtx copy;
2448
2449 if (!COMPLETE_TYPE_P (type)
2450 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2451 || (flag_stack_check == GENERIC_STACK_CHECK
2452 && compare_tree_int (TYPE_SIZE_UNIT (type),
2453 STACK_CHECK_MAX_VAR_SIZE) > 0))
2454 {
2455 /* This is a variable-sized object. Make space on the stack
2456 for it. */
2457 rtx size_rtx = expr_size (args[i].tree_value);
2458
2459 if (*old_stack_level == 0)
2460 {
2461 emit_stack_save (SAVE_BLOCK, old_stack_level);
2462 *old_pending_adj = pending_stack_adjust;
2463 pending_stack_adjust = 0;
2464 }
2465
2466 /* We can pass TRUE as the 4th argument because we just
2467 saved the stack pointer and will restore it right after
2468 the call. */
2469 copy = allocate_dynamic_stack_space (size_rtx,
2470 TYPE_ALIGN (type),
2471 TYPE_ALIGN (type),
2472 max_int_size_in_bytes
2473 (type),
2474 true);
2475 copy = gen_rtx_MEM (BLKmode, copy);
2476 set_mem_attributes (copy, type, 1);
2477 }
2478 else
2479 copy = assign_temp (type, 1, 0);
2480
2481 store_expr (args[i].tree_value, copy, 0, false, false);
2482
2483 /* Just change the const function to pure and then let
2484 the next test clear the pure based on
2485 callee_copies. */
2486 if (*ecf_flags & ECF_CONST)
2487 {
2488 *ecf_flags &= ~ECF_CONST;
2489 *ecf_flags |= ECF_PURE;
2490 }
2491
2492 if (!callee_copies && *ecf_flags & ECF_PURE)
2493 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2494
2495 args[i].tree_value
2496 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2497 type = TREE_TYPE (args[i].tree_value);
2498 *may_tailcall = false;
2499 maybe_complain_about_tail_call (exp,
2500 "argument must be passed"
2501 " by copying");
2502 }
2503 arg.pass_by_reference = true;
2504 }
2505
2506 unsignedp = TYPE_UNSIGNED (type);
2507 arg.type = type;
2508 arg.mode
2509 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2510 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2511
2512 args[i].unsignedp = unsignedp;
2513 args[i].mode = arg.mode;
2514
2515 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2516
2517 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2518
2519 if (args[i].reg && CONST_INT_P (args[i].reg))
2520 args[i].reg = NULL;
2521
2522 /* If this is a sibling call and the machine has register windows, the
2523 register window has to be unwinded before calling the routine, so
2524 arguments have to go into the incoming registers. */
2525 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2526 args[i].tail_call_reg
2527 = targetm.calls.function_incoming_arg (args_so_far, arg);
2528 else
2529 args[i].tail_call_reg = args[i].reg;
2530
2531 if (args[i].reg)
2532 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2533
2534 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2535
2536 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2537 it means that we are to pass this arg in the register(s) designated
2538 by the PARALLEL, but also to pass it in the stack. */
2539 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2540 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2541 args[i].pass_on_stack = 1;
2542
2543 /* If this is an addressable type, we must preallocate the stack
2544 since we must evaluate the object into its final location.
2545
2546 If this is to be passed in both registers and the stack, it is simpler
2547 to preallocate. */
2548 if (TREE_ADDRESSABLE (type)
2549 || (args[i].pass_on_stack && args[i].reg != 0))
2550 *must_preallocate = 1;
2551
2552 /* Compute the stack-size of this argument. */
2553 if (args[i].reg == 0 || args[i].partial != 0
2554 || reg_parm_stack_space > 0
2555 || args[i].pass_on_stack)
2556 locate_and_pad_parm (arg.mode, type,
2557 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2558 1,
2559 #else
2560 args[i].reg != 0,
2561 #endif
2562 reg_parm_stack_space,
2563 args[i].pass_on_stack ? 0 : args[i].partial,
2564 fndecl, args_size, &args[i].locate);
2565 #ifdef BLOCK_REG_PADDING
2566 else
2567 /* The argument is passed entirely in registers. See at which
2568 end it should be padded. */
2569 args[i].locate.where_pad =
2570 BLOCK_REG_PADDING (arg.mode, type,
2571 int_size_in_bytes (type) <= UNITS_PER_WORD);
2572 #endif
2573
2574 /* Update ARGS_SIZE, the total stack space for args so far. */
2575
2576 args_size->constant += args[i].locate.size.constant;
2577 if (args[i].locate.size.var)
2578 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2579
2580 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2581 have been used, etc. */
2582
2583 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2584 promoted_mode used for function_arg above. However, the
2585 corresponding handling of incoming arguments in function.c
2586 does pass the promoted mode. */
2587 arg.mode = TYPE_MODE (type);
2588 targetm.calls.function_arg_advance (args_so_far, arg);
2589
2590 /* Store argument values for functions decorated with attribute
2591 alloc_size. */
2592 if (argpos == alloc_idx[0])
2593 alloc_args[0] = args[i].tree_value;
2594 else if (argpos == alloc_idx[1])
2595 alloc_args[1] = args[i].tree_value;
2596
2597 /* Save the actual argument that corresponds to the access attribute
2598 operand for later processing. */
2599 if (attr_access *access = rdwr_idx.get (argpos))
2600 {
2601 if (POINTER_TYPE_P (type))
2602 {
2603 access->ptr = args[i].tree_value;
2604 // A nonnull ACCESS->SIZE contains VLA bounds. */
2605 }
2606 else
2607 {
2608 access->size = args[i].tree_value;
2609 gcc_assert (access->ptr == NULL_TREE);
2610 }
2611 }
2612 }
2613
2614 if (alloc_args[0])
2615 {
2616 /* Check the arguments of functions decorated with attribute
2617 alloc_size. */
2618 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2619 }
2620
2621 /* Detect passing non-string arguments to functions expecting
2622 nul-terminated strings. */
2623 maybe_warn_nonstring_arg (fndecl, exp);
2624
2625 /* Check attribute access arguments. */
2626 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
2627
2628 /* Check calls to operator new for mismatched forms and attempts
2629 to deallocate unallocated objects. */
2630 maybe_emit_free_warning (exp);
2631 }
2632
2633 /* Update ARGS_SIZE to contain the total size for the argument block.
2634 Return the original constant component of the argument block's size.
2635
2636 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2637 for arguments passed in registers. */
2638
2639 static poly_int64
2640 compute_argument_block_size (int reg_parm_stack_space,
2641 struct args_size *args_size,
2642 tree fndecl ATTRIBUTE_UNUSED,
2643 tree fntype ATTRIBUTE_UNUSED,
2644 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2645 {
2646 poly_int64 unadjusted_args_size = args_size->constant;
2647
2648 /* For accumulate outgoing args mode we don't need to align, since the frame
2649 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2650 backends from generating misaligned frame sizes. */
2651 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2652 preferred_stack_boundary = STACK_BOUNDARY;
2653
2654 /* Compute the actual size of the argument block required. The variable
2655 and constant sizes must be combined, the size may have to be rounded,
2656 and there may be a minimum required size. */
2657
2658 if (args_size->var)
2659 {
2660 args_size->var = ARGS_SIZE_TREE (*args_size);
2661 args_size->constant = 0;
2662
2663 preferred_stack_boundary /= BITS_PER_UNIT;
2664 if (preferred_stack_boundary > 1)
2665 {
2666 /* We don't handle this case yet. To handle it correctly we have
2667 to add the delta, round and subtract the delta.
2668 Currently no machine description requires this support. */
2669 gcc_assert (multiple_p (stack_pointer_delta,
2670 preferred_stack_boundary));
2671 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2672 }
2673
2674 if (reg_parm_stack_space > 0)
2675 {
2676 args_size->var
2677 = size_binop (MAX_EXPR, args_size->var,
2678 ssize_int (reg_parm_stack_space));
2679
2680 /* The area corresponding to register parameters is not to count in
2681 the size of the block we need. So make the adjustment. */
2682 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2683 args_size->var
2684 = size_binop (MINUS_EXPR, args_size->var,
2685 ssize_int (reg_parm_stack_space));
2686 }
2687 }
2688 else
2689 {
2690 preferred_stack_boundary /= BITS_PER_UNIT;
2691 if (preferred_stack_boundary < 1)
2692 preferred_stack_boundary = 1;
2693 args_size->constant = (aligned_upper_bound (args_size->constant
2694 + stack_pointer_delta,
2695 preferred_stack_boundary)
2696 - stack_pointer_delta);
2697
2698 args_size->constant = upper_bound (args_size->constant,
2699 reg_parm_stack_space);
2700
2701 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2702 args_size->constant -= reg_parm_stack_space;
2703 }
2704 return unadjusted_args_size;
2705 }
2706
2707 /* Precompute parameters as needed for a function call.
2708
2709 FLAGS is mask of ECF_* constants.
2710
2711 NUM_ACTUALS is the number of arguments.
2712
2713 ARGS is an array containing information for each argument; this
2714 routine fills in the INITIAL_VALUE and VALUE fields for each
2715 precomputed argument. */
2716
2717 static void
2718 precompute_arguments (int num_actuals, struct arg_data *args)
2719 {
2720 int i;
2721
2722 /* If this is a libcall, then precompute all arguments so that we do not
2723 get extraneous instructions emitted as part of the libcall sequence. */
2724
2725 /* If we preallocated the stack space, and some arguments must be passed
2726 on the stack, then we must precompute any parameter which contains a
2727 function call which will store arguments on the stack.
2728 Otherwise, evaluating the parameter may clobber previous parameters
2729 which have already been stored into the stack. (we have code to avoid
2730 such case by saving the outgoing stack arguments, but it results in
2731 worse code) */
2732 if (!ACCUMULATE_OUTGOING_ARGS)
2733 return;
2734
2735 for (i = 0; i < num_actuals; i++)
2736 {
2737 tree type;
2738 machine_mode mode;
2739
2740 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2741 continue;
2742
2743 /* If this is an addressable type, we cannot pre-evaluate it. */
2744 type = TREE_TYPE (args[i].tree_value);
2745 gcc_assert (!TREE_ADDRESSABLE (type));
2746
2747 args[i].initial_value = args[i].value
2748 = expand_normal (args[i].tree_value);
2749
2750 mode = TYPE_MODE (type);
2751 if (mode != args[i].mode)
2752 {
2753 int unsignedp = args[i].unsignedp;
2754 args[i].value
2755 = convert_modes (args[i].mode, mode,
2756 args[i].value, args[i].unsignedp);
2757
2758 /* CSE will replace this only if it contains args[i].value
2759 pseudo, so convert it down to the declared mode using
2760 a SUBREG. */
2761 if (REG_P (args[i].value)
2762 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2763 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2764 {
2765 args[i].initial_value
2766 = gen_lowpart_SUBREG (mode, args[i].value);
2767 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2768 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2769 }
2770 }
2771 }
2772 }
2773
2774 /* Given the current state of MUST_PREALLOCATE and information about
2775 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2776 compute and return the final value for MUST_PREALLOCATE. */
2777
2778 static int
2779 finalize_must_preallocate (int must_preallocate, int num_actuals,
2780 struct arg_data *args, struct args_size *args_size)
2781 {
2782 /* See if we have or want to preallocate stack space.
2783
2784 If we would have to push a partially-in-regs parm
2785 before other stack parms, preallocate stack space instead.
2786
2787 If the size of some parm is not a multiple of the required stack
2788 alignment, we must preallocate.
2789
2790 If the total size of arguments that would otherwise create a copy in
2791 a temporary (such as a CALL) is more than half the total argument list
2792 size, preallocation is faster.
2793
2794 Another reason to preallocate is if we have a machine (like the m88k)
2795 where stack alignment is required to be maintained between every
2796 pair of insns, not just when the call is made. However, we assume here
2797 that such machines either do not have push insns (and hence preallocation
2798 would occur anyway) or the problem is taken care of with
2799 PUSH_ROUNDING. */
2800
2801 if (! must_preallocate)
2802 {
2803 int partial_seen = 0;
2804 poly_int64 copy_to_evaluate_size = 0;
2805 int i;
2806
2807 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2808 {
2809 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2810 partial_seen = 1;
2811 else if (partial_seen && args[i].reg == 0)
2812 must_preallocate = 1;
2813
2814 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2815 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2816 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2817 || TREE_CODE (args[i].tree_value) == COND_EXPR
2818 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2819 copy_to_evaluate_size
2820 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2821 }
2822
2823 if (maybe_ne (args_size->constant, 0)
2824 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2825 must_preallocate = 1;
2826 }
2827 return must_preallocate;
2828 }
2829
2830 /* If we preallocated stack space, compute the address of each argument
2831 and store it into the ARGS array.
2832
2833 We need not ensure it is a valid memory address here; it will be
2834 validized when it is used.
2835
2836 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2837
2838 static void
2839 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2840 {
2841 if (argblock)
2842 {
2843 rtx arg_reg = argblock;
2844 int i;
2845 poly_int64 arg_offset = 0;
2846
2847 if (GET_CODE (argblock) == PLUS)
2848 {
2849 arg_reg = XEXP (argblock, 0);
2850 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2851 }
2852
2853 for (i = 0; i < num_actuals; i++)
2854 {
2855 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2856 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2857 rtx addr;
2858 unsigned int align, boundary;
2859 poly_uint64 units_on_stack = 0;
2860 machine_mode partial_mode = VOIDmode;
2861
2862 /* Skip this parm if it will not be passed on the stack. */
2863 if (! args[i].pass_on_stack
2864 && args[i].reg != 0
2865 && args[i].partial == 0)
2866 continue;
2867
2868 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2869 continue;
2870
2871 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2872 addr = plus_constant (Pmode, addr, arg_offset);
2873
2874 if (args[i].partial != 0)
2875 {
2876 /* Only part of the parameter is being passed on the stack.
2877 Generate a simple memory reference of the correct size. */
2878 units_on_stack = args[i].locate.size.constant;
2879 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2880 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2881 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2882 set_mem_size (args[i].stack, units_on_stack);
2883 }
2884 else
2885 {
2886 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2887 set_mem_attributes (args[i].stack,
2888 TREE_TYPE (args[i].tree_value), 1);
2889 }
2890 align = BITS_PER_UNIT;
2891 boundary = args[i].locate.boundary;
2892 poly_int64 offset_val;
2893 if (args[i].locate.where_pad != PAD_DOWNWARD)
2894 align = boundary;
2895 else if (poly_int_rtx_p (offset, &offset_val))
2896 {
2897 align = least_bit_hwi (boundary);
2898 unsigned int offset_align
2899 = known_alignment (offset_val) * BITS_PER_UNIT;
2900 if (offset_align != 0)
2901 align = MIN (align, offset_align);
2902 }
2903 set_mem_align (args[i].stack, align);
2904
2905 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2906 addr = plus_constant (Pmode, addr, arg_offset);
2907
2908 if (args[i].partial != 0)
2909 {
2910 /* Only part of the parameter is being passed on the stack.
2911 Generate a simple memory reference of the correct size.
2912 */
2913 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2914 set_mem_size (args[i].stack_slot, units_on_stack);
2915 }
2916 else
2917 {
2918 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2919 set_mem_attributes (args[i].stack_slot,
2920 TREE_TYPE (args[i].tree_value), 1);
2921 }
2922 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2923
2924 /* Function incoming arguments may overlap with sibling call
2925 outgoing arguments and we cannot allow reordering of reads
2926 from function arguments with stores to outgoing arguments
2927 of sibling calls. */
2928 set_mem_alias_set (args[i].stack, 0);
2929 set_mem_alias_set (args[i].stack_slot, 0);
2930 }
2931 }
2932 }
2933
2934 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2935 in a call instruction.
2936
2937 FNDECL is the tree node for the target function. For an indirect call
2938 FNDECL will be NULL_TREE.
2939
2940 ADDR is the operand 0 of CALL_EXPR for this call. */
2941
2942 static rtx
2943 rtx_for_function_call (tree fndecl, tree addr)
2944 {
2945 rtx funexp;
2946
2947 /* Get the function to call, in the form of RTL. */
2948 if (fndecl)
2949 {
2950 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2951 TREE_USED (fndecl) = 1;
2952
2953 /* Get a SYMBOL_REF rtx for the function address. */
2954 funexp = XEXP (DECL_RTL (fndecl), 0);
2955 }
2956 else
2957 /* Generate an rtx (probably a pseudo-register) for the address. */
2958 {
2959 push_temp_slots ();
2960 funexp = expand_normal (addr);
2961 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2962 }
2963 return funexp;
2964 }
2965
2966 /* Return the static chain for this function, if any. */
2967
2968 rtx
2969 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2970 {
2971 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2972 return NULL;
2973
2974 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2975 }
2976
2977 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2978 static struct
2979 {
2980 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2981 or NULL_RTX if none has been scanned yet. */
2982 rtx_insn *scan_start;
2983 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2984 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2985 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2986 with fixed offset, or PC if this is with variable or unknown offset. */
2987 vec<rtx> cache;
2988 } internal_arg_pointer_exp_state;
2989
2990 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2991
2992 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2993 the tail call sequence, starting with first insn that hasn't been
2994 scanned yet, and note for each pseudo on the LHS whether it is based
2995 on crtl->args.internal_arg_pointer or not, and what offset from that
2996 that pointer it has. */
2997
2998 static void
2999 internal_arg_pointer_based_exp_scan (void)
3000 {
3001 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
3002
3003 if (scan_start == NULL_RTX)
3004 insn = get_insns ();
3005 else
3006 insn = NEXT_INSN (scan_start);
3007
3008 while (insn)
3009 {
3010 rtx set = single_set (insn);
3011 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
3012 {
3013 rtx val = NULL_RTX;
3014 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3015 /* Punt on pseudos set multiple times. */
3016 if (idx < internal_arg_pointer_exp_state.cache.length ()
3017 && (internal_arg_pointer_exp_state.cache[idx]
3018 != NULL_RTX))
3019 val = pc_rtx;
3020 else
3021 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3022 if (val != NULL_RTX)
3023 {
3024 if (idx >= internal_arg_pointer_exp_state.cache.length ())
3025 internal_arg_pointer_exp_state.cache
3026 .safe_grow_cleared (idx + 1, true);
3027 internal_arg_pointer_exp_state.cache[idx] = val;
3028 }
3029 }
3030 if (NEXT_INSN (insn) == NULL_RTX)
3031 scan_start = insn;
3032 insn = NEXT_INSN (insn);
3033 }
3034
3035 internal_arg_pointer_exp_state.scan_start = scan_start;
3036 }
3037
3038 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3039 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3040 it with fixed offset, or PC if this is with variable or unknown offset.
3041 TOPLEVEL is true if the function is invoked at the topmost level. */
3042
3043 static rtx
3044 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
3045 {
3046 if (CONSTANT_P (rtl))
3047 return NULL_RTX;
3048
3049 if (rtl == crtl->args.internal_arg_pointer)
3050 return const0_rtx;
3051
3052 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3053 return NULL_RTX;
3054
3055 poly_int64 offset;
3056 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
3057 {
3058 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3059 if (val == NULL_RTX || val == pc_rtx)
3060 return val;
3061 return plus_constant (Pmode, val, offset);
3062 }
3063
3064 /* When called at the topmost level, scan pseudo assignments in between the
3065 last scanned instruction in the tail call sequence and the latest insn
3066 in that sequence. */
3067 if (toplevel)
3068 internal_arg_pointer_based_exp_scan ();
3069
3070 if (REG_P (rtl))
3071 {
3072 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
3073 if (idx < internal_arg_pointer_exp_state.cache.length ())
3074 return internal_arg_pointer_exp_state.cache[idx];
3075
3076 return NULL_RTX;
3077 }
3078
3079 subrtx_iterator::array_type array;
3080 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3081 {
3082 const_rtx x = *iter;
3083 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3084 return pc_rtx;
3085 if (MEM_P (x))
3086 iter.skip_subrtxes ();
3087 }
3088
3089 return NULL_RTX;
3090 }
3091
3092 /* Return true if SIZE bytes starting from address ADDR might overlap an
3093 already-clobbered argument area. This function is used to determine
3094 if we should give up a sibcall. */
3095
3096 static bool
3097 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
3098 {
3099 poly_int64 i;
3100 unsigned HOST_WIDE_INT start, end;
3101 rtx val;
3102
3103 if (bitmap_empty_p (stored_args_map)
3104 && stored_args_watermark == HOST_WIDE_INT_M1U)
3105 return false;
3106 val = internal_arg_pointer_based_exp (addr, true);
3107 if (val == NULL_RTX)
3108 return false;
3109 else if (!poly_int_rtx_p (val, &i))
3110 return true;
3111
3112 if (known_eq (size, 0U))
3113 return false;
3114
3115 if (STACK_GROWS_DOWNWARD)
3116 i -= crtl->args.pretend_args_size;
3117 else
3118 i += crtl->args.pretend_args_size;
3119
3120 if (ARGS_GROW_DOWNWARD)
3121 i = -i - size;
3122
3123 /* We can ignore any references to the function's pretend args,
3124 which at this point would manifest as negative values of I. */
3125 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3126 return false;
3127
3128 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3129 if (!(i + size).is_constant (&end))
3130 end = HOST_WIDE_INT_M1U;
3131
3132 if (end > stored_args_watermark)
3133 return true;
3134
3135 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3136 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3137 if (bitmap_bit_p (stored_args_map, k))
3138 return true;
3139
3140 return false;
3141 }
3142
3143 /* Do the register loads required for any wholly-register parms or any
3144 parms which are passed both on the stack and in a register. Their
3145 expressions were already evaluated.
3146
3147 Mark all register-parms as living through the call, putting these USE
3148 insns in the CALL_INSN_FUNCTION_USAGE field.
3149
3150 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3151 checking, setting *SIBCALL_FAILURE if appropriate. */
3152
3153 static void
3154 load_register_parameters (struct arg_data *args, int num_actuals,
3155 rtx *call_fusage, int flags, int is_sibcall,
3156 int *sibcall_failure)
3157 {
3158 int i, j;
3159
3160 for (i = 0; i < num_actuals; i++)
3161 {
3162 rtx reg = ((flags & ECF_SIBCALL)
3163 ? args[i].tail_call_reg : args[i].reg);
3164 if (reg)
3165 {
3166 int partial = args[i].partial;
3167 int nregs;
3168 poly_int64 size = 0;
3169 HOST_WIDE_INT const_size = 0;
3170 rtx_insn *before_arg = get_last_insn ();
3171 tree type = TREE_TYPE (args[i].tree_value);
3172 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3173 type = TREE_TYPE (first_field (type));
3174 /* Set non-negative if we must move a word at a time, even if
3175 just one word (e.g, partial == 4 && mode == DFmode). Set
3176 to -1 if we just use a normal move insn. This value can be
3177 zero if the argument is a zero size structure. */
3178 nregs = -1;
3179 if (GET_CODE (reg) == PARALLEL)
3180 ;
3181 else if (partial)
3182 {
3183 gcc_assert (partial % UNITS_PER_WORD == 0);
3184 nregs = partial / UNITS_PER_WORD;
3185 }
3186 else if (TYPE_MODE (type) == BLKmode)
3187 {
3188 /* Variable-sized parameters should be described by a
3189 PARALLEL instead. */
3190 const_size = int_size_in_bytes (type);
3191 gcc_assert (const_size >= 0);
3192 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3193 size = const_size;
3194 }
3195 else
3196 size = GET_MODE_SIZE (args[i].mode);
3197
3198 /* Handle calls that pass values in multiple non-contiguous
3199 locations. The Irix 6 ABI has examples of this. */
3200
3201 if (GET_CODE (reg) == PARALLEL)
3202 emit_group_move (reg, args[i].parallel_value);
3203
3204 /* If simple case, just do move. If normal partial, store_one_arg
3205 has already loaded the register for us. In all other cases,
3206 load the register(s) from memory. */
3207
3208 else if (nregs == -1)
3209 {
3210 emit_move_insn (reg, args[i].value);
3211 #ifdef BLOCK_REG_PADDING
3212 /* Handle case where we have a value that needs shifting
3213 up to the msb. eg. a QImode value and we're padding
3214 upward on a BYTES_BIG_ENDIAN machine. */
3215 if (args[i].locate.where_pad
3216 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3217 {
3218 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3219 if (maybe_lt (size, UNITS_PER_WORD))
3220 {
3221 rtx x;
3222 poly_int64 shift
3223 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3224
3225 /* Assigning REG here rather than a temp makes
3226 CALL_FUSAGE report the whole reg as used.
3227 Strictly speaking, the call only uses SIZE
3228 bytes at the msb end, but it doesn't seem worth
3229 generating rtl to say that. */
3230 reg = gen_rtx_REG (word_mode, REGNO (reg));
3231 x = expand_shift (LSHIFT_EXPR, word_mode,
3232 reg, shift, reg, 1);
3233 if (x != reg)
3234 emit_move_insn (reg, x);
3235 }
3236 }
3237 #endif
3238 }
3239
3240 /* If we have pre-computed the values to put in the registers in
3241 the case of non-aligned structures, copy them in now. */
3242
3243 else if (args[i].n_aligned_regs != 0)
3244 for (j = 0; j < args[i].n_aligned_regs; j++)
3245 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3246 args[i].aligned_regs[j]);
3247
3248 else if (partial == 0 || args[i].pass_on_stack)
3249 {
3250 /* SIZE and CONST_SIZE are 0 for partial arguments and
3251 the size of a BLKmode type otherwise. */
3252 gcc_checking_assert (known_eq (size, const_size));
3253 rtx mem = validize_mem (copy_rtx (args[i].value));
3254
3255 /* Check for overlap with already clobbered argument area,
3256 providing that this has non-zero size. */
3257 if (is_sibcall
3258 && const_size != 0
3259 && (mem_might_overlap_already_clobbered_arg_p
3260 (XEXP (args[i].value, 0), const_size)))
3261 *sibcall_failure = 1;
3262
3263 if (const_size % UNITS_PER_WORD == 0
3264 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3265 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3266 else
3267 {
3268 if (nregs > 1)
3269 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3270 args[i].mode);
3271 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3272 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3273 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3274 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3275 word_mode, word_mode, false,
3276 NULL);
3277 if (BYTES_BIG_ENDIAN)
3278 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3279 BITS_PER_WORD - bitsize, dest, 1);
3280 if (x != dest)
3281 emit_move_insn (dest, x);
3282 }
3283
3284 /* Handle a BLKmode that needs shifting. */
3285 if (nregs == 1 && const_size < UNITS_PER_WORD
3286 #ifdef BLOCK_REG_PADDING
3287 && args[i].locate.where_pad == PAD_DOWNWARD
3288 #else
3289 && BYTES_BIG_ENDIAN
3290 #endif
3291 )
3292 {
3293 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3294 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3295 enum tree_code dir = (BYTES_BIG_ENDIAN
3296 ? RSHIFT_EXPR : LSHIFT_EXPR);
3297 rtx x;
3298
3299 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3300 if (x != dest)
3301 emit_move_insn (dest, x);
3302 }
3303 }
3304
3305 /* When a parameter is a block, and perhaps in other cases, it is
3306 possible that it did a load from an argument slot that was
3307 already clobbered. */
3308 if (is_sibcall
3309 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3310 *sibcall_failure = 1;
3311
3312 /* Handle calls that pass values in multiple non-contiguous
3313 locations. The Irix 6 ABI has examples of this. */
3314 if (GET_CODE (reg) == PARALLEL)
3315 use_group_regs (call_fusage, reg);
3316 else if (nregs == -1)
3317 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3318 else if (nregs > 0)
3319 use_regs (call_fusage, REGNO (reg), nregs);
3320 }
3321 }
3322 }
3323
3324 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3325 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3326 bytes, then we would need to push some additional bytes to pad the
3327 arguments. So, we try to compute an adjust to the stack pointer for an
3328 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3329 bytes. Then, when the arguments are pushed the stack will be perfectly
3330 aligned.
3331
3332 Return true if this optimization is possible, storing the adjustment
3333 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3334 bytes that should be popped after the call. */
3335
3336 static bool
3337 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3338 poly_int64 unadjusted_args_size,
3339 struct args_size *args_size,
3340 unsigned int preferred_unit_stack_boundary)
3341 {
3342 /* The number of bytes to pop so that the stack will be
3343 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3344 poly_int64 adjustment;
3345 /* The alignment of the stack after the arguments are pushed, if we
3346 just pushed the arguments without adjust the stack here. */
3347 unsigned HOST_WIDE_INT unadjusted_alignment;
3348
3349 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3350 preferred_unit_stack_boundary,
3351 &unadjusted_alignment))
3352 return false;
3353
3354 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3355 as possible -- leaving just enough left to cancel out the
3356 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3357 PENDING_STACK_ADJUST is non-negative, and congruent to
3358 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3359
3360 /* Begin by trying to pop all the bytes. */
3361 unsigned HOST_WIDE_INT tmp_misalignment;
3362 if (!known_misalignment (pending_stack_adjust,
3363 preferred_unit_stack_boundary,
3364 &tmp_misalignment))
3365 return false;
3366 unadjusted_alignment -= tmp_misalignment;
3367 adjustment = pending_stack_adjust;
3368 /* Push enough additional bytes that the stack will be aligned
3369 after the arguments are pushed. */
3370 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3371 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3372
3373 /* We need to know whether the adjusted argument size
3374 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3375 or a deallocation. */
3376 if (!ordered_p (adjustment, unadjusted_args_size))
3377 return false;
3378
3379 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3380 bytes after the call. The right number is the entire
3381 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3382 by the arguments in the first place. */
3383 args_size->constant
3384 = pending_stack_adjust - adjustment + unadjusted_args_size;
3385
3386 *adjustment_out = adjustment;
3387 return true;
3388 }
3389
3390 /* Scan X expression if it does not dereference any argument slots
3391 we already clobbered by tail call arguments (as noted in stored_args_map
3392 bitmap).
3393 Return nonzero if X expression dereferences such argument slots,
3394 zero otherwise. */
3395
3396 static int
3397 check_sibcall_argument_overlap_1 (rtx x)
3398 {
3399 RTX_CODE code;
3400 int i, j;
3401 const char *fmt;
3402
3403 if (x == NULL_RTX)
3404 return 0;
3405
3406 code = GET_CODE (x);
3407
3408 /* We need not check the operands of the CALL expression itself. */
3409 if (code == CALL)
3410 return 0;
3411
3412 if (code == MEM)
3413 return (mem_might_overlap_already_clobbered_arg_p
3414 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3415
3416 /* Scan all subexpressions. */
3417 fmt = GET_RTX_FORMAT (code);
3418 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3419 {
3420 if (*fmt == 'e')
3421 {
3422 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3423 return 1;
3424 }
3425 else if (*fmt == 'E')
3426 {
3427 for (j = 0; j < XVECLEN (x, i); j++)
3428 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3429 return 1;
3430 }
3431 }
3432 return 0;
3433 }
3434
3435 /* Scan sequence after INSN if it does not dereference any argument slots
3436 we already clobbered by tail call arguments (as noted in stored_args_map
3437 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3438 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3439 should be 0). Return nonzero if sequence after INSN dereferences such argument
3440 slots, zero otherwise. */
3441
3442 static int
3443 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3444 int mark_stored_args_map)
3445 {
3446 poly_uint64 low, high;
3447 unsigned HOST_WIDE_INT const_low, const_high;
3448
3449 if (insn == NULL_RTX)
3450 insn = get_insns ();
3451 else
3452 insn = NEXT_INSN (insn);
3453
3454 for (; insn; insn = NEXT_INSN (insn))
3455 if (INSN_P (insn)
3456 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3457 break;
3458
3459 if (mark_stored_args_map)
3460 {
3461 if (ARGS_GROW_DOWNWARD)
3462 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3463 else
3464 low = arg->locate.slot_offset.constant;
3465 high = low + arg->locate.size.constant;
3466
3467 const_low = constant_lower_bound (low);
3468 if (high.is_constant (&const_high))
3469 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3470 bitmap_set_bit (stored_args_map, i);
3471 else
3472 stored_args_watermark = MIN (stored_args_watermark, const_low);
3473 }
3474 return insn != NULL_RTX;
3475 }
3476
3477 /* Given that a function returns a value of mode MODE at the most
3478 significant end of hard register VALUE, shift VALUE left or right
3479 as specified by LEFT_P. Return true if some action was needed. */
3480
3481 bool
3482 shift_return_value (machine_mode mode, bool left_p, rtx value)
3483 {
3484 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3485 machine_mode value_mode = GET_MODE (value);
3486 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3487
3488 if (known_eq (shift, 0))
3489 return false;
3490
3491 /* Use ashr rather than lshr for right shifts. This is for the benefit
3492 of the MIPS port, which requires SImode values to be sign-extended
3493 when stored in 64-bit registers. */
3494 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3495 value, gen_int_shift_amount (value_mode, shift),
3496 value, 1, OPTAB_WIDEN))
3497 gcc_unreachable ();
3498 return true;
3499 }
3500
3501 /* If X is a likely-spilled register value, copy it to a pseudo
3502 register and return that register. Return X otherwise. */
3503
3504 static rtx
3505 avoid_likely_spilled_reg (rtx x)
3506 {
3507 rtx new_rtx;
3508
3509 if (REG_P (x)
3510 && HARD_REGISTER_P (x)
3511 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3512 {
3513 /* Make sure that we generate a REG rather than a CONCAT.
3514 Moves into CONCATs can need nontrivial instructions,
3515 and the whole point of this function is to avoid
3516 using the hard register directly in such a situation. */
3517 generating_concat_p = 0;
3518 new_rtx = gen_reg_rtx (GET_MODE (x));
3519 generating_concat_p = 1;
3520 emit_move_insn (new_rtx, x);
3521 return new_rtx;
3522 }
3523 return x;
3524 }
3525
3526 /* Helper function for expand_call.
3527 Return false is EXP is not implementable as a sibling call. */
3528
3529 static bool
3530 can_implement_as_sibling_call_p (tree exp,
3531 rtx structure_value_addr,
3532 tree funtype,
3533 tree fndecl,
3534 int flags,
3535 tree addr,
3536 const args_size &args_size)
3537 {
3538 if (!targetm.have_sibcall_epilogue ())
3539 {
3540 maybe_complain_about_tail_call
3541 (exp,
3542 "machine description does not have"
3543 " a sibcall_epilogue instruction pattern");
3544 return false;
3545 }
3546
3547 /* Doing sibling call optimization needs some work, since
3548 structure_value_addr can be allocated on the stack.
3549 It does not seem worth the effort since few optimizable
3550 sibling calls will return a structure. */
3551 if (structure_value_addr != NULL_RTX)
3552 {
3553 maybe_complain_about_tail_call (exp, "callee returns a structure");
3554 return false;
3555 }
3556
3557 /* Check whether the target is able to optimize the call
3558 into a sibcall. */
3559 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3560 {
3561 maybe_complain_about_tail_call (exp,
3562 "target is not able to optimize the"
3563 " call into a sibling call");
3564 return false;
3565 }
3566
3567 /* Functions that do not return exactly once may not be sibcall
3568 optimized. */
3569 if (flags & ECF_RETURNS_TWICE)
3570 {
3571 maybe_complain_about_tail_call (exp, "callee returns twice");
3572 return false;
3573 }
3574 if (flags & ECF_NORETURN)
3575 {
3576 maybe_complain_about_tail_call (exp, "callee does not return");
3577 return false;
3578 }
3579
3580 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3581 {
3582 maybe_complain_about_tail_call (exp, "volatile function type");
3583 return false;
3584 }
3585
3586 /* If the called function is nested in the current one, it might access
3587 some of the caller's arguments, but could clobber them beforehand if
3588 the argument areas are shared. */
3589 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3590 {
3591 maybe_complain_about_tail_call (exp, "nested function");
3592 return false;
3593 }
3594
3595 /* If this function requires more stack slots than the current
3596 function, we cannot change it into a sibling call.
3597 crtl->args.pretend_args_size is not part of the
3598 stack allocated by our caller. */
3599 if (maybe_gt (args_size.constant,
3600 crtl->args.size - crtl->args.pretend_args_size))
3601 {
3602 maybe_complain_about_tail_call (exp,
3603 "callee required more stack slots"
3604 " than the caller");
3605 return false;
3606 }
3607
3608 /* If the callee pops its own arguments, then it must pop exactly
3609 the same number of arguments as the current function. */
3610 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3611 args_size.constant),
3612 targetm.calls.return_pops_args (current_function_decl,
3613 TREE_TYPE
3614 (current_function_decl),
3615 crtl->args.size)))
3616 {
3617 maybe_complain_about_tail_call (exp,
3618 "inconsistent number of"
3619 " popped arguments");
3620 return false;
3621 }
3622
3623 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3624 {
3625 maybe_complain_about_tail_call (exp, "frontend does not support"
3626 " sibling call");
3627 return false;
3628 }
3629
3630 /* All checks passed. */
3631 return true;
3632 }
3633
3634 /* Update stack alignment when the parameter is passed in the stack
3635 since the outgoing parameter requires extra alignment on the calling
3636 function side. */
3637
3638 static void
3639 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3640 {
3641 if (crtl->stack_alignment_needed < locate->boundary)
3642 crtl->stack_alignment_needed = locate->boundary;
3643 if (crtl->preferred_stack_boundary < locate->boundary)
3644 crtl->preferred_stack_boundary = locate->boundary;
3645 }
3646
3647 /* Generate all the code for a CALL_EXPR exp
3648 and return an rtx for its value.
3649 Store the value in TARGET (specified as an rtx) if convenient.
3650 If the value is stored in TARGET then TARGET is returned.
3651 If IGNORE is nonzero, then we ignore the value of the function call. */
3652
3653 rtx
3654 expand_call (tree exp, rtx target, int ignore)
3655 {
3656 /* Nonzero if we are currently expanding a call. */
3657 static int currently_expanding_call = 0;
3658
3659 /* RTX for the function to be called. */
3660 rtx funexp;
3661 /* Sequence of insns to perform a normal "call". */
3662 rtx_insn *normal_call_insns = NULL;
3663 /* Sequence of insns to perform a tail "call". */
3664 rtx_insn *tail_call_insns = NULL;
3665 /* Data type of the function. */
3666 tree funtype;
3667 tree type_arg_types;
3668 tree rettype;
3669 /* Declaration of the function being called,
3670 or 0 if the function is computed (not known by name). */
3671 tree fndecl = 0;
3672 /* The type of the function being called. */
3673 tree fntype;
3674 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3675 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3676 int pass;
3677
3678 /* Register in which non-BLKmode value will be returned,
3679 or 0 if no value or if value is BLKmode. */
3680 rtx valreg;
3681 /* Address where we should return a BLKmode value;
3682 0 if value not BLKmode. */
3683 rtx structure_value_addr = 0;
3684 /* Nonzero if that address is being passed by treating it as
3685 an extra, implicit first parameter. Otherwise,
3686 it is passed by being copied directly into struct_value_rtx. */
3687 int structure_value_addr_parm = 0;
3688 /* Holds the value of implicit argument for the struct value. */
3689 tree structure_value_addr_value = NULL_TREE;
3690 /* Size of aggregate value wanted, or zero if none wanted
3691 or if we are using the non-reentrant PCC calling convention
3692 or expecting the value in registers. */
3693 poly_int64 struct_value_size = 0;
3694 /* Nonzero if called function returns an aggregate in memory PCC style,
3695 by returning the address of where to find it. */
3696 int pcc_struct_value = 0;
3697 rtx struct_value = 0;
3698
3699 /* Number of actual parameters in this call, including struct value addr. */
3700 int num_actuals;
3701 /* Number of named args. Args after this are anonymous ones
3702 and they must all go on the stack. */
3703 int n_named_args;
3704 /* Number of complex actual arguments that need to be split. */
3705 int num_complex_actuals = 0;
3706
3707 /* Vector of information about each argument.
3708 Arguments are numbered in the order they will be pushed,
3709 not the order they are written. */
3710 struct arg_data *args;
3711
3712 /* Total size in bytes of all the stack-parms scanned so far. */
3713 struct args_size args_size;
3714 struct args_size adjusted_args_size;
3715 /* Size of arguments before any adjustments (such as rounding). */
3716 poly_int64 unadjusted_args_size;
3717 /* Data on reg parms scanned so far. */
3718 CUMULATIVE_ARGS args_so_far_v;
3719 cumulative_args_t args_so_far;
3720 /* Nonzero if a reg parm has been scanned. */
3721 int reg_parm_seen;
3722 /* Nonzero if this is an indirect function call. */
3723
3724 /* Nonzero if we must avoid push-insns in the args for this call.
3725 If stack space is allocated for register parameters, but not by the
3726 caller, then it is preallocated in the fixed part of the stack frame.
3727 So the entire argument block must then be preallocated (i.e., we
3728 ignore PUSH_ROUNDING in that case). */
3729
3730 int must_preallocate = !PUSH_ARGS;
3731
3732 /* Size of the stack reserved for parameter registers. */
3733 int reg_parm_stack_space = 0;
3734
3735 /* Address of space preallocated for stack parms
3736 (on machines that lack push insns), or 0 if space not preallocated. */
3737 rtx argblock = 0;
3738
3739 /* Mask of ECF_ and ERF_ flags. */
3740 int flags = 0;
3741 int return_flags = 0;
3742 #ifdef REG_PARM_STACK_SPACE
3743 /* Define the boundary of the register parm stack space that needs to be
3744 saved, if any. */
3745 int low_to_save, high_to_save;
3746 rtx save_area = 0; /* Place that it is saved */
3747 #endif
3748
3749 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3750 char *initial_stack_usage_map = stack_usage_map;
3751 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3752 char *stack_usage_map_buf = NULL;
3753
3754 poly_int64 old_stack_allocated;
3755
3756 /* State variables to track stack modifications. */
3757 rtx old_stack_level = 0;
3758 int old_stack_arg_under_construction = 0;
3759 poly_int64 old_pending_adj = 0;
3760 int old_inhibit_defer_pop = inhibit_defer_pop;
3761
3762 /* Some stack pointer alterations we make are performed via
3763 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3764 which we then also need to save/restore along the way. */
3765 poly_int64 old_stack_pointer_delta = 0;
3766
3767 rtx call_fusage;
3768 tree addr = CALL_EXPR_FN (exp);
3769 int i;
3770 /* The alignment of the stack, in bits. */
3771 unsigned HOST_WIDE_INT preferred_stack_boundary;
3772 /* The alignment of the stack, in bytes. */
3773 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3774 /* The static chain value to use for this call. */
3775 rtx static_chain_value;
3776 /* See if this is "nothrow" function call. */
3777 if (TREE_NOTHROW (exp))
3778 flags |= ECF_NOTHROW;
3779
3780 /* See if we can find a DECL-node for the actual function, and get the
3781 function attributes (flags) from the function decl or type node. */
3782 fndecl = get_callee_fndecl (exp);
3783 if (fndecl)
3784 {
3785 fntype = TREE_TYPE (fndecl);
3786 flags |= flags_from_decl_or_type (fndecl);
3787 return_flags |= decl_return_flags (fndecl);
3788 }
3789 else
3790 {
3791 fntype = TREE_TYPE (TREE_TYPE (addr));
3792 flags |= flags_from_decl_or_type (fntype);
3793 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3794 flags |= ECF_BY_DESCRIPTOR;
3795 }
3796 rettype = TREE_TYPE (exp);
3797
3798 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3799
3800 /* Warn if this value is an aggregate type,
3801 regardless of which calling convention we are using for it. */
3802 if (AGGREGATE_TYPE_P (rettype))
3803 warning (OPT_Waggregate_return, "function call has aggregate value");
3804
3805 /* If the result of a non looping pure or const function call is
3806 ignored (or void), and none of its arguments are volatile, we can
3807 avoid expanding the call and just evaluate the arguments for
3808 side-effects. */
3809 if ((flags & (ECF_CONST | ECF_PURE))
3810 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3811 && (flags & ECF_NOTHROW)
3812 && (ignore || target == const0_rtx
3813 || TYPE_MODE (rettype) == VOIDmode))
3814 {
3815 bool volatilep = false;
3816 tree arg;
3817 call_expr_arg_iterator iter;
3818
3819 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3820 if (TREE_THIS_VOLATILE (arg))
3821 {
3822 volatilep = true;
3823 break;
3824 }
3825
3826 if (! volatilep)
3827 {
3828 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3829 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3830 return const0_rtx;
3831 }
3832 }
3833
3834 #ifdef REG_PARM_STACK_SPACE
3835 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3836 #endif
3837
3838 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3839 && reg_parm_stack_space > 0 && PUSH_ARGS)
3840 must_preallocate = 1;
3841
3842 /* Set up a place to return a structure. */
3843
3844 /* Cater to broken compilers. */
3845 if (aggregate_value_p (exp, fntype))
3846 {
3847 /* This call returns a big structure. */
3848 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3849
3850 #ifdef PCC_STATIC_STRUCT_RETURN
3851 {
3852 pcc_struct_value = 1;
3853 }
3854 #else /* not PCC_STATIC_STRUCT_RETURN */
3855 {
3856 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3857 struct_value_size = -1;
3858
3859 /* Even if it is semantically safe to use the target as the return
3860 slot, it may be not sufficiently aligned for the return type. */
3861 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3862 && target
3863 && MEM_P (target)
3864 /* If rettype is addressable, we may not create a temporary.
3865 If target is properly aligned at runtime and the compiler
3866 just doesn't know about it, it will work fine, otherwise it
3867 will be UB. */
3868 && (TREE_ADDRESSABLE (rettype)
3869 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3870 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3871 MEM_ALIGN (target)))))
3872 structure_value_addr = XEXP (target, 0);
3873 else
3874 {
3875 /* For variable-sized objects, we must be called with a target
3876 specified. If we were to allocate space on the stack here,
3877 we would have no way of knowing when to free it. */
3878 rtx d = assign_temp (rettype, 1, 1);
3879 structure_value_addr = XEXP (d, 0);
3880 target = 0;
3881 }
3882 }
3883 #endif /* not PCC_STATIC_STRUCT_RETURN */
3884 }
3885
3886 /* Figure out the amount to which the stack should be aligned. */
3887 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3888 if (fndecl)
3889 {
3890 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3891 /* Without automatic stack alignment, we can't increase preferred
3892 stack boundary. With automatic stack alignment, it is
3893 unnecessary since unless we can guarantee that all callers will
3894 align the outgoing stack properly, callee has to align its
3895 stack anyway. */
3896 if (i
3897 && i->preferred_incoming_stack_boundary
3898 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3899 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3900 }
3901
3902 /* Operand 0 is a pointer-to-function; get the type of the function. */
3903 funtype = TREE_TYPE (addr);
3904 gcc_assert (POINTER_TYPE_P (funtype));
3905 funtype = TREE_TYPE (funtype);
3906
3907 /* Count whether there are actual complex arguments that need to be split
3908 into their real and imaginary parts. Munge the type_arg_types
3909 appropriately here as well. */
3910 if (targetm.calls.split_complex_arg)
3911 {
3912 call_expr_arg_iterator iter;
3913 tree arg;
3914 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3915 {
3916 tree type = TREE_TYPE (arg);
3917 if (type && TREE_CODE (type) == COMPLEX_TYPE
3918 && targetm.calls.split_complex_arg (type))
3919 num_complex_actuals++;
3920 }
3921 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3922 }
3923 else
3924 type_arg_types = TYPE_ARG_TYPES (funtype);
3925
3926 if (flags & ECF_MAY_BE_ALLOCA)
3927 cfun->calls_alloca = 1;
3928
3929 /* If struct_value_rtx is 0, it means pass the address
3930 as if it were an extra parameter. Put the argument expression
3931 in structure_value_addr_value. */
3932 if (structure_value_addr && struct_value == 0)
3933 {
3934 /* If structure_value_addr is a REG other than
3935 virtual_outgoing_args_rtx, we can use always use it. If it
3936 is not a REG, we must always copy it into a register.
3937 If it is virtual_outgoing_args_rtx, we must copy it to another
3938 register in some cases. */
3939 rtx temp = (!REG_P (structure_value_addr)
3940 || (ACCUMULATE_OUTGOING_ARGS
3941 && stack_arg_under_construction
3942 && structure_value_addr == virtual_outgoing_args_rtx)
3943 ? copy_addr_to_reg (convert_memory_address
3944 (Pmode, structure_value_addr))
3945 : structure_value_addr);
3946
3947 structure_value_addr_value =
3948 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3949 structure_value_addr_parm = 1;
3950 }
3951
3952 /* Count the arguments and set NUM_ACTUALS. */
3953 num_actuals =
3954 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3955
3956 /* Compute number of named args.
3957 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3958
3959 if (type_arg_types != 0)
3960 n_named_args
3961 = (list_length (type_arg_types)
3962 /* Count the struct value address, if it is passed as a parm. */
3963 + structure_value_addr_parm);
3964 else
3965 /* If we know nothing, treat all args as named. */
3966 n_named_args = num_actuals;
3967
3968 /* Start updating where the next arg would go.
3969
3970 On some machines (such as the PA) indirect calls have a different
3971 calling convention than normal calls. The fourth argument in
3972 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3973 or not. */
3974 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3975 args_so_far = pack_cumulative_args (&args_so_far_v);
3976
3977 /* Now possibly adjust the number of named args.
3978 Normally, don't include the last named arg if anonymous args follow.
3979 We do include the last named arg if
3980 targetm.calls.strict_argument_naming() returns nonzero.
3981 (If no anonymous args follow, the result of list_length is actually
3982 one too large. This is harmless.)
3983
3984 If targetm.calls.pretend_outgoing_varargs_named() returns
3985 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3986 this machine will be able to place unnamed args that were passed
3987 in registers into the stack. So treat all args as named. This
3988 allows the insns emitting for a specific argument list to be
3989 independent of the function declaration.
3990
3991 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3992 we do not have any reliable way to pass unnamed args in
3993 registers, so we must force them into memory. */
3994
3995 if (type_arg_types != 0
3996 && targetm.calls.strict_argument_naming (args_so_far))
3997 ;
3998 else if (type_arg_types != 0
3999 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
4000 /* Don't include the last named arg. */
4001 --n_named_args;
4002 else
4003 /* Treat all args as named. */
4004 n_named_args = num_actuals;
4005
4006 /* Make a vector to hold all the information about each arg. */
4007 args = XCNEWVEC (struct arg_data, num_actuals);
4008
4009 /* Build up entries in the ARGS array, compute the size of the
4010 arguments into ARGS_SIZE, etc. */
4011 initialize_argument_information (num_actuals, args, &args_size,
4012 n_named_args, exp,
4013 structure_value_addr_value, fndecl, fntype,
4014 args_so_far, reg_parm_stack_space,
4015 &old_stack_level, &old_pending_adj,
4016 &must_preallocate, &flags,
4017 &try_tail_call, CALL_FROM_THUNK_P (exp));
4018
4019 if (args_size.var)
4020 must_preallocate = 1;
4021
4022 /* Now make final decision about preallocating stack space. */
4023 must_preallocate = finalize_must_preallocate (must_preallocate,
4024 num_actuals, args,
4025 &args_size);
4026
4027 /* If the structure value address will reference the stack pointer, we
4028 must stabilize it. We don't need to do this if we know that we are
4029 not going to adjust the stack pointer in processing this call. */
4030
4031 if (structure_value_addr
4032 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4033 || reg_mentioned_p (virtual_outgoing_args_rtx,
4034 structure_value_addr))
4035 && (args_size.var
4036 || (!ACCUMULATE_OUTGOING_ARGS
4037 && maybe_ne (args_size.constant, 0))))
4038 structure_value_addr = copy_to_reg (structure_value_addr);
4039
4040 /* Tail calls can make things harder to debug, and we've traditionally
4041 pushed these optimizations into -O2. Don't try if we're already
4042 expanding a call, as that means we're an argument. Don't try if
4043 there's cleanups, as we know there's code to follow the call. */
4044 if (currently_expanding_call++ != 0
4045 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4046 || args_size.var
4047 || dbg_cnt (tail_call) == false)
4048 try_tail_call = 0;
4049
4050 /* Workaround buggy C/C++ wrappers around Fortran routines with
4051 character(len=constant) arguments if the hidden string length arguments
4052 are passed on the stack; if the callers forget to pass those arguments,
4053 attempting to tail call in such routines leads to stack corruption.
4054 Avoid tail calls in functions where at least one such hidden string
4055 length argument is passed (partially or fully) on the stack in the
4056 caller and the callee needs to pass any arguments on the stack.
4057 See PR90329. */
4058 if (try_tail_call && maybe_ne (args_size.constant, 0))
4059 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4060 arg; arg = DECL_CHAIN (arg))
4061 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4062 {
4063 subrtx_iterator::array_type array;
4064 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4065 if (MEM_P (*iter))
4066 {
4067 try_tail_call = 0;
4068 break;
4069 }
4070 }
4071
4072 /* If the user has marked the function as requiring tail-call
4073 optimization, attempt it. */
4074 if (must_tail_call)
4075 try_tail_call = 1;
4076
4077 /* Rest of purposes for tail call optimizations to fail. */
4078 if (try_tail_call)
4079 try_tail_call = can_implement_as_sibling_call_p (exp,
4080 structure_value_addr,
4081 funtype,
4082 fndecl,
4083 flags, addr, args_size);
4084
4085 /* Check if caller and callee disagree in promotion of function
4086 return value. */
4087 if (try_tail_call)
4088 {
4089 machine_mode caller_mode, caller_promoted_mode;
4090 machine_mode callee_mode, callee_promoted_mode;
4091 int caller_unsignedp, callee_unsignedp;
4092 tree caller_res = DECL_RESULT (current_function_decl);
4093
4094 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
4095 caller_mode = DECL_MODE (caller_res);
4096 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
4097 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4098 caller_promoted_mode
4099 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4100 &caller_unsignedp,
4101 TREE_TYPE (current_function_decl), 1);
4102 callee_promoted_mode
4103 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
4104 &callee_unsignedp,
4105 funtype, 1);
4106 if (caller_mode != VOIDmode
4107 && (caller_promoted_mode != callee_promoted_mode
4108 || ((caller_mode != caller_promoted_mode
4109 || callee_mode != callee_promoted_mode)
4110 && (caller_unsignedp != callee_unsignedp
4111 || partial_subreg_p (caller_mode, callee_mode)))))
4112 {
4113 try_tail_call = 0;
4114 maybe_complain_about_tail_call (exp,
4115 "caller and callee disagree in"
4116 " promotion of function"
4117 " return value");
4118 }
4119 }
4120
4121 /* Ensure current function's preferred stack boundary is at least
4122 what we need. Stack alignment may also increase preferred stack
4123 boundary. */
4124 for (i = 0; i < num_actuals; i++)
4125 if (reg_parm_stack_space > 0
4126 || args[i].reg == 0
4127 || args[i].partial != 0
4128 || args[i].pass_on_stack)
4129 update_stack_alignment_for_call (&args[i].locate);
4130 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4131 crtl->preferred_stack_boundary = preferred_stack_boundary;
4132 else
4133 preferred_stack_boundary = crtl->preferred_stack_boundary;
4134
4135 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4136
4137 if (flag_callgraph_info)
4138 record_final_call (fndecl, EXPR_LOCATION (exp));
4139
4140 /* We want to make two insn chains; one for a sibling call, the other
4141 for a normal call. We will select one of the two chains after
4142 initial RTL generation is complete. */
4143 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4144 {
4145 int sibcall_failure = 0;
4146 /* We want to emit any pending stack adjustments before the tail
4147 recursion "call". That way we know any adjustment after the tail
4148 recursion call can be ignored if we indeed use the tail
4149 call expansion. */
4150 saved_pending_stack_adjust save;
4151 rtx_insn *insns, *before_call, *after_args;
4152 rtx next_arg_reg;
4153
4154 if (pass == 0)
4155 {
4156 /* State variables we need to save and restore between
4157 iterations. */
4158 save_pending_stack_adjust (&save);
4159 }
4160 if (pass)
4161 flags &= ~ECF_SIBCALL;
4162 else
4163 flags |= ECF_SIBCALL;
4164
4165 /* Other state variables that we must reinitialize each time
4166 through the loop (that are not initialized by the loop itself). */
4167 argblock = 0;
4168 call_fusage = 0;
4169
4170 /* Start a new sequence for the normal call case.
4171
4172 From this point on, if the sibling call fails, we want to set
4173 sibcall_failure instead of continuing the loop. */
4174 start_sequence ();
4175
4176 /* Don't let pending stack adjusts add up to too much.
4177 Also, do all pending adjustments now if there is any chance
4178 this might be a call to alloca or if we are expanding a sibling
4179 call sequence.
4180 Also do the adjustments before a throwing call, otherwise
4181 exception handling can fail; PR 19225. */
4182 if (maybe_ge (pending_stack_adjust, 32)
4183 || (maybe_ne (pending_stack_adjust, 0)
4184 && (flags & ECF_MAY_BE_ALLOCA))
4185 || (maybe_ne (pending_stack_adjust, 0)
4186 && flag_exceptions && !(flags & ECF_NOTHROW))
4187 || pass == 0)
4188 do_pending_stack_adjust ();
4189
4190 /* Precompute any arguments as needed. */
4191 if (pass)
4192 precompute_arguments (num_actuals, args);
4193
4194 /* Now we are about to start emitting insns that can be deleted
4195 if a libcall is deleted. */
4196 if (pass && (flags & ECF_MALLOC))
4197 start_sequence ();
4198
4199 if (pass == 0
4200 && crtl->stack_protect_guard
4201 && targetm.stack_protect_runtime_enabled_p ())
4202 stack_protect_epilogue ();
4203
4204 adjusted_args_size = args_size;
4205 /* Compute the actual size of the argument block required. The variable
4206 and constant sizes must be combined, the size may have to be rounded,
4207 and there may be a minimum required size. When generating a sibcall
4208 pattern, do not round up, since we'll be re-using whatever space our
4209 caller provided. */
4210 unadjusted_args_size
4211 = compute_argument_block_size (reg_parm_stack_space,
4212 &adjusted_args_size,
4213 fndecl, fntype,
4214 (pass == 0 ? 0
4215 : preferred_stack_boundary));
4216
4217 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4218
4219 /* The argument block when performing a sibling call is the
4220 incoming argument block. */
4221 if (pass == 0)
4222 {
4223 argblock = crtl->args.internal_arg_pointer;
4224 if (STACK_GROWS_DOWNWARD)
4225 argblock
4226 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4227 else
4228 argblock
4229 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4230
4231 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4232 stored_args_map = sbitmap_alloc (map_size);
4233 bitmap_clear (stored_args_map);
4234 stored_args_watermark = HOST_WIDE_INT_M1U;
4235 }
4236
4237 /* If we have no actual push instructions, or shouldn't use them,
4238 make space for all args right now. */
4239 else if (adjusted_args_size.var != 0)
4240 {
4241 if (old_stack_level == 0)
4242 {
4243 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4244 old_stack_pointer_delta = stack_pointer_delta;
4245 old_pending_adj = pending_stack_adjust;
4246 pending_stack_adjust = 0;
4247 /* stack_arg_under_construction says whether a stack arg is
4248 being constructed at the old stack level. Pushing the stack
4249 gets a clean outgoing argument block. */
4250 old_stack_arg_under_construction = stack_arg_under_construction;
4251 stack_arg_under_construction = 0;
4252 }
4253 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4254 if (flag_stack_usage_info)
4255 current_function_has_unbounded_dynamic_stack_size = 1;
4256 }
4257 else
4258 {
4259 /* Note that we must go through the motions of allocating an argument
4260 block even if the size is zero because we may be storing args
4261 in the area reserved for register arguments, which may be part of
4262 the stack frame. */
4263
4264 poly_int64 needed = adjusted_args_size.constant;
4265
4266 /* Store the maximum argument space used. It will be pushed by
4267 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4268 checking). */
4269
4270 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4271 needed);
4272
4273 if (must_preallocate)
4274 {
4275 if (ACCUMULATE_OUTGOING_ARGS)
4276 {
4277 /* Since the stack pointer will never be pushed, it is
4278 possible for the evaluation of a parm to clobber
4279 something we have already written to the stack.
4280 Since most function calls on RISC machines do not use
4281 the stack, this is uncommon, but must work correctly.
4282
4283 Therefore, we save any area of the stack that was already
4284 written and that we are using. Here we set up to do this
4285 by making a new stack usage map from the old one. The
4286 actual save will be done by store_one_arg.
4287
4288 Another approach might be to try to reorder the argument
4289 evaluations to avoid this conflicting stack usage. */
4290
4291 /* Since we will be writing into the entire argument area,
4292 the map must be allocated for its entire size, not just
4293 the part that is the responsibility of the caller. */
4294 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4295 needed += reg_parm_stack_space;
4296
4297 poly_int64 limit = needed;
4298 if (ARGS_GROW_DOWNWARD)
4299 limit += 1;
4300
4301 /* For polynomial sizes, this is the maximum possible
4302 size needed for arguments with a constant size
4303 and offset. */
4304 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4305 highest_outgoing_arg_in_use
4306 = MAX (initial_highest_arg_in_use, const_limit);
4307
4308 free (stack_usage_map_buf);
4309 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4310 stack_usage_map = stack_usage_map_buf;
4311
4312 if (initial_highest_arg_in_use)
4313 memcpy (stack_usage_map, initial_stack_usage_map,
4314 initial_highest_arg_in_use);
4315
4316 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4317 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4318 (highest_outgoing_arg_in_use
4319 - initial_highest_arg_in_use));
4320 needed = 0;
4321
4322 /* The address of the outgoing argument list must not be
4323 copied to a register here, because argblock would be left
4324 pointing to the wrong place after the call to
4325 allocate_dynamic_stack_space below. */
4326
4327 argblock = virtual_outgoing_args_rtx;
4328 }
4329 else
4330 {
4331 /* Try to reuse some or all of the pending_stack_adjust
4332 to get this space. */
4333 if (inhibit_defer_pop == 0
4334 && (combine_pending_stack_adjustment_and_call
4335 (&needed,
4336 unadjusted_args_size,
4337 &adjusted_args_size,
4338 preferred_unit_stack_boundary)))
4339 {
4340 /* combine_pending_stack_adjustment_and_call computes
4341 an adjustment before the arguments are allocated.
4342 Account for them and see whether or not the stack
4343 needs to go up or down. */
4344 needed = unadjusted_args_size - needed;
4345
4346 /* Checked by
4347 combine_pending_stack_adjustment_and_call. */
4348 gcc_checking_assert (ordered_p (needed, 0));
4349 if (maybe_lt (needed, 0))
4350 {
4351 /* We're releasing stack space. */
4352 /* ??? We can avoid any adjustment at all if we're
4353 already aligned. FIXME. */
4354 pending_stack_adjust = -needed;
4355 do_pending_stack_adjust ();
4356 needed = 0;
4357 }
4358 else
4359 /* We need to allocate space. We'll do that in
4360 push_block below. */
4361 pending_stack_adjust = 0;
4362 }
4363
4364 /* Special case this because overhead of `push_block' in
4365 this case is non-trivial. */
4366 if (known_eq (needed, 0))
4367 argblock = virtual_outgoing_args_rtx;
4368 else
4369 {
4370 rtx needed_rtx = gen_int_mode (needed, Pmode);
4371 argblock = push_block (needed_rtx, 0, 0);
4372 if (ARGS_GROW_DOWNWARD)
4373 argblock = plus_constant (Pmode, argblock, needed);
4374 }
4375
4376 /* We only really need to call `copy_to_reg' in the case
4377 where push insns are going to be used to pass ARGBLOCK
4378 to a function call in ARGS. In that case, the stack
4379 pointer changes value from the allocation point to the
4380 call point, and hence the value of
4381 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4382 as well always do it. */
4383 argblock = copy_to_reg (argblock);
4384 }
4385 }
4386 }
4387
4388 if (ACCUMULATE_OUTGOING_ARGS)
4389 {
4390 /* The save/restore code in store_one_arg handles all
4391 cases except one: a constructor call (including a C
4392 function returning a BLKmode struct) to initialize
4393 an argument. */
4394 if (stack_arg_under_construction)
4395 {
4396 rtx push_size
4397 = (gen_int_mode
4398 (adjusted_args_size.constant
4399 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4400 : TREE_TYPE (fndecl))
4401 ? 0 : reg_parm_stack_space), Pmode));
4402 if (old_stack_level == 0)
4403 {
4404 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4405 old_stack_pointer_delta = stack_pointer_delta;
4406 old_pending_adj = pending_stack_adjust;
4407 pending_stack_adjust = 0;
4408 /* stack_arg_under_construction says whether a stack
4409 arg is being constructed at the old stack level.
4410 Pushing the stack gets a clean outgoing argument
4411 block. */
4412 old_stack_arg_under_construction
4413 = stack_arg_under_construction;
4414 stack_arg_under_construction = 0;
4415 /* Make a new map for the new argument list. */
4416 free (stack_usage_map_buf);
4417 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4418 stack_usage_map = stack_usage_map_buf;
4419 highest_outgoing_arg_in_use = 0;
4420 stack_usage_watermark = HOST_WIDE_INT_M1U;
4421 }
4422 /* We can pass TRUE as the 4th argument because we just
4423 saved the stack pointer and will restore it right after
4424 the call. */
4425 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4426 -1, true);
4427 }
4428
4429 /* If argument evaluation might modify the stack pointer,
4430 copy the address of the argument list to a register. */
4431 for (i = 0; i < num_actuals; i++)
4432 if (args[i].pass_on_stack)
4433 {
4434 argblock = copy_addr_to_reg (argblock);
4435 break;
4436 }
4437 }
4438
4439 compute_argument_addresses (args, argblock, num_actuals);
4440
4441 /* Stack is properly aligned, pops can't safely be deferred during
4442 the evaluation of the arguments. */
4443 NO_DEFER_POP;
4444
4445 /* Precompute all register parameters. It isn't safe to compute
4446 anything once we have started filling any specific hard regs.
4447 TLS symbols sometimes need a call to resolve. Precompute
4448 register parameters before any stack pointer manipulation
4449 to avoid unaligned stack in the called function. */
4450 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4451
4452 OK_DEFER_POP;
4453
4454 /* Perform stack alignment before the first push (the last arg). */
4455 if (argblock == 0
4456 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4457 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4458 {
4459 /* When the stack adjustment is pending, we get better code
4460 by combining the adjustments. */
4461 if (maybe_ne (pending_stack_adjust, 0)
4462 && ! inhibit_defer_pop
4463 && (combine_pending_stack_adjustment_and_call
4464 (&pending_stack_adjust,
4465 unadjusted_args_size,
4466 &adjusted_args_size,
4467 preferred_unit_stack_boundary)))
4468 do_pending_stack_adjust ();
4469 else if (argblock == 0)
4470 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4471 - unadjusted_args_size,
4472 Pmode));
4473 }
4474 /* Now that the stack is properly aligned, pops can't safely
4475 be deferred during the evaluation of the arguments. */
4476 NO_DEFER_POP;
4477
4478 /* Record the maximum pushed stack space size. We need to delay
4479 doing it this far to take into account the optimization done
4480 by combine_pending_stack_adjustment_and_call. */
4481 if (flag_stack_usage_info
4482 && !ACCUMULATE_OUTGOING_ARGS
4483 && pass
4484 && adjusted_args_size.var == 0)
4485 {
4486 poly_int64 pushed = (adjusted_args_size.constant
4487 + pending_stack_adjust);
4488 current_function_pushed_stack_size
4489 = upper_bound (current_function_pushed_stack_size, pushed);
4490 }
4491
4492 funexp = rtx_for_function_call (fndecl, addr);
4493
4494 if (CALL_EXPR_STATIC_CHAIN (exp))
4495 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4496 else
4497 static_chain_value = 0;
4498
4499 #ifdef REG_PARM_STACK_SPACE
4500 /* Save the fixed argument area if it's part of the caller's frame and
4501 is clobbered by argument setup for this call. */
4502 if (ACCUMULATE_OUTGOING_ARGS && pass)
4503 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4504 &low_to_save, &high_to_save);
4505 #endif
4506
4507 /* Now store (and compute if necessary) all non-register parms.
4508 These come before register parms, since they can require block-moves,
4509 which could clobber the registers used for register parms.
4510 Parms which have partial registers are not stored here,
4511 but we do preallocate space here if they want that. */
4512
4513 for (i = 0; i < num_actuals; i++)
4514 {
4515 if (args[i].reg == 0 || args[i].pass_on_stack)
4516 {
4517 rtx_insn *before_arg = get_last_insn ();
4518
4519 /* We don't allow passing huge (> 2^30 B) arguments
4520 by value. It would cause an overflow later on. */
4521 if (constant_lower_bound (adjusted_args_size.constant)
4522 >= (1 << (HOST_BITS_PER_INT - 2)))
4523 {
4524 sorry ("passing too large argument on stack");
4525 continue;
4526 }
4527
4528 if (store_one_arg (&args[i], argblock, flags,
4529 adjusted_args_size.var != 0,
4530 reg_parm_stack_space)
4531 || (pass == 0
4532 && check_sibcall_argument_overlap (before_arg,
4533 &args[i], 1)))
4534 sibcall_failure = 1;
4535 }
4536
4537 if (args[i].stack)
4538 call_fusage
4539 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4540 gen_rtx_USE (VOIDmode, args[i].stack),
4541 call_fusage);
4542 }
4543
4544 /* If we have a parm that is passed in registers but not in memory
4545 and whose alignment does not permit a direct copy into registers,
4546 make a group of pseudos that correspond to each register that we
4547 will later fill. */
4548 if (STRICT_ALIGNMENT)
4549 store_unaligned_arguments_into_pseudos (args, num_actuals);
4550
4551 /* Now store any partially-in-registers parm.
4552 This is the last place a block-move can happen. */
4553 if (reg_parm_seen)
4554 for (i = 0; i < num_actuals; i++)
4555 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4556 {
4557 rtx_insn *before_arg = get_last_insn ();
4558
4559 /* On targets with weird calling conventions (e.g. PA) it's
4560 hard to ensure that all cases of argument overlap between
4561 stack and registers work. Play it safe and bail out. */
4562 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4563 {
4564 sibcall_failure = 1;
4565 break;
4566 }
4567
4568 if (store_one_arg (&args[i], argblock, flags,
4569 adjusted_args_size.var != 0,
4570 reg_parm_stack_space)
4571 || (pass == 0
4572 && check_sibcall_argument_overlap (before_arg,
4573 &args[i], 1)))
4574 sibcall_failure = 1;
4575 }
4576
4577 bool any_regs = false;
4578 for (i = 0; i < num_actuals; i++)
4579 if (args[i].reg != NULL_RTX)
4580 {
4581 any_regs = true;
4582 targetm.calls.call_args (args[i].reg, funtype);
4583 }
4584 if (!any_regs)
4585 targetm.calls.call_args (pc_rtx, funtype);
4586
4587 /* Figure out the register where the value, if any, will come back. */
4588 valreg = 0;
4589 if (TYPE_MODE (rettype) != VOIDmode
4590 && ! structure_value_addr)
4591 {
4592 if (pcc_struct_value)
4593 valreg = hard_function_value (build_pointer_type (rettype),
4594 fndecl, NULL, (pass == 0));
4595 else
4596 valreg = hard_function_value (rettype, fndecl, fntype,
4597 (pass == 0));
4598
4599 /* If VALREG is a PARALLEL whose first member has a zero
4600 offset, use that. This is for targets such as m68k that
4601 return the same value in multiple places. */
4602 if (GET_CODE (valreg) == PARALLEL)
4603 {
4604 rtx elem = XVECEXP (valreg, 0, 0);
4605 rtx where = XEXP (elem, 0);
4606 rtx offset = XEXP (elem, 1);
4607 if (offset == const0_rtx
4608 && GET_MODE (where) == GET_MODE (valreg))
4609 valreg = where;
4610 }
4611 }
4612
4613 /* If register arguments require space on the stack and stack space
4614 was not preallocated, allocate stack space here for arguments
4615 passed in registers. */
4616 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4617 && !ACCUMULATE_OUTGOING_ARGS
4618 && must_preallocate == 0 && reg_parm_stack_space > 0)
4619 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4620
4621 /* Pass the function the address in which to return a
4622 structure value. */
4623 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4624 {
4625 structure_value_addr
4626 = convert_memory_address (Pmode, structure_value_addr);
4627 emit_move_insn (struct_value,
4628 force_reg (Pmode,
4629 force_operand (structure_value_addr,
4630 NULL_RTX)));
4631
4632 if (REG_P (struct_value))
4633 use_reg (&call_fusage, struct_value);
4634 }
4635
4636 after_args = get_last_insn ();
4637 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4638 static_chain_value, &call_fusage,
4639 reg_parm_seen, flags);
4640
4641 load_register_parameters (args, num_actuals, &call_fusage, flags,
4642 pass == 0, &sibcall_failure);
4643
4644 /* Save a pointer to the last insn before the call, so that we can
4645 later safely search backwards to find the CALL_INSN. */
4646 before_call = get_last_insn ();
4647
4648 /* Set up next argument register. For sibling calls on machines
4649 with register windows this should be the incoming register. */
4650 if (pass == 0)
4651 next_arg_reg = targetm.calls.function_incoming_arg
4652 (args_so_far, function_arg_info::end_marker ());
4653 else
4654 next_arg_reg = targetm.calls.function_arg
4655 (args_so_far, function_arg_info::end_marker ());
4656
4657 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4658 {
4659 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4660 arg_nr = num_actuals - arg_nr - 1;
4661 if (arg_nr >= 0
4662 && arg_nr < num_actuals
4663 && args[arg_nr].reg
4664 && valreg
4665 && REG_P (valreg)
4666 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4667 call_fusage
4668 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4669 gen_rtx_SET (valreg, args[arg_nr].reg),
4670 call_fusage);
4671 }
4672 /* All arguments and registers used for the call must be set up by
4673 now! */
4674
4675 /* Stack must be properly aligned now. */
4676 gcc_assert (!pass
4677 || multiple_p (stack_pointer_delta,
4678 preferred_unit_stack_boundary));
4679
4680 /* Generate the actual call instruction. */
4681 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4682 adjusted_args_size.constant, struct_value_size,
4683 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4684 flags, args_so_far);
4685
4686 if (flag_ipa_ra)
4687 {
4688 rtx_call_insn *last;
4689 rtx datum = NULL_RTX;
4690 if (fndecl != NULL_TREE)
4691 {
4692 datum = XEXP (DECL_RTL (fndecl), 0);
4693 gcc_assert (datum != NULL_RTX
4694 && GET_CODE (datum) == SYMBOL_REF);
4695 }
4696 last = last_call_insn ();
4697 add_reg_note (last, REG_CALL_DECL, datum);
4698 }
4699
4700 /* If the call setup or the call itself overlaps with anything
4701 of the argument setup we probably clobbered our call address.
4702 In that case we can't do sibcalls. */
4703 if (pass == 0
4704 && check_sibcall_argument_overlap (after_args, 0, 0))
4705 sibcall_failure = 1;
4706
4707 /* If a non-BLKmode value is returned at the most significant end
4708 of a register, shift the register right by the appropriate amount
4709 and update VALREG accordingly. BLKmode values are handled by the
4710 group load/store machinery below. */
4711 if (!structure_value_addr
4712 && !pcc_struct_value
4713 && TYPE_MODE (rettype) != VOIDmode
4714 && TYPE_MODE (rettype) != BLKmode
4715 && REG_P (valreg)
4716 && targetm.calls.return_in_msb (rettype))
4717 {
4718 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4719 sibcall_failure = 1;
4720 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4721 }
4722
4723 if (pass && (flags & ECF_MALLOC))
4724 {
4725 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4726 rtx_insn *last, *insns;
4727
4728 /* The return value from a malloc-like function is a pointer. */
4729 if (TREE_CODE (rettype) == POINTER_TYPE)
4730 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4731
4732 emit_move_insn (temp, valreg);
4733
4734 /* The return value from a malloc-like function cannot alias
4735 anything else. */
4736 last = get_last_insn ();
4737 add_reg_note (last, REG_NOALIAS, temp);
4738
4739 /* Write out the sequence. */
4740 insns = get_insns ();
4741 end_sequence ();
4742 emit_insn (insns);
4743 valreg = temp;
4744 }
4745
4746 /* For calls to `setjmp', etc., inform
4747 function.c:setjmp_warnings that it should complain if
4748 nonvolatile values are live. For functions that cannot
4749 return, inform flow that control does not fall through. */
4750
4751 if ((flags & ECF_NORETURN) || pass == 0)
4752 {
4753 /* The barrier must be emitted
4754 immediately after the CALL_INSN. Some ports emit more
4755 than just a CALL_INSN above, so we must search for it here. */
4756
4757 rtx_insn *last = get_last_insn ();
4758 while (!CALL_P (last))
4759 {
4760 last = PREV_INSN (last);
4761 /* There was no CALL_INSN? */
4762 gcc_assert (last != before_call);
4763 }
4764
4765 emit_barrier_after (last);
4766
4767 /* Stack adjustments after a noreturn call are dead code.
4768 However when NO_DEFER_POP is in effect, we must preserve
4769 stack_pointer_delta. */
4770 if (inhibit_defer_pop == 0)
4771 {
4772 stack_pointer_delta = old_stack_allocated;
4773 pending_stack_adjust = 0;
4774 }
4775 }
4776
4777 /* If value type not void, return an rtx for the value. */
4778
4779 if (TYPE_MODE (rettype) == VOIDmode
4780 || ignore)
4781 target = const0_rtx;
4782 else if (structure_value_addr)
4783 {
4784 if (target == 0 || !MEM_P (target))
4785 {
4786 target
4787 = gen_rtx_MEM (TYPE_MODE (rettype),
4788 memory_address (TYPE_MODE (rettype),
4789 structure_value_addr));
4790 set_mem_attributes (target, rettype, 1);
4791 }
4792 }
4793 else if (pcc_struct_value)
4794 {
4795 /* This is the special C++ case where we need to
4796 know what the true target was. We take care to
4797 never use this value more than once in one expression. */
4798 target = gen_rtx_MEM (TYPE_MODE (rettype),
4799 copy_to_reg (valreg));
4800 set_mem_attributes (target, rettype, 1);
4801 }
4802 /* Handle calls that return values in multiple non-contiguous locations.
4803 The Irix 6 ABI has examples of this. */
4804 else if (GET_CODE (valreg) == PARALLEL)
4805 {
4806 if (target == 0)
4807 target = emit_group_move_into_temps (valreg);
4808 else if (rtx_equal_p (target, valreg))
4809 ;
4810 else if (GET_CODE (target) == PARALLEL)
4811 /* Handle the result of a emit_group_move_into_temps
4812 call in the previous pass. */
4813 emit_group_move (target, valreg);
4814 else
4815 emit_group_store (target, valreg, rettype,
4816 int_size_in_bytes (rettype));
4817 }
4818 else if (target
4819 && GET_MODE (target) == TYPE_MODE (rettype)
4820 && GET_MODE (target) == GET_MODE (valreg))
4821 {
4822 bool may_overlap = false;
4823
4824 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4825 reg to a plain register. */
4826 if (!REG_P (target) || HARD_REGISTER_P (target))
4827 valreg = avoid_likely_spilled_reg (valreg);
4828
4829 /* If TARGET is a MEM in the argument area, and we have
4830 saved part of the argument area, then we can't store
4831 directly into TARGET as it may get overwritten when we
4832 restore the argument save area below. Don't work too
4833 hard though and simply force TARGET to a register if it
4834 is a MEM; the optimizer is quite likely to sort it out. */
4835 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4836 for (i = 0; i < num_actuals; i++)
4837 if (args[i].save_area)
4838 {
4839 may_overlap = true;
4840 break;
4841 }
4842
4843 if (may_overlap)
4844 target = copy_to_reg (valreg);
4845 else
4846 {
4847 /* TARGET and VALREG cannot be equal at this point
4848 because the latter would not have
4849 REG_FUNCTION_VALUE_P true, while the former would if
4850 it were referring to the same register.
4851
4852 If they refer to the same register, this move will be
4853 a no-op, except when function inlining is being
4854 done. */
4855 emit_move_insn (target, valreg);
4856
4857 /* If we are setting a MEM, this code must be executed.
4858 Since it is emitted after the call insn, sibcall
4859 optimization cannot be performed in that case. */
4860 if (MEM_P (target))
4861 sibcall_failure = 1;
4862 }
4863 }
4864 else
4865 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4866
4867 /* If we promoted this return value, make the proper SUBREG.
4868 TARGET might be const0_rtx here, so be careful. */
4869 if (REG_P (target)
4870 && TYPE_MODE (rettype) != BLKmode
4871 && GET_MODE (target) != TYPE_MODE (rettype))
4872 {
4873 tree type = rettype;
4874 int unsignedp = TYPE_UNSIGNED (type);
4875 machine_mode pmode;
4876
4877 /* Ensure we promote as expected, and get the new unsignedness. */
4878 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4879 funtype, 1);
4880 gcc_assert (GET_MODE (target) == pmode);
4881
4882 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4883 GET_MODE (target));
4884 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4885 SUBREG_PROMOTED_VAR_P (target) = 1;
4886 SUBREG_PROMOTED_SET (target, unsignedp);
4887 }
4888
4889 /* If size of args is variable or this was a constructor call for a stack
4890 argument, restore saved stack-pointer value. */
4891
4892 if (old_stack_level)
4893 {
4894 rtx_insn *prev = get_last_insn ();
4895
4896 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4897 stack_pointer_delta = old_stack_pointer_delta;
4898
4899 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4900
4901 pending_stack_adjust = old_pending_adj;
4902 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4903 stack_arg_under_construction = old_stack_arg_under_construction;
4904 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4905 stack_usage_map = initial_stack_usage_map;
4906 stack_usage_watermark = initial_stack_usage_watermark;
4907 sibcall_failure = 1;
4908 }
4909 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4910 {
4911 #ifdef REG_PARM_STACK_SPACE
4912 if (save_area)
4913 restore_fixed_argument_area (save_area, argblock,
4914 high_to_save, low_to_save);
4915 #endif
4916
4917 /* If we saved any argument areas, restore them. */
4918 for (i = 0; i < num_actuals; i++)
4919 if (args[i].save_area)
4920 {
4921 machine_mode save_mode = GET_MODE (args[i].save_area);
4922 rtx stack_area
4923 = gen_rtx_MEM (save_mode,
4924 memory_address (save_mode,
4925 XEXP (args[i].stack_slot, 0)));
4926
4927 if (save_mode != BLKmode)
4928 emit_move_insn (stack_area, args[i].save_area);
4929 else
4930 emit_block_move (stack_area, args[i].save_area,
4931 (gen_int_mode
4932 (args[i].locate.size.constant, Pmode)),
4933 BLOCK_OP_CALL_PARM);
4934 }
4935
4936 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4937 stack_usage_map = initial_stack_usage_map;
4938 stack_usage_watermark = initial_stack_usage_watermark;
4939 }
4940
4941 /* If this was alloca, record the new stack level. */
4942 if (flags & ECF_MAY_BE_ALLOCA)
4943 record_new_stack_level ();
4944
4945 /* Free up storage we no longer need. */
4946 for (i = 0; i < num_actuals; ++i)
4947 free (args[i].aligned_regs);
4948
4949 targetm.calls.end_call_args ();
4950
4951 insns = get_insns ();
4952 end_sequence ();
4953
4954 if (pass == 0)
4955 {
4956 tail_call_insns = insns;
4957
4958 /* Restore the pending stack adjustment now that we have
4959 finished generating the sibling call sequence. */
4960
4961 restore_pending_stack_adjust (&save);
4962
4963 /* Prepare arg structure for next iteration. */
4964 for (i = 0; i < num_actuals; i++)
4965 {
4966 args[i].value = 0;
4967 args[i].aligned_regs = 0;
4968 args[i].stack = 0;
4969 }
4970
4971 sbitmap_free (stored_args_map);
4972 internal_arg_pointer_exp_state.scan_start = NULL;
4973 internal_arg_pointer_exp_state.cache.release ();
4974 }
4975 else
4976 {
4977 normal_call_insns = insns;
4978
4979 /* Verify that we've deallocated all the stack we used. */
4980 gcc_assert ((flags & ECF_NORETURN)
4981 || known_eq (old_stack_allocated,
4982 stack_pointer_delta
4983 - pending_stack_adjust));
4984 }
4985
4986 /* If something prevents making this a sibling call,
4987 zero out the sequence. */
4988 if (sibcall_failure)
4989 tail_call_insns = NULL;
4990 else
4991 break;
4992 }
4993
4994 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4995 arguments too, as argument area is now clobbered by the call. */
4996 if (tail_call_insns)
4997 {
4998 emit_insn (tail_call_insns);
4999 crtl->tail_call_emit = true;
5000 }
5001 else
5002 {
5003 emit_insn (normal_call_insns);
5004 if (try_tail_call)
5005 /* Ideally we'd emit a message for all of the ways that it could
5006 have failed. */
5007 maybe_complain_about_tail_call (exp, "tail call production failed");
5008 }
5009
5010 currently_expanding_call--;
5011
5012 free (stack_usage_map_buf);
5013 free (args);
5014 return target;
5015 }
5016
5017 /* A sibling call sequence invalidates any REG_EQUIV notes made for
5018 this function's incoming arguments.
5019
5020 At the start of RTL generation we know the only REG_EQUIV notes
5021 in the rtl chain are those for incoming arguments, so we can look
5022 for REG_EQUIV notes between the start of the function and the
5023 NOTE_INSN_FUNCTION_BEG.
5024
5025 This is (slight) overkill. We could keep track of the highest
5026 argument we clobber and be more selective in removing notes, but it
5027 does not seem to be worth the effort. */
5028
5029 void
5030 fixup_tail_calls (void)
5031 {
5032 rtx_insn *insn;
5033
5034 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5035 {
5036 rtx note;
5037
5038 /* There are never REG_EQUIV notes for the incoming arguments
5039 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5040 if (NOTE_P (insn)
5041 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
5042 break;
5043
5044 note = find_reg_note (insn, REG_EQUIV, 0);
5045 if (note)
5046 remove_note (insn, note);
5047 note = find_reg_note (insn, REG_EQUIV, 0);
5048 gcc_assert (!note);
5049 }
5050 }
5051
5052 /* Traverse a list of TYPES and expand all complex types into their
5053 components. */
5054 static tree
5055 split_complex_types (tree types)
5056 {
5057 tree p;
5058
5059 /* Before allocating memory, check for the common case of no complex. */
5060 for (p = types; p; p = TREE_CHAIN (p))
5061 {
5062 tree type = TREE_VALUE (p);
5063 if (TREE_CODE (type) == COMPLEX_TYPE
5064 && targetm.calls.split_complex_arg (type))
5065 goto found;
5066 }
5067 return types;
5068
5069 found:
5070 types = copy_list (types);
5071
5072 for (p = types; p; p = TREE_CHAIN (p))
5073 {
5074 tree complex_type = TREE_VALUE (p);
5075
5076 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5077 && targetm.calls.split_complex_arg (complex_type))
5078 {
5079 tree next, imag;
5080
5081 /* Rewrite complex type with component type. */
5082 TREE_VALUE (p) = TREE_TYPE (complex_type);
5083 next = TREE_CHAIN (p);
5084
5085 /* Add another component type for the imaginary part. */
5086 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5087 TREE_CHAIN (p) = imag;
5088 TREE_CHAIN (imag) = next;
5089
5090 /* Skip the newly created node. */
5091 p = TREE_CHAIN (p);
5092 }
5093 }
5094
5095 return types;
5096 }
5097 \f
5098 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5099 for a value of mode OUTMODE,
5100 with NARGS different arguments, passed as ARGS.
5101 Store the return value if RETVAL is nonzero: store it in VALUE if
5102 VALUE is nonnull, otherwise pick a convenient location. In either
5103 case return the location of the stored value.
5104
5105 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5106 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5107 other types of library calls. */
5108
5109 rtx
5110 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5111 enum libcall_type fn_type,
5112 machine_mode outmode, int nargs, rtx_mode_t *args)
5113 {
5114 /* Total size in bytes of all the stack-parms scanned so far. */
5115 struct args_size args_size;
5116 /* Size of arguments before any adjustments (such as rounding). */
5117 struct args_size original_args_size;
5118 int argnum;
5119 rtx fun;
5120 /* Todo, choose the correct decl type of orgfun. Sadly this information
5121 isn't present here, so we default to native calling abi here. */
5122 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5123 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5124 int count;
5125 rtx argblock = 0;
5126 CUMULATIVE_ARGS args_so_far_v;
5127 cumulative_args_t args_so_far;
5128 struct arg
5129 {
5130 rtx value;
5131 machine_mode mode;
5132 rtx reg;
5133 int partial;
5134 struct locate_and_pad_arg_data locate;
5135 rtx save_area;
5136 };
5137 struct arg *argvec;
5138 int old_inhibit_defer_pop = inhibit_defer_pop;
5139 rtx call_fusage = 0;
5140 rtx mem_value = 0;
5141 rtx valreg;
5142 int pcc_struct_value = 0;
5143 poly_int64 struct_value_size = 0;
5144 int flags;
5145 int reg_parm_stack_space = 0;
5146 poly_int64 needed;
5147 rtx_insn *before_call;
5148 bool have_push_fusage;
5149 tree tfom; /* type_for_mode (outmode, 0) */
5150
5151 #ifdef REG_PARM_STACK_SPACE
5152 /* Define the boundary of the register parm stack space that needs to be
5153 save, if any. */
5154 int low_to_save = 0, high_to_save = 0;
5155 rtx save_area = 0; /* Place that it is saved. */
5156 #endif
5157
5158 /* Size of the stack reserved for parameter registers. */
5159 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5160 char *initial_stack_usage_map = stack_usage_map;
5161 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5162 char *stack_usage_map_buf = NULL;
5163
5164 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5165
5166 #ifdef REG_PARM_STACK_SPACE
5167 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5168 #endif
5169
5170 /* By default, library functions cannot throw. */
5171 flags = ECF_NOTHROW;
5172
5173 switch (fn_type)
5174 {
5175 case LCT_NORMAL:
5176 break;
5177 case LCT_CONST:
5178 flags |= ECF_CONST;
5179 break;
5180 case LCT_PURE:
5181 flags |= ECF_PURE;
5182 break;
5183 case LCT_NORETURN:
5184 flags |= ECF_NORETURN;
5185 break;
5186 case LCT_THROW:
5187 flags &= ~ECF_NOTHROW;
5188 break;
5189 case LCT_RETURNS_TWICE:
5190 flags = ECF_RETURNS_TWICE;
5191 break;
5192 }
5193 fun = orgfun;
5194
5195 /* Ensure current function's preferred stack boundary is at least
5196 what we need. */
5197 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5198 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5199
5200 /* If this kind of value comes back in memory,
5201 decide where in memory it should come back. */
5202 if (outmode != VOIDmode)
5203 {
5204 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5205 if (aggregate_value_p (tfom, 0))
5206 {
5207 #ifdef PCC_STATIC_STRUCT_RETURN
5208 rtx pointer_reg
5209 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5210 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5211 pcc_struct_value = 1;
5212 if (value == 0)
5213 value = gen_reg_rtx (outmode);
5214 #else /* not PCC_STATIC_STRUCT_RETURN */
5215 struct_value_size = GET_MODE_SIZE (outmode);
5216 if (value != 0 && MEM_P (value))
5217 mem_value = value;
5218 else
5219 mem_value = assign_temp (tfom, 1, 1);
5220 #endif
5221 /* This call returns a big structure. */
5222 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5223 }
5224 }
5225 else
5226 tfom = void_type_node;
5227
5228 /* ??? Unfinished: must pass the memory address as an argument. */
5229
5230 /* Copy all the libcall-arguments out of the varargs data
5231 and into a vector ARGVEC.
5232
5233 Compute how to pass each argument. We only support a very small subset
5234 of the full argument passing conventions to limit complexity here since
5235 library functions shouldn't have many args. */
5236
5237 argvec = XALLOCAVEC (struct arg, nargs + 1);
5238 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5239
5240 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5241 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5242 #else
5243 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5244 #endif
5245 args_so_far = pack_cumulative_args (&args_so_far_v);
5246
5247 args_size.constant = 0;
5248 args_size.var = 0;
5249
5250 count = 0;
5251
5252 push_temp_slots ();
5253
5254 /* If there's a structure value address to be passed,
5255 either pass it in the special place, or pass it as an extra argument. */
5256 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5257 {
5258 rtx addr = XEXP (mem_value, 0);
5259
5260 nargs++;
5261
5262 /* Make sure it is a reasonable operand for a move or push insn. */
5263 if (!REG_P (addr) && !MEM_P (addr)
5264 && !(CONSTANT_P (addr)
5265 && targetm.legitimate_constant_p (Pmode, addr)))
5266 addr = force_operand (addr, NULL_RTX);
5267
5268 argvec[count].value = addr;
5269 argvec[count].mode = Pmode;
5270 argvec[count].partial = 0;
5271
5272 function_arg_info ptr_arg (Pmode, /*named=*/true);
5273 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5274 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5275
5276 locate_and_pad_parm (Pmode, NULL_TREE,
5277 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5278 1,
5279 #else
5280 argvec[count].reg != 0,
5281 #endif
5282 reg_parm_stack_space, 0,
5283 NULL_TREE, &args_size, &argvec[count].locate);
5284
5285 if (argvec[count].reg == 0 || argvec[count].partial != 0
5286 || reg_parm_stack_space > 0)
5287 args_size.constant += argvec[count].locate.size.constant;
5288
5289 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5290
5291 count++;
5292 }
5293
5294 for (unsigned int i = 0; count < nargs; i++, count++)
5295 {
5296 rtx val = args[i].first;
5297 function_arg_info arg (args[i].second, /*named=*/true);
5298 int unsigned_p = 0;
5299
5300 /* We cannot convert the arg value to the mode the library wants here;
5301 must do it earlier where we know the signedness of the arg. */
5302 gcc_assert (arg.mode != BLKmode
5303 && (GET_MODE (val) == arg.mode
5304 || GET_MODE (val) == VOIDmode));
5305
5306 /* Make sure it is a reasonable operand for a move or push insn. */
5307 if (!REG_P (val) && !MEM_P (val)
5308 && !(CONSTANT_P (val)
5309 && targetm.legitimate_constant_p (arg.mode, val)))
5310 val = force_operand (val, NULL_RTX);
5311
5312 if (pass_by_reference (&args_so_far_v, arg))
5313 {
5314 rtx slot;
5315 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5316
5317 /* If this was a CONST function, it is now PURE since it now
5318 reads memory. */
5319 if (flags & ECF_CONST)
5320 {
5321 flags &= ~ECF_CONST;
5322 flags |= ECF_PURE;
5323 }
5324
5325 if (MEM_P (val) && !must_copy)
5326 {
5327 tree val_expr = MEM_EXPR (val);
5328 if (val_expr)
5329 mark_addressable (val_expr);
5330 slot = val;
5331 }
5332 else
5333 {
5334 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5335 1, 1);
5336 emit_move_insn (slot, val);
5337 }
5338
5339 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5340 gen_rtx_USE (VOIDmode, slot),
5341 call_fusage);
5342 if (must_copy)
5343 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5344 gen_rtx_CLOBBER (VOIDmode,
5345 slot),
5346 call_fusage);
5347
5348 arg.mode = Pmode;
5349 arg.pass_by_reference = true;
5350 val = force_operand (XEXP (slot, 0), NULL_RTX);
5351 }
5352
5353 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5354 NULL_TREE, 0);
5355 argvec[count].mode = arg.mode;
5356 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5357 unsigned_p);
5358 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5359
5360 argvec[count].partial
5361 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5362
5363 if (argvec[count].reg == 0
5364 || argvec[count].partial != 0
5365 || reg_parm_stack_space > 0)
5366 {
5367 locate_and_pad_parm (arg.mode, NULL_TREE,
5368 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5369 1,
5370 #else
5371 argvec[count].reg != 0,
5372 #endif
5373 reg_parm_stack_space, argvec[count].partial,
5374 NULL_TREE, &args_size, &argvec[count].locate);
5375 args_size.constant += argvec[count].locate.size.constant;
5376 gcc_assert (!argvec[count].locate.size.var);
5377 }
5378 #ifdef BLOCK_REG_PADDING
5379 else
5380 /* The argument is passed entirely in registers. See at which
5381 end it should be padded. */
5382 argvec[count].locate.where_pad =
5383 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5384 known_le (GET_MODE_SIZE (arg.mode),
5385 UNITS_PER_WORD));
5386 #endif
5387
5388 targetm.calls.function_arg_advance (args_so_far, arg);
5389 }
5390
5391 for (int i = 0; i < nargs; i++)
5392 if (reg_parm_stack_space > 0
5393 || argvec[i].reg == 0
5394 || argvec[i].partial != 0)
5395 update_stack_alignment_for_call (&argvec[i].locate);
5396
5397 /* If this machine requires an external definition for library
5398 functions, write one out. */
5399 assemble_external_libcall (fun);
5400
5401 original_args_size = args_size;
5402 args_size.constant = (aligned_upper_bound (args_size.constant
5403 + stack_pointer_delta,
5404 STACK_BYTES)
5405 - stack_pointer_delta);
5406
5407 args_size.constant = upper_bound (args_size.constant,
5408 reg_parm_stack_space);
5409
5410 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5411 args_size.constant -= reg_parm_stack_space;
5412
5413 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5414 args_size.constant);
5415
5416 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5417 {
5418 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5419 current_function_pushed_stack_size
5420 = upper_bound (current_function_pushed_stack_size, pushed);
5421 }
5422
5423 if (ACCUMULATE_OUTGOING_ARGS)
5424 {
5425 /* Since the stack pointer will never be pushed, it is possible for
5426 the evaluation of a parm to clobber something we have already
5427 written to the stack. Since most function calls on RISC machines
5428 do not use the stack, this is uncommon, but must work correctly.
5429
5430 Therefore, we save any area of the stack that was already written
5431 and that we are using. Here we set up to do this by making a new
5432 stack usage map from the old one.
5433
5434 Another approach might be to try to reorder the argument
5435 evaluations to avoid this conflicting stack usage. */
5436
5437 needed = args_size.constant;
5438
5439 /* Since we will be writing into the entire argument area, the
5440 map must be allocated for its entire size, not just the part that
5441 is the responsibility of the caller. */
5442 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5443 needed += reg_parm_stack_space;
5444
5445 poly_int64 limit = needed;
5446 if (ARGS_GROW_DOWNWARD)
5447 limit += 1;
5448
5449 /* For polynomial sizes, this is the maximum possible size needed
5450 for arguments with a constant size and offset. */
5451 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5452 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5453 const_limit);
5454
5455 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5456 stack_usage_map = stack_usage_map_buf;
5457
5458 if (initial_highest_arg_in_use)
5459 memcpy (stack_usage_map, initial_stack_usage_map,
5460 initial_highest_arg_in_use);
5461
5462 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5463 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5464 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5465 needed = 0;
5466
5467 /* We must be careful to use virtual regs before they're instantiated,
5468 and real regs afterwards. Loop optimization, for example, can create
5469 new libcalls after we've instantiated the virtual regs, and if we
5470 use virtuals anyway, they won't match the rtl patterns. */
5471
5472 if (virtuals_instantiated)
5473 argblock = plus_constant (Pmode, stack_pointer_rtx,
5474 STACK_POINTER_OFFSET);
5475 else
5476 argblock = virtual_outgoing_args_rtx;
5477 }
5478 else
5479 {
5480 if (!PUSH_ARGS)
5481 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5482 }
5483
5484 /* We push args individually in reverse order, perform stack alignment
5485 before the first push (the last arg). */
5486 if (argblock == 0)
5487 anti_adjust_stack (gen_int_mode (args_size.constant
5488 - original_args_size.constant,
5489 Pmode));
5490
5491 argnum = nargs - 1;
5492
5493 #ifdef REG_PARM_STACK_SPACE
5494 if (ACCUMULATE_OUTGOING_ARGS)
5495 {
5496 /* The argument list is the property of the called routine and it
5497 may clobber it. If the fixed area has been used for previous
5498 parameters, we must save and restore it. */
5499 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5500 &low_to_save, &high_to_save);
5501 }
5502 #endif
5503
5504 /* When expanding a normal call, args are stored in push order,
5505 which is the reverse of what we have here. */
5506 bool any_regs = false;
5507 for (int i = nargs; i-- > 0; )
5508 if (argvec[i].reg != NULL_RTX)
5509 {
5510 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5511 any_regs = true;
5512 }
5513 if (!any_regs)
5514 targetm.calls.call_args (pc_rtx, NULL_TREE);
5515
5516 /* Push the args that need to be pushed. */
5517
5518 have_push_fusage = false;
5519
5520 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5521 are to be pushed. */
5522 for (count = 0; count < nargs; count++, argnum--)
5523 {
5524 machine_mode mode = argvec[argnum].mode;
5525 rtx val = argvec[argnum].value;
5526 rtx reg = argvec[argnum].reg;
5527 int partial = argvec[argnum].partial;
5528 unsigned int parm_align = argvec[argnum].locate.boundary;
5529 poly_int64 lower_bound = 0, upper_bound = 0;
5530
5531 if (! (reg != 0 && partial == 0))
5532 {
5533 rtx use;
5534
5535 if (ACCUMULATE_OUTGOING_ARGS)
5536 {
5537 /* If this is being stored into a pre-allocated, fixed-size,
5538 stack area, save any previous data at that location. */
5539
5540 if (ARGS_GROW_DOWNWARD)
5541 {
5542 /* stack_slot is negative, but we want to index stack_usage_map
5543 with positive values. */
5544 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5545 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5546 }
5547 else
5548 {
5549 lower_bound = argvec[argnum].locate.slot_offset.constant;
5550 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5551 }
5552
5553 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5554 reg_parm_stack_space))
5555 {
5556 /* We need to make a save area. */
5557 poly_uint64 size
5558 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5559 machine_mode save_mode
5560 = int_mode_for_size (size, 1).else_blk ();
5561 rtx adr
5562 = plus_constant (Pmode, argblock,
5563 argvec[argnum].locate.offset.constant);
5564 rtx stack_area
5565 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5566
5567 if (save_mode == BLKmode)
5568 {
5569 argvec[argnum].save_area
5570 = assign_stack_temp (BLKmode,
5571 argvec[argnum].locate.size.constant
5572 );
5573
5574 emit_block_move (validize_mem
5575 (copy_rtx (argvec[argnum].save_area)),
5576 stack_area,
5577 (gen_int_mode
5578 (argvec[argnum].locate.size.constant,
5579 Pmode)),
5580 BLOCK_OP_CALL_PARM);
5581 }
5582 else
5583 {
5584 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5585
5586 emit_move_insn (argvec[argnum].save_area, stack_area);
5587 }
5588 }
5589 }
5590
5591 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5592 partial, reg, 0, argblock,
5593 (gen_int_mode
5594 (argvec[argnum].locate.offset.constant, Pmode)),
5595 reg_parm_stack_space,
5596 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5597
5598 /* Now mark the segment we just used. */
5599 if (ACCUMULATE_OUTGOING_ARGS)
5600 mark_stack_region_used (lower_bound, upper_bound);
5601
5602 NO_DEFER_POP;
5603
5604 /* Indicate argument access so that alias.c knows that these
5605 values are live. */
5606 if (argblock)
5607 use = plus_constant (Pmode, argblock,
5608 argvec[argnum].locate.offset.constant);
5609 else if (have_push_fusage)
5610 continue;
5611 else
5612 {
5613 /* When arguments are pushed, trying to tell alias.c where
5614 exactly this argument is won't work, because the
5615 auto-increment causes confusion. So we merely indicate
5616 that we access something with a known mode somewhere on
5617 the stack. */
5618 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5619 gen_rtx_SCRATCH (Pmode));
5620 have_push_fusage = true;
5621 }
5622 use = gen_rtx_MEM (argvec[argnum].mode, use);
5623 use = gen_rtx_USE (VOIDmode, use);
5624 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5625 }
5626 }
5627
5628 argnum = nargs - 1;
5629
5630 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5631
5632 /* Now load any reg parms into their regs. */
5633
5634 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5635 are to be pushed. */
5636 for (count = 0; count < nargs; count++, argnum--)
5637 {
5638 machine_mode mode = argvec[argnum].mode;
5639 rtx val = argvec[argnum].value;
5640 rtx reg = argvec[argnum].reg;
5641 int partial = argvec[argnum].partial;
5642
5643 /* Handle calls that pass values in multiple non-contiguous
5644 locations. The PA64 has examples of this for library calls. */
5645 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5646 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5647 else if (reg != 0 && partial == 0)
5648 {
5649 emit_move_insn (reg, val);
5650 #ifdef BLOCK_REG_PADDING
5651 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5652
5653 /* Copied from load_register_parameters. */
5654
5655 /* Handle case where we have a value that needs shifting
5656 up to the msb. eg. a QImode value and we're padding
5657 upward on a BYTES_BIG_ENDIAN machine. */
5658 if (known_lt (size, UNITS_PER_WORD)
5659 && (argvec[argnum].locate.where_pad
5660 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5661 {
5662 rtx x;
5663 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5664
5665 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5666 report the whole reg as used. Strictly speaking, the
5667 call only uses SIZE bytes at the msb end, but it doesn't
5668 seem worth generating rtl to say that. */
5669 reg = gen_rtx_REG (word_mode, REGNO (reg));
5670 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5671 if (x != reg)
5672 emit_move_insn (reg, x);
5673 }
5674 #endif
5675 }
5676
5677 NO_DEFER_POP;
5678 }
5679
5680 /* Any regs containing parms remain in use through the call. */
5681 for (count = 0; count < nargs; count++)
5682 {
5683 rtx reg = argvec[count].reg;
5684 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5685 use_group_regs (&call_fusage, reg);
5686 else if (reg != 0)
5687 {
5688 int partial = argvec[count].partial;
5689 if (partial)
5690 {
5691 int nregs;
5692 gcc_assert (partial % UNITS_PER_WORD == 0);
5693 nregs = partial / UNITS_PER_WORD;
5694 use_regs (&call_fusage, REGNO (reg), nregs);
5695 }
5696 else
5697 use_reg (&call_fusage, reg);
5698 }
5699 }
5700
5701 /* Pass the function the address in which to return a structure value. */
5702 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5703 {
5704 emit_move_insn (struct_value,
5705 force_reg (Pmode,
5706 force_operand (XEXP (mem_value, 0),
5707 NULL_RTX)));
5708 if (REG_P (struct_value))
5709 use_reg (&call_fusage, struct_value);
5710 }
5711
5712 /* Don't allow popping to be deferred, since then
5713 cse'ing of library calls could delete a call and leave the pop. */
5714 NO_DEFER_POP;
5715 valreg = (mem_value == 0 && outmode != VOIDmode
5716 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5717
5718 /* Stack must be properly aligned now. */
5719 gcc_assert (multiple_p (stack_pointer_delta,
5720 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5721
5722 before_call = get_last_insn ();
5723
5724 if (flag_callgraph_info)
5725 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5726
5727 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5728 will set inhibit_defer_pop to that value. */
5729 /* The return type is needed to decide how many bytes the function pops.
5730 Signedness plays no role in that, so for simplicity, we pretend it's
5731 always signed. We also assume that the list of arguments passed has
5732 no impact, so we pretend it is unknown. */
5733
5734 emit_call_1 (fun, NULL,
5735 get_identifier (XSTR (orgfun, 0)),
5736 build_function_type (tfom, NULL_TREE),
5737 original_args_size.constant, args_size.constant,
5738 struct_value_size,
5739 targetm.calls.function_arg (args_so_far,
5740 function_arg_info::end_marker ()),
5741 valreg,
5742 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5743
5744 if (flag_ipa_ra)
5745 {
5746 rtx datum = orgfun;
5747 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5748 rtx_call_insn *last = last_call_insn ();
5749 add_reg_note (last, REG_CALL_DECL, datum);
5750 }
5751
5752 /* Right-shift returned value if necessary. */
5753 if (!pcc_struct_value
5754 && TYPE_MODE (tfom) != BLKmode
5755 && targetm.calls.return_in_msb (tfom))
5756 {
5757 shift_return_value (TYPE_MODE (tfom), false, valreg);
5758 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5759 }
5760
5761 targetm.calls.end_call_args ();
5762
5763 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5764 that it should complain if nonvolatile values are live. For
5765 functions that cannot return, inform flow that control does not
5766 fall through. */
5767 if (flags & ECF_NORETURN)
5768 {
5769 /* The barrier note must be emitted
5770 immediately after the CALL_INSN. Some ports emit more than
5771 just a CALL_INSN above, so we must search for it here. */
5772 rtx_insn *last = get_last_insn ();
5773 while (!CALL_P (last))
5774 {
5775 last = PREV_INSN (last);
5776 /* There was no CALL_INSN? */
5777 gcc_assert (last != before_call);
5778 }
5779
5780 emit_barrier_after (last);
5781 }
5782
5783 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5784 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5785 if (flags & ECF_NOTHROW)
5786 {
5787 rtx_insn *last = get_last_insn ();
5788 while (!CALL_P (last))
5789 {
5790 last = PREV_INSN (last);
5791 /* There was no CALL_INSN? */
5792 gcc_assert (last != before_call);
5793 }
5794
5795 make_reg_eh_region_note_nothrow_nononlocal (last);
5796 }
5797
5798 /* Now restore inhibit_defer_pop to its actual original value. */
5799 OK_DEFER_POP;
5800
5801 pop_temp_slots ();
5802
5803 /* Copy the value to the right place. */
5804 if (outmode != VOIDmode && retval)
5805 {
5806 if (mem_value)
5807 {
5808 if (value == 0)
5809 value = mem_value;
5810 if (value != mem_value)
5811 emit_move_insn (value, mem_value);
5812 }
5813 else if (GET_CODE (valreg) == PARALLEL)
5814 {
5815 if (value == 0)
5816 value = gen_reg_rtx (outmode);
5817 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5818 }
5819 else
5820 {
5821 /* Convert to the proper mode if a promotion has been active. */
5822 if (GET_MODE (valreg) != outmode)
5823 {
5824 int unsignedp = TYPE_UNSIGNED (tfom);
5825
5826 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5827 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5828 == GET_MODE (valreg));
5829 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5830 }
5831
5832 if (value != 0)
5833 emit_move_insn (value, valreg);
5834 else
5835 value = valreg;
5836 }
5837 }
5838
5839 if (ACCUMULATE_OUTGOING_ARGS)
5840 {
5841 #ifdef REG_PARM_STACK_SPACE
5842 if (save_area)
5843 restore_fixed_argument_area (save_area, argblock,
5844 high_to_save, low_to_save);
5845 #endif
5846
5847 /* If we saved any argument areas, restore them. */
5848 for (count = 0; count < nargs; count++)
5849 if (argvec[count].save_area)
5850 {
5851 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5852 rtx adr = plus_constant (Pmode, argblock,
5853 argvec[count].locate.offset.constant);
5854 rtx stack_area = gen_rtx_MEM (save_mode,
5855 memory_address (save_mode, adr));
5856
5857 if (save_mode == BLKmode)
5858 emit_block_move (stack_area,
5859 validize_mem
5860 (copy_rtx (argvec[count].save_area)),
5861 (gen_int_mode
5862 (argvec[count].locate.size.constant, Pmode)),
5863 BLOCK_OP_CALL_PARM);
5864 else
5865 emit_move_insn (stack_area, argvec[count].save_area);
5866 }
5867
5868 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5869 stack_usage_map = initial_stack_usage_map;
5870 stack_usage_watermark = initial_stack_usage_watermark;
5871 }
5872
5873 free (stack_usage_map_buf);
5874
5875 return value;
5876
5877 }
5878 \f
5879
5880 /* Store a single argument for a function call
5881 into the register or memory area where it must be passed.
5882 *ARG describes the argument value and where to pass it.
5883
5884 ARGBLOCK is the address of the stack-block for all the arguments,
5885 or 0 on a machine where arguments are pushed individually.
5886
5887 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5888 so must be careful about how the stack is used.
5889
5890 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5891 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5892 that we need not worry about saving and restoring the stack.
5893
5894 FNDECL is the declaration of the function we are calling.
5895
5896 Return nonzero if this arg should cause sibcall failure,
5897 zero otherwise. */
5898
5899 static int
5900 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5901 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5902 {
5903 tree pval = arg->tree_value;
5904 rtx reg = 0;
5905 int partial = 0;
5906 poly_int64 used = 0;
5907 poly_int64 lower_bound = 0, upper_bound = 0;
5908 int sibcall_failure = 0;
5909
5910 if (TREE_CODE (pval) == ERROR_MARK)
5911 return 1;
5912
5913 /* Push a new temporary level for any temporaries we make for
5914 this argument. */
5915 push_temp_slots ();
5916
5917 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5918 {
5919 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5920 save any previous data at that location. */
5921 if (argblock && ! variable_size && arg->stack)
5922 {
5923 if (ARGS_GROW_DOWNWARD)
5924 {
5925 /* stack_slot is negative, but we want to index stack_usage_map
5926 with positive values. */
5927 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5928 {
5929 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5930 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5931 }
5932 else
5933 upper_bound = 0;
5934
5935 lower_bound = upper_bound - arg->locate.size.constant;
5936 }
5937 else
5938 {
5939 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5940 {
5941 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5942 lower_bound = rtx_to_poly_int64 (offset);
5943 }
5944 else
5945 lower_bound = 0;
5946
5947 upper_bound = lower_bound + arg->locate.size.constant;
5948 }
5949
5950 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5951 reg_parm_stack_space))
5952 {
5953 /* We need to make a save area. */
5954 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5955 machine_mode save_mode
5956 = int_mode_for_size (size, 1).else_blk ();
5957 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5958 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5959
5960 if (save_mode == BLKmode)
5961 {
5962 arg->save_area
5963 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5964 preserve_temp_slots (arg->save_area);
5965 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5966 stack_area,
5967 (gen_int_mode
5968 (arg->locate.size.constant, Pmode)),
5969 BLOCK_OP_CALL_PARM);
5970 }
5971 else
5972 {
5973 arg->save_area = gen_reg_rtx (save_mode);
5974 emit_move_insn (arg->save_area, stack_area);
5975 }
5976 }
5977 }
5978 }
5979
5980 /* If this isn't going to be placed on both the stack and in registers,
5981 set up the register and number of words. */
5982 if (! arg->pass_on_stack)
5983 {
5984 if (flags & ECF_SIBCALL)
5985 reg = arg->tail_call_reg;
5986 else
5987 reg = arg->reg;
5988 partial = arg->partial;
5989 }
5990
5991 /* Being passed entirely in a register. We shouldn't be called in
5992 this case. */
5993 gcc_assert (reg == 0 || partial != 0);
5994
5995 /* If this arg needs special alignment, don't load the registers
5996 here. */
5997 if (arg->n_aligned_regs != 0)
5998 reg = 0;
5999
6000 /* If this is being passed partially in a register, we can't evaluate
6001 it directly into its stack slot. Otherwise, we can. */
6002 if (arg->value == 0)
6003 {
6004 /* stack_arg_under_construction is nonzero if a function argument is
6005 being evaluated directly into the outgoing argument list and
6006 expand_call must take special action to preserve the argument list
6007 if it is called recursively.
6008
6009 For scalar function arguments stack_usage_map is sufficient to
6010 determine which stack slots must be saved and restored. Scalar
6011 arguments in general have pass_on_stack == 0.
6012
6013 If this argument is initialized by a function which takes the
6014 address of the argument (a C++ constructor or a C function
6015 returning a BLKmode structure), then stack_usage_map is
6016 insufficient and expand_call must push the stack around the
6017 function call. Such arguments have pass_on_stack == 1.
6018
6019 Note that it is always safe to set stack_arg_under_construction,
6020 but this generates suboptimal code if set when not needed. */
6021
6022 if (arg->pass_on_stack)
6023 stack_arg_under_construction++;
6024
6025 arg->value = expand_expr (pval,
6026 (partial
6027 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6028 ? NULL_RTX : arg->stack,
6029 VOIDmode, EXPAND_STACK_PARM);
6030
6031 /* If we are promoting object (or for any other reason) the mode
6032 doesn't agree, convert the mode. */
6033
6034 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6035 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6036 arg->value, arg->unsignedp);
6037
6038 if (arg->pass_on_stack)
6039 stack_arg_under_construction--;
6040 }
6041
6042 /* Check for overlap with already clobbered argument area. */
6043 if ((flags & ECF_SIBCALL)
6044 && MEM_P (arg->value)
6045 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6046 arg->locate.size.constant))
6047 sibcall_failure = 1;
6048
6049 /* Don't allow anything left on stack from computation
6050 of argument to alloca. */
6051 if (flags & ECF_MAY_BE_ALLOCA)
6052 do_pending_stack_adjust ();
6053
6054 if (arg->value == arg->stack)
6055 /* If the value is already in the stack slot, we are done. */
6056 ;
6057 else if (arg->mode != BLKmode)
6058 {
6059 unsigned int parm_align;
6060
6061 /* Argument is a scalar, not entirely passed in registers.
6062 (If part is passed in registers, arg->partial says how much
6063 and emit_push_insn will take care of putting it there.)
6064
6065 Push it, and if its size is less than the
6066 amount of space allocated to it,
6067 also bump stack pointer by the additional space.
6068 Note that in C the default argument promotions
6069 will prevent such mismatches. */
6070
6071 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6072 ? 0 : GET_MODE_SIZE (arg->mode));
6073
6074 /* Compute how much space the push instruction will push.
6075 On many machines, pushing a byte will advance the stack
6076 pointer by a halfword. */
6077 #ifdef PUSH_ROUNDING
6078 size = PUSH_ROUNDING (size);
6079 #endif
6080 used = size;
6081
6082 /* Compute how much space the argument should get:
6083 round up to a multiple of the alignment for arguments. */
6084 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6085 != PAD_NONE)
6086 /* At the moment we don't (need to) support ABIs for which the
6087 padding isn't known at compile time. In principle it should
6088 be easy to add though. */
6089 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
6090
6091 /* Compute the alignment of the pushed argument. */
6092 parm_align = arg->locate.boundary;
6093 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6094 == PAD_DOWNWARD)
6095 {
6096 poly_int64 pad = used - size;
6097 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6098 if (pad_align != 0)
6099 parm_align = MIN (parm_align, pad_align);
6100 }
6101
6102 /* This isn't already where we want it on the stack, so put it there.
6103 This can either be done with push or copy insns. */
6104 if (maybe_ne (used, 0)
6105 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6106 NULL_RTX, parm_align, partial, reg, used - size,
6107 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6108 reg_parm_stack_space,
6109 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
6110 sibcall_failure = 1;
6111
6112 /* Unless this is a partially-in-register argument, the argument is now
6113 in the stack. */
6114 if (partial == 0)
6115 arg->value = arg->stack;
6116 }
6117 else
6118 {
6119 /* BLKmode, at least partly to be pushed. */
6120
6121 unsigned int parm_align;
6122 poly_int64 excess;
6123 rtx size_rtx;
6124
6125 /* Pushing a nonscalar.
6126 If part is passed in registers, PARTIAL says how much
6127 and emit_push_insn will take care of putting it there. */
6128
6129 /* Round its size up to a multiple
6130 of the allocation unit for arguments. */
6131
6132 if (arg->locate.size.var != 0)
6133 {
6134 excess = 0;
6135 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6136 }
6137 else
6138 {
6139 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6140 for BLKmode is careful to avoid it. */
6141 excess = (arg->locate.size.constant
6142 - arg_int_size_in_bytes (TREE_TYPE (pval))
6143 + partial);
6144 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6145 NULL_RTX, TYPE_MODE (sizetype),
6146 EXPAND_NORMAL);
6147 }
6148
6149 parm_align = arg->locate.boundary;
6150
6151 /* When an argument is padded down, the block is aligned to
6152 PARM_BOUNDARY, but the actual argument isn't. */
6153 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6154 == PAD_DOWNWARD)
6155 {
6156 if (arg->locate.size.var)
6157 parm_align = BITS_PER_UNIT;
6158 else
6159 {
6160 unsigned int excess_align
6161 = known_alignment (excess) * BITS_PER_UNIT;
6162 if (excess_align != 0)
6163 parm_align = MIN (parm_align, excess_align);
6164 }
6165 }
6166
6167 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6168 {
6169 /* emit_push_insn might not work properly if arg->value and
6170 argblock + arg->locate.offset areas overlap. */
6171 rtx x = arg->value;
6172 poly_int64 i = 0;
6173
6174 if (strip_offset (XEXP (x, 0), &i)
6175 == crtl->args.internal_arg_pointer)
6176 {
6177 /* arg.locate doesn't contain the pretend_args_size offset,
6178 it's part of argblock. Ensure we don't count it in I. */
6179 if (STACK_GROWS_DOWNWARD)
6180 i -= crtl->args.pretend_args_size;
6181 else
6182 i += crtl->args.pretend_args_size;
6183
6184 /* expand_call should ensure this. */
6185 gcc_assert (!arg->locate.offset.var
6186 && arg->locate.size.var == 0);
6187 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6188
6189 if (known_eq (arg->locate.offset.constant, i))
6190 {
6191 /* Even though they appear to be at the same location,
6192 if part of the outgoing argument is in registers,
6193 they aren't really at the same location. Check for
6194 this by making sure that the incoming size is the
6195 same as the outgoing size. */
6196 if (maybe_ne (arg->locate.size.constant, size_val))
6197 sibcall_failure = 1;
6198 }
6199 else if (maybe_in_range_p (arg->locate.offset.constant,
6200 i, size_val))
6201 sibcall_failure = 1;
6202 /* Use arg->locate.size.constant instead of size_rtx
6203 because we only care about the part of the argument
6204 on the stack. */
6205 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6206 arg->locate.size.constant))
6207 sibcall_failure = 1;
6208 }
6209 }
6210
6211 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6212 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6213 parm_align, partial, reg, excess, argblock,
6214 ARGS_SIZE_RTX (arg->locate.offset),
6215 reg_parm_stack_space,
6216 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6217
6218 /* Unless this is a partially-in-register argument, the argument is now
6219 in the stack.
6220
6221 ??? Unlike the case above, in which we want the actual
6222 address of the data, so that we can load it directly into a
6223 register, here we want the address of the stack slot, so that
6224 it's properly aligned for word-by-word copying or something
6225 like that. It's not clear that this is always correct. */
6226 if (partial == 0)
6227 arg->value = arg->stack_slot;
6228 }
6229
6230 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6231 {
6232 tree type = TREE_TYPE (arg->tree_value);
6233 arg->parallel_value
6234 = emit_group_load_into_temps (arg->reg, arg->value, type,
6235 int_size_in_bytes (type));
6236 }
6237
6238 /* Mark all slots this store used. */
6239 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6240 && argblock && ! variable_size && arg->stack)
6241 mark_stack_region_used (lower_bound, upper_bound);
6242
6243 /* Once we have pushed something, pops can't safely
6244 be deferred during the rest of the arguments. */
6245 NO_DEFER_POP;
6246
6247 /* Free any temporary slots made in processing this argument. */
6248 pop_temp_slots ();
6249
6250 return sibcall_failure;
6251 }
6252
6253 /* Nonzero if we do not know how to pass ARG solely in registers. */
6254
6255 bool
6256 must_pass_in_stack_var_size (const function_arg_info &arg)
6257 {
6258 if (!arg.type)
6259 return false;
6260
6261 /* If the type has variable size... */
6262 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6263 return true;
6264
6265 /* If the type is marked as addressable (it is required
6266 to be constructed into the stack)... */
6267 if (TREE_ADDRESSABLE (arg.type))
6268 return true;
6269
6270 return false;
6271 }
6272
6273 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6274 takes trailing padding of a structure into account. */
6275 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6276
6277 bool
6278 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6279 {
6280 if (!arg.type)
6281 return false;
6282
6283 /* If the type has variable size... */
6284 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6285 return true;
6286
6287 /* If the type is marked as addressable (it is required
6288 to be constructed into the stack)... */
6289 if (TREE_ADDRESSABLE (arg.type))
6290 return true;
6291
6292 if (TYPE_EMPTY_P (arg.type))
6293 return false;
6294
6295 /* If the padding and mode of the type is such that a copy into
6296 a register would put it into the wrong part of the register. */
6297 if (arg.mode == BLKmode
6298 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6299 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6300 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6301 return true;
6302
6303 return false;
6304 }
6305
6306 /* Return true if TYPE must be passed on the stack when passed to
6307 the "..." arguments of a function. */
6308
6309 bool
6310 must_pass_va_arg_in_stack (tree type)
6311 {
6312 function_arg_info arg (type, /*named=*/false);
6313 return targetm.calls.must_pass_in_stack (arg);
6314 }
6315
6316 /* Return true if FIELD is the C++17 empty base field that should
6317 be ignored for ABI calling convention decisions in order to
6318 maintain ABI compatibility between C++14 and earlier, which doesn't
6319 add this FIELD to classes with empty bases, and C++17 and later
6320 which does. */
6321
6322 bool
6323 cxx17_empty_base_field_p (const_tree field)
6324 {
6325 return (DECL_FIELD_ABI_IGNORED (field)
6326 && DECL_ARTIFICIAL (field)
6327 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6328 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6329 }
6330
6331 /* Tell the garbage collector about GTY markers in this source file. */
6332 #include "gt-calls.h"