]>
Commit | Line | Data |
---|---|---|
66d433c7 | 1 | /* Convert function calls to rtl insns, for GNU C compiler. |
aad93da1 | 2 | Copyright (C) 1989-2017 Free Software Foundation, Inc. |
66d433c7 | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
66d433c7 | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
66d433c7 | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
66d433c7 | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
66d433c7 | 19 | |
20 | #include "config.h" | |
405711de | 21 | #include "system.h" |
805e22b2 | 22 | #include "coretypes.h" |
9ef16211 | 23 | #include "backend.h" |
7c29e30e | 24 | #include "target.h" |
25 | #include "rtl.h" | |
9ef16211 | 26 | #include "tree.h" |
27 | #include "gimple.h" | |
7c29e30e | 28 | #include "predict.h" |
ad7b10a2 | 29 | #include "memmodel.h" |
7c29e30e | 30 | #include "tm_p.h" |
31 | #include "stringpool.h" | |
32 | #include "expmed.h" | |
33 | #include "optabs.h" | |
7c29e30e | 34 | #include "emit-rtl.h" |
35 | #include "cgraph.h" | |
36 | #include "diagnostic-core.h" | |
b20a8bb4 | 37 | #include "fold-const.h" |
9ed99284 | 38 | #include "stor-layout.h" |
39 | #include "varasm.h" | |
bc61cadb | 40 | #include "internal-fn.h" |
d53441c8 | 41 | #include "dojump.h" |
42 | #include "explow.h" | |
43 | #include "calls.h" | |
405711de | 44 | #include "expr.h" |
cd03a192 | 45 | #include "output.h" |
771d21fa | 46 | #include "langhooks.h" |
95cedffb | 47 | #include "except.h" |
3072d30e | 48 | #include "dbgcnt.h" |
474ce66a | 49 | #include "rtl-iter.h" |
058a1b7a | 50 | #include "tree-chkp.h" |
370e45b9 | 51 | #include "tree-vrp.h" |
52 | #include "tree-ssanames.h" | |
058a1b7a | 53 | #include "rtl-chkp.h" |
370e45b9 | 54 | #include "intl.h" |
30a86690 | 55 | #include "stringpool.h" |
56 | #include "attribs.h" | |
e6a18b5a | 57 | #include "builtins.h" |
a8b58ffb | 58 | |
dfb1ee39 | 59 | /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */ |
60 | #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) | |
66d433c7 | 61 | |
62 | /* Data structure and subroutines used within expand_call. */ | |
63 | ||
64 | struct arg_data | |
65 | { | |
66 | /* Tree node for this argument. */ | |
67 | tree tree_value; | |
1c0c37a5 | 68 | /* Mode for value; TYPE_MODE unless promoted. */ |
3754d046 | 69 | machine_mode mode; |
66d433c7 | 70 | /* Current RTL value for argument, or 0 if it isn't precomputed. */ |
71 | rtx value; | |
72 | /* Initially-compute RTL value for argument; only for const functions. */ | |
73 | rtx initial_value; | |
74 | /* Register to pass this argument in, 0 if passed on stack, or an | |
566d850a | 75 | PARALLEL if the arg is to be copied into multiple non-contiguous |
66d433c7 | 76 | registers. */ |
77 | rtx reg; | |
0e0be288 | 78 | /* Register to pass this argument in when generating tail call sequence. |
79 | This is not the same register as for normal calls on machines with | |
80 | register windows. */ | |
81 | rtx tail_call_reg; | |
b600a907 | 82 | /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct |
83 | form for emit_group_move. */ | |
84 | rtx parallel_value; | |
058a1b7a | 85 | /* If value is passed in neither reg nor stack, this field holds a number |
86 | of a special slot to be used. */ | |
87 | rtx special_slot; | |
88 | /* For pointer bounds hold an index of parm bounds are bound to. -1 if | |
89 | there is no such pointer. */ | |
90 | int pointer_arg; | |
91 | /* If pointer_arg refers a structure, then pointer_offset holds an offset | |
92 | of a pointer in this structure. */ | |
93 | int pointer_offset; | |
23eb5fa6 | 94 | /* If REG was promoted from the actual mode of the argument expression, |
95 | indicates whether the promotion is sign- or zero-extended. */ | |
96 | int unsignedp; | |
83272ab4 | 97 | /* Number of bytes to put in registers. 0 means put the whole arg |
98 | in registers. Also 0 if not passed in registers. */ | |
66d433c7 | 99 | int partial; |
d10cfa8d | 100 | /* Nonzero if argument must be passed on stack. |
f848041f | 101 | Note that some arguments may be passed on the stack |
102 | even though pass_on_stack is zero, just because FUNCTION_ARG says so. | |
103 | pass_on_stack identifies arguments that *cannot* go in registers. */ | |
66d433c7 | 104 | int pass_on_stack; |
241399f6 | 105 | /* Some fields packaged up for locate_and_pad_parm. */ |
106 | struct locate_and_pad_arg_data locate; | |
66d433c7 | 107 | /* Location on the stack at which parameter should be stored. The store |
108 | has already been done if STACK == VALUE. */ | |
109 | rtx stack; | |
110 | /* Location on the stack of the start of this argument slot. This can | |
111 | differ from STACK if this arg pads downward. This location is known | |
bd99ba64 | 112 | to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */ |
66d433c7 | 113 | rtx stack_slot; |
66d433c7 | 114 | /* Place that this stack area has been saved, if needed. */ |
115 | rtx save_area; | |
f28c7a75 | 116 | /* If an argument's alignment does not permit direct copying into registers, |
117 | copy in smaller-sized pieces into pseudos. These are stored in a | |
118 | block pointed to by this field. The next field says how many | |
119 | word-sized pseudos we made. */ | |
120 | rtx *aligned_regs; | |
121 | int n_aligned_regs; | |
66d433c7 | 122 | }; |
123 | ||
d10cfa8d | 124 | /* A vector of one char per byte of stack space. A byte if nonzero if |
66d433c7 | 125 | the corresponding stack location has been used. |
126 | This vector is used to prevent a function call within an argument from | |
127 | clobbering any stack already set up. */ | |
128 | static char *stack_usage_map; | |
129 | ||
130 | /* Size of STACK_USAGE_MAP. */ | |
131 | static int highest_outgoing_arg_in_use; | |
d1b03b62 | 132 | |
7ecc63d3 | 133 | /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding |
134 | stack location's tail call argument has been already stored into the stack. | |
135 | This bitmap is used to prevent sibling call optimization if function tries | |
136 | to use parent's incoming argument slots when they have been already | |
137 | overwritten with tail call arguments. */ | |
138 | static sbitmap stored_args_map; | |
139 | ||
d1b03b62 | 140 | /* stack_arg_under_construction is nonzero when an argument may be |
141 | initialized with a constructor call (including a C function that | |
142 | returns a BLKmode struct) and expand_call must take special action | |
143 | to make sure the object being constructed does not overlap the | |
144 | argument list for the constructor call. */ | |
fbbbfe26 | 145 | static int stack_arg_under_construction; |
66d433c7 | 146 | |
4ee9c684 | 147 | static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, |
4c9e08a4 | 148 | HOST_WIDE_INT, rtx, rtx, int, rtx, int, |
39cba157 | 149 | cumulative_args_t); |
4c9e08a4 | 150 | static void precompute_register_parameters (int, struct arg_data *, int *); |
058a1b7a | 151 | static void store_bounds (struct arg_data *, struct arg_data *); |
4c9e08a4 | 152 | static int store_one_arg (struct arg_data *, rtx, int, int, int); |
153 | static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); | |
154 | static int finalize_must_preallocate (int, int, struct arg_data *, | |
155 | struct args_size *); | |
2dd6f9ed | 156 | static void precompute_arguments (int, struct arg_data *); |
fa20f865 | 157 | static int compute_argument_block_size (int, struct args_size *, tree, tree, int); |
4c9e08a4 | 158 | static void initialize_argument_information (int, struct arg_data *, |
cd46caee | 159 | struct args_size *, int, |
160 | tree, tree, | |
39cba157 | 161 | tree, tree, cumulative_args_t, int, |
eaa112a0 | 162 | rtx *, int *, int *, int *, |
4ee9c684 | 163 | bool *, bool); |
4c9e08a4 | 164 | static void compute_argument_addresses (struct arg_data *, rtx, int); |
165 | static rtx rtx_for_function_call (tree, tree); | |
166 | static void load_register_parameters (struct arg_data *, int, rtx *, int, | |
167 | int, int *); | |
5d1b319b | 168 | static int special_function_p (const_tree, int); |
4c9e08a4 | 169 | static int check_sibcall_argument_overlap_1 (rtx); |
3663becd | 170 | static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int); |
4c9e08a4 | 171 | |
172 | static int combine_pending_stack_adjustment_and_call (int, struct args_size *, | |
38413c80 | 173 | unsigned int); |
5ab29745 | 174 | static tree split_complex_types (tree); |
cde25025 | 175 | |
4448f543 | 176 | #ifdef REG_PARM_STACK_SPACE |
4c9e08a4 | 177 | static rtx save_fixed_argument_area (int, rtx, int *, int *); |
178 | static void restore_fixed_argument_area (rtx, rtx, int, int); | |
6a0e6138 | 179 | #endif |
66d433c7 | 180 | \f |
66d433c7 | 181 | /* Force FUNEXP into a form suitable for the address of a CALL, |
182 | and return that as an rtx. Also load the static chain register | |
183 | if FNDECL is a nested function. | |
184 | ||
8866f42d | 185 | CALL_FUSAGE points to a variable holding the prospective |
186 | CALL_INSN_FUNCTION_USAGE information. */ | |
66d433c7 | 187 | |
d9076622 | 188 | rtx |
156cc902 | 189 | prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value, |
a27e3913 | 190 | rtx *call_fusage, int reg_parm_seen, int flags) |
66d433c7 | 191 | { |
c7bf1374 | 192 | /* Make a valid memory address and copy constants through pseudo-regs, |
66d433c7 | 193 | but not for a constant address if -fno-function-cse. */ |
194 | if (GET_CODE (funexp) != SYMBOL_REF) | |
a27e3913 | 195 | { |
196 | /* If it's an indirect call by descriptor, generate code to perform | |
197 | runtime identification of the pointer and load the descriptor. */ | |
198 | if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines) | |
199 | { | |
200 | const int bit_val = targetm.calls.custom_function_descriptors; | |
201 | rtx call_lab = gen_label_rtx (); | |
202 | ||
203 | gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type)); | |
204 | fndecl_or_type | |
205 | = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, | |
206 | fndecl_or_type); | |
207 | DECL_STATIC_CHAIN (fndecl_or_type) = 1; | |
208 | rtx chain = targetm.calls.static_chain (fndecl_or_type, false); | |
209 | ||
fa6012cb | 210 | if (GET_MODE (funexp) != Pmode) |
211 | funexp = convert_memory_address (Pmode, funexp); | |
212 | ||
a27e3913 | 213 | /* Avoid long live ranges around function calls. */ |
214 | funexp = copy_to_mode_reg (Pmode, funexp); | |
215 | ||
216 | if (REG_P (chain)) | |
217 | emit_insn (gen_rtx_CLOBBER (VOIDmode, chain)); | |
218 | ||
219 | /* Emit the runtime identification pattern. */ | |
220 | rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val)); | |
221 | emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1, | |
222 | call_lab); | |
223 | ||
224 | /* Statically predict the branch to very likely taken. */ | |
225 | rtx_insn *insn = get_last_insn (); | |
226 | if (JUMP_P (insn)) | |
227 | predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN); | |
228 | ||
229 | /* Load the descriptor. */ | |
230 | rtx mem = gen_rtx_MEM (ptr_mode, | |
231 | plus_constant (Pmode, funexp, - bit_val)); | |
232 | MEM_NOTRAP_P (mem) = 1; | |
233 | mem = convert_memory_address (Pmode, mem); | |
234 | emit_move_insn (chain, mem); | |
235 | ||
236 | mem = gen_rtx_MEM (ptr_mode, | |
237 | plus_constant (Pmode, funexp, | |
238 | POINTER_SIZE / BITS_PER_UNIT | |
239 | - bit_val)); | |
240 | MEM_NOTRAP_P (mem) = 1; | |
241 | mem = convert_memory_address (Pmode, mem); | |
242 | emit_move_insn (funexp, mem); | |
243 | ||
244 | emit_label (call_lab); | |
245 | ||
246 | if (REG_P (chain)) | |
247 | { | |
248 | use_reg (call_fusage, chain); | |
249 | STATIC_CHAIN_REG_P (chain) = 1; | |
250 | } | |
251 | ||
252 | /* Make sure we're not going to be overwritten below. */ | |
253 | gcc_assert (!static_chain_value); | |
254 | } | |
255 | ||
256 | /* If we are using registers for parameters, force the | |
257 | function address into a register now. */ | |
258 | funexp = ((reg_parm_seen | |
259 | && targetm.small_register_classes_for_mode_p (FUNCTION_MODE)) | |
260 | ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) | |
261 | : memory_address (FUNCTION_MODE, funexp)); | |
262 | } | |
97615b02 | 263 | else |
66d433c7 | 264 | { |
97615b02 | 265 | /* funexp could be a SYMBOL_REF represents a function pointer which is |
266 | of ptr_mode. In this case, it should be converted into address mode | |
267 | to be a valid address for memory rtx pattern. See PR 64971. */ | |
268 | if (GET_MODE (funexp) != Pmode) | |
269 | funexp = convert_memory_address (Pmode, funexp); | |
270 | ||
a27e3913 | 271 | if (!(flags & ECF_SIBCALL)) |
97615b02 | 272 | { |
273 | if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse) | |
274 | funexp = force_reg (Pmode, funexp); | |
275 | } | |
66d433c7 | 276 | } |
277 | ||
156cc902 | 278 | if (static_chain_value != 0 |
279 | && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL | |
280 | || DECL_STATIC_CHAIN (fndecl_or_type))) | |
66d433c7 | 281 | { |
82c7907c | 282 | rtx chain; |
283 | ||
156cc902 | 284 | chain = targetm.calls.static_chain (fndecl_or_type, false); |
3dce56cc | 285 | static_chain_value = convert_memory_address (Pmode, static_chain_value); |
66d433c7 | 286 | |
82c7907c | 287 | emit_move_insn (chain, static_chain_value); |
288 | if (REG_P (chain)) | |
a27e3913 | 289 | { |
290 | use_reg (call_fusage, chain); | |
291 | STATIC_CHAIN_REG_P (chain) = 1; | |
292 | } | |
66d433c7 | 293 | } |
294 | ||
295 | return funexp; | |
296 | } | |
297 | ||
298 | /* Generate instructions to call function FUNEXP, | |
299 | and optionally pop the results. | |
300 | The CALL_INSN is the first insn generated. | |
301 | ||
c74d0a20 | 302 | FNDECL is the declaration node of the function. This is given to the |
f5bc28da | 303 | hook TARGET_RETURN_POPS_ARGS to determine whether this function pops |
304 | its own args. | |
e93a4612 | 305 | |
f5bc28da | 306 | FUNTYPE is the data type of the function. This is given to the hook |
307 | TARGET_RETURN_POPS_ARGS to determine whether this function pops its | |
308 | own args. We used to allow an identifier for library functions, but | |
309 | that doesn't work when the return type is an aggregate type and the | |
310 | calling convention says that the pointer to this aggregate is to be | |
311 | popped by the callee. | |
66d433c7 | 312 | |
313 | STACK_SIZE is the number of bytes of arguments on the stack, | |
a62b99b7 | 314 | ROUNDED_STACK_SIZE is that number rounded up to |
315 | PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is | |
316 | both to put into the call insn and to generate explicit popping | |
317 | code if necessary. | |
66d433c7 | 318 | |
319 | STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value. | |
320 | It is zero if this call doesn't want a structure value. | |
321 | ||
322 | NEXT_ARG_REG is the rtx that results from executing | |
f387af4f | 323 | targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true) |
66d433c7 | 324 | just after all the args have had their registers assigned. |
325 | This could be whatever you like, but normally it is the first | |
326 | arg-register beyond those used for args in this call, | |
327 | or 0 if all the arg-registers are used in this call. | |
328 | It is passed on to `gen_call' so you can put this info in the call insn. | |
329 | ||
330 | VALREG is a hard register in which a value is returned, | |
331 | or 0 if the call does not return a value. | |
332 | ||
333 | OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before | |
334 | the args to this call were processed. | |
335 | We restore `inhibit_defer_pop' to that value. | |
336 | ||
07409b3a | 337 | CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that |
1e625a2e | 338 | denote registers used by the called function. */ |
c87678e4 | 339 | |
8ddf1c7e | 340 | static void |
16c9337c | 341 | emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED, |
4ee9c684 | 342 | tree funtype ATTRIBUTE_UNUSED, |
4c9e08a4 | 343 | HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, |
344 | HOST_WIDE_INT rounded_stack_size, | |
345 | HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, | |
346 | rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, | |
347 | int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, | |
39cba157 | 348 | cumulative_args_t args_so_far ATTRIBUTE_UNUSED) |
66d433c7 | 349 | { |
dd837bff | 350 | rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); |
7f265a08 | 351 | rtx call, funmem, pat; |
66d433c7 | 352 | int already_popped = 0; |
d94a1f53 | 353 | HOST_WIDE_INT n_popped = 0; |
354 | ||
355 | /* Sibling call patterns never pop arguments (no sibcall(_value)_pop | |
356 | patterns exist). Any popping that the callee does on return will | |
357 | be from our caller's frame rather than ours. */ | |
358 | if (!(ecf_flags & ECF_SIBCALL)) | |
359 | { | |
360 | n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size); | |
66d433c7 | 361 | |
87e19636 | 362 | #ifdef CALL_POPS_ARGS |
d94a1f53 | 363 | n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far)); |
87e19636 | 364 | #endif |
d94a1f53 | 365 | } |
4c9e08a4 | 366 | |
66d433c7 | 367 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
368 | and we don't want to load it into a register as an optimization, | |
369 | because prepare_call_address already did it if it should be done. */ | |
370 | if (GET_CODE (funexp) != SYMBOL_REF) | |
371 | funexp = memory_address (FUNCTION_MODE, funexp); | |
372 | ||
57999964 | 373 | funmem = gen_rtx_MEM (FUNCTION_MODE, funexp); |
374 | if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL) | |
854aa6aa | 375 | { |
376 | tree t = fndecl; | |
b9a16870 | 377 | |
854aa6aa | 378 | /* Although a built-in FUNCTION_DECL and its non-__builtin |
379 | counterpart compare equal and get a shared mem_attrs, they | |
380 | produce different dump output in compare-debug compilations, | |
381 | if an entry gets garbage collected in one compilation, then | |
382 | adds a different (but equivalent) entry, while the other | |
383 | doesn't run the garbage collector at the same spot and then | |
384 | shares the mem_attr with the equivalent entry. */ | |
b9a16870 | 385 | if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL) |
386 | { | |
387 | tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t)); | |
388 | if (t2) | |
389 | t = t2; | |
390 | } | |
391 | ||
392 | set_mem_expr (funmem, t); | |
854aa6aa | 393 | } |
57999964 | 394 | else if (fntree) |
2622064f | 395 | set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree))); |
57999964 | 396 | |
7f265a08 | 397 | if (ecf_flags & ECF_SIBCALL) |
60ecc450 | 398 | { |
60ecc450 | 399 | if (valreg) |
7f265a08 | 400 | pat = targetm.gen_sibcall_value (valreg, funmem, |
401 | rounded_stack_size_rtx, | |
402 | next_arg_reg, NULL_RTX); | |
60ecc450 | 403 | else |
7f265a08 | 404 | pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx, |
405 | next_arg_reg, GEN_INT (struct_value_size)); | |
60ecc450 | 406 | } |
2a631e19 | 407 | /* If the target has "call" or "call_value" insns, then prefer them |
408 | if no arguments are actually popped. If the target does not have | |
409 | "call" or "call_value" insns, then we must use the popping versions | |
410 | even if the call has no arguments to pop. */ | |
7f265a08 | 411 | else if (n_popped > 0 |
412 | || !(valreg | |
413 | ? targetm.have_call_value () | |
414 | : targetm.have_call ())) | |
66d433c7 | 415 | { |
e39fae61 | 416 | rtx n_pop = GEN_INT (n_popped); |
66d433c7 | 417 | |
418 | /* If this subroutine pops its own args, record that in the call insn | |
419 | if possible, for the sake of frame pointer elimination. */ | |
e93a4612 | 420 | |
66d433c7 | 421 | if (valreg) |
7f265a08 | 422 | pat = targetm.gen_call_value_pop (valreg, funmem, |
423 | rounded_stack_size_rtx, | |
424 | next_arg_reg, n_pop); | |
66d433c7 | 425 | else |
7f265a08 | 426 | pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx, |
427 | next_arg_reg, n_pop); | |
66d433c7 | 428 | |
66d433c7 | 429 | already_popped = 1; |
430 | } | |
431 | else | |
60ecc450 | 432 | { |
433 | if (valreg) | |
7f265a08 | 434 | pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx, |
435 | next_arg_reg, NULL_RTX); | |
60ecc450 | 436 | else |
7f265a08 | 437 | pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg, |
438 | GEN_INT (struct_value_size)); | |
60ecc450 | 439 | } |
7f265a08 | 440 | emit_insn (pat); |
66d433c7 | 441 | |
d5f9786f | 442 | /* Find the call we just emitted. */ |
9ed997be | 443 | rtx_call_insn *call_insn = last_call_insn (); |
66d433c7 | 444 | |
57999964 | 445 | /* Some target create a fresh MEM instead of reusing the one provided |
446 | above. Set its MEM_EXPR. */ | |
cf7fb72d | 447 | call = get_call_rtx_from (call_insn); |
448 | if (call | |
57999964 | 449 | && MEM_EXPR (XEXP (call, 0)) == NULL_TREE |
450 | && MEM_EXPR (funmem) != NULL_TREE) | |
451 | set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem)); | |
452 | ||
058a1b7a | 453 | /* Mark instrumented calls. */ |
454 | if (call && fntree) | |
455 | CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree); | |
456 | ||
d5f9786f | 457 | /* Put the register usage information there. */ |
458 | add_function_usage_to (call_insn, call_fusage); | |
66d433c7 | 459 | |
460 | /* If this is a const call, then set the insn's unchanging bit. */ | |
9c2a0c05 | 461 | if (ecf_flags & ECF_CONST) |
462 | RTL_CONST_CALL_P (call_insn) = 1; | |
463 | ||
464 | /* If this is a pure call, then set the insn's unchanging bit. */ | |
465 | if (ecf_flags & ECF_PURE) | |
466 | RTL_PURE_CALL_P (call_insn) = 1; | |
467 | ||
468 | /* If this is a const call, then set the insn's unchanging bit. */ | |
469 | if (ecf_flags & ECF_LOOPING_CONST_OR_PURE) | |
470 | RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1; | |
66d433c7 | 471 | |
e38def9c | 472 | /* Create a nothrow REG_EH_REGION note, if needed. */ |
473 | make_reg_eh_region_note (call_insn, ecf_flags, 0); | |
00dd2e9e | 474 | |
356b51a0 | 475 | if (ecf_flags & ECF_NORETURN) |
a1ddb869 | 476 | add_reg_note (call_insn, REG_NORETURN, const0_rtx); |
356b51a0 | 477 | |
9239aee6 | 478 | if (ecf_flags & ECF_RETURNS_TWICE) |
0ff18307 | 479 | { |
a1ddb869 | 480 | add_reg_note (call_insn, REG_SETJMP, const0_rtx); |
18d50ae6 | 481 | cfun->calls_setjmp = 1; |
0ff18307 | 482 | } |
9239aee6 | 483 | |
60ecc450 | 484 | SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0); |
485 | ||
d1f88d00 | 486 | /* Restore this now, so that we do defer pops for this call's args |
487 | if the context of the call as a whole permits. */ | |
488 | inhibit_defer_pop = old_inhibit_defer_pop; | |
489 | ||
e39fae61 | 490 | if (n_popped > 0) |
66d433c7 | 491 | { |
492 | if (!already_popped) | |
37808e3a | 493 | CALL_INSN_FUNCTION_USAGE (call_insn) |
941522d6 | 494 | = gen_rtx_EXPR_LIST (VOIDmode, |
495 | gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx), | |
496 | CALL_INSN_FUNCTION_USAGE (call_insn)); | |
e39fae61 | 497 | rounded_stack_size -= n_popped; |
dd837bff | 498 | rounded_stack_size_rtx = GEN_INT (rounded_stack_size); |
91b70175 | 499 | stack_pointer_delta -= n_popped; |
27a7a23a | 500 | |
dfe00a8f | 501 | add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); |
502 | ||
27a7a23a | 503 | /* If popup is needed, stack realign must use DRAP */ |
504 | if (SUPPORTS_STACK_ALIGNMENT) | |
505 | crtl->need_drap = true; | |
66d433c7 | 506 | } |
27827244 | 507 | /* For noreturn calls when not accumulating outgoing args force |
508 | REG_ARGS_SIZE note to prevent crossjumping of calls with different | |
509 | args sizes. */ | |
510 | else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0) | |
511 | add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
66d433c7 | 512 | |
4448f543 | 513 | if (!ACCUMULATE_OUTGOING_ARGS) |
66d433c7 | 514 | { |
4448f543 | 515 | /* If returning from the subroutine does not automatically pop the args, |
516 | we need an instruction to pop them sooner or later. | |
517 | Perhaps do it now; perhaps just record how much space to pop later. | |
518 | ||
519 | If returning from the subroutine does pop the args, indicate that the | |
520 | stack pointer will be changed. */ | |
521 | ||
10d1a2c0 | 522 | if (rounded_stack_size != 0) |
4448f543 | 523 | { |
ff3ae375 | 524 | if (ecf_flags & ECF_NORETURN) |
10d1a2c0 | 525 | /* Just pretend we did the pop. */ |
526 | stack_pointer_delta -= rounded_stack_size; | |
527 | else if (flag_defer_pop && inhibit_defer_pop == 0 | |
d490e2f2 | 528 | && ! (ecf_flags & (ECF_CONST | ECF_PURE))) |
4448f543 | 529 | pending_stack_adjust += rounded_stack_size; |
530 | else | |
531 | adjust_stack (rounded_stack_size_rtx); | |
532 | } | |
66d433c7 | 533 | } |
4448f543 | 534 | /* When we accumulate outgoing args, we must avoid any stack manipulations. |
535 | Restore the stack pointer to its original value now. Usually | |
536 | ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions. | |
537 | On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and | |
538 | popping variants of functions exist as well. | |
539 | ||
540 | ??? We may optimize similar to defer_pop above, but it is | |
541 | probably not worthwhile. | |
c87678e4 | 542 | |
4448f543 | 543 | ??? It will be worthwhile to enable combine_stack_adjustments even for |
544 | such machines. */ | |
545 | else if (n_popped) | |
546 | anti_adjust_stack (GEN_INT (n_popped)); | |
66d433c7 | 547 | } |
548 | ||
f29fd58e | 549 | /* Determine if the function identified by FNDECL is one with |
550 | special properties we wish to know about. Modify FLAGS accordingly. | |
6a0e6138 | 551 | |
552 | For example, if the function might return more than one time (setjmp), then | |
f29fd58e | 553 | set ECF_RETURNS_TWICE. |
6a0e6138 | 554 | |
f29fd58e | 555 | Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate |
6a0e6138 | 556 | space from the stack such as alloca. */ |
557 | ||
dfe08167 | 558 | static int |
5d1b319b | 559 | special_function_p (const_tree fndecl, int flags) |
6a0e6138 | 560 | { |
058a1b7a | 561 | tree name_decl = DECL_NAME (fndecl); |
562 | ||
563 | /* For instrumentation clones we want to derive flags | |
564 | from the original name. */ | |
565 | if (cgraph_node::get (fndecl) | |
566 | && cgraph_node::get (fndecl)->instrumentation_clone) | |
567 | name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl); | |
568 | ||
569 | if (fndecl && name_decl | |
f29fd58e | 570 | && IDENTIFIER_LENGTH (name_decl) <= 11 |
6a0e6138 | 571 | /* Exclude functions not at the file scope, or not `extern', |
572 | since they are not the magic functions we would otherwise | |
40109983 | 573 | think they are. |
a0c938f0 | 574 | FIXME: this should be handled with attributes, not with this |
575 | hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong | |
576 | because you can declare fork() inside a function if you | |
577 | wish. */ | |
0d568ddf | 578 | && (DECL_CONTEXT (fndecl) == NULL_TREE |
40109983 | 579 | || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) |
580 | && TREE_PUBLIC (fndecl)) | |
6a0e6138 | 581 | { |
058a1b7a | 582 | const char *name = IDENTIFIER_POINTER (name_decl); |
71d9fc9b | 583 | const char *tname = name; |
6a0e6138 | 584 | |
cc7cc47f | 585 | /* We assume that alloca will always be called by name. It |
586 | makes no sense to pass it as a pointer-to-function to | |
587 | anything that does not understand its behavior. */ | |
26fa902d | 588 | if (IDENTIFIER_LENGTH (name_decl) == 6 |
589 | && name[0] == 'a' | |
590 | && ! strcmp (name, "alloca")) | |
dfe08167 | 591 | flags |= ECF_MAY_BE_ALLOCA; |
cc7cc47f | 592 | |
f29fd58e | 593 | /* Disregard prefix _ or __. */ |
6a0e6138 | 594 | if (name[0] == '_') |
595 | { | |
f29fd58e | 596 | if (name[1] == '_') |
6a0e6138 | 597 | tname += 2; |
598 | else | |
599 | tname += 1; | |
600 | } | |
601 | ||
f29fd58e | 602 | /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */ |
603 | if (! strcmp (tname, "setjmp") | |
604 | || ! strcmp (tname, "sigsetjmp") | |
605 | || ! strcmp (name, "savectx") | |
606 | || ! strcmp (name, "vfork") | |
607 | || ! strcmp (name, "getcontext")) | |
608 | flags |= ECF_RETURNS_TWICE; | |
6a0e6138 | 609 | } |
73673831 | 610 | |
2b34677f | 611 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL |
612 | && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl))) | |
613 | flags |= ECF_MAY_BE_ALLOCA; | |
26fa902d | 614 | |
dfe08167 | 615 | return flags; |
6a0e6138 | 616 | } |
617 | ||
c8010b80 | 618 | /* Similar to special_function_p; return a set of ERF_ flags for the |
619 | function FNDECL. */ | |
620 | static int | |
621 | decl_return_flags (tree fndecl) | |
622 | { | |
623 | tree attr; | |
624 | tree type = TREE_TYPE (fndecl); | |
625 | if (!type) | |
626 | return 0; | |
627 | ||
628 | attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type)); | |
629 | if (!attr) | |
630 | return 0; | |
631 | ||
632 | attr = TREE_VALUE (TREE_VALUE (attr)); | |
633 | if (!attr || TREE_STRING_LENGTH (attr) < 1) | |
634 | return 0; | |
635 | ||
636 | switch (TREE_STRING_POINTER (attr)[0]) | |
637 | { | |
638 | case '1': | |
639 | case '2': | |
640 | case '3': | |
641 | case '4': | |
642 | return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1'); | |
643 | ||
644 | case 'm': | |
645 | return ERF_NOALIAS; | |
646 | ||
647 | case '.': | |
648 | default: | |
649 | return 0; | |
650 | } | |
651 | } | |
652 | ||
4c8db992 | 653 | /* Return nonzero when FNDECL represents a call to setjmp. */ |
d490e2f2 | 654 | |
dfe08167 | 655 | int |
5d1b319b | 656 | setjmp_call_p (const_tree fndecl) |
dfe08167 | 657 | { |
69010134 | 658 | if (DECL_IS_RETURNS_TWICE (fndecl)) |
659 | return ECF_RETURNS_TWICE; | |
dfe08167 | 660 | return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE; |
661 | } | |
662 | ||
75a70cf9 | 663 | |
58905985 | 664 | /* Return true if STMT may be an alloca call. */ |
75a70cf9 | 665 | |
666 | bool | |
58905985 | 667 | gimple_maybe_alloca_call_p (const gimple *stmt) |
75a70cf9 | 668 | { |
669 | tree fndecl; | |
670 | ||
671 | if (!is_gimple_call (stmt)) | |
672 | return false; | |
673 | ||
674 | fndecl = gimple_call_fndecl (stmt); | |
675 | if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA)) | |
676 | return true; | |
677 | ||
678 | return false; | |
679 | } | |
680 | ||
58905985 | 681 | /* Return true if STMT is a builtin alloca call. */ |
682 | ||
683 | bool | |
684 | gimple_alloca_call_p (const gimple *stmt) | |
685 | { | |
686 | tree fndecl; | |
687 | ||
688 | if (!is_gimple_call (stmt)) | |
689 | return false; | |
690 | ||
691 | fndecl = gimple_call_fndecl (stmt); | |
692 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
693 | switch (DECL_FUNCTION_CODE (fndecl)) | |
694 | { | |
2b34677f | 695 | CASE_BUILT_IN_ALLOCA: |
58905985 | 696 | return true; |
697 | default: | |
698 | break; | |
699 | } | |
700 | ||
701 | return false; | |
702 | } | |
703 | ||
704 | /* Return true when exp contains a builtin alloca call. */ | |
75a70cf9 | 705 | |
9a7ecb49 | 706 | bool |
5d1b319b | 707 | alloca_call_p (const_tree exp) |
9a7ecb49 | 708 | { |
0b7282f1 | 709 | tree fndecl; |
9a7ecb49 | 710 | if (TREE_CODE (exp) == CALL_EXPR |
0b7282f1 | 711 | && (fndecl = get_callee_fndecl (exp)) |
58905985 | 712 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) |
713 | switch (DECL_FUNCTION_CODE (fndecl)) | |
714 | { | |
2b34677f | 715 | CASE_BUILT_IN_ALLOCA: |
58905985 | 716 | return true; |
717 | default: | |
718 | break; | |
719 | } | |
720 | ||
9a7ecb49 | 721 | return false; |
722 | } | |
723 | ||
4c0315d0 | 724 | /* Return TRUE if FNDECL is either a TM builtin or a TM cloned |
725 | function. Return FALSE otherwise. */ | |
726 | ||
727 | static bool | |
728 | is_tm_builtin (const_tree fndecl) | |
729 | { | |
730 | if (fndecl == NULL) | |
731 | return false; | |
732 | ||
733 | if (decl_is_tm_clone (fndecl)) | |
734 | return true; | |
735 | ||
736 | if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
737 | { | |
738 | switch (DECL_FUNCTION_CODE (fndecl)) | |
739 | { | |
740 | case BUILT_IN_TM_COMMIT: | |
741 | case BUILT_IN_TM_COMMIT_EH: | |
742 | case BUILT_IN_TM_ABORT: | |
743 | case BUILT_IN_TM_IRREVOCABLE: | |
744 | case BUILT_IN_TM_GETTMCLONE_IRR: | |
745 | case BUILT_IN_TM_MEMCPY: | |
746 | case BUILT_IN_TM_MEMMOVE: | |
747 | case BUILT_IN_TM_MEMSET: | |
748 | CASE_BUILT_IN_TM_STORE (1): | |
749 | CASE_BUILT_IN_TM_STORE (2): | |
750 | CASE_BUILT_IN_TM_STORE (4): | |
751 | CASE_BUILT_IN_TM_STORE (8): | |
752 | CASE_BUILT_IN_TM_STORE (FLOAT): | |
753 | CASE_BUILT_IN_TM_STORE (DOUBLE): | |
754 | CASE_BUILT_IN_TM_STORE (LDOUBLE): | |
755 | CASE_BUILT_IN_TM_STORE (M64): | |
756 | CASE_BUILT_IN_TM_STORE (M128): | |
757 | CASE_BUILT_IN_TM_STORE (M256): | |
758 | CASE_BUILT_IN_TM_LOAD (1): | |
759 | CASE_BUILT_IN_TM_LOAD (2): | |
760 | CASE_BUILT_IN_TM_LOAD (4): | |
761 | CASE_BUILT_IN_TM_LOAD (8): | |
762 | CASE_BUILT_IN_TM_LOAD (FLOAT): | |
763 | CASE_BUILT_IN_TM_LOAD (DOUBLE): | |
764 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): | |
765 | CASE_BUILT_IN_TM_LOAD (M64): | |
766 | CASE_BUILT_IN_TM_LOAD (M128): | |
767 | CASE_BUILT_IN_TM_LOAD (M256): | |
768 | case BUILT_IN_TM_LOG: | |
769 | case BUILT_IN_TM_LOG_1: | |
770 | case BUILT_IN_TM_LOG_2: | |
771 | case BUILT_IN_TM_LOG_4: | |
772 | case BUILT_IN_TM_LOG_8: | |
773 | case BUILT_IN_TM_LOG_FLOAT: | |
774 | case BUILT_IN_TM_LOG_DOUBLE: | |
775 | case BUILT_IN_TM_LOG_LDOUBLE: | |
776 | case BUILT_IN_TM_LOG_M64: | |
777 | case BUILT_IN_TM_LOG_M128: | |
778 | case BUILT_IN_TM_LOG_M256: | |
779 | return true; | |
780 | default: | |
781 | break; | |
782 | } | |
783 | } | |
784 | return false; | |
785 | } | |
786 | ||
5edaabad | 787 | /* Detect flags (function attributes) from the function decl or type node. */ |
d490e2f2 | 788 | |
805e22b2 | 789 | int |
5d1b319b | 790 | flags_from_decl_or_type (const_tree exp) |
dfe08167 | 791 | { |
792 | int flags = 0; | |
7a24815f | 793 | |
dfe08167 | 794 | if (DECL_P (exp)) |
795 | { | |
796 | /* The function exp may have the `malloc' attribute. */ | |
7a24815f | 797 | if (DECL_IS_MALLOC (exp)) |
dfe08167 | 798 | flags |= ECF_MALLOC; |
799 | ||
26d1c5ff | 800 | /* The function exp may have the `returns_twice' attribute. */ |
801 | if (DECL_IS_RETURNS_TWICE (exp)) | |
802 | flags |= ECF_RETURNS_TWICE; | |
803 | ||
9c2a0c05 | 804 | /* Process the pure and const attributes. */ |
67fa4078 | 805 | if (TREE_READONLY (exp)) |
9c2a0c05 | 806 | flags |= ECF_CONST; |
807 | if (DECL_PURE_P (exp)) | |
ef689d4e | 808 | flags |= ECF_PURE; |
9c2a0c05 | 809 | if (DECL_LOOPING_CONST_OR_PURE_P (exp)) |
810 | flags |= ECF_LOOPING_CONST_OR_PURE; | |
26dfc457 | 811 | |
fc09b200 | 812 | if (DECL_IS_NOVOPS (exp)) |
813 | flags |= ECF_NOVOPS; | |
7bd95dfd | 814 | if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp))) |
815 | flags |= ECF_LEAF; | |
642860fc | 816 | if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp))) |
817 | flags |= ECF_COLD; | |
fc09b200 | 818 | |
dfe08167 | 819 | if (TREE_NOTHROW (exp)) |
820 | flags |= ECF_NOTHROW; | |
b15db406 | 821 | |
4c0315d0 | 822 | if (flag_tm) |
823 | { | |
824 | if (is_tm_builtin (exp)) | |
825 | flags |= ECF_TM_BUILTIN; | |
c86dbacd | 826 | else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0 |
4c0315d0 | 827 | || lookup_attribute ("transaction_pure", |
828 | TYPE_ATTRIBUTES (TREE_TYPE (exp)))) | |
829 | flags |= ECF_TM_PURE; | |
830 | } | |
831 | ||
4ee9c684 | 832 | flags = special_function_p (exp, flags); |
dfe08167 | 833 | } |
4c0315d0 | 834 | else if (TYPE_P (exp)) |
835 | { | |
836 | if (TYPE_READONLY (exp)) | |
837 | flags |= ECF_CONST; | |
838 | ||
839 | if (flag_tm | |
840 | && ((flags & ECF_CONST) != 0 | |
841 | || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp)))) | |
842 | flags |= ECF_TM_PURE; | |
843 | } | |
c579aed5 | 844 | else |
845 | gcc_unreachable (); | |
dfe08167 | 846 | |
847 | if (TREE_THIS_VOLATILE (exp)) | |
67fa4078 | 848 | { |
849 | flags |= ECF_NORETURN; | |
850 | if (flags & (ECF_CONST|ECF_PURE)) | |
851 | flags |= ECF_LOOPING_CONST_OR_PURE; | |
852 | } | |
dfe08167 | 853 | |
854 | return flags; | |
855 | } | |
856 | ||
886a914d | 857 | /* Detect flags from a CALL_EXPR. */ |
858 | ||
859 | int | |
b7bf20db | 860 | call_expr_flags (const_tree t) |
886a914d | 861 | { |
862 | int flags; | |
863 | tree decl = get_callee_fndecl (t); | |
864 | ||
865 | if (decl) | |
866 | flags = flags_from_decl_or_type (decl); | |
4036aeb0 | 867 | else if (CALL_EXPR_FN (t) == NULL_TREE) |
868 | flags = internal_fn_flags (CALL_EXPR_IFN (t)); | |
886a914d | 869 | else |
870 | { | |
a27e3913 | 871 | tree type = TREE_TYPE (CALL_EXPR_FN (t)); |
872 | if (type && TREE_CODE (type) == POINTER_TYPE) | |
873 | flags = flags_from_decl_or_type (TREE_TYPE (type)); | |
886a914d | 874 | else |
875 | flags = 0; | |
a27e3913 | 876 | if (CALL_EXPR_BY_DESCRIPTOR (t)) |
877 | flags |= ECF_BY_DESCRIPTOR; | |
886a914d | 878 | } |
879 | ||
880 | return flags; | |
881 | } | |
882 | ||
a3c76fda | 883 | /* Return true if TYPE should be passed by invisible reference. */ |
884 | ||
885 | bool | |
886 | pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode, | |
887 | tree type, bool named_arg) | |
888 | { | |
889 | if (type) | |
890 | { | |
891 | /* If this type contains non-trivial constructors, then it is | |
892 | forbidden for the middle-end to create any new copies. */ | |
893 | if (TREE_ADDRESSABLE (type)) | |
894 | return true; | |
895 | ||
896 | /* GCC post 3.4 passes *all* variable sized types by reference. */ | |
897 | if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
898 | return true; | |
899 | ||
900 | /* If a record type should be passed the same as its first (and only) | |
901 | member, use the type and mode of that member. */ | |
902 | if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) | |
903 | { | |
904 | type = TREE_TYPE (first_field (type)); | |
905 | mode = TYPE_MODE (type); | |
906 | } | |
907 | } | |
908 | ||
909 | return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode, | |
910 | type, named_arg); | |
911 | } | |
912 | ||
913 | /* Return true if TYPE, which is passed by reference, should be callee | |
914 | copied instead of caller copied. */ | |
915 | ||
916 | bool | |
917 | reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode, | |
918 | tree type, bool named_arg) | |
919 | { | |
920 | if (type && TREE_ADDRESSABLE (type)) | |
921 | return false; | |
922 | return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type, | |
923 | named_arg); | |
924 | } | |
925 | ||
926 | ||
6a0e6138 | 927 | /* Precompute all register parameters as described by ARGS, storing values |
928 | into fields within the ARGS array. | |
929 | ||
930 | NUM_ACTUALS indicates the total number elements in the ARGS array. | |
931 | ||
932 | Set REG_PARM_SEEN if we encounter a register parameter. */ | |
933 | ||
934 | static void | |
e2ff5c1b | 935 | precompute_register_parameters (int num_actuals, struct arg_data *args, |
936 | int *reg_parm_seen) | |
6a0e6138 | 937 | { |
938 | int i; | |
939 | ||
940 | *reg_parm_seen = 0; | |
941 | ||
942 | for (i = 0; i < num_actuals; i++) | |
943 | if (args[i].reg != 0 && ! args[i].pass_on_stack) | |
944 | { | |
945 | *reg_parm_seen = 1; | |
946 | ||
947 | if (args[i].value == 0) | |
948 | { | |
949 | push_temp_slots (); | |
8ec3c5c2 | 950 | args[i].value = expand_normal (args[i].tree_value); |
6a0e6138 | 951 | preserve_temp_slots (args[i].value); |
952 | pop_temp_slots (); | |
6a0e6138 | 953 | } |
954 | ||
955 | /* If we are to promote the function arg to a wider mode, | |
956 | do it now. */ | |
957 | ||
958 | if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) | |
959 | args[i].value | |
960 | = convert_modes (args[i].mode, | |
961 | TYPE_MODE (TREE_TYPE (args[i].tree_value)), | |
962 | args[i].value, args[i].unsignedp); | |
963 | ||
5e050fa1 | 964 | /* If the value is a non-legitimate constant, force it into a |
965 | pseudo now. TLS symbols sometimes need a call to resolve. */ | |
966 | if (CONSTANT_P (args[i].value) | |
967 | && !targetm.legitimate_constant_p (args[i].mode, args[i].value)) | |
968 | args[i].value = force_reg (args[i].mode, args[i].value); | |
969 | ||
e2ff5c1b | 970 | /* If we're going to have to load the value by parts, pull the |
971 | parts into pseudos. The part extraction process can involve | |
972 | non-trivial computation. */ | |
973 | if (GET_CODE (args[i].reg) == PARALLEL) | |
974 | { | |
975 | tree type = TREE_TYPE (args[i].tree_value); | |
b600a907 | 976 | args[i].parallel_value |
e2ff5c1b | 977 | = emit_group_load_into_temps (args[i].reg, args[i].value, |
978 | type, int_size_in_bytes (type)); | |
979 | } | |
980 | ||
c87678e4 | 981 | /* If the value is expensive, and we are inside an appropriately |
6a0e6138 | 982 | short loop, put the value into a pseudo and then put the pseudo |
983 | into the hard reg. | |
984 | ||
985 | For small register classes, also do this if this call uses | |
986 | register parameters. This is to avoid reload conflicts while | |
987 | loading the parameters registers. */ | |
988 | ||
e2ff5c1b | 989 | else if ((! (REG_P (args[i].value) |
990 | || (GET_CODE (args[i].value) == SUBREG | |
991 | && REG_P (SUBREG_REG (args[i].value))))) | |
992 | && args[i].mode != BLKmode | |
5ae4887d | 993 | && (set_src_cost (args[i].value, args[i].mode, |
994 | optimize_insn_for_speed_p ()) | |
995 | > COSTS_N_INSNS (1)) | |
ed5527ca | 996 | && ((*reg_parm_seen |
997 | && targetm.small_register_classes_for_mode_p (args[i].mode)) | |
e2ff5c1b | 998 | || optimize)) |
6a0e6138 | 999 | args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); |
1000 | } | |
1001 | } | |
1002 | ||
4448f543 | 1003 | #ifdef REG_PARM_STACK_SPACE |
6a0e6138 | 1004 | |
1005 | /* The argument list is the property of the called routine and it | |
1006 | may clobber it. If the fixed area has been used for previous | |
1007 | parameters, we must save and restore it. */ | |
f7c44134 | 1008 | |
6a0e6138 | 1009 | static rtx |
4c9e08a4 | 1010 | save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save) |
6a0e6138 | 1011 | { |
6e96b626 | 1012 | int low; |
1013 | int high; | |
6a0e6138 | 1014 | |
6e96b626 | 1015 | /* Compute the boundary of the area that needs to be saved, if any. */ |
1016 | high = reg_parm_stack_space; | |
ccccd62c | 1017 | if (ARGS_GROW_DOWNWARD) |
1018 | high += 1; | |
1019 | ||
6e96b626 | 1020 | if (high > highest_outgoing_arg_in_use) |
1021 | high = highest_outgoing_arg_in_use; | |
6a0e6138 | 1022 | |
6e96b626 | 1023 | for (low = 0; low < high; low++) |
1024 | if (stack_usage_map[low] != 0) | |
1025 | { | |
1026 | int num_to_save; | |
3754d046 | 1027 | machine_mode save_mode; |
6e96b626 | 1028 | int delta; |
29c05e22 | 1029 | rtx addr; |
6e96b626 | 1030 | rtx stack_area; |
1031 | rtx save_area; | |
6a0e6138 | 1032 | |
6e96b626 | 1033 | while (stack_usage_map[--high] == 0) |
1034 | ; | |
6a0e6138 | 1035 | |
6e96b626 | 1036 | *low_to_save = low; |
1037 | *high_to_save = high; | |
1038 | ||
1039 | num_to_save = high - low + 1; | |
6a0e6138 | 1040 | |
6e96b626 | 1041 | /* If we don't have the required alignment, must do this |
1042 | in BLKmode. */ | |
44504d18 | 1043 | scalar_int_mode imode; |
1044 | if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode) | |
1045 | && (low & (MIN (GET_MODE_SIZE (imode), | |
1046 | BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0) | |
1047 | save_mode = imode; | |
1048 | else | |
6e96b626 | 1049 | save_mode = BLKmode; |
6a0e6138 | 1050 | |
ccccd62c | 1051 | if (ARGS_GROW_DOWNWARD) |
1052 | delta = -high; | |
1053 | else | |
1054 | delta = low; | |
1055 | ||
29c05e22 | 1056 | addr = plus_constant (Pmode, argblock, delta); |
1057 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); | |
2a631e19 | 1058 | |
6e96b626 | 1059 | set_mem_align (stack_area, PARM_BOUNDARY); |
1060 | if (save_mode == BLKmode) | |
1061 | { | |
0ab48139 | 1062 | save_area = assign_stack_temp (BLKmode, num_to_save); |
6e96b626 | 1063 | emit_block_move (validize_mem (save_area), stack_area, |
1064 | GEN_INT (num_to_save), BLOCK_OP_CALL_PARM); | |
1065 | } | |
1066 | else | |
1067 | { | |
1068 | save_area = gen_reg_rtx (save_mode); | |
1069 | emit_move_insn (save_area, stack_area); | |
1070 | } | |
2a631e19 | 1071 | |
6e96b626 | 1072 | return save_area; |
1073 | } | |
1074 | ||
1075 | return NULL_RTX; | |
6a0e6138 | 1076 | } |
1077 | ||
1078 | static void | |
4c9e08a4 | 1079 | restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save) |
6a0e6138 | 1080 | { |
3754d046 | 1081 | machine_mode save_mode = GET_MODE (save_area); |
6e96b626 | 1082 | int delta; |
29c05e22 | 1083 | rtx addr, stack_area; |
6e96b626 | 1084 | |
ccccd62c | 1085 | if (ARGS_GROW_DOWNWARD) |
1086 | delta = -high_to_save; | |
1087 | else | |
1088 | delta = low_to_save; | |
1089 | ||
29c05e22 | 1090 | addr = plus_constant (Pmode, argblock, delta); |
1091 | stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr)); | |
6e96b626 | 1092 | set_mem_align (stack_area, PARM_BOUNDARY); |
6a0e6138 | 1093 | |
1094 | if (save_mode != BLKmode) | |
1095 | emit_move_insn (stack_area, save_area); | |
1096 | else | |
0378dbdc | 1097 | emit_block_move (stack_area, validize_mem (save_area), |
1098 | GEN_INT (high_to_save - low_to_save + 1), | |
1099 | BLOCK_OP_CALL_PARM); | |
6a0e6138 | 1100 | } |
f6025ee7 | 1101 | #endif /* REG_PARM_STACK_SPACE */ |
c87678e4 | 1102 | |
6a0e6138 | 1103 | /* If any elements in ARGS refer to parameters that are to be passed in |
1104 | registers, but not in memory, and whose alignment does not permit a | |
1105 | direct copy into registers. Copy the values into a group of pseudos | |
c87678e4 | 1106 | which we will later copy into the appropriate hard registers. |
6d801f27 | 1107 | |
1108 | Pseudos for each unaligned argument will be stored into the array | |
1109 | args[argnum].aligned_regs. The caller is responsible for deallocating | |
1110 | the aligned_regs array if it is nonzero. */ | |
1111 | ||
6a0e6138 | 1112 | static void |
4c9e08a4 | 1113 | store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals) |
6a0e6138 | 1114 | { |
1115 | int i, j; | |
c87678e4 | 1116 | |
6a0e6138 | 1117 | for (i = 0; i < num_actuals; i++) |
1118 | if (args[i].reg != 0 && ! args[i].pass_on_stack | |
33eb84dc | 1119 | && GET_CODE (args[i].reg) != PARALLEL |
6a0e6138 | 1120 | && args[i].mode == BLKmode |
77f1b1bb | 1121 | && MEM_P (args[i].value) |
1122 | && (MEM_ALIGN (args[i].value) | |
6a0e6138 | 1123 | < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) |
1124 | { | |
1125 | int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
5f4cd670 | 1126 | int endian_correction = 0; |
6a0e6138 | 1127 | |
f054eb3c | 1128 | if (args[i].partial) |
1129 | { | |
1130 | gcc_assert (args[i].partial % UNITS_PER_WORD == 0); | |
1131 | args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD; | |
1132 | } | |
1133 | else | |
1134 | { | |
1135 | args[i].n_aligned_regs | |
1136 | = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | |
1137 | } | |
1138 | ||
4c36ffe6 | 1139 | args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs); |
6a0e6138 | 1140 | |
5f4cd670 | 1141 | /* Structures smaller than a word are normally aligned to the |
1142 | least significant byte. On a BYTES_BIG_ENDIAN machine, | |
6a0e6138 | 1143 | this means we must skip the empty high order bytes when |
1144 | calculating the bit offset. */ | |
5f4cd670 | 1145 | if (bytes < UNITS_PER_WORD |
1146 | #ifdef BLOCK_REG_PADDING | |
1147 | && (BLOCK_REG_PADDING (args[i].mode, | |
1148 | TREE_TYPE (args[i].tree_value), 1) | |
d7ab0e3d | 1149 | == PAD_DOWNWARD) |
5f4cd670 | 1150 | #else |
1151 | && BYTES_BIG_ENDIAN | |
1152 | #endif | |
1153 | ) | |
1154 | endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; | |
6a0e6138 | 1155 | |
1156 | for (j = 0; j < args[i].n_aligned_regs; j++) | |
1157 | { | |
1158 | rtx reg = gen_reg_rtx (word_mode); | |
1159 | rtx word = operand_subword_force (args[i].value, j, BLKmode); | |
1160 | int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); | |
6a0e6138 | 1161 | |
1162 | args[i].aligned_regs[j] = reg; | |
3f71db40 | 1163 | word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, |
5d77cce2 | 1164 | word_mode, word_mode, false, NULL); |
6a0e6138 | 1165 | |
1166 | /* There is no need to restrict this code to loading items | |
1167 | in TYPE_ALIGN sized hunks. The bitfield instructions can | |
1168 | load up entire word sized registers efficiently. | |
1169 | ||
1170 | ??? This may not be needed anymore. | |
1171 | We use to emit a clobber here but that doesn't let later | |
1172 | passes optimize the instructions we emit. By storing 0 into | |
1173 | the register later passes know the first AND to zero out the | |
1174 | bitfield being set in the register is unnecessary. The store | |
1175 | of 0 will be deleted as will at least the first AND. */ | |
1176 | ||
1177 | emit_move_insn (reg, const0_rtx); | |
1178 | ||
1179 | bytes -= bitsize / BITS_PER_UNIT; | |
4bb60ec7 | 1180 | store_bit_field (reg, bitsize, endian_correction, 0, 0, |
292237f3 | 1181 | word_mode, word, false); |
6a0e6138 | 1182 | } |
1183 | } | |
1184 | } | |
1185 | ||
370e45b9 | 1186 | /* The limit set by -Walloc-larger-than=. */ |
1187 | static GTY(()) tree alloc_object_size_limit; | |
1188 | ||
1189 | /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than= | |
1190 | setting if the option is specified, or to the maximum object size if it | |
1191 | is not. Return the initialized value. */ | |
1192 | ||
1193 | static tree | |
1194 | alloc_max_size (void) | |
1195 | { | |
1196 | if (!alloc_object_size_limit) | |
1197 | { | |
e6a18b5a | 1198 | alloc_object_size_limit = max_object_size (); |
370e45b9 | 1199 | |
ae0a5f68 | 1200 | if (warn_alloc_size_limit) |
370e45b9 | 1201 | { |
ae0a5f68 | 1202 | char *end = NULL; |
1203 | errno = 0; | |
1204 | unsigned HOST_WIDE_INT unit = 1; | |
1205 | unsigned HOST_WIDE_INT limit | |
1206 | = strtoull (warn_alloc_size_limit, &end, 10); | |
1207 | ||
1208 | if (!errno) | |
370e45b9 | 1209 | { |
ae0a5f68 | 1210 | if (end && *end) |
1211 | { | |
1212 | /* Numeric option arguments are at most INT_MAX. Make it | |
1213 | possible to specify a larger value by accepting common | |
1214 | suffixes. */ | |
1215 | if (!strcmp (end, "kB")) | |
1216 | unit = 1000; | |
1217 | else if (!strcasecmp (end, "KiB") || strcmp (end, "KB")) | |
1218 | unit = 1024; | |
1219 | else if (!strcmp (end, "MB")) | |
167571c8 | 1220 | unit = HOST_WIDE_INT_UC (1000) * 1000; |
ae0a5f68 | 1221 | else if (!strcasecmp (end, "MiB")) |
167571c8 | 1222 | unit = HOST_WIDE_INT_UC (1024) * 1024; |
ae0a5f68 | 1223 | else if (!strcasecmp (end, "GB")) |
167571c8 | 1224 | unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000; |
ae0a5f68 | 1225 | else if (!strcasecmp (end, "GiB")) |
167571c8 | 1226 | unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024; |
ae0a5f68 | 1227 | else if (!strcasecmp (end, "TB")) |
167571c8 | 1228 | unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000; |
ae0a5f68 | 1229 | else if (!strcasecmp (end, "TiB")) |
167571c8 | 1230 | unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024; |
ae0a5f68 | 1231 | else if (!strcasecmp (end, "PB")) |
167571c8 | 1232 | unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000; |
ae0a5f68 | 1233 | else if (!strcasecmp (end, "PiB")) |
167571c8 | 1234 | unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024; |
ae0a5f68 | 1235 | else if (!strcasecmp (end, "EB")) |
167571c8 | 1236 | unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000 |
1237 | * 1000; | |
ae0a5f68 | 1238 | else if (!strcasecmp (end, "EiB")) |
167571c8 | 1239 | unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024 |
1240 | * 1024; | |
ae0a5f68 | 1241 | else |
1242 | unit = 0; | |
1243 | } | |
370e45b9 | 1244 | |
ae0a5f68 | 1245 | if (unit) |
167571c8 | 1246 | { |
cb7bca5f | 1247 | widest_int w = wi::mul (limit, unit); |
1248 | if (w < wi::to_widest (alloc_object_size_limit)) | |
e6a18b5a | 1249 | alloc_object_size_limit |
1250 | = wide_int_to_tree (ptrdiff_type_node, w); | |
167571c8 | 1251 | } |
ae0a5f68 | 1252 | } |
370e45b9 | 1253 | } |
1254 | } | |
1255 | return alloc_object_size_limit; | |
1256 | } | |
1257 | ||
ae0a5f68 | 1258 | /* Return true when EXP's range can be determined and set RANGE[] to it |
e6a18b5a | 1259 | after adjusting it if necessary to make EXP a represents a valid size |
1260 | of object, or a valid size argument to an allocation function declared | |
1261 | with attribute alloc_size (whose argument may be signed), or to a string | |
1262 | manipulation function like memset. When ALLOW_ZERO is true, allow | |
1263 | returning a range of [0, 0] for a size in an anti-range [1, N] where | |
1264 | N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to | |
1265 | allocation functions like malloc but it is a valid argument to | |
1266 | functions like memset. */ | |
370e45b9 | 1267 | |
ae0a5f68 | 1268 | bool |
e6a18b5a | 1269 | get_size_range (tree exp, tree range[2], bool allow_zero /* = false */) |
370e45b9 | 1270 | { |
ae0a5f68 | 1271 | if (tree_fits_uhwi_p (exp)) |
370e45b9 | 1272 | { |
ae0a5f68 | 1273 | /* EXP is a constant. */ |
1274 | range[0] = range[1] = exp; | |
1275 | return true; | |
1276 | } | |
1277 | ||
e6a18b5a | 1278 | tree exptype = TREE_TYPE (exp); |
1279 | bool integral = INTEGRAL_TYPE_P (exptype); | |
1280 | ||
ae0a5f68 | 1281 | wide_int min, max; |
e6a18b5a | 1282 | enum value_range_type range_type; |
1283 | ||
1284 | if (TREE_CODE (exp) == SSA_NAME && integral) | |
1285 | range_type = get_range_info (exp, &min, &max); | |
1286 | else | |
1287 | range_type = VR_VARYING; | |
ae0a5f68 | 1288 | |
1289 | if (range_type == VR_VARYING) | |
1290 | { | |
e6a18b5a | 1291 | if (integral) |
1292 | { | |
1293 | /* Use the full range of the type of the expression when | |
1294 | no value range information is available. */ | |
1295 | range[0] = TYPE_MIN_VALUE (exptype); | |
1296 | range[1] = TYPE_MAX_VALUE (exptype); | |
1297 | return true; | |
1298 | } | |
1299 | ||
ae0a5f68 | 1300 | range[0] = NULL_TREE; |
1301 | range[1] = NULL_TREE; | |
1302 | return false; | |
1303 | } | |
1304 | ||
ae0a5f68 | 1305 | unsigned expprec = TYPE_PRECISION (exptype); |
ae0a5f68 | 1306 | |
1307 | bool signed_p = !TYPE_UNSIGNED (exptype); | |
1308 | ||
1309 | if (range_type == VR_ANTI_RANGE) | |
1310 | { | |
1311 | if (signed_p) | |
370e45b9 | 1312 | { |
e3d0f65c | 1313 | if (wi::les_p (max, 0)) |
370e45b9 | 1314 | { |
ae0a5f68 | 1315 | /* EXP is not in a strictly negative range. That means |
1316 | it must be in some (not necessarily strictly) positive | |
1317 | range which includes zero. Since in signed to unsigned | |
1318 | conversions negative values end up converted to large | |
1319 | positive values, and otherwise they are not valid sizes, | |
1320 | the resulting range is in both cases [0, TYPE_MAX]. */ | |
e3d0f65c | 1321 | min = wi::zero (expprec); |
1322 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
370e45b9 | 1323 | } |
e3d0f65c | 1324 | else if (wi::les_p (min - 1, 0)) |
ae0a5f68 | 1325 | { |
1326 | /* EXP is not in a negative-positive range. That means EXP | |
1327 | is either negative, or greater than max. Since negative | |
1328 | sizes are invalid make the range [MAX + 1, TYPE_MAX]. */ | |
1329 | min = max + 1; | |
e3d0f65c | 1330 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); |
ae0a5f68 | 1331 | } |
1332 | else | |
1333 | { | |
1334 | max = min - 1; | |
e3d0f65c | 1335 | min = wi::zero (expprec); |
ae0a5f68 | 1336 | } |
1337 | } | |
e3d0f65c | 1338 | else if (wi::eq_p (0, min - 1)) |
ae0a5f68 | 1339 | { |
1340 | /* EXP is unsigned and not in the range [1, MAX]. That means | |
1341 | it's either zero or greater than MAX. Even though 0 would | |
e6a18b5a | 1342 | normally be detected by -Walloc-zero, unless ALLOW_ZERO |
1343 | is true, set the range to [MAX, TYPE_MAX] so that when MAX | |
1344 | is greater than the limit the whole range is diagnosed. */ | |
1345 | if (allow_zero) | |
1346 | min = max = wi::zero (expprec); | |
1347 | else | |
1348 | { | |
1349 | min = max + 1; | |
1350 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
1351 | } | |
ae0a5f68 | 1352 | } |
1353 | else | |
1354 | { | |
1355 | max = min - 1; | |
e3d0f65c | 1356 | min = wi::zero (expprec); |
370e45b9 | 1357 | } |
1358 | } | |
1359 | ||
ae0a5f68 | 1360 | range[0] = wide_int_to_tree (exptype, min); |
1361 | range[1] = wide_int_to_tree (exptype, max); | |
1362 | ||
1363 | return true; | |
370e45b9 | 1364 | } |
1365 | ||
1366 | /* Diagnose a call EXP to function FN decorated with attribute alloc_size | |
1367 | whose argument numbers given by IDX with values given by ARGS exceed | |
1368 | the maximum object size or cause an unsigned oveflow (wrapping) when | |
1369 | multiplied. When ARGS[0] is null the function does nothing. ARGS[1] | |
1370 | may be null for functions like malloc, and non-null for those like | |
1371 | calloc that are decorated with a two-argument attribute alloc_size. */ | |
1372 | ||
1373 | void | |
1374 | maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2]) | |
1375 | { | |
1376 | /* The range each of the (up to) two arguments is known to be in. */ | |
1377 | tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } }; | |
1378 | ||
1379 | /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */ | |
1380 | tree maxobjsize = alloc_max_size (); | |
1381 | ||
1382 | location_t loc = EXPR_LOCATION (exp); | |
1383 | ||
1384 | bool warned = false; | |
1385 | ||
1386 | /* Validate each argument individually. */ | |
1387 | for (unsigned i = 0; i != 2 && args[i]; ++i) | |
1388 | { | |
1389 | if (TREE_CODE (args[i]) == INTEGER_CST) | |
1390 | { | |
1391 | argrange[i][0] = args[i]; | |
1392 | argrange[i][1] = args[i]; | |
1393 | ||
1394 | if (tree_int_cst_lt (args[i], integer_zero_node)) | |
1395 | { | |
1396 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1397 | "%Kargument %i value %qE is negative", |
1398 | exp, idx[i] + 1, args[i]); | |
370e45b9 | 1399 | } |
1400 | else if (integer_zerop (args[i])) | |
1401 | { | |
1402 | /* Avoid issuing -Walloc-zero for allocation functions other | |
1403 | than __builtin_alloca that are declared with attribute | |
1404 | returns_nonnull because there's no portability risk. This | |
1405 | avoids warning for such calls to libiberty's xmalloc and | |
1406 | friends. | |
1407 | Also avoid issuing the warning for calls to function named | |
1408 | "alloca". */ | |
1409 | if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA | |
1410 | && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6) | |
1411 | || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA | |
1412 | && !lookup_attribute ("returns_nonnull", | |
1413 | TYPE_ATTRIBUTES (TREE_TYPE (fn))))) | |
1414 | warned = warning_at (loc, OPT_Walloc_zero, | |
ae0a5f68 | 1415 | "%Kargument %i value is zero", |
1416 | exp, idx[i] + 1); | |
370e45b9 | 1417 | } |
1418 | else if (tree_int_cst_lt (maxobjsize, args[i])) | |
1419 | { | |
1420 | /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98 | |
1421 | mode and with -fno-exceptions as a way to indicate array | |
1422 | size overflow. There's no good way to detect C++98 here | |
1423 | so avoid diagnosing these calls for all C++ modes. */ | |
1424 | if (i == 0 | |
1425 | && !args[1] | |
1426 | && lang_GNU_CXX () | |
1427 | && DECL_IS_OPERATOR_NEW (fn) | |
1428 | && integer_all_onesp (args[i])) | |
1429 | continue; | |
1430 | ||
1431 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1432 | "%Kargument %i value %qE exceeds " |
370e45b9 | 1433 | "maximum object size %E", |
ae0a5f68 | 1434 | exp, idx[i] + 1, args[i], maxobjsize); |
370e45b9 | 1435 | } |
1436 | } | |
ae0a5f68 | 1437 | else if (TREE_CODE (args[i]) == SSA_NAME |
1438 | && get_size_range (args[i], argrange[i])) | |
370e45b9 | 1439 | { |
370e45b9 | 1440 | /* Verify that the argument's range is not negative (including |
1441 | upper bound of zero). */ | |
1442 | if (tree_int_cst_lt (argrange[i][0], integer_zero_node) | |
1443 | && tree_int_cst_le (argrange[i][1], integer_zero_node)) | |
1444 | { | |
1445 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1446 | "%Kargument %i range [%E, %E] is negative", |
1447 | exp, idx[i] + 1, | |
1448 | argrange[i][0], argrange[i][1]); | |
370e45b9 | 1449 | } |
1450 | else if (tree_int_cst_lt (maxobjsize, argrange[i][0])) | |
1451 | { | |
1452 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1453 | "%Kargument %i range [%E, %E] exceeds " |
370e45b9 | 1454 | "maximum object size %E", |
ae0a5f68 | 1455 | exp, idx[i] + 1, |
1456 | argrange[i][0], argrange[i][1], | |
370e45b9 | 1457 | maxobjsize); |
1458 | } | |
1459 | } | |
1460 | } | |
1461 | ||
1462 | if (!argrange[0]) | |
1463 | return; | |
1464 | ||
1465 | /* For a two-argument alloc_size, validate the product of the two | |
1466 | arguments if both of their values or ranges are known. */ | |
1467 | if (!warned && tree_fits_uhwi_p (argrange[0][0]) | |
1468 | && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0]) | |
1469 | && !integer_onep (argrange[0][0]) | |
1470 | && !integer_onep (argrange[1][0])) | |
1471 | { | |
1472 | /* Check for overflow in the product of a function decorated with | |
1473 | attribute alloc_size (X, Y). */ | |
1474 | unsigned szprec = TYPE_PRECISION (size_type_node); | |
1475 | wide_int x = wi::to_wide (argrange[0][0], szprec); | |
1476 | wide_int y = wi::to_wide (argrange[1][0], szprec); | |
1477 | ||
1478 | bool vflow; | |
1479 | wide_int prod = wi::umul (x, y, &vflow); | |
1480 | ||
1481 | if (vflow) | |
1482 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1483 | "%Kproduct %<%E * %E%> of arguments %i and %i " |
370e45b9 | 1484 | "exceeds %<SIZE_MAX%>", |
ae0a5f68 | 1485 | exp, argrange[0][0], argrange[1][0], |
370e45b9 | 1486 | idx[0] + 1, idx[1] + 1); |
1487 | else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod)) | |
1488 | warned = warning_at (loc, OPT_Walloc_size_larger_than_, | |
ae0a5f68 | 1489 | "%Kproduct %<%E * %E%> of arguments %i and %i " |
370e45b9 | 1490 | "exceeds maximum object size %E", |
ae0a5f68 | 1491 | exp, argrange[0][0], argrange[1][0], |
370e45b9 | 1492 | idx[0] + 1, idx[1] + 1, |
1493 | maxobjsize); | |
1494 | ||
1495 | if (warned) | |
1496 | { | |
1497 | /* Print the full range of each of the two arguments to make | |
1498 | it clear when it is, in fact, in a range and not constant. */ | |
1499 | if (argrange[0][0] != argrange [0][1]) | |
1500 | inform (loc, "argument %i in the range [%E, %E]", | |
1501 | idx[0] + 1, argrange[0][0], argrange[0][1]); | |
1502 | if (argrange[1][0] != argrange [1][1]) | |
1503 | inform (loc, "argument %i in the range [%E, %E]", | |
1504 | idx[1] + 1, argrange[1][0], argrange[1][1]); | |
1505 | } | |
1506 | } | |
1507 | ||
1508 | if (warned) | |
1509 | { | |
1510 | location_t fnloc = DECL_SOURCE_LOCATION (fn); | |
1511 | ||
1512 | if (DECL_IS_BUILTIN (fn)) | |
1513 | inform (loc, | |
1514 | "in a call to built-in allocation function %qD", fn); | |
1515 | else | |
1516 | inform (fnloc, | |
1517 | "in a call to allocation function %qD declared here", fn); | |
1518 | } | |
1519 | } | |
1520 | ||
0c45740b | 1521 | /* If EXPR refers to a character array or pointer declared attribute |
1522 | nonstring return a decl for that array or pointer and set *REF to | |
1523 | the referenced enclosing object or pointer. Otherwise returns | |
1524 | null. */ | |
1525 | ||
1526 | tree | |
1527 | get_attr_nonstring_decl (tree expr, tree *ref) | |
1528 | { | |
1529 | tree decl = expr; | |
1530 | if (TREE_CODE (decl) == SSA_NAME) | |
1531 | { | |
1532 | gimple *def = SSA_NAME_DEF_STMT (decl); | |
1533 | ||
1534 | if (is_gimple_assign (def)) | |
1535 | { | |
1536 | tree_code code = gimple_assign_rhs_code (def); | |
1537 | if (code == ADDR_EXPR | |
1538 | || code == COMPONENT_REF | |
1539 | || code == VAR_DECL) | |
1540 | decl = gimple_assign_rhs1 (def); | |
1541 | } | |
1542 | else if (tree var = SSA_NAME_VAR (decl)) | |
1543 | decl = var; | |
1544 | } | |
1545 | ||
1546 | if (TREE_CODE (decl) == ADDR_EXPR) | |
1547 | decl = TREE_OPERAND (decl, 0); | |
1548 | ||
1549 | if (ref) | |
1550 | *ref = decl; | |
1551 | ||
1552 | if (TREE_CODE (decl) == COMPONENT_REF) | |
1553 | decl = TREE_OPERAND (decl, 1); | |
1554 | ||
1555 | if (DECL_P (decl) | |
1556 | && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl))) | |
1557 | return decl; | |
1558 | ||
1559 | return NULL_TREE; | |
1560 | } | |
1561 | ||
0c45740b | 1562 | /* Warn about passing a non-string array/pointer to a function that |
1563 | expects a nul-terminated string argument. */ | |
1564 | ||
1565 | void | |
1566 | maybe_warn_nonstring_arg (tree fndecl, tree exp) | |
1567 | { | |
1568 | if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL) | |
1569 | return; | |
1570 | ||
1571 | bool with_bounds = CALL_WITH_BOUNDS_P (exp); | |
1572 | ||
1573 | /* The bound argument to a bounded string function like strncpy. */ | |
1574 | tree bound = NULL_TREE; | |
1575 | ||
1576 | /* It's safe to call "bounded" string functions with a non-string | |
1577 | argument since the functions provide an explicit bound for this | |
1578 | purpose. */ | |
1579 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1580 | { | |
1581 | case BUILT_IN_STPNCPY: | |
1582 | case BUILT_IN_STPNCPY_CHK: | |
1583 | case BUILT_IN_STRNCMP: | |
1584 | case BUILT_IN_STRNCASECMP: | |
1585 | case BUILT_IN_STRNCPY: | |
1586 | case BUILT_IN_STRNCPY_CHK: | |
1587 | bound = CALL_EXPR_ARG (exp, with_bounds ? 4 : 2); | |
1588 | break; | |
1589 | ||
1590 | case BUILT_IN_STRNDUP: | |
1591 | bound = CALL_EXPR_ARG (exp, with_bounds ? 2 : 1); | |
1592 | break; | |
1593 | ||
1594 | default: | |
1595 | break; | |
1596 | } | |
1597 | ||
1598 | /* Determine the range of the bound argument (if specified). */ | |
1599 | tree bndrng[2] = { NULL_TREE, NULL_TREE }; | |
1600 | if (bound) | |
1601 | get_size_range (bound, bndrng); | |
1602 | ||
1603 | /* Iterate over the built-in function's formal arguments and check | |
1604 | each const char* against the actual argument. If the actual | |
1605 | argument is declared attribute non-string issue a warning unless | |
1606 | the argument's maximum length is bounded. */ | |
1607 | function_args_iterator it; | |
1608 | function_args_iter_init (&it, TREE_TYPE (fndecl)); | |
1609 | ||
1610 | for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it)) | |
1611 | { | |
1612 | tree argtype = function_args_iter_cond (&it); | |
1613 | if (!argtype) | |
1614 | break; | |
1615 | ||
1616 | if (TREE_CODE (argtype) != POINTER_TYPE) | |
1617 | continue; | |
1618 | ||
1619 | argtype = TREE_TYPE (argtype); | |
1620 | ||
1621 | if (TREE_CODE (argtype) != INTEGER_TYPE | |
1622 | || !TYPE_READONLY (argtype)) | |
1623 | continue; | |
1624 | ||
1625 | argtype = TYPE_MAIN_VARIANT (argtype); | |
1626 | if (argtype != char_type_node) | |
1627 | continue; | |
1628 | ||
1629 | tree callarg = CALL_EXPR_ARG (exp, argno); | |
1630 | if (TREE_CODE (callarg) == ADDR_EXPR) | |
1631 | callarg = TREE_OPERAND (callarg, 0); | |
1632 | ||
1633 | /* See if the destination is declared with attribute "nonstring". */ | |
1634 | tree decl = get_attr_nonstring_decl (callarg); | |
1635 | if (!decl) | |
1636 | continue; | |
1637 | ||
1638 | tree type = TREE_TYPE (decl); | |
1639 | ||
1640 | offset_int wibnd = 0; | |
1641 | if (bndrng[0]) | |
1642 | wibnd = wi::to_offset (bndrng[0]); | |
1643 | ||
1644 | offset_int asize = wibnd; | |
1645 | ||
1646 | if (TREE_CODE (type) == ARRAY_TYPE) | |
1647 | if (tree arrbnd = TYPE_DOMAIN (type)) | |
1648 | { | |
1649 | if ((arrbnd = TYPE_MAX_VALUE (arrbnd))) | |
1650 | asize = wi::to_offset (arrbnd) + 1; | |
1651 | } | |
1652 | ||
1653 | location_t loc = EXPR_LOCATION (exp); | |
1654 | ||
1655 | bool warned = false; | |
1656 | ||
1657 | if (wi::ltu_p (asize, wibnd)) | |
1658 | warned = warning_at (loc, OPT_Wstringop_overflow_, | |
1659 | "%qD argument %i declared attribute %<nonstring%> " | |
1660 | "is smaller than the specified bound %E", | |
1661 | fndecl, argno + 1, bndrng[0]); | |
1662 | else if (!bound) | |
1663 | warned = warning_at (loc, OPT_Wstringop_overflow_, | |
1664 | "%qD argument %i declared attribute %<nonstring%>", | |
1665 | fndecl, argno + 1); | |
1666 | ||
1667 | if (warned) | |
1668 | inform (DECL_SOURCE_LOCATION (decl), | |
1669 | "argument %qD declared here", decl); | |
1670 | } | |
1671 | } | |
1672 | ||
b4a61e77 | 1673 | /* Issue an error if CALL_EXPR was flagged as requiring |
1674 | tall-call optimization. */ | |
1675 | ||
1676 | static void | |
1677 | maybe_complain_about_tail_call (tree call_expr, const char *reason) | |
1678 | { | |
1679 | gcc_assert (TREE_CODE (call_expr) == CALL_EXPR); | |
1680 | if (!CALL_EXPR_MUST_TAIL_CALL (call_expr)) | |
1681 | return; | |
1682 | ||
1683 | error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason); | |
1684 | } | |
1685 | ||
cb543c54 | 1686 | /* Fill in ARGS_SIZE and ARGS array based on the parameters found in |
48e1416a | 1687 | CALL_EXPR EXP. |
cb543c54 | 1688 | |
1689 | NUM_ACTUALS is the total number of parameters. | |
1690 | ||
1691 | N_NAMED_ARGS is the total number of named arguments. | |
1692 | ||
cd46caee | 1693 | STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return |
1694 | value, or null. | |
1695 | ||
cb543c54 | 1696 | FNDECL is the tree code for the target of this call (if known) |
1697 | ||
1698 | ARGS_SO_FAR holds state needed by the target to know where to place | |
1699 | the next argument. | |
1700 | ||
1701 | REG_PARM_STACK_SPACE is the number of bytes of stack space reserved | |
1702 | for arguments which are passed in registers. | |
1703 | ||
1704 | OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level | |
1705 | and may be modified by this routine. | |
1706 | ||
dfe08167 | 1707 | OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer |
47ae02b7 | 1708 | flags which may be modified by this routine. |
eaa112a0 | 1709 | |
4ee9c684 | 1710 | MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference |
1711 | that requires allocation of stack space. | |
1712 | ||
eaa112a0 | 1713 | CALL_FROM_THUNK_P is true if this call is the jump from a thunk to |
1714 | the thunked-to function. */ | |
cb543c54 | 1715 | |
1716 | static void | |
4c9e08a4 | 1717 | initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, |
1718 | struct arg_data *args, | |
1719 | struct args_size *args_size, | |
1720 | int n_named_args ATTRIBUTE_UNUSED, | |
cd46caee | 1721 | tree exp, tree struct_value_addr_value, |
d8b9c828 | 1722 | tree fndecl, tree fntype, |
39cba157 | 1723 | cumulative_args_t args_so_far, |
4c9e08a4 | 1724 | int reg_parm_stack_space, |
1725 | rtx *old_stack_level, int *old_pending_adj, | |
eaa112a0 | 1726 | int *must_preallocate, int *ecf_flags, |
4ee9c684 | 1727 | bool *may_tailcall, bool call_from_thunk_p) |
cb543c54 | 1728 | { |
39cba157 | 1729 | CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far); |
389dd41b | 1730 | location_t loc = EXPR_LOCATION (exp); |
cb543c54 | 1731 | |
1732 | /* Count arg position in order args appear. */ | |
1733 | int argpos; | |
1734 | ||
1735 | int i; | |
c87678e4 | 1736 | |
cb543c54 | 1737 | args_size->constant = 0; |
1738 | args_size->var = 0; | |
1739 | ||
058a1b7a | 1740 | bitmap_obstack_initialize (NULL); |
1741 | ||
cb543c54 | 1742 | /* In this loop, we consider args in the order they are written. |
bf29c577 | 1743 | We fill up ARGS from the back. */ |
cb543c54 | 1744 | |
bf29c577 | 1745 | i = num_actuals - 1; |
cd46caee | 1746 | { |
058a1b7a | 1747 | int j = i, ptr_arg = -1; |
cd46caee | 1748 | call_expr_arg_iterator iter; |
1749 | tree arg; | |
058a1b7a | 1750 | bitmap slots = NULL; |
cd46caee | 1751 | |
1752 | if (struct_value_addr_value) | |
1753 | { | |
1754 | args[j].tree_value = struct_value_addr_value; | |
bf29c577 | 1755 | j--; |
058a1b7a | 1756 | |
1757 | /* If we pass structure address then we need to | |
1758 | create bounds for it. Since created bounds is | |
1759 | a call statement, we expand it right here to avoid | |
1760 | fixing all other places where it may be expanded. */ | |
1761 | if (CALL_WITH_BOUNDS_P (exp)) | |
1762 | { | |
1763 | args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ()); | |
1764 | args[j].tree_value | |
1765 | = chkp_make_bounds_for_struct_addr (struct_value_addr_value); | |
1766 | expand_expr_real (args[j].tree_value, args[j].value, VOIDmode, | |
1767 | EXPAND_NORMAL, 0, false); | |
1768 | args[j].pointer_arg = j + 1; | |
1769 | j--; | |
1770 | } | |
cd46caee | 1771 | } |
e66d763a | 1772 | argpos = 0; |
cd46caee | 1773 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
1774 | { | |
1775 | tree argtype = TREE_TYPE (arg); | |
058a1b7a | 1776 | |
1777 | /* Remember last param with pointer and associate it | |
1778 | with following pointer bounds. */ | |
1779 | if (CALL_WITH_BOUNDS_P (exp) | |
1780 | && chkp_type_has_pointer (argtype)) | |
1781 | { | |
1782 | if (slots) | |
1783 | BITMAP_FREE (slots); | |
1784 | ptr_arg = j; | |
1785 | if (!BOUNDED_TYPE_P (argtype)) | |
1786 | { | |
1787 | slots = BITMAP_ALLOC (NULL); | |
1788 | chkp_find_bound_slots (argtype, slots); | |
1789 | } | |
1790 | } | |
e66d763a | 1791 | else if (CALL_WITH_BOUNDS_P (exp) |
1792 | && pass_by_reference (NULL, TYPE_MODE (argtype), argtype, | |
1793 | argpos < n_named_args)) | |
1794 | { | |
1795 | if (slots) | |
1796 | BITMAP_FREE (slots); | |
1797 | ptr_arg = j; | |
1798 | } | |
058a1b7a | 1799 | else if (POINTER_BOUNDS_TYPE_P (argtype)) |
1800 | { | |
1801 | /* We expect bounds in instrumented calls only. | |
1802 | Otherwise it is a sign we lost flag due to some optimization | |
1803 | and may emit call args incorrectly. */ | |
1804 | gcc_assert (CALL_WITH_BOUNDS_P (exp)); | |
1805 | ||
1806 | /* For structures look for the next available pointer. */ | |
1807 | if (ptr_arg != -1 && slots) | |
1808 | { | |
1809 | unsigned bnd_no = bitmap_first_set_bit (slots); | |
1810 | args[j].pointer_offset = | |
1811 | bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
1812 | ||
1813 | bitmap_clear_bit (slots, bnd_no); | |
1814 | ||
1815 | /* Check we have no more pointers in the structure. */ | |
1816 | if (bitmap_empty_p (slots)) | |
1817 | BITMAP_FREE (slots); | |
1818 | } | |
1819 | args[j].pointer_arg = ptr_arg; | |
1820 | ||
1821 | /* Check we covered all pointers in the previous | |
1822 | non bounds arg. */ | |
1823 | if (!slots) | |
1824 | ptr_arg = -1; | |
1825 | } | |
1826 | else | |
1827 | ptr_arg = -1; | |
1828 | ||
cd46caee | 1829 | if (targetm.calls.split_complex_arg |
1830 | && argtype | |
1831 | && TREE_CODE (argtype) == COMPLEX_TYPE | |
1832 | && targetm.calls.split_complex_arg (argtype)) | |
1833 | { | |
1834 | tree subtype = TREE_TYPE (argtype); | |
cd46caee | 1835 | args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); |
bf29c577 | 1836 | j--; |
cd46caee | 1837 | args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); |
1838 | } | |
1839 | else | |
1840 | args[j].tree_value = arg; | |
bf29c577 | 1841 | j--; |
e66d763a | 1842 | argpos++; |
cd46caee | 1843 | } |
058a1b7a | 1844 | |
1845 | if (slots) | |
1846 | BITMAP_FREE (slots); | |
cd46caee | 1847 | } |
1848 | ||
058a1b7a | 1849 | bitmap_obstack_release (NULL); |
1850 | ||
370e45b9 | 1851 | /* Extract attribute alloc_size and if set, store the indices of |
1852 | the corresponding arguments in ALLOC_IDX, and then the actual | |
1853 | argument(s) at those indices in ALLOC_ARGS. */ | |
1854 | int alloc_idx[2] = { -1, -1 }; | |
1855 | if (tree alloc_size | |
1856 | = (fndecl ? lookup_attribute ("alloc_size", | |
1857 | TYPE_ATTRIBUTES (TREE_TYPE (fndecl))) | |
1858 | : NULL_TREE)) | |
1859 | { | |
1860 | tree args = TREE_VALUE (alloc_size); | |
1861 | alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1; | |
1862 | if (TREE_CHAIN (args)) | |
1863 | alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1; | |
1864 | } | |
1865 | ||
1866 | /* Array for up to the two attribute alloc_size arguments. */ | |
1867 | tree alloc_args[] = { NULL_TREE, NULL_TREE }; | |
1868 | ||
cb543c54 | 1869 | /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ |
bf29c577 | 1870 | for (argpos = 0; argpos < num_actuals; i--, argpos++) |
cb543c54 | 1871 | { |
cd46caee | 1872 | tree type = TREE_TYPE (args[i].tree_value); |
cb543c54 | 1873 | int unsignedp; |
3754d046 | 1874 | machine_mode mode; |
cb543c54 | 1875 | |
cb543c54 | 1876 | /* Replace erroneous argument with constant zero. */ |
4b72716d | 1877 | if (type == error_mark_node || !COMPLETE_TYPE_P (type)) |
cb543c54 | 1878 | args[i].tree_value = integer_zero_node, type = integer_type_node; |
1879 | ||
8df5a43d | 1880 | /* If TYPE is a transparent union or record, pass things the way |
1881 | we would pass the first field of the union or record. We have | |
1882 | already verified that the modes are the same. */ | |
1883 | if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE) | |
1884 | && TYPE_TRANSPARENT_AGGR (type)) | |
1885 | type = TREE_TYPE (first_field (type)); | |
cb543c54 | 1886 | |
1887 | /* Decide where to pass this arg. | |
1888 | ||
1889 | args[i].reg is nonzero if all or part is passed in registers. | |
1890 | ||
1891 | args[i].partial is nonzero if part but not all is passed in registers, | |
f054eb3c | 1892 | and the exact value says how many bytes are passed in registers. |
cb543c54 | 1893 | |
1894 | args[i].pass_on_stack is nonzero if the argument must at least be | |
1895 | computed on the stack. It may then be loaded back into registers | |
1896 | if args[i].reg is nonzero. | |
1897 | ||
1898 | These decisions are driven by the FUNCTION_... macros and must agree | |
1899 | with those made by function.c. */ | |
1900 | ||
1901 | /* See if this argument should be passed by invisible reference. */ | |
39cba157 | 1902 | if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type), |
cc9b8628 | 1903 | type, argpos < n_named_args)) |
cb543c54 | 1904 | { |
41dc12b4 | 1905 | bool callee_copies; |
bc4577c4 | 1906 | tree base = NULL_TREE; |
41dc12b4 | 1907 | |
1908 | callee_copies | |
39cba157 | 1909 | = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type), |
13f08ee7 | 1910 | type, argpos < n_named_args); |
41dc12b4 | 1911 | |
1912 | /* If we're compiling a thunk, pass through invisible references | |
1913 | instead of making a copy. */ | |
eaa112a0 | 1914 | if (call_from_thunk_p |
41dc12b4 | 1915 | || (callee_copies |
1916 | && !TREE_ADDRESSABLE (type) | |
1917 | && (base = get_base_address (args[i].tree_value)) | |
d6230243 | 1918 | && TREE_CODE (base) != SSA_NAME |
41dc12b4 | 1919 | && (!DECL_P (base) || MEM_P (DECL_RTL (base))))) |
cb543c54 | 1920 | { |
6b7d03d8 | 1921 | /* We may have turned the parameter value into an SSA name. |
1922 | Go back to the original parameter so we can take the | |
1923 | address. */ | |
1924 | if (TREE_CODE (args[i].tree_value) == SSA_NAME) | |
1925 | { | |
1926 | gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value)); | |
1927 | args[i].tree_value = SSA_NAME_VAR (args[i].tree_value); | |
1928 | gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL); | |
1929 | } | |
9502706b | 1930 | /* Argument setup code may have copied the value to register. We |
1931 | revert that optimization now because the tail call code must | |
1932 | use the original location. */ | |
1933 | if (TREE_CODE (args[i].tree_value) == PARM_DECL | |
1934 | && !MEM_P (DECL_RTL (args[i].tree_value)) | |
1935 | && DECL_INCOMING_RTL (args[i].tree_value) | |
1936 | && MEM_P (DECL_INCOMING_RTL (args[i].tree_value))) | |
1937 | set_decl_rtl (args[i].tree_value, | |
1938 | DECL_INCOMING_RTL (args[i].tree_value)); | |
1939 | ||
006e2d5a | 1940 | mark_addressable (args[i].tree_value); |
1941 | ||
41dc12b4 | 1942 | /* We can't use sibcalls if a callee-copied argument is |
1943 | stored in the current function's frame. */ | |
1944 | if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base)) | |
b4a61e77 | 1945 | { |
1946 | *may_tailcall = false; | |
1947 | maybe_complain_about_tail_call (exp, | |
1948 | "a callee-copied argument is" | |
1949 | " stored in the current " | |
1950 | " function's frame"); | |
1951 | } | |
c71e72dd | 1952 | |
389dd41b | 1953 | args[i].tree_value = build_fold_addr_expr_loc (loc, |
1954 | args[i].tree_value); | |
41dc12b4 | 1955 | type = TREE_TYPE (args[i].tree_value); |
1956 | ||
9c2a0c05 | 1957 | if (*ecf_flags & ECF_CONST) |
1958 | *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE); | |
ce95a955 | 1959 | } |
cb543c54 | 1960 | else |
1961 | { | |
1962 | /* We make a copy of the object and pass the address to the | |
1963 | function being called. */ | |
1964 | rtx copy; | |
1965 | ||
4b72716d | 1966 | if (!COMPLETE_TYPE_P (type) |
4852b829 | 1967 | || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST |
1968 | || (flag_stack_check == GENERIC_STACK_CHECK | |
1969 | && compare_tree_int (TYPE_SIZE_UNIT (type), | |
1970 | STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
cb543c54 | 1971 | { |
1972 | /* This is a variable-sized object. Make space on the stack | |
1973 | for it. */ | |
cd46caee | 1974 | rtx size_rtx = expr_size (args[i].tree_value); |
cb543c54 | 1975 | |
1976 | if (*old_stack_level == 0) | |
1977 | { | |
e9c97615 | 1978 | emit_stack_save (SAVE_BLOCK, old_stack_level); |
cb543c54 | 1979 | *old_pending_adj = pending_stack_adjust; |
1980 | pending_stack_adjust = 0; | |
1981 | } | |
1982 | ||
990495a7 | 1983 | /* We can pass TRUE as the 4th argument because we just |
1984 | saved the stack pointer and will restore it right after | |
1985 | the call. */ | |
5be42b39 | 1986 | copy = allocate_dynamic_stack_space (size_rtx, |
1987 | TYPE_ALIGN (type), | |
1988 | TYPE_ALIGN (type), | |
2b34677f | 1989 | max_int_size_in_bytes |
1990 | (type), | |
5be42b39 | 1991 | true); |
1992 | copy = gen_rtx_MEM (BLKmode, copy); | |
f7c44134 | 1993 | set_mem_attributes (copy, type, 1); |
cb543c54 | 1994 | } |
1995 | else | |
0ab48139 | 1996 | copy = assign_temp (type, 1, 0); |
cb543c54 | 1997 | |
292237f3 | 1998 | store_expr (args[i].tree_value, copy, 0, false, false); |
cb543c54 | 1999 | |
9c2a0c05 | 2000 | /* Just change the const function to pure and then let |
2001 | the next test clear the pure based on | |
2002 | callee_copies. */ | |
2003 | if (*ecf_flags & ECF_CONST) | |
2004 | { | |
2005 | *ecf_flags &= ~ECF_CONST; | |
2006 | *ecf_flags |= ECF_PURE; | |
2007 | } | |
2008 | ||
2009 | if (!callee_copies && *ecf_flags & ECF_PURE) | |
2010 | *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE); | |
41dc12b4 | 2011 | |
2012 | args[i].tree_value | |
389dd41b | 2013 | = build_fold_addr_expr_loc (loc, make_tree (type, copy)); |
41dc12b4 | 2014 | type = TREE_TYPE (args[i].tree_value); |
4ee9c684 | 2015 | *may_tailcall = false; |
b4a61e77 | 2016 | maybe_complain_about_tail_call (exp, |
2017 | "argument must be passed" | |
2018 | " by copying"); | |
cb543c54 | 2019 | } |
2020 | } | |
2021 | ||
78a8ed03 | 2022 | unsignedp = TYPE_UNSIGNED (type); |
3b2411a8 | 2023 | mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, |
2024 | fndecl ? TREE_TYPE (fndecl) : fntype, 0); | |
cb543c54 | 2025 | |
2026 | args[i].unsignedp = unsignedp; | |
2027 | args[i].mode = mode; | |
7a8d641b | 2028 | |
532d84ff | 2029 | targetm.calls.warn_parameter_passing_abi (args_so_far, type); |
2030 | ||
f387af4f | 2031 | args[i].reg = targetm.calls.function_arg (args_so_far, mode, type, |
2032 | argpos < n_named_args); | |
2033 | ||
058a1b7a | 2034 | if (args[i].reg && CONST_INT_P (args[i].reg)) |
2035 | { | |
2036 | args[i].special_slot = args[i].reg; | |
2037 | args[i].reg = NULL; | |
2038 | } | |
2039 | ||
7a8d641b | 2040 | /* If this is a sibling call and the machine has register windows, the |
2041 | register window has to be unwinded before calling the routine, so | |
2042 | arguments have to go into the incoming registers. */ | |
f387af4f | 2043 | if (targetm.calls.function_incoming_arg != targetm.calls.function_arg) |
2044 | args[i].tail_call_reg | |
2045 | = targetm.calls.function_incoming_arg (args_so_far, mode, type, | |
2046 | argpos < n_named_args); | |
2047 | else | |
2048 | args[i].tail_call_reg = args[i].reg; | |
7a8d641b | 2049 | |
cb543c54 | 2050 | if (args[i].reg) |
2051 | args[i].partial | |
f054eb3c | 2052 | = targetm.calls.arg_partial_bytes (args_so_far, mode, type, |
2053 | argpos < n_named_args); | |
cb543c54 | 2054 | |
0336f0f0 | 2055 | args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type); |
cb543c54 | 2056 | |
2057 | /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), | |
2058 | it means that we are to pass this arg in the register(s) designated | |
2059 | by the PARALLEL, but also to pass it in the stack. */ | |
2060 | if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL | |
2061 | && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) | |
2062 | args[i].pass_on_stack = 1; | |
2063 | ||
2064 | /* If this is an addressable type, we must preallocate the stack | |
2065 | since we must evaluate the object into its final location. | |
2066 | ||
2067 | If this is to be passed in both registers and the stack, it is simpler | |
2068 | to preallocate. */ | |
2069 | if (TREE_ADDRESSABLE (type) | |
2070 | || (args[i].pass_on_stack && args[i].reg != 0)) | |
2071 | *must_preallocate = 1; | |
2072 | ||
058a1b7a | 2073 | /* No stack allocation and padding for bounds. */ |
2074 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
2075 | ; | |
cb543c54 | 2076 | /* Compute the stack-size of this argument. */ |
058a1b7a | 2077 | else if (args[i].reg == 0 || args[i].partial != 0 |
2078 | || reg_parm_stack_space > 0 | |
2079 | || args[i].pass_on_stack) | |
cb543c54 | 2080 | locate_and_pad_parm (mode, type, |
2081 | #ifdef STACK_PARMS_IN_REG_PARM_AREA | |
2082 | 1, | |
2083 | #else | |
2084 | args[i].reg != 0, | |
2085 | #endif | |
2e090bf6 | 2086 | reg_parm_stack_space, |
241399f6 | 2087 | args[i].pass_on_stack ? 0 : args[i].partial, |
2088 | fndecl, args_size, &args[i].locate); | |
0fee47f4 | 2089 | #ifdef BLOCK_REG_PADDING |
2090 | else | |
2091 | /* The argument is passed entirely in registers. See at which | |
2092 | end it should be padded. */ | |
2093 | args[i].locate.where_pad = | |
2094 | BLOCK_REG_PADDING (mode, type, | |
2095 | int_size_in_bytes (type) <= UNITS_PER_WORD); | |
2096 | #endif | |
c87678e4 | 2097 | |
cb543c54 | 2098 | /* Update ARGS_SIZE, the total stack space for args so far. */ |
2099 | ||
241399f6 | 2100 | args_size->constant += args[i].locate.size.constant; |
2101 | if (args[i].locate.size.var) | |
2102 | ADD_PARM_SIZE (*args_size, args[i].locate.size.var); | |
cb543c54 | 2103 | |
2104 | /* Increment ARGS_SO_FAR, which has info about which arg-registers | |
2105 | have been used, etc. */ | |
2106 | ||
f387af4f | 2107 | targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type), |
2108 | type, argpos < n_named_args); | |
370e45b9 | 2109 | |
2110 | /* Store argument values for functions decorated with attribute | |
2111 | alloc_size. */ | |
2112 | if (argpos == alloc_idx[0]) | |
2113 | alloc_args[0] = args[i].tree_value; | |
2114 | else if (argpos == alloc_idx[1]) | |
2115 | alloc_args[1] = args[i].tree_value; | |
2116 | } | |
2117 | ||
2118 | if (alloc_args[0]) | |
2119 | { | |
2120 | /* Check the arguments of functions decorated with attribute | |
2121 | alloc_size. */ | |
2122 | maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx); | |
cb543c54 | 2123 | } |
0c45740b | 2124 | |
2125 | /* Detect passing non-string arguments to functions expecting | |
2126 | nul-terminated strings. */ | |
2127 | maybe_warn_nonstring_arg (fndecl, exp); | |
cb543c54 | 2128 | } |
2129 | ||
cc45e5e8 | 2130 | /* Update ARGS_SIZE to contain the total size for the argument block. |
2131 | Return the original constant component of the argument block's size. | |
2132 | ||
2133 | REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved | |
2134 | for arguments passed in registers. */ | |
2135 | ||
2136 | static int | |
4c9e08a4 | 2137 | compute_argument_block_size (int reg_parm_stack_space, |
2138 | struct args_size *args_size, | |
60e2260d | 2139 | tree fndecl ATTRIBUTE_UNUSED, |
fa20f865 | 2140 | tree fntype ATTRIBUTE_UNUSED, |
4c9e08a4 | 2141 | int preferred_stack_boundary ATTRIBUTE_UNUSED) |
cc45e5e8 | 2142 | { |
2143 | int unadjusted_args_size = args_size->constant; | |
2144 | ||
4448f543 | 2145 | /* For accumulate outgoing args mode we don't need to align, since the frame |
2146 | will be already aligned. Align to STACK_BOUNDARY in order to prevent | |
35a3065a | 2147 | backends from generating misaligned frame sizes. */ |
4448f543 | 2148 | if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY) |
2149 | preferred_stack_boundary = STACK_BOUNDARY; | |
4448f543 | 2150 | |
cc45e5e8 | 2151 | /* Compute the actual size of the argument block required. The variable |
2152 | and constant sizes must be combined, the size may have to be rounded, | |
2153 | and there may be a minimum required size. */ | |
2154 | ||
2155 | if (args_size->var) | |
2156 | { | |
2157 | args_size->var = ARGS_SIZE_TREE (*args_size); | |
2158 | args_size->constant = 0; | |
2159 | ||
d0285dd8 | 2160 | preferred_stack_boundary /= BITS_PER_UNIT; |
2161 | if (preferred_stack_boundary > 1) | |
91b70175 | 2162 | { |
2163 | /* We don't handle this case yet. To handle it correctly we have | |
35a3065a | 2164 | to add the delta, round and subtract the delta. |
91b70175 | 2165 | Currently no machine description requires this support. */ |
231bd014 | 2166 | gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1))); |
91b70175 | 2167 | args_size->var = round_up (args_size->var, preferred_stack_boundary); |
2168 | } | |
cc45e5e8 | 2169 | |
2170 | if (reg_parm_stack_space > 0) | |
2171 | { | |
2172 | args_size->var | |
2173 | = size_binop (MAX_EXPR, args_size->var, | |
902de8ed | 2174 | ssize_int (reg_parm_stack_space)); |
cc45e5e8 | 2175 | |
cc45e5e8 | 2176 | /* The area corresponding to register parameters is not to count in |
2177 | the size of the block we need. So make the adjustment. */ | |
fa20f865 | 2178 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
63c68695 | 2179 | args_size->var |
2180 | = size_binop (MINUS_EXPR, args_size->var, | |
2181 | ssize_int (reg_parm_stack_space)); | |
cc45e5e8 | 2182 | } |
2183 | } | |
2184 | else | |
2185 | { | |
d0285dd8 | 2186 | preferred_stack_boundary /= BITS_PER_UNIT; |
60ecc450 | 2187 | if (preferred_stack_boundary < 1) |
2188 | preferred_stack_boundary = 1; | |
e39fae61 | 2189 | args_size->constant = (((args_size->constant |
91b70175 | 2190 | + stack_pointer_delta |
d0285dd8 | 2191 | + preferred_stack_boundary - 1) |
2192 | / preferred_stack_boundary | |
2193 | * preferred_stack_boundary) | |
91b70175 | 2194 | - stack_pointer_delta); |
cc45e5e8 | 2195 | |
2196 | args_size->constant = MAX (args_size->constant, | |
2197 | reg_parm_stack_space); | |
2198 | ||
fa20f865 | 2199 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
63c68695 | 2200 | args_size->constant -= reg_parm_stack_space; |
cc45e5e8 | 2201 | } |
2202 | return unadjusted_args_size; | |
2203 | } | |
2204 | ||
caa1595a | 2205 | /* Precompute parameters as needed for a function call. |
04707f1c | 2206 | |
dfe08167 | 2207 | FLAGS is mask of ECF_* constants. |
04707f1c | 2208 | |
04707f1c | 2209 | NUM_ACTUALS is the number of arguments. |
2210 | ||
c87678e4 | 2211 | ARGS is an array containing information for each argument; this |
2212 | routine fills in the INITIAL_VALUE and VALUE fields for each | |
2213 | precomputed argument. */ | |
04707f1c | 2214 | |
2215 | static void | |
2dd6f9ed | 2216 | precompute_arguments (int num_actuals, struct arg_data *args) |
04707f1c | 2217 | { |
2218 | int i; | |
2219 | ||
8c78c14b | 2220 | /* If this is a libcall, then precompute all arguments so that we do not |
67c155cb | 2221 | get extraneous instructions emitted as part of the libcall sequence. */ |
c5dc094f | 2222 | |
2223 | /* If we preallocated the stack space, and some arguments must be passed | |
2224 | on the stack, then we must precompute any parameter which contains a | |
2225 | function call which will store arguments on the stack. | |
2226 | Otherwise, evaluating the parameter may clobber previous parameters | |
2227 | which have already been stored into the stack. (we have code to avoid | |
2228 | such case by saving the outgoing stack arguments, but it results in | |
2229 | worse code) */ | |
2dd6f9ed | 2230 | if (!ACCUMULATE_OUTGOING_ARGS) |
67c155cb | 2231 | return; |
0d568ddf | 2232 | |
04707f1c | 2233 | for (i = 0; i < num_actuals; i++) |
67c155cb | 2234 | { |
3b2411a8 | 2235 | tree type; |
3754d046 | 2236 | machine_mode mode; |
701e46d0 | 2237 | |
2dd6f9ed | 2238 | if (TREE_CODE (args[i].tree_value) != CALL_EXPR) |
c5dc094f | 2239 | continue; |
2240 | ||
67c155cb | 2241 | /* If this is an addressable type, we cannot pre-evaluate it. */ |
3b2411a8 | 2242 | type = TREE_TYPE (args[i].tree_value); |
2243 | gcc_assert (!TREE_ADDRESSABLE (type)); | |
04707f1c | 2244 | |
67c155cb | 2245 | args[i].initial_value = args[i].value |
8ec3c5c2 | 2246 | = expand_normal (args[i].tree_value); |
04707f1c | 2247 | |
3b2411a8 | 2248 | mode = TYPE_MODE (type); |
67c155cb | 2249 | if (mode != args[i].mode) |
2250 | { | |
3b2411a8 | 2251 | int unsignedp = args[i].unsignedp; |
67c155cb | 2252 | args[i].value |
2253 | = convert_modes (args[i].mode, mode, | |
2254 | args[i].value, args[i].unsignedp); | |
3b2411a8 | 2255 | |
67c155cb | 2256 | /* CSE will replace this only if it contains args[i].value |
2257 | pseudo, so convert it down to the declared mode using | |
2258 | a SUBREG. */ | |
2259 | if (REG_P (args[i].value) | |
3b2411a8 | 2260 | && GET_MODE_CLASS (args[i].mode) == MODE_INT |
2261 | && promote_mode (type, mode, &unsignedp) != args[i].mode) | |
67c155cb | 2262 | { |
2263 | args[i].initial_value | |
2264 | = gen_lowpart_SUBREG (mode, args[i].value); | |
2265 | SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1; | |
5a9ccd1b | 2266 | SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp); |
67c155cb | 2267 | } |
67c155cb | 2268 | } |
2269 | } | |
04707f1c | 2270 | } |
2271 | ||
e717ffc2 | 2272 | /* Given the current state of MUST_PREALLOCATE and information about |
2273 | arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE, | |
2274 | compute and return the final value for MUST_PREALLOCATE. */ | |
2275 | ||
2276 | static int | |
48e1416a | 2277 | finalize_must_preallocate (int must_preallocate, int num_actuals, |
c2f47e15 | 2278 | struct arg_data *args, struct args_size *args_size) |
e717ffc2 | 2279 | { |
2280 | /* See if we have or want to preallocate stack space. | |
2281 | ||
2282 | If we would have to push a partially-in-regs parm | |
2283 | before other stack parms, preallocate stack space instead. | |
2284 | ||
2285 | If the size of some parm is not a multiple of the required stack | |
2286 | alignment, we must preallocate. | |
2287 | ||
2288 | If the total size of arguments that would otherwise create a copy in | |
2289 | a temporary (such as a CALL) is more than half the total argument list | |
2290 | size, preallocation is faster. | |
2291 | ||
2292 | Another reason to preallocate is if we have a machine (like the m88k) | |
2293 | where stack alignment is required to be maintained between every | |
2294 | pair of insns, not just when the call is made. However, we assume here | |
2295 | that such machines either do not have push insns (and hence preallocation | |
2296 | would occur anyway) or the problem is taken care of with | |
2297 | PUSH_ROUNDING. */ | |
2298 | ||
2299 | if (! must_preallocate) | |
2300 | { | |
2301 | int partial_seen = 0; | |
2302 | int copy_to_evaluate_size = 0; | |
2303 | int i; | |
2304 | ||
2305 | for (i = 0; i < num_actuals && ! must_preallocate; i++) | |
2306 | { | |
2307 | if (args[i].partial > 0 && ! args[i].pass_on_stack) | |
2308 | partial_seen = 1; | |
2309 | else if (partial_seen && args[i].reg == 0) | |
2310 | must_preallocate = 1; | |
058a1b7a | 2311 | /* We preallocate in case there are bounds passed |
2312 | in the bounds table to have precomputed address | |
2313 | for bounds association. */ | |
2314 | else if (POINTER_BOUNDS_P (args[i].tree_value) | |
2315 | && !args[i].reg) | |
2316 | must_preallocate = 1; | |
e717ffc2 | 2317 | |
2318 | if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode | |
2319 | && (TREE_CODE (args[i].tree_value) == CALL_EXPR | |
2320 | || TREE_CODE (args[i].tree_value) == TARGET_EXPR | |
2321 | || TREE_CODE (args[i].tree_value) == COND_EXPR | |
2322 | || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) | |
2323 | copy_to_evaluate_size | |
2324 | += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
2325 | } | |
2326 | ||
2327 | if (copy_to_evaluate_size * 2 >= args_size->constant | |
2328 | && args_size->constant > 0) | |
2329 | must_preallocate = 1; | |
2330 | } | |
2331 | return must_preallocate; | |
2332 | } | |
cc45e5e8 | 2333 | |
f3012854 | 2334 | /* If we preallocated stack space, compute the address of each argument |
2335 | and store it into the ARGS array. | |
2336 | ||
c87678e4 | 2337 | We need not ensure it is a valid memory address here; it will be |
f3012854 | 2338 | validized when it is used. |
2339 | ||
2340 | ARGBLOCK is an rtx for the address of the outgoing arguments. */ | |
2341 | ||
2342 | static void | |
4c9e08a4 | 2343 | compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals) |
f3012854 | 2344 | { |
2345 | if (argblock) | |
2346 | { | |
2347 | rtx arg_reg = argblock; | |
2348 | int i, arg_offset = 0; | |
2349 | ||
2350 | if (GET_CODE (argblock) == PLUS) | |
2351 | arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); | |
2352 | ||
2353 | for (i = 0; i < num_actuals; i++) | |
2354 | { | |
241399f6 | 2355 | rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); |
2356 | rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); | |
f3012854 | 2357 | rtx addr; |
c5dc0c32 | 2358 | unsigned int align, boundary; |
c2ca1bab | 2359 | unsigned int units_on_stack = 0; |
3754d046 | 2360 | machine_mode partial_mode = VOIDmode; |
f3012854 | 2361 | |
2362 | /* Skip this parm if it will not be passed on the stack. */ | |
c2ca1bab | 2363 | if (! args[i].pass_on_stack |
2364 | && args[i].reg != 0 | |
2365 | && args[i].partial == 0) | |
f3012854 | 2366 | continue; |
2367 | ||
aed50899 | 2368 | if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value))) |
2369 | continue; | |
2370 | ||
058a1b7a | 2371 | /* Pointer Bounds are never passed on the stack. */ |
2372 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
2373 | continue; | |
2374 | ||
53fdf12a | 2375 | addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset); |
29c05e22 | 2376 | addr = plus_constant (Pmode, addr, arg_offset); |
c2ca1bab | 2377 | |
2378 | if (args[i].partial != 0) | |
2379 | { | |
2380 | /* Only part of the parameter is being passed on the stack. | |
2381 | Generate a simple memory reference of the correct size. */ | |
2382 | units_on_stack = args[i].locate.size.constant; | |
517be012 | 2383 | unsigned int bits_on_stack = units_on_stack * BITS_PER_UNIT; |
2384 | partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk (); | |
c2ca1bab | 2385 | args[i].stack = gen_rtx_MEM (partial_mode, addr); |
5b2a69fa | 2386 | set_mem_size (args[i].stack, units_on_stack); |
c2ca1bab | 2387 | } |
2388 | else | |
2389 | { | |
2390 | args[i].stack = gen_rtx_MEM (args[i].mode, addr); | |
2391 | set_mem_attributes (args[i].stack, | |
2392 | TREE_TYPE (args[i].tree_value), 1); | |
2393 | } | |
c5dc0c32 | 2394 | align = BITS_PER_UNIT; |
2395 | boundary = args[i].locate.boundary; | |
d7ab0e3d | 2396 | if (args[i].locate.where_pad != PAD_DOWNWARD) |
c5dc0c32 | 2397 | align = boundary; |
971ba038 | 2398 | else if (CONST_INT_P (offset)) |
c5dc0c32 | 2399 | { |
2400 | align = INTVAL (offset) * BITS_PER_UNIT | boundary; | |
ac29ece2 | 2401 | align = least_bit_hwi (align); |
c5dc0c32 | 2402 | } |
2403 | set_mem_align (args[i].stack, align); | |
f3012854 | 2404 | |
53fdf12a | 2405 | addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset); |
29c05e22 | 2406 | addr = plus_constant (Pmode, addr, arg_offset); |
c2ca1bab | 2407 | |
2408 | if (args[i].partial != 0) | |
2409 | { | |
2410 | /* Only part of the parameter is being passed on the stack. | |
2411 | Generate a simple memory reference of the correct size. | |
2412 | */ | |
2413 | args[i].stack_slot = gen_rtx_MEM (partial_mode, addr); | |
5b2a69fa | 2414 | set_mem_size (args[i].stack_slot, units_on_stack); |
c2ca1bab | 2415 | } |
2416 | else | |
2417 | { | |
2418 | args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); | |
2419 | set_mem_attributes (args[i].stack_slot, | |
2420 | TREE_TYPE (args[i].tree_value), 1); | |
2421 | } | |
c5dc0c32 | 2422 | set_mem_align (args[i].stack_slot, args[i].locate.boundary); |
a9f2963b | 2423 | |
2424 | /* Function incoming arguments may overlap with sibling call | |
2425 | outgoing arguments and we cannot allow reordering of reads | |
2426 | from function arguments with stores to outgoing arguments | |
2427 | of sibling calls. */ | |
ab6ab77e | 2428 | set_mem_alias_set (args[i].stack, 0); |
2429 | set_mem_alias_set (args[i].stack_slot, 0); | |
f3012854 | 2430 | } |
2431 | } | |
2432 | } | |
c87678e4 | 2433 | |
f3012854 | 2434 | /* Given a FNDECL and EXP, return an rtx suitable for use as a target address |
2435 | in a call instruction. | |
2436 | ||
2437 | FNDECL is the tree node for the target function. For an indirect call | |
2438 | FNDECL will be NULL_TREE. | |
2439 | ||
95672afe | 2440 | ADDR is the operand 0 of CALL_EXPR for this call. */ |
f3012854 | 2441 | |
2442 | static rtx | |
4c9e08a4 | 2443 | rtx_for_function_call (tree fndecl, tree addr) |
f3012854 | 2444 | { |
2445 | rtx funexp; | |
2446 | ||
2447 | /* Get the function to call, in the form of RTL. */ | |
2448 | if (fndecl) | |
2449 | { | |
3d053e06 | 2450 | if (!TREE_USED (fndecl) && fndecl != current_function_decl) |
ea259bbe | 2451 | TREE_USED (fndecl) = 1; |
f3012854 | 2452 | |
2453 | /* Get a SYMBOL_REF rtx for the function address. */ | |
2454 | funexp = XEXP (DECL_RTL (fndecl), 0); | |
2455 | } | |
2456 | else | |
2457 | /* Generate an rtx (probably a pseudo-register) for the address. */ | |
2458 | { | |
2459 | push_temp_slots (); | |
8ec3c5c2 | 2460 | funexp = expand_normal (addr); |
c87678e4 | 2461 | pop_temp_slots (); /* FUNEXP can't be BLKmode. */ |
f3012854 | 2462 | } |
2463 | return funexp; | |
2464 | } | |
2465 | ||
3c56e0c1 | 2466 | /* Return the static chain for this function, if any. */ |
2467 | ||
2468 | rtx | |
2469 | rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p) | |
2470 | { | |
2471 | if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type)) | |
2472 | return NULL; | |
2473 | ||
2474 | return targetm.calls.static_chain (fndecl_or_type, incoming_p); | |
2475 | } | |
2476 | ||
74c02416 | 2477 | /* Internal state for internal_arg_pointer_based_exp and its helpers. */ |
2478 | static struct | |
2479 | { | |
2480 | /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan, | |
2481 | or NULL_RTX if none has been scanned yet. */ | |
3663becd | 2482 | rtx_insn *scan_start; |
74c02416 | 2483 | /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is |
2484 | based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the | |
2485 | pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it | |
2486 | with fixed offset, or PC if this is with variable or unknown offset. */ | |
f1f41a6c | 2487 | vec<rtx> cache; |
74c02416 | 2488 | } internal_arg_pointer_exp_state; |
2489 | ||
474ce66a | 2490 | static rtx internal_arg_pointer_based_exp (const_rtx, bool); |
74c02416 | 2491 | |
2492 | /* Helper function for internal_arg_pointer_based_exp. Scan insns in | |
2493 | the tail call sequence, starting with first insn that hasn't been | |
2494 | scanned yet, and note for each pseudo on the LHS whether it is based | |
2495 | on crtl->args.internal_arg_pointer or not, and what offset from that | |
2496 | that pointer it has. */ | |
2497 | ||
2498 | static void | |
2499 | internal_arg_pointer_based_exp_scan (void) | |
2500 | { | |
3663becd | 2501 | rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start; |
74c02416 | 2502 | |
2503 | if (scan_start == NULL_RTX) | |
2504 | insn = get_insns (); | |
2505 | else | |
2506 | insn = NEXT_INSN (scan_start); | |
2507 | ||
2508 | while (insn) | |
2509 | { | |
2510 | rtx set = single_set (insn); | |
2511 | if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set))) | |
2512 | { | |
2513 | rtx val = NULL_RTX; | |
2514 | unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER; | |
2515 | /* Punt on pseudos set multiple times. */ | |
f1f41a6c | 2516 | if (idx < internal_arg_pointer_exp_state.cache.length () |
2517 | && (internal_arg_pointer_exp_state.cache[idx] | |
74c02416 | 2518 | != NULL_RTX)) |
2519 | val = pc_rtx; | |
2520 | else | |
2521 | val = internal_arg_pointer_based_exp (SET_SRC (set), false); | |
2522 | if (val != NULL_RTX) | |
2523 | { | |
f1f41a6c | 2524 | if (idx >= internal_arg_pointer_exp_state.cache.length ()) |
9af5ce0c | 2525 | internal_arg_pointer_exp_state.cache |
2526 | .safe_grow_cleared (idx + 1); | |
f1f41a6c | 2527 | internal_arg_pointer_exp_state.cache[idx] = val; |
74c02416 | 2528 | } |
2529 | } | |
2530 | if (NEXT_INSN (insn) == NULL_RTX) | |
2531 | scan_start = insn; | |
2532 | insn = NEXT_INSN (insn); | |
2533 | } | |
2534 | ||
2535 | internal_arg_pointer_exp_state.scan_start = scan_start; | |
2536 | } | |
2537 | ||
74c02416 | 2538 | /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return |
2539 | NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on | |
2540 | it with fixed offset, or PC if this is with variable or unknown offset. | |
2541 | TOPLEVEL is true if the function is invoked at the topmost level. */ | |
2542 | ||
2543 | static rtx | |
474ce66a | 2544 | internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel) |
74c02416 | 2545 | { |
2546 | if (CONSTANT_P (rtl)) | |
2547 | return NULL_RTX; | |
2548 | ||
2549 | if (rtl == crtl->args.internal_arg_pointer) | |
2550 | return const0_rtx; | |
2551 | ||
2552 | if (REG_P (rtl) && HARD_REGISTER_P (rtl)) | |
2553 | return NULL_RTX; | |
2554 | ||
2555 | if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1))) | |
2556 | { | |
2557 | rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel); | |
2558 | if (val == NULL_RTX || val == pc_rtx) | |
2559 | return val; | |
29c05e22 | 2560 | return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1))); |
74c02416 | 2561 | } |
2562 | ||
2563 | /* When called at the topmost level, scan pseudo assignments in between the | |
2564 | last scanned instruction in the tail call sequence and the latest insn | |
2565 | in that sequence. */ | |
2566 | if (toplevel) | |
2567 | internal_arg_pointer_based_exp_scan (); | |
2568 | ||
2569 | if (REG_P (rtl)) | |
2570 | { | |
2571 | unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER; | |
f1f41a6c | 2572 | if (idx < internal_arg_pointer_exp_state.cache.length ()) |
2573 | return internal_arg_pointer_exp_state.cache[idx]; | |
74c02416 | 2574 | |
2575 | return NULL_RTX; | |
2576 | } | |
2577 | ||
474ce66a | 2578 | subrtx_iterator::array_type array; |
2579 | FOR_EACH_SUBRTX (iter, array, rtl, NONCONST) | |
2580 | { | |
2581 | const_rtx x = *iter; | |
2582 | if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX) | |
2583 | return pc_rtx; | |
2584 | if (MEM_P (x)) | |
2585 | iter.skip_subrtxes (); | |
2586 | } | |
74c02416 | 2587 | |
2588 | return NULL_RTX; | |
2589 | } | |
2590 | ||
ff6c0ab2 | 2591 | /* Return true if and only if SIZE storage units (usually bytes) |
2592 | starting from address ADDR overlap with already clobbered argument | |
2593 | area. This function is used to determine if we should give up a | |
2594 | sibcall. */ | |
2595 | ||
2596 | static bool | |
2597 | mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size) | |
2598 | { | |
2599 | HOST_WIDE_INT i; | |
74c02416 | 2600 | rtx val; |
ff6c0ab2 | 2601 | |
53c5d9d4 | 2602 | if (bitmap_empty_p (stored_args_map)) |
9ddeff7e | 2603 | return false; |
74c02416 | 2604 | val = internal_arg_pointer_based_exp (addr, true); |
2605 | if (val == NULL_RTX) | |
2606 | return false; | |
2607 | else if (val == pc_rtx) | |
cc0595c0 | 2608 | return true; |
ff6c0ab2 | 2609 | else |
74c02416 | 2610 | i = INTVAL (val); |
a8b58ffb | 2611 | |
2612 | if (STACK_GROWS_DOWNWARD) | |
2613 | i -= crtl->args.pretend_args_size; | |
2614 | else | |
2615 | i += crtl->args.pretend_args_size; | |
2616 | ||
ff6c0ab2 | 2617 | |
ccccd62c | 2618 | if (ARGS_GROW_DOWNWARD) |
2619 | i = -i - size; | |
2620 | ||
ff6c0ab2 | 2621 | if (size > 0) |
2622 | { | |
2623 | unsigned HOST_WIDE_INT k; | |
2624 | ||
2625 | for (k = 0; k < size; k++) | |
156093aa | 2626 | if (i + k < SBITMAP_SIZE (stored_args_map) |
08b7917c | 2627 | && bitmap_bit_p (stored_args_map, i + k)) |
ff6c0ab2 | 2628 | return true; |
2629 | } | |
2630 | ||
2631 | return false; | |
2632 | } | |
2633 | ||
cde25025 | 2634 | /* Do the register loads required for any wholly-register parms or any |
2635 | parms which are passed both on the stack and in a register. Their | |
c87678e4 | 2636 | expressions were already evaluated. |
cde25025 | 2637 | |
2638 | Mark all register-parms as living through the call, putting these USE | |
4c9e08a4 | 2639 | insns in the CALL_INSN_FUNCTION_USAGE field. |
2640 | ||
dc537795 | 2641 | When IS_SIBCALL, perform the check_sibcall_argument_overlap |
42b11544 | 2642 | checking, setting *SIBCALL_FAILURE if appropriate. */ |
cde25025 | 2643 | |
2644 | static void | |
4c9e08a4 | 2645 | load_register_parameters (struct arg_data *args, int num_actuals, |
2646 | rtx *call_fusage, int flags, int is_sibcall, | |
2647 | int *sibcall_failure) | |
cde25025 | 2648 | { |
2649 | int i, j; | |
2650 | ||
cde25025 | 2651 | for (i = 0; i < num_actuals; i++) |
cde25025 | 2652 | { |
0e0be288 | 2653 | rtx reg = ((flags & ECF_SIBCALL) |
2654 | ? args[i].tail_call_reg : args[i].reg); | |
cde25025 | 2655 | if (reg) |
2656 | { | |
5f4cd670 | 2657 | int partial = args[i].partial; |
2658 | int nregs; | |
2659 | int size = 0; | |
3663becd | 2660 | rtx_insn *before_arg = get_last_insn (); |
83272ab4 | 2661 | /* Set non-negative if we must move a word at a time, even if |
2662 | just one word (e.g, partial == 4 && mode == DFmode). Set | |
2663 | to -1 if we just use a normal move insn. This value can be | |
2664 | zero if the argument is a zero size structure. */ | |
5f4cd670 | 2665 | nregs = -1; |
f054eb3c | 2666 | if (GET_CODE (reg) == PARALLEL) |
2667 | ; | |
2668 | else if (partial) | |
2669 | { | |
2670 | gcc_assert (partial % UNITS_PER_WORD == 0); | |
2671 | nregs = partial / UNITS_PER_WORD; | |
2672 | } | |
5f4cd670 | 2673 | else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) |
2674 | { | |
2675 | size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); | |
2676 | nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; | |
2677 | } | |
2678 | else | |
2679 | size = GET_MODE_SIZE (args[i].mode); | |
cde25025 | 2680 | |
2681 | /* Handle calls that pass values in multiple non-contiguous | |
2682 | locations. The Irix 6 ABI has examples of this. */ | |
2683 | ||
2684 | if (GET_CODE (reg) == PARALLEL) | |
b600a907 | 2685 | emit_group_move (reg, args[i].parallel_value); |
cde25025 | 2686 | |
2687 | /* If simple case, just do move. If normal partial, store_one_arg | |
2688 | has already loaded the register for us. In all other cases, | |
2689 | load the register(s) from memory. */ | |
2690 | ||
8e67abab | 2691 | else if (nregs == -1) |
2692 | { | |
2693 | emit_move_insn (reg, args[i].value); | |
5f4cd670 | 2694 | #ifdef BLOCK_REG_PADDING |
8e67abab | 2695 | /* Handle case where we have a value that needs shifting |
2696 | up to the msb. eg. a QImode value and we're padding | |
2697 | upward on a BYTES_BIG_ENDIAN machine. */ | |
2698 | if (size < UNITS_PER_WORD | |
2699 | && (args[i].locate.where_pad | |
d7ab0e3d | 2700 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
8e67abab | 2701 | { |
8e67abab | 2702 | rtx x; |
2703 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
b2abd798 | 2704 | |
2705 | /* Assigning REG here rather than a temp makes CALL_FUSAGE | |
2706 | report the whole reg as used. Strictly speaking, the | |
2707 | call only uses SIZE bytes at the msb end, but it doesn't | |
2708 | seem worth generating rtl to say that. */ | |
2709 | reg = gen_rtx_REG (word_mode, REGNO (reg)); | |
f5ff0b21 | 2710 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); |
b2abd798 | 2711 | if (x != reg) |
2712 | emit_move_insn (reg, x); | |
8e67abab | 2713 | } |
5f4cd670 | 2714 | #endif |
8e67abab | 2715 | } |
cde25025 | 2716 | |
2717 | /* If we have pre-computed the values to put in the registers in | |
2718 | the case of non-aligned structures, copy them in now. */ | |
2719 | ||
2720 | else if (args[i].n_aligned_regs != 0) | |
2721 | for (j = 0; j < args[i].n_aligned_regs; j++) | |
2722 | emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j), | |
2723 | args[i].aligned_regs[j]); | |
2724 | ||
e2e0ef92 | 2725 | else if (partial == 0 || args[i].pass_on_stack) |
5f4cd670 | 2726 | { |
d2b9158b | 2727 | rtx mem = validize_mem (copy_rtx (args[i].value)); |
5f4cd670 | 2728 | |
e2e0ef92 | 2729 | /* Check for overlap with already clobbered argument area, |
2730 | providing that this has non-zero size. */ | |
ff6c0ab2 | 2731 | if (is_sibcall |
77478042 | 2732 | && size != 0 |
2733 | && (mem_overlaps_already_clobbered_arg_p | |
2734 | (XEXP (args[i].value, 0), size))) | |
ff6c0ab2 | 2735 | *sibcall_failure = 1; |
2736 | ||
72f2d6cc | 2737 | if (size % UNITS_PER_WORD == 0 |
2738 | || MEM_ALIGN (mem) % BITS_PER_WORD == 0) | |
2739 | move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); | |
2740 | else | |
2741 | { | |
2742 | if (nregs > 1) | |
2743 | move_block_to_reg (REGNO (reg), mem, nregs - 1, | |
2744 | args[i].mode); | |
2745 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1); | |
2746 | unsigned int bitoff = (nregs - 1) * BITS_PER_WORD; | |
2747 | unsigned int bitsize = size * BITS_PER_UNIT - bitoff; | |
292237f3 | 2748 | rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest, |
5d77cce2 | 2749 | word_mode, word_mode, false, |
2750 | NULL); | |
72f2d6cc | 2751 | if (BYTES_BIG_ENDIAN) |
2752 | x = expand_shift (LSHIFT_EXPR, word_mode, x, | |
2753 | BITS_PER_WORD - bitsize, dest, 1); | |
2754 | if (x != dest) | |
2755 | emit_move_insn (dest, x); | |
2756 | } | |
2757 | ||
5f4cd670 | 2758 | /* Handle a BLKmode that needs shifting. */ |
8e67abab | 2759 | if (nregs == 1 && size < UNITS_PER_WORD |
2c267f1a | 2760 | #ifdef BLOCK_REG_PADDING |
d7ab0e3d | 2761 | && args[i].locate.where_pad == PAD_DOWNWARD |
2c267f1a | 2762 | #else |
2763 | && BYTES_BIG_ENDIAN | |
2764 | #endif | |
72f2d6cc | 2765 | ) |
5f4cd670 | 2766 | { |
72f2d6cc | 2767 | rtx dest = gen_rtx_REG (word_mode, REGNO (reg)); |
5f4cd670 | 2768 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; |
72f2d6cc | 2769 | enum tree_code dir = (BYTES_BIG_ENDIAN |
2770 | ? RSHIFT_EXPR : LSHIFT_EXPR); | |
2771 | rtx x; | |
5f4cd670 | 2772 | |
72f2d6cc | 2773 | x = expand_shift (dir, word_mode, dest, shift, dest, 1); |
2774 | if (x != dest) | |
2775 | emit_move_insn (dest, x); | |
5f4cd670 | 2776 | } |
5f4cd670 | 2777 | } |
cde25025 | 2778 | |
42b11544 | 2779 | /* When a parameter is a block, and perhaps in other cases, it is |
2780 | possible that it did a load from an argument slot that was | |
6a8fa8e2 | 2781 | already clobbered. */ |
42b11544 | 2782 | if (is_sibcall |
2783 | && check_sibcall_argument_overlap (before_arg, &args[i], 0)) | |
2784 | *sibcall_failure = 1; | |
2785 | ||
cde25025 | 2786 | /* Handle calls that pass values in multiple non-contiguous |
2787 | locations. The Irix 6 ABI has examples of this. */ | |
2788 | if (GET_CODE (reg) == PARALLEL) | |
2789 | use_group_regs (call_fusage, reg); | |
2790 | else if (nregs == -1) | |
b4eeceb9 | 2791 | use_reg_mode (call_fusage, reg, |
2792 | TYPE_MODE (TREE_TYPE (args[i].tree_value))); | |
c75d013c | 2793 | else if (nregs > 0) |
2794 | use_regs (call_fusage, REGNO (reg), nregs); | |
cde25025 | 2795 | } |
2796 | } | |
2797 | } | |
2798 | ||
92e1ef5b | 2799 | /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments |
2800 | wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY | |
2801 | bytes, then we would need to push some additional bytes to pad the | |
481feae3 | 2802 | arguments. So, we compute an adjust to the stack pointer for an |
2803 | amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE | |
2804 | bytes. Then, when the arguments are pushed the stack will be perfectly | |
2805 | aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should | |
2806 | be popped after the call. Returns the adjustment. */ | |
92e1ef5b | 2807 | |
481feae3 | 2808 | static int |
4c9e08a4 | 2809 | combine_pending_stack_adjustment_and_call (int unadjusted_args_size, |
2810 | struct args_size *args_size, | |
38413c80 | 2811 | unsigned int preferred_unit_stack_boundary) |
92e1ef5b | 2812 | { |
2813 | /* The number of bytes to pop so that the stack will be | |
2814 | under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ | |
2815 | HOST_WIDE_INT adjustment; | |
2816 | /* The alignment of the stack after the arguments are pushed, if we | |
2817 | just pushed the arguments without adjust the stack here. */ | |
38413c80 | 2818 | unsigned HOST_WIDE_INT unadjusted_alignment; |
92e1ef5b | 2819 | |
c87678e4 | 2820 | unadjusted_alignment |
92e1ef5b | 2821 | = ((stack_pointer_delta + unadjusted_args_size) |
2822 | % preferred_unit_stack_boundary); | |
2823 | ||
2824 | /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes | |
2825 | as possible -- leaving just enough left to cancel out the | |
2826 | UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the | |
2827 | PENDING_STACK_ADJUST is non-negative, and congruent to | |
2828 | -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */ | |
2829 | ||
2830 | /* Begin by trying to pop all the bytes. */ | |
c87678e4 | 2831 | unadjusted_alignment |
2832 | = (unadjusted_alignment | |
92e1ef5b | 2833 | - (pending_stack_adjust % preferred_unit_stack_boundary)); |
2834 | adjustment = pending_stack_adjust; | |
2835 | /* Push enough additional bytes that the stack will be aligned | |
2836 | after the arguments are pushed. */ | |
b47bf174 | 2837 | if (preferred_unit_stack_boundary > 1 && unadjusted_alignment) |
2838 | adjustment -= preferred_unit_stack_boundary - unadjusted_alignment; | |
c87678e4 | 2839 | |
92e1ef5b | 2840 | /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of |
2841 | bytes after the call. The right number is the entire | |
2842 | PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required | |
2843 | by the arguments in the first place. */ | |
c87678e4 | 2844 | args_size->constant |
92e1ef5b | 2845 | = pending_stack_adjust - adjustment + unadjusted_args_size; |
2846 | ||
481feae3 | 2847 | return adjustment; |
92e1ef5b | 2848 | } |
2849 | ||
7ecc63d3 | 2850 | /* Scan X expression if it does not dereference any argument slots |
2851 | we already clobbered by tail call arguments (as noted in stored_args_map | |
2852 | bitmap). | |
d10cfa8d | 2853 | Return nonzero if X expression dereferences such argument slots, |
7ecc63d3 | 2854 | zero otherwise. */ |
2855 | ||
2856 | static int | |
4c9e08a4 | 2857 | check_sibcall_argument_overlap_1 (rtx x) |
7ecc63d3 | 2858 | { |
2859 | RTX_CODE code; | |
2860 | int i, j; | |
7ecc63d3 | 2861 | const char *fmt; |
2862 | ||
2863 | if (x == NULL_RTX) | |
2864 | return 0; | |
2865 | ||
2866 | code = GET_CODE (x); | |
2867 | ||
cc0595c0 | 2868 | /* We need not check the operands of the CALL expression itself. */ |
2869 | if (code == CALL) | |
2870 | return 0; | |
2871 | ||
7ecc63d3 | 2872 | if (code == MEM) |
ff6c0ab2 | 2873 | return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0), |
2874 | GET_MODE_SIZE (GET_MODE (x))); | |
7ecc63d3 | 2875 | |
c87678e4 | 2876 | /* Scan all subexpressions. */ |
7ecc63d3 | 2877 | fmt = GET_RTX_FORMAT (code); |
2878 | for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) | |
2879 | { | |
2880 | if (*fmt == 'e') | |
c87678e4 | 2881 | { |
2882 | if (check_sibcall_argument_overlap_1 (XEXP (x, i))) | |
2883 | return 1; | |
2884 | } | |
7ecc63d3 | 2885 | else if (*fmt == 'E') |
c87678e4 | 2886 | { |
2887 | for (j = 0; j < XVECLEN (x, i); j++) | |
2888 | if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j))) | |
2889 | return 1; | |
2890 | } | |
7ecc63d3 | 2891 | } |
2892 | return 0; | |
7ecc63d3 | 2893 | } |
2894 | ||
2895 | /* Scan sequence after INSN if it does not dereference any argument slots | |
2896 | we already clobbered by tail call arguments (as noted in stored_args_map | |
42b11544 | 2897 | bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to |
2898 | stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP | |
2899 | should be 0). Return nonzero if sequence after INSN dereferences such argument | |
2900 | slots, zero otherwise. */ | |
7ecc63d3 | 2901 | |
2902 | static int | |
3663becd | 2903 | check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg, |
2904 | int mark_stored_args_map) | |
c87678e4 | 2905 | { |
7ecc63d3 | 2906 | int low, high; |
2907 | ||
2908 | if (insn == NULL_RTX) | |
2909 | insn = get_insns (); | |
2910 | else | |
2911 | insn = NEXT_INSN (insn); | |
2912 | ||
2913 | for (; insn; insn = NEXT_INSN (insn)) | |
c87678e4 | 2914 | if (INSN_P (insn) |
2915 | && check_sibcall_argument_overlap_1 (PATTERN (insn))) | |
7ecc63d3 | 2916 | break; |
2917 | ||
42b11544 | 2918 | if (mark_stored_args_map) |
2919 | { | |
ccccd62c | 2920 | if (ARGS_GROW_DOWNWARD) |
2921 | low = -arg->locate.slot_offset.constant - arg->locate.size.constant; | |
2922 | else | |
2923 | low = arg->locate.slot_offset.constant; | |
db10eec8 | 2924 | |
241399f6 | 2925 | for (high = low + arg->locate.size.constant; low < high; low++) |
08b7917c | 2926 | bitmap_set_bit (stored_args_map, low); |
42b11544 | 2927 | } |
7ecc63d3 | 2928 | return insn != NULL_RTX; |
2929 | } | |
2930 | ||
05d18e8b | 2931 | /* Given that a function returns a value of mode MODE at the most |
2932 | significant end of hard register VALUE, shift VALUE left or right | |
2933 | as specified by LEFT_P. Return true if some action was needed. */ | |
2c8ff1ed | 2934 | |
05d18e8b | 2935 | bool |
3754d046 | 2936 | shift_return_value (machine_mode mode, bool left_p, rtx value) |
2c8ff1ed | 2937 | { |
05d18e8b | 2938 | HOST_WIDE_INT shift; |
2939 | ||
2940 | gcc_assert (REG_P (value) && HARD_REGISTER_P (value)); | |
bd39703a | 2941 | machine_mode value_mode = GET_MODE (value); |
2942 | shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode); | |
05d18e8b | 2943 | if (shift == 0) |
2944 | return false; | |
2945 | ||
2946 | /* Use ashr rather than lshr for right shifts. This is for the benefit | |
2947 | of the MIPS port, which requires SImode values to be sign-extended | |
2948 | when stored in 64-bit registers. */ | |
bd39703a | 2949 | if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab, |
2950 | value, gen_int_shift_amount (value_mode, shift), | |
2951 | value, 1, OPTAB_WIDEN)) | |
05d18e8b | 2952 | gcc_unreachable (); |
2953 | return true; | |
2c8ff1ed | 2954 | } |
2955 | ||
90af1361 | 2956 | /* If X is a likely-spilled register value, copy it to a pseudo |
2957 | register and return that register. Return X otherwise. */ | |
2958 | ||
2959 | static rtx | |
2960 | avoid_likely_spilled_reg (rtx x) | |
2961 | { | |
f4e36c33 | 2962 | rtx new_rtx; |
90af1361 | 2963 | |
2964 | if (REG_P (x) | |
2965 | && HARD_REGISTER_P (x) | |
24dd0668 | 2966 | && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x)))) |
90af1361 | 2967 | { |
2968 | /* Make sure that we generate a REG rather than a CONCAT. | |
2969 | Moves into CONCATs can need nontrivial instructions, | |
2970 | and the whole point of this function is to avoid | |
2971 | using the hard register directly in such a situation. */ | |
2972 | generating_concat_p = 0; | |
f4e36c33 | 2973 | new_rtx = gen_reg_rtx (GET_MODE (x)); |
90af1361 | 2974 | generating_concat_p = 1; |
f4e36c33 | 2975 | emit_move_insn (new_rtx, x); |
2976 | return new_rtx; | |
90af1361 | 2977 | } |
2978 | return x; | |
2979 | } | |
2980 | ||
80e11038 | 2981 | /* Helper function for expand_call. |
2982 | Return false is EXP is not implementable as a sibling call. */ | |
2983 | ||
2984 | static bool | |
2985 | can_implement_as_sibling_call_p (tree exp, | |
2986 | rtx structure_value_addr, | |
2987 | tree funtype, | |
869bb2b6 | 2988 | int reg_parm_stack_space ATTRIBUTE_UNUSED, |
80e11038 | 2989 | tree fndecl, |
2990 | int flags, | |
2991 | tree addr, | |
2992 | const args_size &args_size) | |
2993 | { | |
2994 | if (!targetm.have_sibcall_epilogue ()) | |
b4a61e77 | 2995 | { |
2996 | maybe_complain_about_tail_call | |
2997 | (exp, | |
2998 | "machine description does not have" | |
2999 | " a sibcall_epilogue instruction pattern"); | |
3000 | return false; | |
3001 | } | |
80e11038 | 3002 | |
3003 | /* Doing sibling call optimization needs some work, since | |
3004 | structure_value_addr can be allocated on the stack. | |
3005 | It does not seem worth the effort since few optimizable | |
3006 | sibling calls will return a structure. */ | |
3007 | if (structure_value_addr != NULL_RTX) | |
b4a61e77 | 3008 | { |
3009 | maybe_complain_about_tail_call (exp, "callee returns a structure"); | |
3010 | return false; | |
3011 | } | |
80e11038 | 3012 | |
3013 | #ifdef REG_PARM_STACK_SPACE | |
3014 | /* If outgoing reg parm stack space changes, we can not do sibcall. */ | |
3015 | if (OUTGOING_REG_PARM_STACK_SPACE (funtype) | |
3016 | != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)) | |
3017 | || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl))) | |
b4a61e77 | 3018 | { |
3019 | maybe_complain_about_tail_call (exp, | |
3020 | "inconsistent size of stack space" | |
3021 | " allocated for arguments which are" | |
3022 | " passed in registers"); | |
3023 | return false; | |
3024 | } | |
80e11038 | 3025 | #endif |
3026 | ||
3027 | /* Check whether the target is able to optimize the call | |
3028 | into a sibcall. */ | |
3029 | if (!targetm.function_ok_for_sibcall (fndecl, exp)) | |
b4a61e77 | 3030 | { |
3031 | maybe_complain_about_tail_call (exp, | |
3032 | "target is not able to optimize the" | |
3033 | " call into a sibling call"); | |
3034 | return false; | |
3035 | } | |
80e11038 | 3036 | |
3037 | /* Functions that do not return exactly once may not be sibcall | |
3038 | optimized. */ | |
b4a61e77 | 3039 | if (flags & ECF_RETURNS_TWICE) |
3040 | { | |
3041 | maybe_complain_about_tail_call (exp, "callee returns twice"); | |
3042 | return false; | |
3043 | } | |
3044 | if (flags & ECF_NORETURN) | |
3045 | { | |
3046 | maybe_complain_about_tail_call (exp, "callee does not return"); | |
3047 | return false; | |
3048 | } | |
80e11038 | 3049 | |
3050 | if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))) | |
b4a61e77 | 3051 | { |
3052 | maybe_complain_about_tail_call (exp, "volatile function type"); | |
3053 | return false; | |
3054 | } | |
80e11038 | 3055 | |
3056 | /* If the called function is nested in the current one, it might access | |
3057 | some of the caller's arguments, but could clobber them beforehand if | |
3058 | the argument areas are shared. */ | |
3059 | if (fndecl && decl_function_context (fndecl) == current_function_decl) | |
b4a61e77 | 3060 | { |
3061 | maybe_complain_about_tail_call (exp, "nested function"); | |
3062 | return false; | |
3063 | } | |
80e11038 | 3064 | |
3065 | /* If this function requires more stack slots than the current | |
3066 | function, we cannot change it into a sibling call. | |
3067 | crtl->args.pretend_args_size is not part of the | |
3068 | stack allocated by our caller. */ | |
3069 | if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size)) | |
b4a61e77 | 3070 | { |
3071 | maybe_complain_about_tail_call (exp, | |
3072 | "callee required more stack slots" | |
3073 | " than the caller"); | |
3074 | return false; | |
3075 | } | |
80e11038 | 3076 | |
3077 | /* If the callee pops its own arguments, then it must pop exactly | |
3078 | the same number of arguments as the current function. */ | |
3079 | if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant) | |
3080 | != targetm.calls.return_pops_args (current_function_decl, | |
3081 | TREE_TYPE (current_function_decl), | |
3082 | crtl->args.size)) | |
b4a61e77 | 3083 | { |
3084 | maybe_complain_about_tail_call (exp, | |
3085 | "inconsistent number of" | |
3086 | " popped arguments"); | |
3087 | return false; | |
3088 | } | |
80e11038 | 3089 | |
3090 | if (!lang_hooks.decls.ok_for_sibcall (fndecl)) | |
b4a61e77 | 3091 | { |
3092 | maybe_complain_about_tail_call (exp, "frontend does not support" | |
3093 | " sibling call"); | |
3094 | return false; | |
3095 | } | |
80e11038 | 3096 | |
3097 | /* All checks passed. */ | |
3098 | return true; | |
3099 | } | |
3100 | ||
c2f47e15 | 3101 | /* Generate all the code for a CALL_EXPR exp |
66d433c7 | 3102 | and return an rtx for its value. |
3103 | Store the value in TARGET (specified as an rtx) if convenient. | |
3104 | If the value is stored in TARGET then TARGET is returned. | |
3105 | If IGNORE is nonzero, then we ignore the value of the function call. */ | |
3106 | ||
3107 | rtx | |
4c9e08a4 | 3108 | expand_call (tree exp, rtx target, int ignore) |
66d433c7 | 3109 | { |
60ecc450 | 3110 | /* Nonzero if we are currently expanding a call. */ |
3111 | static int currently_expanding_call = 0; | |
3112 | ||
66d433c7 | 3113 | /* RTX for the function to be called. */ |
3114 | rtx funexp; | |
60ecc450 | 3115 | /* Sequence of insns to perform a normal "call". */ |
3663becd | 3116 | rtx_insn *normal_call_insns = NULL; |
4ee9c684 | 3117 | /* Sequence of insns to perform a tail "call". */ |
3663becd | 3118 | rtx_insn *tail_call_insns = NULL; |
66d433c7 | 3119 | /* Data type of the function. */ |
3120 | tree funtype; | |
915e81b8 | 3121 | tree type_arg_types; |
16c9337c | 3122 | tree rettype; |
66d433c7 | 3123 | /* Declaration of the function being called, |
3124 | or 0 if the function is computed (not known by name). */ | |
3125 | tree fndecl = 0; | |
e100aadc | 3126 | /* The type of the function being called. */ |
3127 | tree fntype; | |
4ee9c684 | 3128 | bool try_tail_call = CALL_EXPR_TAILCALL (exp); |
b4a61e77 | 3129 | bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp); |
60ecc450 | 3130 | int pass; |
66d433c7 | 3131 | |
3132 | /* Register in which non-BLKmode value will be returned, | |
3133 | or 0 if no value or if value is BLKmode. */ | |
3134 | rtx valreg; | |
058a1b7a | 3135 | /* Register(s) in which bounds are returned. */ |
3136 | rtx valbnd = NULL; | |
66d433c7 | 3137 | /* Address where we should return a BLKmode value; |
3138 | 0 if value not BLKmode. */ | |
3139 | rtx structure_value_addr = 0; | |
3140 | /* Nonzero if that address is being passed by treating it as | |
3141 | an extra, implicit first parameter. Otherwise, | |
3142 | it is passed by being copied directly into struct_value_rtx. */ | |
3143 | int structure_value_addr_parm = 0; | |
cd46caee | 3144 | /* Holds the value of implicit argument for the struct value. */ |
3145 | tree structure_value_addr_value = NULL_TREE; | |
66d433c7 | 3146 | /* Size of aggregate value wanted, or zero if none wanted |
3147 | or if we are using the non-reentrant PCC calling convention | |
3148 | or expecting the value in registers. */ | |
997d68fe | 3149 | HOST_WIDE_INT struct_value_size = 0; |
66d433c7 | 3150 | /* Nonzero if called function returns an aggregate in memory PCC style, |
3151 | by returning the address of where to find it. */ | |
3152 | int pcc_struct_value = 0; | |
45550790 | 3153 | rtx struct_value = 0; |
66d433c7 | 3154 | |
3155 | /* Number of actual parameters in this call, including struct value addr. */ | |
3156 | int num_actuals; | |
3157 | /* Number of named args. Args after this are anonymous ones | |
3158 | and they must all go on the stack. */ | |
3159 | int n_named_args; | |
cd46caee | 3160 | /* Number of complex actual arguments that need to be split. */ |
3161 | int num_complex_actuals = 0; | |
66d433c7 | 3162 | |
3163 | /* Vector of information about each argument. | |
3164 | Arguments are numbered in the order they will be pushed, | |
3165 | not the order they are written. */ | |
3166 | struct arg_data *args; | |
3167 | ||
3168 | /* Total size in bytes of all the stack-parms scanned so far. */ | |
3169 | struct args_size args_size; | |
0e0be288 | 3170 | struct args_size adjusted_args_size; |
66d433c7 | 3171 | /* Size of arguments before any adjustments (such as rounding). */ |
cc45e5e8 | 3172 | int unadjusted_args_size; |
66d433c7 | 3173 | /* Data on reg parms scanned so far. */ |
39cba157 | 3174 | CUMULATIVE_ARGS args_so_far_v; |
3175 | cumulative_args_t args_so_far; | |
66d433c7 | 3176 | /* Nonzero if a reg parm has been scanned. */ |
3177 | int reg_parm_seen; | |
a50ca374 | 3178 | /* Nonzero if this is an indirect function call. */ |
66d433c7 | 3179 | |
c87678e4 | 3180 | /* Nonzero if we must avoid push-insns in the args for this call. |
66d433c7 | 3181 | If stack space is allocated for register parameters, but not by the |
3182 | caller, then it is preallocated in the fixed part of the stack frame. | |
3183 | So the entire argument block must then be preallocated (i.e., we | |
3184 | ignore PUSH_ROUNDING in that case). */ | |
3185 | ||
4448f543 | 3186 | int must_preallocate = !PUSH_ARGS; |
66d433c7 | 3187 | |
eb2f80f3 | 3188 | /* Size of the stack reserved for parameter registers. */ |
2d7187c2 | 3189 | int reg_parm_stack_space = 0; |
3190 | ||
66d433c7 | 3191 | /* Address of space preallocated for stack parms |
3192 | (on machines that lack push insns), or 0 if space not preallocated. */ | |
3193 | rtx argblock = 0; | |
3194 | ||
c8010b80 | 3195 | /* Mask of ECF_ and ERF_ flags. */ |
dfe08167 | 3196 | int flags = 0; |
c8010b80 | 3197 | int return_flags = 0; |
4448f543 | 3198 | #ifdef REG_PARM_STACK_SPACE |
66d433c7 | 3199 | /* Define the boundary of the register parm stack space that needs to be |
6e96b626 | 3200 | saved, if any. */ |
3201 | int low_to_save, high_to_save; | |
66d433c7 | 3202 | rtx save_area = 0; /* Place that it is saved */ |
3203 | #endif | |
3204 | ||
66d433c7 | 3205 | int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
3206 | char *initial_stack_usage_map = stack_usage_map; | |
a331ea1b | 3207 | char *stack_usage_map_buf = NULL; |
66d433c7 | 3208 | |
9069face | 3209 | int old_stack_allocated; |
3210 | ||
3211 | /* State variables to track stack modifications. */ | |
66d433c7 | 3212 | rtx old_stack_level = 0; |
9069face | 3213 | int old_stack_arg_under_construction = 0; |
65dccdb1 | 3214 | int old_pending_adj = 0; |
66d433c7 | 3215 | int old_inhibit_defer_pop = inhibit_defer_pop; |
9069face | 3216 | |
3217 | /* Some stack pointer alterations we make are performed via | |
3218 | allocate_dynamic_stack_space. This modifies the stack_pointer_delta, | |
3219 | which we then also need to save/restore along the way. */ | |
31d035ca | 3220 | int old_stack_pointer_delta = 0; |
9069face | 3221 | |
60ecc450 | 3222 | rtx call_fusage; |
c2f47e15 | 3223 | tree addr = CALL_EXPR_FN (exp); |
19cb6b50 | 3224 | int i; |
92e1ef5b | 3225 | /* The alignment of the stack, in bits. */ |
38413c80 | 3226 | unsigned HOST_WIDE_INT preferred_stack_boundary; |
92e1ef5b | 3227 | /* The alignment of the stack, in bytes. */ |
38413c80 | 3228 | unsigned HOST_WIDE_INT preferred_unit_stack_boundary; |
4ee9c684 | 3229 | /* The static chain value to use for this call. */ |
3230 | rtx static_chain_value; | |
dfe08167 | 3231 | /* See if this is "nothrow" function call. */ |
3232 | if (TREE_NOTHROW (exp)) | |
3233 | flags |= ECF_NOTHROW; | |
3234 | ||
4ee9c684 | 3235 | /* See if we can find a DECL-node for the actual function, and get the |
3236 | function attributes (flags) from the function decl or type node. */ | |
97a1590b | 3237 | fndecl = get_callee_fndecl (exp); |
3238 | if (fndecl) | |
66d433c7 | 3239 | { |
e100aadc | 3240 | fntype = TREE_TYPE (fndecl); |
97a1590b | 3241 | flags |= flags_from_decl_or_type (fndecl); |
c8010b80 | 3242 | return_flags |= decl_return_flags (fndecl); |
66d433c7 | 3243 | } |
97a1590b | 3244 | else |
8a8cdb8d | 3245 | { |
16c9337c | 3246 | fntype = TREE_TYPE (TREE_TYPE (addr)); |
e100aadc | 3247 | flags |= flags_from_decl_or_type (fntype); |
a27e3913 | 3248 | if (CALL_EXPR_BY_DESCRIPTOR (exp)) |
3249 | flags |= ECF_BY_DESCRIPTOR; | |
8a8cdb8d | 3250 | } |
16c9337c | 3251 | rettype = TREE_TYPE (exp); |
d490e2f2 | 3252 | |
e100aadc | 3253 | struct_value = targetm.calls.struct_value_rtx (fntype, 0); |
45550790 | 3254 | |
4a081ddd | 3255 | /* Warn if this value is an aggregate type, |
3256 | regardless of which calling convention we are using for it. */ | |
16c9337c | 3257 | if (AGGREGATE_TYPE_P (rettype)) |
efb9d9ee | 3258 | warning (OPT_Waggregate_return, "function call has aggregate value"); |
4a081ddd | 3259 | |
9c2a0c05 | 3260 | /* If the result of a non looping pure or const function call is |
3261 | ignored (or void), and none of its arguments are volatile, we can | |
3262 | avoid expanding the call and just evaluate the arguments for | |
3263 | side-effects. */ | |
4a081ddd | 3264 | if ((flags & (ECF_CONST | ECF_PURE)) |
9c2a0c05 | 3265 | && (!(flags & ECF_LOOPING_CONST_OR_PURE)) |
4a081ddd | 3266 | && (ignore || target == const0_rtx |
16c9337c | 3267 | || TYPE_MODE (rettype) == VOIDmode)) |
4a081ddd | 3268 | { |
3269 | bool volatilep = false; | |
3270 | tree arg; | |
cd46caee | 3271 | call_expr_arg_iterator iter; |
4a081ddd | 3272 | |
cd46caee | 3273 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
3274 | if (TREE_THIS_VOLATILE (arg)) | |
4a081ddd | 3275 | { |
3276 | volatilep = true; | |
3277 | break; | |
3278 | } | |
3279 | ||
3280 | if (! volatilep) | |
3281 | { | |
cd46caee | 3282 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) |
3283 | expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
4a081ddd | 3284 | return const0_rtx; |
3285 | } | |
3286 | } | |
3287 | ||
2d7187c2 | 3288 | #ifdef REG_PARM_STACK_SPACE |
fa20f865 | 3289 | reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl); |
2d7187c2 | 3290 | #endif |
2d7187c2 | 3291 | |
fa20f865 | 3292 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
22c61100 | 3293 | && reg_parm_stack_space > 0 && PUSH_ARGS) |
997d68fe | 3294 | must_preallocate = 1; |
997d68fe | 3295 | |
66d433c7 | 3296 | /* Set up a place to return a structure. */ |
3297 | ||
3298 | /* Cater to broken compilers. */ | |
4cd5bb61 | 3299 | if (aggregate_value_p (exp, fntype)) |
66d433c7 | 3300 | { |
3301 | /* This call returns a big structure. */ | |
2dd6f9ed | 3302 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
66d433c7 | 3303 | |
3304 | #ifdef PCC_STATIC_STRUCT_RETURN | |
f49c64ba | 3305 | { |
3306 | pcc_struct_value = 1; | |
f49c64ba | 3307 | } |
3308 | #else /* not PCC_STATIC_STRUCT_RETURN */ | |
3309 | { | |
16c9337c | 3310 | struct_value_size = int_size_in_bytes (rettype); |
66d433c7 | 3311 | |
e012cdc7 | 3312 | /* Even if it is semantically safe to use the target as the return |
3313 | slot, it may be not sufficiently aligned for the return type. */ | |
3314 | if (CALL_EXPR_RETURN_SLOT_OPT (exp) | |
3315 | && target | |
3316 | && MEM_P (target) | |
3317 | && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype) | |
dfdced85 | 3318 | && targetm.slow_unaligned_access (TYPE_MODE (rettype), |
3319 | MEM_ALIGN (target)))) | |
f49c64ba | 3320 | structure_value_addr = XEXP (target, 0); |
3321 | else | |
3322 | { | |
f49c64ba | 3323 | /* For variable-sized objects, we must be called with a target |
3324 | specified. If we were to allocate space on the stack here, | |
3325 | we would have no way of knowing when to free it. */ | |
0ab48139 | 3326 | rtx d = assign_temp (rettype, 1, 1); |
930f0e87 | 3327 | structure_value_addr = XEXP (d, 0); |
f49c64ba | 3328 | target = 0; |
3329 | } | |
3330 | } | |
3331 | #endif /* not PCC_STATIC_STRUCT_RETURN */ | |
66d433c7 | 3332 | } |
3333 | ||
0e0be288 | 3334 | /* Figure out the amount to which the stack should be aligned. */ |
0e0be288 | 3335 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
28992b23 | 3336 | if (fndecl) |
3337 | { | |
35ee1c66 | 3338 | struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl); |
9a27561f | 3339 | /* Without automatic stack alignment, we can't increase preferred |
3340 | stack boundary. With automatic stack alignment, it is | |
3341 | unnecessary since unless we can guarantee that all callers will | |
3342 | align the outgoing stack properly, callee has to align its | |
3343 | stack anyway. */ | |
3344 | if (i | |
3345 | && i->preferred_incoming_stack_boundary | |
3346 | && i->preferred_incoming_stack_boundary < preferred_stack_boundary) | |
28992b23 | 3347 | preferred_stack_boundary = i->preferred_incoming_stack_boundary; |
3348 | } | |
0e0be288 | 3349 | |
3350 | /* Operand 0 is a pointer-to-function; get the type of the function. */ | |
95672afe | 3351 | funtype = TREE_TYPE (addr); |
231bd014 | 3352 | gcc_assert (POINTER_TYPE_P (funtype)); |
0e0be288 | 3353 | funtype = TREE_TYPE (funtype); |
3354 | ||
cd46caee | 3355 | /* Count whether there are actual complex arguments that need to be split |
3356 | into their real and imaginary parts. Munge the type_arg_types | |
3357 | appropriately here as well. */ | |
92d40bc4 | 3358 | if (targetm.calls.split_complex_arg) |
915e81b8 | 3359 | { |
cd46caee | 3360 | call_expr_arg_iterator iter; |
3361 | tree arg; | |
3362 | FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) | |
3363 | { | |
3364 | tree type = TREE_TYPE (arg); | |
3365 | if (type && TREE_CODE (type) == COMPLEX_TYPE | |
3366 | && targetm.calls.split_complex_arg (type)) | |
3367 | num_complex_actuals++; | |
3368 | } | |
915e81b8 | 3369 | type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); |
915e81b8 | 3370 | } |
3371 | else | |
3372 | type_arg_types = TYPE_ARG_TYPES (funtype); | |
3373 | ||
0e0be288 | 3374 | if (flags & ECF_MAY_BE_ALLOCA) |
18d50ae6 | 3375 | cfun->calls_alloca = 1; |
0e0be288 | 3376 | |
3377 | /* If struct_value_rtx is 0, it means pass the address | |
cd46caee | 3378 | as if it were an extra parameter. Put the argument expression |
3379 | in structure_value_addr_value. */ | |
45550790 | 3380 | if (structure_value_addr && struct_value == 0) |
0e0be288 | 3381 | { |
3382 | /* If structure_value_addr is a REG other than | |
3383 | virtual_outgoing_args_rtx, we can use always use it. If it | |
3384 | is not a REG, we must always copy it into a register. | |
3385 | If it is virtual_outgoing_args_rtx, we must copy it to another | |
3386 | register in some cases. */ | |
8ad4c111 | 3387 | rtx temp = (!REG_P (structure_value_addr) |
0e0be288 | 3388 | || (ACCUMULATE_OUTGOING_ARGS |
3389 | && stack_arg_under_construction | |
3390 | && structure_value_addr == virtual_outgoing_args_rtx) | |
0d568ddf | 3391 | ? copy_addr_to_reg (convert_memory_address |
e100aadc | 3392 | (Pmode, structure_value_addr)) |
0e0be288 | 3393 | : structure_value_addr); |
3394 | ||
cd46caee | 3395 | structure_value_addr_value = |
3396 | make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); | |
058a1b7a | 3397 | structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1; |
0e0be288 | 3398 | } |
3399 | ||
3400 | /* Count the arguments and set NUM_ACTUALS. */ | |
cd46caee | 3401 | num_actuals = |
3402 | call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm; | |
0e0be288 | 3403 | |
3404 | /* Compute number of named args. | |
30a10006 | 3405 | First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */ |
3406 | ||
3407 | if (type_arg_types != 0) | |
3408 | n_named_args | |
3409 | = (list_length (type_arg_types) | |
3410 | /* Count the struct value address, if it is passed as a parm. */ | |
3411 | + structure_value_addr_parm); | |
3412 | else | |
3413 | /* If we know nothing, treat all args as named. */ | |
3414 | n_named_args = num_actuals; | |
3415 | ||
3416 | /* Start updating where the next arg would go. | |
3417 | ||
3418 | On some machines (such as the PA) indirect calls have a different | |
3419 | calling convention than normal calls. The fourth argument in | |
3420 | INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call | |
3421 | or not. */ | |
39cba157 | 3422 | INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args); |
3423 | args_so_far = pack_cumulative_args (&args_so_far_v); | |
30a10006 | 3424 | |
3425 | /* Now possibly adjust the number of named args. | |
0e0be288 | 3426 | Normally, don't include the last named arg if anonymous args follow. |
8bdddbd1 | 3427 | We do include the last named arg if |
3428 | targetm.calls.strict_argument_naming() returns nonzero. | |
0e0be288 | 3429 | (If no anonymous args follow, the result of list_length is actually |
3430 | one too large. This is harmless.) | |
3431 | ||
a107cd89 | 3432 | If targetm.calls.pretend_outgoing_varargs_named() returns |
8bdddbd1 | 3433 | nonzero, and targetm.calls.strict_argument_naming() returns zero, |
3434 | this machine will be able to place unnamed args that were passed | |
3435 | in registers into the stack. So treat all args as named. This | |
3436 | allows the insns emitting for a specific argument list to be | |
3437 | independent of the function declaration. | |
a107cd89 | 3438 | |
3439 | If targetm.calls.pretend_outgoing_varargs_named() returns zero, | |
3440 | we do not have any reliable way to pass unnamed args in | |
3441 | registers, so we must force them into memory. */ | |
0e0be288 | 3442 | |
30a10006 | 3443 | if (type_arg_types != 0 |
39cba157 | 3444 | && targetm.calls.strict_argument_naming (args_so_far)) |
30a10006 | 3445 | ; |
3446 | else if (type_arg_types != 0 | |
39cba157 | 3447 | && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far)) |
30a10006 | 3448 | /* Don't include the last named arg. */ |
3449 | --n_named_args; | |
0e0be288 | 3450 | else |
30a10006 | 3451 | /* Treat all args as named. */ |
0e0be288 | 3452 | n_named_args = num_actuals; |
3453 | ||
0e0be288 | 3454 | /* Make a vector to hold all the information about each arg. */ |
1f303606 | 3455 | args = XCNEWVEC (struct arg_data, num_actuals); |
0e0be288 | 3456 | |
00dddcf2 | 3457 | /* Build up entries in the ARGS array, compute the size of the |
3458 | arguments into ARGS_SIZE, etc. */ | |
0e0be288 | 3459 | initialize_argument_information (num_actuals, args, &args_size, |
cd46caee | 3460 | n_named_args, exp, |
d8b9c828 | 3461 | structure_value_addr_value, fndecl, fntype, |
39cba157 | 3462 | args_so_far, reg_parm_stack_space, |
0e0be288 | 3463 | &old_stack_level, &old_pending_adj, |
eaa112a0 | 3464 | &must_preallocate, &flags, |
4ee9c684 | 3465 | &try_tail_call, CALL_FROM_THUNK_P (exp)); |
0e0be288 | 3466 | |
3467 | if (args_size.var) | |
2dd6f9ed | 3468 | must_preallocate = 1; |
0e0be288 | 3469 | |
3470 | /* Now make final decision about preallocating stack space. */ | |
3471 | must_preallocate = finalize_must_preallocate (must_preallocate, | |
3472 | num_actuals, args, | |
3473 | &args_size); | |
3474 | ||
3475 | /* If the structure value address will reference the stack pointer, we | |
3476 | must stabilize it. We don't need to do this if we know that we are | |
3477 | not going to adjust the stack pointer in processing this call. */ | |
3478 | ||
3479 | if (structure_value_addr | |
3480 | && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) | |
3481 | || reg_mentioned_p (virtual_outgoing_args_rtx, | |
3482 | structure_value_addr)) | |
3483 | && (args_size.var | |
3484 | || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant))) | |
3485 | structure_value_addr = copy_to_reg (structure_value_addr); | |
60ecc450 | 3486 | |
0d568ddf | 3487 | /* Tail calls can make things harder to debug, and we've traditionally |
4f8af819 | 3488 | pushed these optimizations into -O2. Don't try if we're already |
fdf2b689 | 3489 | expanding a call, as that means we're an argument. Don't try if |
011e6b51 | 3490 | there's cleanups, as we know there's code to follow the call. */ |
60ecc450 | 3491 | |
0e0be288 | 3492 | if (currently_expanding_call++ != 0 |
3493 | || !flag_optimize_sibling_calls | |
4ee9c684 | 3494 | || args_size.var |
3072d30e | 3495 | || dbg_cnt (tail_call) == false) |
4ee9c684 | 3496 | try_tail_call = 0; |
0e0be288 | 3497 | |
b4a61e77 | 3498 | /* If the user has marked the function as requiring tail-call |
3499 | optimization, attempt it. */ | |
3500 | if (must_tail_call) | |
3501 | try_tail_call = 1; | |
3502 | ||
0e0be288 | 3503 | /* Rest of purposes for tail call optimizations to fail. */ |
80e11038 | 3504 | if (try_tail_call) |
b4a61e77 | 3505 | try_tail_call = can_implement_as_sibling_call_p (exp, |
3506 | structure_value_addr, | |
3507 | funtype, | |
3508 | reg_parm_stack_space, | |
3509 | fndecl, | |
80e11038 | 3510 | flags, addr, args_size); |
4b066641 | 3511 | |
4681dd41 | 3512 | /* Check if caller and callee disagree in promotion of function |
3513 | return value. */ | |
3514 | if (try_tail_call) | |
3515 | { | |
3754d046 | 3516 | machine_mode caller_mode, caller_promoted_mode; |
3517 | machine_mode callee_mode, callee_promoted_mode; | |
4681dd41 | 3518 | int caller_unsignedp, callee_unsignedp; |
3519 | tree caller_res = DECL_RESULT (current_function_decl); | |
3520 | ||
3521 | caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res)); | |
3b2411a8 | 3522 | caller_mode = DECL_MODE (caller_res); |
4681dd41 | 3523 | callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype)); |
3b2411a8 | 3524 | callee_mode = TYPE_MODE (TREE_TYPE (funtype)); |
3525 | caller_promoted_mode | |
3526 | = promote_function_mode (TREE_TYPE (caller_res), caller_mode, | |
3527 | &caller_unsignedp, | |
3528 | TREE_TYPE (current_function_decl), 1); | |
3529 | callee_promoted_mode | |
c879dbcf | 3530 | = promote_function_mode (TREE_TYPE (funtype), callee_mode, |
3b2411a8 | 3531 | &callee_unsignedp, |
c879dbcf | 3532 | funtype, 1); |
4681dd41 | 3533 | if (caller_mode != VOIDmode |
3534 | && (caller_promoted_mode != callee_promoted_mode | |
3535 | || ((caller_mode != caller_promoted_mode | |
3536 | || callee_mode != callee_promoted_mode) | |
3537 | && (caller_unsignedp != callee_unsignedp | |
974534ab | 3538 | || partial_subreg_p (caller_mode, callee_mode))))) |
b4a61e77 | 3539 | { |
3540 | try_tail_call = 0; | |
3541 | maybe_complain_about_tail_call (exp, | |
3542 | "caller and callee disagree in" | |
3543 | " promotion of function" | |
3544 | " return value"); | |
3545 | } | |
4681dd41 | 3546 | } |
3547 | ||
755ece1f | 3548 | /* Ensure current function's preferred stack boundary is at least |
3549 | what we need. Stack alignment may also increase preferred stack | |
3550 | boundary. */ | |
54d759e3 | 3551 | if (crtl->preferred_stack_boundary < preferred_stack_boundary) |
edb7afe8 | 3552 | crtl->preferred_stack_boundary = preferred_stack_boundary; |
755ece1f | 3553 | else |
3554 | preferred_stack_boundary = crtl->preferred_stack_boundary; | |
d0285dd8 | 3555 | |
0e0be288 | 3556 | preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; |
4b066641 | 3557 | |
60ecc450 | 3558 | /* We want to make two insn chains; one for a sibling call, the other |
3559 | for a normal call. We will select one of the two chains after | |
3560 | initial RTL generation is complete. */ | |
6e96b626 | 3561 | for (pass = try_tail_call ? 0 : 1; pass < 2; pass++) |
60ecc450 | 3562 | { |
3563 | int sibcall_failure = 0; | |
35a3065a | 3564 | /* We want to emit any pending stack adjustments before the tail |
60ecc450 | 3565 | recursion "call". That way we know any adjustment after the tail |
0d568ddf | 3566 | recursion call can be ignored if we indeed use the tail |
60ecc450 | 3567 | call expansion. */ |
b6d206a2 | 3568 | saved_pending_stack_adjust save; |
3663becd | 3569 | rtx_insn *insns, *before_call, *after_args; |
3570 | rtx next_arg_reg; | |
1e2b2ab3 | 3571 | |
60ecc450 | 3572 | if (pass == 0) |
3573 | { | |
60ecc450 | 3574 | /* State variables we need to save and restore between |
3575 | iterations. */ | |
b6d206a2 | 3576 | save_pending_stack_adjust (&save); |
60ecc450 | 3577 | } |
dfe08167 | 3578 | if (pass) |
3579 | flags &= ~ECF_SIBCALL; | |
3580 | else | |
3581 | flags |= ECF_SIBCALL; | |
66d433c7 | 3582 | |
60ecc450 | 3583 | /* Other state variables that we must reinitialize each time |
dfe08167 | 3584 | through the loop (that are not initialized by the loop itself). */ |
60ecc450 | 3585 | argblock = 0; |
3586 | call_fusage = 0; | |
2f921ec9 | 3587 | |
c87678e4 | 3588 | /* Start a new sequence for the normal call case. |
66d433c7 | 3589 | |
60ecc450 | 3590 | From this point on, if the sibling call fails, we want to set |
3591 | sibcall_failure instead of continuing the loop. */ | |
3592 | start_sequence (); | |
412321ce | 3593 | |
60ecc450 | 3594 | /* Don't let pending stack adjusts add up to too much. |
3595 | Also, do all pending adjustments now if there is any chance | |
3596 | this might be a call to alloca or if we are expanding a sibling | |
ff3ae375 | 3597 | call sequence. |
82e95be3 | 3598 | Also do the adjustments before a throwing call, otherwise |
3599 | exception handling can fail; PR 19225. */ | |
60ecc450 | 3600 | if (pending_stack_adjust >= 32 |
5edaabad | 3601 | || (pending_stack_adjust > 0 |
ff3ae375 | 3602 | && (flags & ECF_MAY_BE_ALLOCA)) |
82e95be3 | 3603 | || (pending_stack_adjust > 0 |
3604 | && flag_exceptions && !(flags & ECF_NOTHROW)) | |
60ecc450 | 3605 | || pass == 0) |
3606 | do_pending_stack_adjust (); | |
66d433c7 | 3607 | |
60ecc450 | 3608 | /* Precompute any arguments as needed. */ |
02510658 | 3609 | if (pass) |
2dd6f9ed | 3610 | precompute_arguments (num_actuals, args); |
66d433c7 | 3611 | |
60ecc450 | 3612 | /* Now we are about to start emitting insns that can be deleted |
3613 | if a libcall is deleted. */ | |
2dd6f9ed | 3614 | if (pass && (flags & ECF_MALLOC)) |
60ecc450 | 3615 | start_sequence (); |
66d433c7 | 3616 | |
783f362b | 3617 | if (pass == 0 |
3618 | && crtl->stack_protect_guard | |
3619 | && targetm.stack_protect_runtime_enabled_p ()) | |
71d89928 | 3620 | stack_protect_epilogue (); |
3621 | ||
0e0be288 | 3622 | adjusted_args_size = args_size; |
481feae3 | 3623 | /* Compute the actual size of the argument block required. The variable |
3624 | and constant sizes must be combined, the size may have to be rounded, | |
3625 | and there may be a minimum required size. When generating a sibcall | |
3626 | pattern, do not round up, since we'll be re-using whatever space our | |
3627 | caller provided. */ | |
3628 | unadjusted_args_size | |
c87678e4 | 3629 | = compute_argument_block_size (reg_parm_stack_space, |
3630 | &adjusted_args_size, | |
fa20f865 | 3631 | fndecl, fntype, |
481feae3 | 3632 | (pass == 0 ? 0 |
3633 | : preferred_stack_boundary)); | |
3634 | ||
c87678e4 | 3635 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
481feae3 | 3636 | |
02510658 | 3637 | /* The argument block when performing a sibling call is the |
a0c938f0 | 3638 | incoming argument block. */ |
02510658 | 3639 | if (pass == 0) |
7ecc63d3 | 3640 | { |
27a7a23a | 3641 | argblock = crtl->args.internal_arg_pointer; |
a8b58ffb | 3642 | if (STACK_GROWS_DOWNWARD) |
3643 | argblock | |
3644 | = plus_constant (Pmode, argblock, crtl->args.pretend_args_size); | |
3645 | else | |
3646 | argblock | |
3647 | = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size); | |
3648 | ||
7ecc63d3 | 3649 | stored_args_map = sbitmap_alloc (args_size.constant); |
53c5d9d4 | 3650 | bitmap_clear (stored_args_map); |
7ecc63d3 | 3651 | } |
481feae3 | 3652 | |
60ecc450 | 3653 | /* If we have no actual push instructions, or shouldn't use them, |
3654 | make space for all args right now. */ | |
0e0be288 | 3655 | else if (adjusted_args_size.var != 0) |
66d433c7 | 3656 | { |
60ecc450 | 3657 | if (old_stack_level == 0) |
3658 | { | |
e9c97615 | 3659 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
9069face | 3660 | old_stack_pointer_delta = stack_pointer_delta; |
60ecc450 | 3661 | old_pending_adj = pending_stack_adjust; |
3662 | pending_stack_adjust = 0; | |
60ecc450 | 3663 | /* stack_arg_under_construction says whether a stack arg is |
3664 | being constructed at the old stack level. Pushing the stack | |
3665 | gets a clean outgoing argument block. */ | |
3666 | old_stack_arg_under_construction = stack_arg_under_construction; | |
3667 | stack_arg_under_construction = 0; | |
60ecc450 | 3668 | } |
0e0be288 | 3669 | argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0); |
8c0dd614 | 3670 | if (flag_stack_usage_info) |
990495a7 | 3671 | current_function_has_unbounded_dynamic_stack_size = 1; |
66d433c7 | 3672 | } |
60ecc450 | 3673 | else |
3674 | { | |
3675 | /* Note that we must go through the motions of allocating an argument | |
3676 | block even if the size is zero because we may be storing args | |
3677 | in the area reserved for register arguments, which may be part of | |
3678 | the stack frame. */ | |
7221f864 | 3679 | |
0e0be288 | 3680 | int needed = adjusted_args_size.constant; |
66d433c7 | 3681 | |
60ecc450 | 3682 | /* Store the maximum argument space used. It will be pushed by |
3683 | the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow | |
3684 | checking). */ | |
66d433c7 | 3685 | |
abe32cce | 3686 | if (needed > crtl->outgoing_args_size) |
3687 | crtl->outgoing_args_size = needed; | |
66d433c7 | 3688 | |
60ecc450 | 3689 | if (must_preallocate) |
3690 | { | |
4448f543 | 3691 | if (ACCUMULATE_OUTGOING_ARGS) |
3692 | { | |
02510658 | 3693 | /* Since the stack pointer will never be pushed, it is |
3694 | possible for the evaluation of a parm to clobber | |
3695 | something we have already written to the stack. | |
3696 | Since most function calls on RISC machines do not use | |
3697 | the stack, this is uncommon, but must work correctly. | |
7221f864 | 3698 | |
4448f543 | 3699 | Therefore, we save any area of the stack that was already |
02510658 | 3700 | written and that we are using. Here we set up to do this |
3701 | by making a new stack usage map from the old one. The | |
c87678e4 | 3702 | actual save will be done by store_one_arg. |
7221f864 | 3703 | |
4448f543 | 3704 | Another approach might be to try to reorder the argument |
3705 | evaluations to avoid this conflicting stack usage. */ | |
7221f864 | 3706 | |
02510658 | 3707 | /* Since we will be writing into the entire argument area, |
3708 | the map must be allocated for its entire size, not just | |
3709 | the part that is the responsibility of the caller. */ | |
fa20f865 | 3710 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
63c68695 | 3711 | needed += reg_parm_stack_space; |
66d433c7 | 3712 | |
ccccd62c | 3713 | if (ARGS_GROW_DOWNWARD) |
3714 | highest_outgoing_arg_in_use | |
3715 | = MAX (initial_highest_arg_in_use, needed + 1); | |
3716 | else | |
3717 | highest_outgoing_arg_in_use | |
3718 | = MAX (initial_highest_arg_in_use, needed); | |
3719 | ||
dd045aee | 3720 | free (stack_usage_map_buf); |
4c36ffe6 | 3721 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
a331ea1b | 3722 | stack_usage_map = stack_usage_map_buf; |
66d433c7 | 3723 | |
4448f543 | 3724 | if (initial_highest_arg_in_use) |
8e547276 | 3725 | memcpy (stack_usage_map, initial_stack_usage_map, |
3726 | initial_highest_arg_in_use); | |
d1b03b62 | 3727 | |
4448f543 | 3728 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
93d3b7de | 3729 | memset (&stack_usage_map[initial_highest_arg_in_use], 0, |
4448f543 | 3730 | (highest_outgoing_arg_in_use |
3731 | - initial_highest_arg_in_use)); | |
3732 | needed = 0; | |
d1b03b62 | 3733 | |
02510658 | 3734 | /* The address of the outgoing argument list must not be |
3735 | copied to a register here, because argblock would be left | |
3736 | pointing to the wrong place after the call to | |
c87678e4 | 3737 | allocate_dynamic_stack_space below. */ |
d1b03b62 | 3738 | |
4448f543 | 3739 | argblock = virtual_outgoing_args_rtx; |
c87678e4 | 3740 | } |
4448f543 | 3741 | else |
7221f864 | 3742 | { |
4448f543 | 3743 | if (inhibit_defer_pop == 0) |
60ecc450 | 3744 | { |
4448f543 | 3745 | /* Try to reuse some or all of the pending_stack_adjust |
481feae3 | 3746 | to get this space. */ |
3747 | needed | |
c87678e4 | 3748 | = (combine_pending_stack_adjustment_and_call |
481feae3 | 3749 | (unadjusted_args_size, |
0e0be288 | 3750 | &adjusted_args_size, |
481feae3 | 3751 | preferred_unit_stack_boundary)); |
3752 | ||
3753 | /* combine_pending_stack_adjustment_and_call computes | |
3754 | an adjustment before the arguments are allocated. | |
3755 | Account for them and see whether or not the stack | |
3756 | needs to go up or down. */ | |
3757 | needed = unadjusted_args_size - needed; | |
3758 | ||
3759 | if (needed < 0) | |
4448f543 | 3760 | { |
481feae3 | 3761 | /* We're releasing stack space. */ |
3762 | /* ??? We can avoid any adjustment at all if we're | |
3763 | already aligned. FIXME. */ | |
3764 | pending_stack_adjust = -needed; | |
3765 | do_pending_stack_adjust (); | |
4448f543 | 3766 | needed = 0; |
3767 | } | |
c87678e4 | 3768 | else |
481feae3 | 3769 | /* We need to allocate space. We'll do that in |
3770 | push_block below. */ | |
3771 | pending_stack_adjust = 0; | |
60ecc450 | 3772 | } |
481feae3 | 3773 | |
3774 | /* Special case this because overhead of `push_block' in | |
3775 | this case is non-trivial. */ | |
4448f543 | 3776 | if (needed == 0) |
3777 | argblock = virtual_outgoing_args_rtx; | |
60ecc450 | 3778 | else |
ad3b56f3 | 3779 | { |
3780 | argblock = push_block (GEN_INT (needed), 0, 0); | |
ccccd62c | 3781 | if (ARGS_GROW_DOWNWARD) |
3782 | argblock = plus_constant (Pmode, argblock, needed); | |
ad3b56f3 | 3783 | } |
4448f543 | 3784 | |
02510658 | 3785 | /* We only really need to call `copy_to_reg' in the case |
3786 | where push insns are going to be used to pass ARGBLOCK | |
3787 | to a function call in ARGS. In that case, the stack | |
3788 | pointer changes value from the allocation point to the | |
3789 | call point, and hence the value of | |
3790 | VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might | |
3791 | as well always do it. */ | |
4448f543 | 3792 | argblock = copy_to_reg (argblock); |
9069face | 3793 | } |
3794 | } | |
3795 | } | |
60ecc450 | 3796 | |
9069face | 3797 | if (ACCUMULATE_OUTGOING_ARGS) |
3798 | { | |
3799 | /* The save/restore code in store_one_arg handles all | |
3800 | cases except one: a constructor call (including a C | |
3801 | function returning a BLKmode struct) to initialize | |
3802 | an argument. */ | |
3803 | if (stack_arg_under_construction) | |
3804 | { | |
63c68695 | 3805 | rtx push_size |
3806 | = GEN_INT (adjusted_args_size.constant | |
fa20f865 | 3807 | + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype |
22c61100 | 3808 | : TREE_TYPE (fndecl))) ? 0 |
63c68695 | 3809 | : reg_parm_stack_space)); |
9069face | 3810 | if (old_stack_level == 0) |
3811 | { | |
e9c97615 | 3812 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
9069face | 3813 | old_stack_pointer_delta = stack_pointer_delta; |
3814 | old_pending_adj = pending_stack_adjust; | |
3815 | pending_stack_adjust = 0; | |
3816 | /* stack_arg_under_construction says whether a stack | |
3817 | arg is being constructed at the old stack level. | |
3818 | Pushing the stack gets a clean outgoing argument | |
3819 | block. */ | |
3820 | old_stack_arg_under_construction | |
3821 | = stack_arg_under_construction; | |
3822 | stack_arg_under_construction = 0; | |
3823 | /* Make a new map for the new argument list. */ | |
dd045aee | 3824 | free (stack_usage_map_buf); |
43959b95 | 3825 | stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use); |
a331ea1b | 3826 | stack_usage_map = stack_usage_map_buf; |
9069face | 3827 | highest_outgoing_arg_in_use = 0; |
4448f543 | 3828 | } |
990495a7 | 3829 | /* We can pass TRUE as the 4th argument because we just |
3830 | saved the stack pointer and will restore it right after | |
3831 | the call. */ | |
2b34677f | 3832 | allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT, |
3833 | -1, true); | |
60ecc450 | 3834 | } |
a3585b90 | 3835 | |
9069face | 3836 | /* If argument evaluation might modify the stack pointer, |
3837 | copy the address of the argument list to a register. */ | |
3838 | for (i = 0; i < num_actuals; i++) | |
3839 | if (args[i].pass_on_stack) | |
3840 | { | |
3841 | argblock = copy_addr_to_reg (argblock); | |
3842 | break; | |
3843 | } | |
3844 | } | |
4c9e08a4 | 3845 | |
60ecc450 | 3846 | compute_argument_addresses (args, argblock, num_actuals); |
a3585b90 | 3847 | |
2d298c93 | 3848 | /* Stack is properly aligned, pops can't safely be deferred during |
3849 | the evaluation of the arguments. */ | |
3850 | NO_DEFER_POP; | |
3851 | ||
3a12804f | 3852 | /* Precompute all register parameters. It isn't safe to compute |
3853 | anything once we have started filling any specific hard regs. | |
3854 | TLS symbols sometimes need a call to resolve. Precompute | |
3855 | register parameters before any stack pointer manipulation | |
3856 | to avoid unaligned stack in the called function. */ | |
3857 | precompute_register_parameters (num_actuals, args, ®_parm_seen); | |
3858 | ||
2d298c93 | 3859 | OK_DEFER_POP; |
3860 | ||
bf29c577 | 3861 | /* Perform stack alignment before the first push (the last arg). */ |
3862 | if (argblock == 0 | |
85c35fbc | 3863 | && adjusted_args_size.constant > reg_parm_stack_space |
0e0be288 | 3864 | && adjusted_args_size.constant != unadjusted_args_size) |
ff92623c | 3865 | { |
60ecc450 | 3866 | /* When the stack adjustment is pending, we get better code |
3867 | by combining the adjustments. */ | |
c87678e4 | 3868 | if (pending_stack_adjust |
60ecc450 | 3869 | && ! inhibit_defer_pop) |
481feae3 | 3870 | { |
3871 | pending_stack_adjust | |
c87678e4 | 3872 | = (combine_pending_stack_adjustment_and_call |
481feae3 | 3873 | (unadjusted_args_size, |
0e0be288 | 3874 | &adjusted_args_size, |
481feae3 | 3875 | preferred_unit_stack_boundary)); |
3876 | do_pending_stack_adjust (); | |
3877 | } | |
60ecc450 | 3878 | else if (argblock == 0) |
0e0be288 | 3879 | anti_adjust_stack (GEN_INT (adjusted_args_size.constant |
60ecc450 | 3880 | - unadjusted_args_size)); |
60ecc450 | 3881 | } |
fa4f1f09 | 3882 | /* Now that the stack is properly aligned, pops can't safely |
3883 | be deferred during the evaluation of the arguments. */ | |
3884 | NO_DEFER_POP; | |
66d433c7 | 3885 | |
990495a7 | 3886 | /* Record the maximum pushed stack space size. We need to delay |
3887 | doing it this far to take into account the optimization done | |
3888 | by combine_pending_stack_adjustment_and_call. */ | |
8c0dd614 | 3889 | if (flag_stack_usage_info |
990495a7 | 3890 | && !ACCUMULATE_OUTGOING_ARGS |
3891 | && pass | |
3892 | && adjusted_args_size.var == 0) | |
3893 | { | |
3894 | int pushed = adjusted_args_size.constant + pending_stack_adjust; | |
3895 | if (pushed > current_function_pushed_stack_size) | |
3896 | current_function_pushed_stack_size = pushed; | |
3897 | } | |
3898 | ||
95672afe | 3899 | funexp = rtx_for_function_call (fndecl, addr); |
66d433c7 | 3900 | |
c2f47e15 | 3901 | if (CALL_EXPR_STATIC_CHAIN (exp)) |
3902 | static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp)); | |
4ee9c684 | 3903 | else |
3904 | static_chain_value = 0; | |
3905 | ||
4448f543 | 3906 | #ifdef REG_PARM_STACK_SPACE |
60ecc450 | 3907 | /* Save the fixed argument area if it's part of the caller's frame and |
3908 | is clobbered by argument setup for this call. */ | |
02510658 | 3909 | if (ACCUMULATE_OUTGOING_ARGS && pass) |
4448f543 | 3910 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, |
3911 | &low_to_save, &high_to_save); | |
41332f48 | 3912 | #endif |
66d433c7 | 3913 | |
60ecc450 | 3914 | /* Now store (and compute if necessary) all non-register parms. |
3915 | These come before register parms, since they can require block-moves, | |
3916 | which could clobber the registers used for register parms. | |
3917 | Parms which have partial registers are not stored here, | |
3918 | but we do preallocate space here if they want that. */ | |
66d433c7 | 3919 | |
60ecc450 | 3920 | for (i = 0; i < num_actuals; i++) |
eb940a48 | 3921 | { |
058a1b7a | 3922 | /* Delay bounds until all other args are stored. */ |
3923 | if (POINTER_BOUNDS_P (args[i].tree_value)) | |
3924 | continue; | |
3925 | else if (args[i].reg == 0 || args[i].pass_on_stack) | |
eb940a48 | 3926 | { |
3663becd | 3927 | rtx_insn *before_arg = get_last_insn (); |
eb940a48 | 3928 | |
ba83222c | 3929 | /* We don't allow passing huge (> 2^30 B) arguments |
3930 | by value. It would cause an overflow later on. */ | |
3931 | if (adjusted_args_size.constant | |
3932 | >= (1 << (HOST_BITS_PER_INT - 2))) | |
3933 | { | |
3934 | sorry ("passing too large argument on stack"); | |
3935 | continue; | |
3936 | } | |
3937 | ||
eb940a48 | 3938 | if (store_one_arg (&args[i], argblock, flags, |
3939 | adjusted_args_size.var != 0, | |
3940 | reg_parm_stack_space) | |
3941 | || (pass == 0 | |
3942 | && check_sibcall_argument_overlap (before_arg, | |
3943 | &args[i], 1))) | |
3944 | sibcall_failure = 1; | |
3945 | } | |
3946 | ||
4143d08b | 3947 | if (args[i].stack) |
b4eeceb9 | 3948 | call_fusage |
3949 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)), | |
3950 | gen_rtx_USE (VOIDmode, args[i].stack), | |
3951 | call_fusage); | |
eb940a48 | 3952 | } |
60ecc450 | 3953 | |
3954 | /* If we have a parm that is passed in registers but not in memory | |
3955 | and whose alignment does not permit a direct copy into registers, | |
3956 | make a group of pseudos that correspond to each register that we | |
3957 | will later fill. */ | |
3958 | if (STRICT_ALIGNMENT) | |
3959 | store_unaligned_arguments_into_pseudos (args, num_actuals); | |
3960 | ||
3961 | /* Now store any partially-in-registers parm. | |
3962 | This is the last place a block-move can happen. */ | |
3963 | if (reg_parm_seen) | |
3964 | for (i = 0; i < num_actuals; i++) | |
3965 | if (args[i].partial != 0 && ! args[i].pass_on_stack) | |
7ecc63d3 | 3966 | { |
3663becd | 3967 | rtx_insn *before_arg = get_last_insn (); |
7ecc63d3 | 3968 | |
a95e5776 | 3969 | /* On targets with weird calling conventions (e.g. PA) it's |
3970 | hard to ensure that all cases of argument overlap between | |
3971 | stack and registers work. Play it safe and bail out. */ | |
3972 | if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD) | |
3973 | { | |
3974 | sibcall_failure = 1; | |
3975 | break; | |
3976 | } | |
3977 | ||
57679d39 | 3978 | if (store_one_arg (&args[i], argblock, flags, |
3979 | adjusted_args_size.var != 0, | |
3980 | reg_parm_stack_space) | |
3981 | || (pass == 0 | |
3982 | && check_sibcall_argument_overlap (before_arg, | |
42b11544 | 3983 | &args[i], 1))) |
7ecc63d3 | 3984 | sibcall_failure = 1; |
3985 | } | |
66d433c7 | 3986 | |
53597a55 | 3987 | bool any_regs = false; |
3988 | for (i = 0; i < num_actuals; i++) | |
3989 | if (args[i].reg != NULL_RTX) | |
3990 | { | |
3991 | any_regs = true; | |
3992 | targetm.calls.call_args (args[i].reg, funtype); | |
3993 | } | |
3994 | if (!any_regs) | |
3995 | targetm.calls.call_args (pc_rtx, funtype); | |
3996 | ||
3997 | /* Figure out the register where the value, if any, will come back. */ | |
3998 | valreg = 0; | |
3999 | valbnd = 0; | |
4000 | if (TYPE_MODE (rettype) != VOIDmode | |
4001 | && ! structure_value_addr) | |
4002 | { | |
4003 | if (pcc_struct_value) | |
4004 | { | |
4005 | valreg = hard_function_value (build_pointer_type (rettype), | |
4006 | fndecl, NULL, (pass == 0)); | |
4007 | if (CALL_WITH_BOUNDS_P (exp)) | |
4008 | valbnd = targetm.calls. | |
4009 | chkp_function_value_bounds (build_pointer_type (rettype), | |
4010 | fndecl, (pass == 0)); | |
4011 | } | |
4012 | else | |
4013 | { | |
4014 | valreg = hard_function_value (rettype, fndecl, fntype, | |
4015 | (pass == 0)); | |
4016 | if (CALL_WITH_BOUNDS_P (exp)) | |
4017 | valbnd = targetm.calls.chkp_function_value_bounds (rettype, | |
4018 | fndecl, | |
4019 | (pass == 0)); | |
4020 | } | |
4021 | ||
4022 | /* If VALREG is a PARALLEL whose first member has a zero | |
4023 | offset, use that. This is for targets such as m68k that | |
4024 | return the same value in multiple places. */ | |
4025 | if (GET_CODE (valreg) == PARALLEL) | |
4026 | { | |
4027 | rtx elem = XVECEXP (valreg, 0, 0); | |
4028 | rtx where = XEXP (elem, 0); | |
4029 | rtx offset = XEXP (elem, 1); | |
4030 | if (offset == const0_rtx | |
4031 | && GET_MODE (where) == GET_MODE (valreg)) | |
4032 | valreg = where; | |
4033 | } | |
4034 | } | |
4035 | ||
058a1b7a | 4036 | /* Store all bounds not passed in registers. */ |
4037 | for (i = 0; i < num_actuals; i++) | |
4038 | { | |
4039 | if (POINTER_BOUNDS_P (args[i].tree_value) | |
4040 | && !args[i].reg) | |
4041 | store_bounds (&args[i], | |
4042 | args[i].pointer_arg == -1 | |
4043 | ? NULL | |
4044 | : &args[args[i].pointer_arg]); | |
4045 | } | |
4046 | ||
60ecc450 | 4047 | /* If register arguments require space on the stack and stack space |
4048 | was not preallocated, allocate stack space here for arguments | |
4049 | passed in registers. */ | |
fa20f865 | 4050 | if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))) |
22c61100 | 4051 | && !ACCUMULATE_OUTGOING_ARGS |
c87678e4 | 4052 | && must_preallocate == 0 && reg_parm_stack_space > 0) |
60ecc450 | 4053 | anti_adjust_stack (GEN_INT (reg_parm_stack_space)); |
985adbca | 4054 | |
60ecc450 | 4055 | /* Pass the function the address in which to return a |
4056 | structure value. */ | |
4057 | if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) | |
4058 | { | |
0d568ddf | 4059 | structure_value_addr |
85d654dd | 4060 | = convert_memory_address (Pmode, structure_value_addr); |
45550790 | 4061 | emit_move_insn (struct_value, |
60ecc450 | 4062 | force_reg (Pmode, |
4063 | force_operand (structure_value_addr, | |
4064 | NULL_RTX))); | |
4065 | ||
8ad4c111 | 4066 | if (REG_P (struct_value)) |
45550790 | 4067 | use_reg (&call_fusage, struct_value); |
60ecc450 | 4068 | } |
02c736f4 | 4069 | |
c0e7e9f7 | 4070 | after_args = get_last_insn (); |
88f80691 | 4071 | funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp, |
4072 | static_chain_value, &call_fusage, | |
4073 | reg_parm_seen, flags); | |
e726704c | 4074 | |
42b11544 | 4075 | load_register_parameters (args, num_actuals, &call_fusage, flags, |
4076 | pass == 0, &sibcall_failure); | |
c87678e4 | 4077 | |
60ecc450 | 4078 | /* Save a pointer to the last insn before the call, so that we can |
4079 | later safely search backwards to find the CALL_INSN. */ | |
4080 | before_call = get_last_insn (); | |
66d433c7 | 4081 | |
7a8d641b | 4082 | /* Set up next argument register. For sibling calls on machines |
4083 | with register windows this should be the incoming register. */ | |
7a8d641b | 4084 | if (pass == 0) |
39cba157 | 4085 | next_arg_reg = targetm.calls.function_incoming_arg (args_so_far, |
f387af4f | 4086 | VOIDmode, |
4087 | void_type_node, | |
4088 | true); | |
7a8d641b | 4089 | else |
39cba157 | 4090 | next_arg_reg = targetm.calls.function_arg (args_so_far, |
f387af4f | 4091 | VOIDmode, void_type_node, |
4092 | true); | |
7a8d641b | 4093 | |
c8010b80 | 4094 | if (pass == 1 && (return_flags & ERF_RETURNS_ARG)) |
4095 | { | |
4096 | int arg_nr = return_flags & ERF_RETURN_ARG_MASK; | |
bf29c577 | 4097 | arg_nr = num_actuals - arg_nr - 1; |
3d38d682 | 4098 | if (arg_nr >= 0 |
4099 | && arg_nr < num_actuals | |
4100 | && args[arg_nr].reg | |
c8010b80 | 4101 | && valreg |
4102 | && REG_P (valreg) | |
4103 | && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg)) | |
4104 | call_fusage | |
4105 | = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)), | |
d1f9b275 | 4106 | gen_rtx_SET (valreg, args[arg_nr].reg), |
c8010b80 | 4107 | call_fusage); |
4108 | } | |
60ecc450 | 4109 | /* All arguments and registers used for the call must be set up by |
4110 | now! */ | |
4111 | ||
481feae3 | 4112 | /* Stack must be properly aligned now. */ |
231bd014 | 4113 | gcc_assert (!pass |
4114 | || !(stack_pointer_delta % preferred_unit_stack_boundary)); | |
fa4f1f09 | 4115 | |
60ecc450 | 4116 | /* Generate the actual call instruction. */ |
4ee9c684 | 4117 | emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size, |
0e0be288 | 4118 | adjusted_args_size.constant, struct_value_size, |
7a8d641b | 4119 | next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, |
39cba157 | 4120 | flags, args_so_far); |
60ecc450 | 4121 | |
fcf56aaf | 4122 | if (flag_ipa_ra) |
2e3b0d0f | 4123 | { |
3663becd | 4124 | rtx_call_insn *last; |
4125 | rtx datum = NULL_RTX; | |
2e3b0d0f | 4126 | if (fndecl != NULL_TREE) |
4127 | { | |
4128 | datum = XEXP (DECL_RTL (fndecl), 0); | |
4129 | gcc_assert (datum != NULL_RTX | |
4130 | && GET_CODE (datum) == SYMBOL_REF); | |
4131 | } | |
4132 | last = last_call_insn (); | |
4133 | add_reg_note (last, REG_CALL_DECL, datum); | |
4134 | } | |
4135 | ||
c0e7e9f7 | 4136 | /* If the call setup or the call itself overlaps with anything |
4137 | of the argument setup we probably clobbered our call address. | |
4138 | In that case we can't do sibcalls. */ | |
4139 | if (pass == 0 | |
4140 | && check_sibcall_argument_overlap (after_args, 0, 0)) | |
4141 | sibcall_failure = 1; | |
4142 | ||
05d18e8b | 4143 | /* If a non-BLKmode value is returned at the most significant end |
4144 | of a register, shift the register right by the appropriate amount | |
4145 | and update VALREG accordingly. BLKmode values are handled by the | |
4146 | group load/store machinery below. */ | |
4147 | if (!structure_value_addr | |
4148 | && !pcc_struct_value | |
d8ef55fc | 4149 | && TYPE_MODE (rettype) != VOIDmode |
16c9337c | 4150 | && TYPE_MODE (rettype) != BLKmode |
d8ef55fc | 4151 | && REG_P (valreg) |
16c9337c | 4152 | && targetm.calls.return_in_msb (rettype)) |
05d18e8b | 4153 | { |
16c9337c | 4154 | if (shift_return_value (TYPE_MODE (rettype), false, valreg)) |
05d18e8b | 4155 | sibcall_failure = 1; |
16c9337c | 4156 | valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg)); |
05d18e8b | 4157 | } |
4158 | ||
2dd6f9ed | 4159 | if (pass && (flags & ECF_MALLOC)) |
60ecc450 | 4160 | { |
4161 | rtx temp = gen_reg_rtx (GET_MODE (valreg)); | |
3663becd | 4162 | rtx_insn *last, *insns; |
60ecc450 | 4163 | |
c87678e4 | 4164 | /* The return value from a malloc-like function is a pointer. */ |
16c9337c | 4165 | if (TREE_CODE (rettype) == POINTER_TYPE) |
10836fcc | 4166 | mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT); |
60ecc450 | 4167 | |
4168 | emit_move_insn (temp, valreg); | |
4169 | ||
4170 | /* The return value from a malloc-like function can not alias | |
4171 | anything else. */ | |
4172 | last = get_last_insn (); | |
a1ddb869 | 4173 | add_reg_note (last, REG_NOALIAS, temp); |
60ecc450 | 4174 | |
4175 | /* Write out the sequence. */ | |
4176 | insns = get_insns (); | |
4177 | end_sequence (); | |
31d3e01c | 4178 | emit_insn (insns); |
60ecc450 | 4179 | valreg = temp; |
4180 | } | |
66d433c7 | 4181 | |
3072d30e | 4182 | /* For calls to `setjmp', etc., inform |
4183 | function.c:setjmp_warnings that it should complain if | |
4184 | nonvolatile values are live. For functions that cannot | |
4185 | return, inform flow that control does not fall through. */ | |
66d433c7 | 4186 | |
4fec1d6c | 4187 | if ((flags & ECF_NORETURN) || pass == 0) |
02c736f4 | 4188 | { |
9239aee6 | 4189 | /* The barrier must be emitted |
60ecc450 | 4190 | immediately after the CALL_INSN. Some ports emit more |
4191 | than just a CALL_INSN above, so we must search for it here. */ | |
66d433c7 | 4192 | |
3663becd | 4193 | rtx_insn *last = get_last_insn (); |
6d7dc5b9 | 4194 | while (!CALL_P (last)) |
60ecc450 | 4195 | { |
4196 | last = PREV_INSN (last); | |
4197 | /* There was no CALL_INSN? */ | |
231bd014 | 4198 | gcc_assert (last != before_call); |
60ecc450 | 4199 | } |
66d433c7 | 4200 | |
9239aee6 | 4201 | emit_barrier_after (last); |
20f5f6d0 | 4202 | |
b494d193 | 4203 | /* Stack adjustments after a noreturn call are dead code. |
4204 | However when NO_DEFER_POP is in effect, we must preserve | |
4205 | stack_pointer_delta. */ | |
4206 | if (inhibit_defer_pop == 0) | |
4207 | { | |
4208 | stack_pointer_delta = old_stack_allocated; | |
4209 | pending_stack_adjust = 0; | |
4210 | } | |
60ecc450 | 4211 | } |
66d433c7 | 4212 | |
60ecc450 | 4213 | /* If value type not void, return an rtx for the value. */ |
66d433c7 | 4214 | |
16c9337c | 4215 | if (TYPE_MODE (rettype) == VOIDmode |
60ecc450 | 4216 | || ignore) |
5edaabad | 4217 | target = const0_rtx; |
60ecc450 | 4218 | else if (structure_value_addr) |
4219 | { | |
e16ceb8e | 4220 | if (target == 0 || !MEM_P (target)) |
60ecc450 | 4221 | { |
f7c44134 | 4222 | target |
16c9337c | 4223 | = gen_rtx_MEM (TYPE_MODE (rettype), |
4224 | memory_address (TYPE_MODE (rettype), | |
f7c44134 | 4225 | structure_value_addr)); |
16c9337c | 4226 | set_mem_attributes (target, rettype, 1); |
60ecc450 | 4227 | } |
4228 | } | |
4229 | else if (pcc_struct_value) | |
566d850a | 4230 | { |
60ecc450 | 4231 | /* This is the special C++ case where we need to |
4232 | know what the true target was. We take care to | |
4233 | never use this value more than once in one expression. */ | |
16c9337c | 4234 | target = gen_rtx_MEM (TYPE_MODE (rettype), |
60ecc450 | 4235 | copy_to_reg (valreg)); |
16c9337c | 4236 | set_mem_attributes (target, rettype, 1); |
566d850a | 4237 | } |
60ecc450 | 4238 | /* Handle calls that return values in multiple non-contiguous locations. |
4239 | The Irix 6 ABI has examples of this. */ | |
4240 | else if (GET_CODE (valreg) == PARALLEL) | |
4241 | { | |
4ee9c684 | 4242 | if (target == 0) |
2d0fd66d | 4243 | target = emit_group_move_into_temps (valreg); |
5bd5c1c2 | 4244 | else if (rtx_equal_p (target, valreg)) |
4245 | ; | |
4246 | else if (GET_CODE (target) == PARALLEL) | |
4247 | /* Handle the result of a emit_group_move_into_temps | |
4248 | call in the previous pass. */ | |
4249 | emit_group_move (target, valreg); | |
4250 | else | |
16c9337c | 4251 | emit_group_store (target, valreg, rettype, |
4252 | int_size_in_bytes (rettype)); | |
60ecc450 | 4253 | } |
4254 | else if (target | |
16c9337c | 4255 | && GET_MODE (target) == TYPE_MODE (rettype) |
60ecc450 | 4256 | && GET_MODE (target) == GET_MODE (valreg)) |
4257 | { | |
aadbaa40 | 4258 | bool may_overlap = false; |
4259 | ||
360738f1 | 4260 | /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard |
4261 | reg to a plain register. */ | |
90af1361 | 4262 | if (!REG_P (target) || HARD_REGISTER_P (target)) |
4263 | valreg = avoid_likely_spilled_reg (valreg); | |
360738f1 | 4264 | |
aadbaa40 | 4265 | /* If TARGET is a MEM in the argument area, and we have |
4266 | saved part of the argument area, then we can't store | |
4267 | directly into TARGET as it may get overwritten when we | |
4268 | restore the argument save area below. Don't work too | |
4269 | hard though and simply force TARGET to a register if it | |
4270 | is a MEM; the optimizer is quite likely to sort it out. */ | |
4271 | if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target)) | |
4272 | for (i = 0; i < num_actuals; i++) | |
4273 | if (args[i].save_area) | |
4274 | { | |
4275 | may_overlap = true; | |
4276 | break; | |
4277 | } | |
dbe1f550 | 4278 | |
aadbaa40 | 4279 | if (may_overlap) |
4280 | target = copy_to_reg (valreg); | |
4281 | else | |
4282 | { | |
4283 | /* TARGET and VALREG cannot be equal at this point | |
4284 | because the latter would not have | |
4285 | REG_FUNCTION_VALUE_P true, while the former would if | |
4286 | it were referring to the same register. | |
4287 | ||
4288 | If they refer to the same register, this move will be | |
4289 | a no-op, except when function inlining is being | |
4290 | done. */ | |
4291 | emit_move_insn (target, valreg); | |
4292 | ||
4293 | /* If we are setting a MEM, this code must be executed. | |
4294 | Since it is emitted after the call insn, sibcall | |
4295 | optimization cannot be performed in that case. */ | |
4296 | if (MEM_P (target)) | |
4297 | sibcall_failure = 1; | |
4298 | } | |
60ecc450 | 4299 | } |
60ecc450 | 4300 | else |
90af1361 | 4301 | target = copy_to_reg (avoid_likely_spilled_reg (valreg)); |
66d433c7 | 4302 | |
3b2411a8 | 4303 | /* If we promoted this return value, make the proper SUBREG. |
4304 | TARGET might be const0_rtx here, so be careful. */ | |
4305 | if (REG_P (target) | |
16c9337c | 4306 | && TYPE_MODE (rettype) != BLKmode |
4307 | && GET_MODE (target) != TYPE_MODE (rettype)) | |
45550790 | 4308 | { |
16c9337c | 4309 | tree type = rettype; |
3b2411a8 | 4310 | int unsignedp = TYPE_UNSIGNED (type); |
3754d046 | 4311 | machine_mode pmode; |
3b2411a8 | 4312 | |
4313 | /* Ensure we promote as expected, and get the new unsignedness. */ | |
4314 | pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, | |
4315 | funtype, 1); | |
4316 | gcc_assert (GET_MODE (target) == pmode); | |
4317 | ||
9edf7ea8 | 4318 | poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type), |
4319 | GET_MODE (target)); | |
3b2411a8 | 4320 | target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset); |
4321 | SUBREG_PROMOTED_VAR_P (target) = 1; | |
e8629f9e | 4322 | SUBREG_PROMOTED_SET (target, unsignedp); |
45550790 | 4323 | } |
23eb5fa6 | 4324 | |
60ecc450 | 4325 | /* If size of args is variable or this was a constructor call for a stack |
4326 | argument, restore saved stack-pointer value. */ | |
66d433c7 | 4327 | |
ff3ae375 | 4328 | if (old_stack_level) |
60ecc450 | 4329 | { |
3663becd | 4330 | rtx_insn *prev = get_last_insn (); |
dfe00a8f | 4331 | |
e9c97615 | 4332 | emit_stack_restore (SAVE_BLOCK, old_stack_level); |
9069face | 4333 | stack_pointer_delta = old_stack_pointer_delta; |
dfe00a8f | 4334 | |
897445c7 | 4335 | fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta); |
dfe00a8f | 4336 | |
60ecc450 | 4337 | pending_stack_adjust = old_pending_adj; |
80f06481 | 4338 | old_stack_allocated = stack_pointer_delta - pending_stack_adjust; |
60ecc450 | 4339 | stack_arg_under_construction = old_stack_arg_under_construction; |
4340 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; | |
4341 | stack_usage_map = initial_stack_usage_map; | |
60ecc450 | 4342 | sibcall_failure = 1; |
4343 | } | |
02510658 | 4344 | else if (ACCUMULATE_OUTGOING_ARGS && pass) |
60ecc450 | 4345 | { |
66d433c7 | 4346 | #ifdef REG_PARM_STACK_SPACE |
60ecc450 | 4347 | if (save_area) |
6e96b626 | 4348 | restore_fixed_argument_area (save_area, argblock, |
4349 | high_to_save, low_to_save); | |
41332f48 | 4350 | #endif |
66d433c7 | 4351 | |
60ecc450 | 4352 | /* If we saved any argument areas, restore them. */ |
4353 | for (i = 0; i < num_actuals; i++) | |
4354 | if (args[i].save_area) | |
4355 | { | |
3754d046 | 4356 | machine_mode save_mode = GET_MODE (args[i].save_area); |
60ecc450 | 4357 | rtx stack_area |
4358 | = gen_rtx_MEM (save_mode, | |
4359 | memory_address (save_mode, | |
4360 | XEXP (args[i].stack_slot, 0))); | |
4361 | ||
4362 | if (save_mode != BLKmode) | |
4363 | emit_move_insn (stack_area, args[i].save_area); | |
4364 | else | |
0378dbdc | 4365 | emit_block_move (stack_area, args[i].save_area, |
241399f6 | 4366 | GEN_INT (args[i].locate.size.constant), |
0378dbdc | 4367 | BLOCK_OP_CALL_PARM); |
60ecc450 | 4368 | } |
66d433c7 | 4369 | |
60ecc450 | 4370 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
4371 | stack_usage_map = initial_stack_usage_map; | |
4372 | } | |
66d433c7 | 4373 | |
97354ae4 | 4374 | /* If this was alloca, record the new stack level. */ |
4375 | if (flags & ECF_MAY_BE_ALLOCA) | |
4376 | record_new_stack_level (); | |
66d433c7 | 4377 | |
60ecc450 | 4378 | /* Free up storage we no longer need. */ |
4379 | for (i = 0; i < num_actuals; ++i) | |
dd045aee | 4380 | free (args[i].aligned_regs); |
60ecc450 | 4381 | |
53597a55 | 4382 | targetm.calls.end_call_args (); |
4383 | ||
60ecc450 | 4384 | insns = get_insns (); |
4385 | end_sequence (); | |
4386 | ||
4387 | if (pass == 0) | |
4388 | { | |
4389 | tail_call_insns = insns; | |
4390 | ||
60ecc450 | 4391 | /* Restore the pending stack adjustment now that we have |
4392 | finished generating the sibling call sequence. */ | |
91b70175 | 4393 | |
b6d206a2 | 4394 | restore_pending_stack_adjust (&save); |
0e0be288 | 4395 | |
4396 | /* Prepare arg structure for next iteration. */ | |
c87678e4 | 4397 | for (i = 0; i < num_actuals; i++) |
0e0be288 | 4398 | { |
4399 | args[i].value = 0; | |
4400 | args[i].aligned_regs = 0; | |
4401 | args[i].stack = 0; | |
4402 | } | |
7ecc63d3 | 4403 | |
4404 | sbitmap_free (stored_args_map); | |
3663becd | 4405 | internal_arg_pointer_exp_state.scan_start = NULL; |
f1f41a6c | 4406 | internal_arg_pointer_exp_state.cache.release (); |
60ecc450 | 4407 | } |
4408 | else | |
9069face | 4409 | { |
4410 | normal_call_insns = insns; | |
4411 | ||
4412 | /* Verify that we've deallocated all the stack we used. */ | |
4fec1d6c | 4413 | gcc_assert ((flags & ECF_NORETURN) |
231bd014 | 4414 | || (old_stack_allocated |
4415 | == stack_pointer_delta - pending_stack_adjust)); | |
9069face | 4416 | } |
ae8d6151 | 4417 | |
4418 | /* If something prevents making this a sibling call, | |
4419 | zero out the sequence. */ | |
4420 | if (sibcall_failure) | |
3663becd | 4421 | tail_call_insns = NULL; |
4ee9c684 | 4422 | else |
4423 | break; | |
60ecc450 | 4424 | } |
4425 | ||
365db11e | 4426 | /* If tail call production succeeded, we need to remove REG_EQUIV notes on |
4ee9c684 | 4427 | arguments too, as argument area is now clobbered by the call. */ |
4428 | if (tail_call_insns) | |
60ecc450 | 4429 | { |
4ee9c684 | 4430 | emit_insn (tail_call_insns); |
18d50ae6 | 4431 | crtl->tail_call_emit = true; |
60ecc450 | 4432 | } |
4433 | else | |
b4a61e77 | 4434 | { |
4435 | emit_insn (normal_call_insns); | |
4436 | if (try_tail_call) | |
4437 | /* Ideally we'd emit a message for all of the ways that it could | |
4438 | have failed. */ | |
4439 | maybe_complain_about_tail_call (exp, "tail call production failed"); | |
4440 | } | |
66d433c7 | 4441 | |
60ecc450 | 4442 | currently_expanding_call--; |
6d801f27 | 4443 | |
dd045aee | 4444 | free (stack_usage_map_buf); |
1f303606 | 4445 | free (args); |
a331ea1b | 4446 | |
058a1b7a | 4447 | /* Join result with returned bounds so caller may use them if needed. */ |
4448 | target = chkp_join_splitted_slot (target, valbnd); | |
4449 | ||
66d433c7 | 4450 | return target; |
4451 | } | |
915e81b8 | 4452 | |
4ee9c684 | 4453 | /* A sibling call sequence invalidates any REG_EQUIV notes made for |
4454 | this function's incoming arguments. | |
4455 | ||
4456 | At the start of RTL generation we know the only REG_EQUIV notes | |
0a227ed5 | 4457 | in the rtl chain are those for incoming arguments, so we can look |
4458 | for REG_EQUIV notes between the start of the function and the | |
4459 | NOTE_INSN_FUNCTION_BEG. | |
4ee9c684 | 4460 | |
4461 | This is (slight) overkill. We could keep track of the highest | |
4462 | argument we clobber and be more selective in removing notes, but it | |
4463 | does not seem to be worth the effort. */ | |
0a227ed5 | 4464 | |
4ee9c684 | 4465 | void |
4466 | fixup_tail_calls (void) | |
4467 | { | |
3663becd | 4468 | rtx_insn *insn; |
0a227ed5 | 4469 | |
4470 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
4471 | { | |
750a330e | 4472 | rtx note; |
4473 | ||
0a227ed5 | 4474 | /* There are never REG_EQUIV notes for the incoming arguments |
4475 | after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ | |
4476 | if (NOTE_P (insn) | |
ad4583d9 | 4477 | && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) |
0a227ed5 | 4478 | break; |
4479 | ||
750a330e | 4480 | note = find_reg_note (insn, REG_EQUIV, 0); |
4481 | if (note) | |
4482 | remove_note (insn, note); | |
4483 | note = find_reg_note (insn, REG_EQUIV, 0); | |
4484 | gcc_assert (!note); | |
0a227ed5 | 4485 | } |
4ee9c684 | 4486 | } |
4487 | ||
915e81b8 | 4488 | /* Traverse a list of TYPES and expand all complex types into their |
4489 | components. */ | |
5ab29745 | 4490 | static tree |
915e81b8 | 4491 | split_complex_types (tree types) |
4492 | { | |
4493 | tree p; | |
4494 | ||
92d40bc4 | 4495 | /* Before allocating memory, check for the common case of no complex. */ |
4496 | for (p = types; p; p = TREE_CHAIN (p)) | |
4497 | { | |
4498 | tree type = TREE_VALUE (p); | |
4499 | if (TREE_CODE (type) == COMPLEX_TYPE | |
4500 | && targetm.calls.split_complex_arg (type)) | |
a0c938f0 | 4501 | goto found; |
92d40bc4 | 4502 | } |
4503 | return types; | |
4504 | ||
4505 | found: | |
915e81b8 | 4506 | types = copy_list (types); |
4507 | ||
4508 | for (p = types; p; p = TREE_CHAIN (p)) | |
4509 | { | |
4510 | tree complex_type = TREE_VALUE (p); | |
4511 | ||
92d40bc4 | 4512 | if (TREE_CODE (complex_type) == COMPLEX_TYPE |
4513 | && targetm.calls.split_complex_arg (complex_type)) | |
915e81b8 | 4514 | { |
4515 | tree next, imag; | |
4516 | ||
4517 | /* Rewrite complex type with component type. */ | |
4518 | TREE_VALUE (p) = TREE_TYPE (complex_type); | |
4519 | next = TREE_CHAIN (p); | |
4520 | ||
4521 | /* Add another component type for the imaginary part. */ | |
4522 | imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); | |
4523 | TREE_CHAIN (p) = imag; | |
4524 | TREE_CHAIN (imag) = next; | |
4525 | ||
4526 | /* Skip the newly created node. */ | |
4527 | p = TREE_CHAIN (p); | |
4528 | } | |
4529 | } | |
4530 | ||
4531 | return types; | |
4532 | } | |
66d433c7 | 4533 | \f |
9e9e5c15 | 4534 | /* Output a library call to function ORGFUN (a SYMBOL_REF rtx) |
4535 | for a value of mode OUTMODE, | |
4536 | with NARGS different arguments, passed as ARGS. | |
4537 | Store the return value if RETVAL is nonzero: store it in VALUE if | |
4538 | VALUE is nonnull, otherwise pick a convenient location. In either | |
4539 | case return the location of the stored value. | |
2a631e19 | 4540 | |
9e9e5c15 | 4541 | FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for |
4542 | `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for | |
4543 | other types of library calls. */ | |
4544 | ||
4545 | rtx | |
4c9e08a4 | 4546 | emit_library_call_value_1 (int retval, rtx orgfun, rtx value, |
4547 | enum libcall_type fn_type, | |
9e9e5c15 | 4548 | machine_mode outmode, int nargs, rtx_mode_t *args) |
b39693dd | 4549 | { |
9bdaf1ba | 4550 | /* Total size in bytes of all the stack-parms scanned so far. */ |
4551 | struct args_size args_size; | |
4552 | /* Size of arguments before any adjustments (such as rounding). */ | |
4553 | struct args_size original_args_size; | |
19cb6b50 | 4554 | int argnum; |
9bdaf1ba | 4555 | rtx fun; |
22c61100 | 4556 | /* Todo, choose the correct decl type of orgfun. Sadly this information |
4557 | isn't present here, so we default to native calling abi here. */ | |
60e2260d | 4558 | tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
fa20f865 | 4559 | tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */ |
9bdaf1ba | 4560 | int count; |
9bdaf1ba | 4561 | rtx argblock = 0; |
39cba157 | 4562 | CUMULATIVE_ARGS args_so_far_v; |
4563 | cumulative_args_t args_so_far; | |
c87678e4 | 4564 | struct arg |
4565 | { | |
4566 | rtx value; | |
3754d046 | 4567 | machine_mode mode; |
c87678e4 | 4568 | rtx reg; |
4569 | int partial; | |
241399f6 | 4570 | struct locate_and_pad_arg_data locate; |
c87678e4 | 4571 | rtx save_area; |
4572 | }; | |
9bdaf1ba | 4573 | struct arg *argvec; |
4574 | int old_inhibit_defer_pop = inhibit_defer_pop; | |
4575 | rtx call_fusage = 0; | |
4576 | rtx mem_value = 0; | |
16204096 | 4577 | rtx valreg; |
9bdaf1ba | 4578 | int pcc_struct_value = 0; |
4579 | int struct_value_size = 0; | |
df4b504c | 4580 | int flags; |
9bdaf1ba | 4581 | int reg_parm_stack_space = 0; |
9bdaf1ba | 4582 | int needed; |
3663becd | 4583 | rtx_insn *before_call; |
8700bf9e | 4584 | bool have_push_fusage; |
771d21fa | 4585 | tree tfom; /* type_for_mode (outmode, 0) */ |
9bdaf1ba | 4586 | |
4448f543 | 4587 | #ifdef REG_PARM_STACK_SPACE |
9bdaf1ba | 4588 | /* Define the boundary of the register parm stack space that needs to be |
4589 | save, if any. */ | |
75a70cf9 | 4590 | int low_to_save = 0, high_to_save = 0; |
c87678e4 | 4591 | rtx save_area = 0; /* Place that it is saved. */ |
9bdaf1ba | 4592 | #endif |
4593 | ||
9bdaf1ba | 4594 | /* Size of the stack reserved for parameter registers. */ |
4595 | int initial_highest_arg_in_use = highest_outgoing_arg_in_use; | |
4596 | char *initial_stack_usage_map = stack_usage_map; | |
a331ea1b | 4597 | char *stack_usage_map_buf = NULL; |
9bdaf1ba | 4598 | |
45550790 | 4599 | rtx struct_value = targetm.calls.struct_value_rtx (0, 0); |
4600 | ||
9bdaf1ba | 4601 | #ifdef REG_PARM_STACK_SPACE |
9bdaf1ba | 4602 | reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0); |
9bdaf1ba | 4603 | #endif |
4604 | ||
1c1a1b9a | 4605 | /* By default, library functions cannot throw. */ |
df4b504c | 4606 | flags = ECF_NOTHROW; |
4607 | ||
ab7ccfa2 | 4608 | switch (fn_type) |
4609 | { | |
4610 | case LCT_NORMAL: | |
2a0c81bf | 4611 | break; |
ab7ccfa2 | 4612 | case LCT_CONST: |
2a0c81bf | 4613 | flags |= ECF_CONST; |
4614 | break; | |
ab7ccfa2 | 4615 | case LCT_PURE: |
2a0c81bf | 4616 | flags |= ECF_PURE; |
ab7ccfa2 | 4617 | break; |
ab7ccfa2 | 4618 | case LCT_NORETURN: |
4619 | flags |= ECF_NORETURN; | |
4620 | break; | |
4621 | case LCT_THROW: | |
1c1a1b9a | 4622 | flags &= ~ECF_NOTHROW; |
ab7ccfa2 | 4623 | break; |
0ff18307 | 4624 | case LCT_RETURNS_TWICE: |
4625 | flags = ECF_RETURNS_TWICE; | |
4626 | break; | |
ab7ccfa2 | 4627 | } |
9bdaf1ba | 4628 | fun = orgfun; |
4629 | ||
9bdaf1ba | 4630 | /* Ensure current function's preferred stack boundary is at least |
4631 | what we need. */ | |
edb7afe8 | 4632 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) |
4633 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
9bdaf1ba | 4634 | |
4635 | /* If this kind of value comes back in memory, | |
4636 | decide where in memory it should come back. */ | |
771d21fa | 4637 | if (outmode != VOIDmode) |
9bdaf1ba | 4638 | { |
dc24ddbd | 4639 | tfom = lang_hooks.types.type_for_mode (outmode, 0); |
45550790 | 4640 | if (aggregate_value_p (tfom, 0)) |
771d21fa | 4641 | { |
9bdaf1ba | 4642 | #ifdef PCC_STATIC_STRUCT_RETURN |
771d21fa | 4643 | rtx pointer_reg |
46b3ff29 | 4644 | = hard_function_value (build_pointer_type (tfom), 0, 0, 0); |
771d21fa | 4645 | mem_value = gen_rtx_MEM (outmode, pointer_reg); |
4646 | pcc_struct_value = 1; | |
4647 | if (value == 0) | |
4648 | value = gen_reg_rtx (outmode); | |
9bdaf1ba | 4649 | #else /* not PCC_STATIC_STRUCT_RETURN */ |
771d21fa | 4650 | struct_value_size = GET_MODE_SIZE (outmode); |
e16ceb8e | 4651 | if (value != 0 && MEM_P (value)) |
771d21fa | 4652 | mem_value = value; |
4653 | else | |
0ab48139 | 4654 | mem_value = assign_temp (tfom, 1, 1); |
9bdaf1ba | 4655 | #endif |
771d21fa | 4656 | /* This call returns a big structure. */ |
2dd6f9ed | 4657 | flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE); |
771d21fa | 4658 | } |
9bdaf1ba | 4659 | } |
771d21fa | 4660 | else |
4661 | tfom = void_type_node; | |
9bdaf1ba | 4662 | |
4663 | /* ??? Unfinished: must pass the memory address as an argument. */ | |
4664 | ||
4665 | /* Copy all the libcall-arguments out of the varargs data | |
4666 | and into a vector ARGVEC. | |
4667 | ||
4668 | Compute how to pass each argument. We only support a very small subset | |
4669 | of the full argument passing conventions to limit complexity here since | |
4670 | library functions shouldn't have many args. */ | |
4671 | ||
364c0c59 | 4672 | argvec = XALLOCAVEC (struct arg, nargs + 1); |
f0af5a88 | 4673 | memset (argvec, 0, (nargs + 1) * sizeof (struct arg)); |
9bdaf1ba | 4674 | |
e1efd914 | 4675 | #ifdef INIT_CUMULATIVE_LIBCALL_ARGS |
39cba157 | 4676 | INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun); |
e1efd914 | 4677 | #else |
39cba157 | 4678 | INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs); |
e1efd914 | 4679 | #endif |
39cba157 | 4680 | args_so_far = pack_cumulative_args (&args_so_far_v); |
9bdaf1ba | 4681 | |
4682 | args_size.constant = 0; | |
4683 | args_size.var = 0; | |
4684 | ||
4685 | count = 0; | |
4686 | ||
4687 | push_temp_slots (); | |
4688 | ||
4689 | /* If there's a structure value address to be passed, | |
4690 | either pass it in the special place, or pass it as an extra argument. */ | |
45550790 | 4691 | if (mem_value && struct_value == 0 && ! pcc_struct_value) |
9bdaf1ba | 4692 | { |
4693 | rtx addr = XEXP (mem_value, 0); | |
a0c938f0 | 4694 | |
9bdaf1ba | 4695 | nargs++; |
4696 | ||
a56c46d2 | 4697 | /* Make sure it is a reasonable operand for a move or push insn. */ |
4698 | if (!REG_P (addr) && !MEM_P (addr) | |
ca316360 | 4699 | && !(CONSTANT_P (addr) |
4700 | && targetm.legitimate_constant_p (Pmode, addr))) | |
a56c46d2 | 4701 | addr = force_operand (addr, NULL_RTX); |
4702 | ||
9bdaf1ba | 4703 | argvec[count].value = addr; |
4704 | argvec[count].mode = Pmode; | |
4705 | argvec[count].partial = 0; | |
4706 | ||
39cba157 | 4707 | argvec[count].reg = targetm.calls.function_arg (args_so_far, |
f387af4f | 4708 | Pmode, NULL_TREE, true); |
39cba157 | 4709 | gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode, |
f054eb3c | 4710 | NULL_TREE, 1) == 0); |
9bdaf1ba | 4711 | |
4712 | locate_and_pad_parm (Pmode, NULL_TREE, | |
2e735c0d | 4713 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
a0c938f0 | 4714 | 1, |
2e735c0d | 4715 | #else |
4716 | argvec[count].reg != 0, | |
4717 | #endif | |
2e090bf6 | 4718 | reg_parm_stack_space, 0, |
4719 | NULL_TREE, &args_size, &argvec[count].locate); | |
9bdaf1ba | 4720 | |
9bdaf1ba | 4721 | if (argvec[count].reg == 0 || argvec[count].partial != 0 |
4722 | || reg_parm_stack_space > 0) | |
241399f6 | 4723 | args_size.constant += argvec[count].locate.size.constant; |
9bdaf1ba | 4724 | |
39cba157 | 4725 | targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true); |
9bdaf1ba | 4726 | |
4727 | count++; | |
4728 | } | |
4729 | ||
9e9e5c15 | 4730 | for (unsigned int i = 0; count < nargs; i++, count++) |
9bdaf1ba | 4731 | { |
9e9e5c15 | 4732 | rtx val = args[i].first; |
4733 | machine_mode mode = args[i].second; | |
adaf4ef0 | 4734 | int unsigned_p = 0; |
9bdaf1ba | 4735 | |
4736 | /* We cannot convert the arg value to the mode the library wants here; | |
4737 | must do it earlier where we know the signedness of the arg. */ | |
231bd014 | 4738 | gcc_assert (mode != BLKmode |
4739 | && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode)); | |
9bdaf1ba | 4740 | |
a56c46d2 | 4741 | /* Make sure it is a reasonable operand for a move or push insn. */ |
4742 | if (!REG_P (val) && !MEM_P (val) | |
ca316360 | 4743 | && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val))) |
a56c46d2 | 4744 | val = force_operand (val, NULL_RTX); |
4745 | ||
39cba157 | 4746 | if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1)) |
9bdaf1ba | 4747 | { |
ddaf7ad3 | 4748 | rtx slot; |
13f08ee7 | 4749 | int must_copy |
39cba157 | 4750 | = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1); |
ddaf7ad3 | 4751 | |
9c2a0c05 | 4752 | /* If this was a CONST function, it is now PURE since it now |
4753 | reads memory. */ | |
5096b8b0 | 4754 | if (flags & ECF_CONST) |
4755 | { | |
4756 | flags &= ~ECF_CONST; | |
4757 | flags |= ECF_PURE; | |
4758 | } | |
4759 | ||
590c3166 | 4760 | if (MEM_P (val) && !must_copy) |
006e2d5a | 4761 | { |
4762 | tree val_expr = MEM_EXPR (val); | |
4763 | if (val_expr) | |
4764 | mark_addressable (val_expr); | |
4765 | slot = val; | |
4766 | } | |
41dc12b4 | 4767 | else |
ddaf7ad3 | 4768 | { |
dc24ddbd | 4769 | slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0), |
0ab48139 | 4770 | 1, 1); |
ddaf7ad3 | 4771 | emit_move_insn (slot, val); |
4772 | } | |
387bc205 | 4773 | |
a683e787 | 4774 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, |
4775 | gen_rtx_USE (VOIDmode, slot), | |
4776 | call_fusage); | |
ddaf7ad3 | 4777 | if (must_copy) |
4778 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, | |
4779 | gen_rtx_CLOBBER (VOIDmode, | |
4780 | slot), | |
4781 | call_fusage); | |
4782 | ||
9bdaf1ba | 4783 | mode = Pmode; |
ddaf7ad3 | 4784 | val = force_operand (XEXP (slot, 0), NULL_RTX); |
9bdaf1ba | 4785 | } |
9bdaf1ba | 4786 | |
adaf4ef0 | 4787 | mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0); |
9bdaf1ba | 4788 | argvec[count].mode = mode; |
adaf4ef0 | 4789 | argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p); |
39cba157 | 4790 | argvec[count].reg = targetm.calls.function_arg (args_so_far, mode, |
f387af4f | 4791 | NULL_TREE, true); |
9bdaf1ba | 4792 | |
9bdaf1ba | 4793 | argvec[count].partial |
39cba157 | 4794 | = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1); |
9bdaf1ba | 4795 | |
11fb947f | 4796 | if (argvec[count].reg == 0 |
4797 | || argvec[count].partial != 0 | |
4798 | || reg_parm_stack_space > 0) | |
4799 | { | |
4800 | locate_and_pad_parm (mode, NULL_TREE, | |
2e735c0d | 4801 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
11fb947f | 4802 | 1, |
2e735c0d | 4803 | #else |
11fb947f | 4804 | argvec[count].reg != 0, |
4805 | #endif | |
2e090bf6 | 4806 | reg_parm_stack_space, argvec[count].partial, |
11fb947f | 4807 | NULL_TREE, &args_size, &argvec[count].locate); |
4808 | args_size.constant += argvec[count].locate.size.constant; | |
4809 | gcc_assert (!argvec[count].locate.size.var); | |
4810 | } | |
4811 | #ifdef BLOCK_REG_PADDING | |
4812 | else | |
4813 | /* The argument is passed entirely in registers. See at which | |
4814 | end it should be padded. */ | |
4815 | argvec[count].locate.where_pad = | |
4816 | BLOCK_REG_PADDING (mode, NULL_TREE, | |
4817 | GET_MODE_SIZE (mode) <= UNITS_PER_WORD); | |
2e735c0d | 4818 | #endif |
9bdaf1ba | 4819 | |
39cba157 | 4820 | targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true); |
9bdaf1ba | 4821 | } |
9bdaf1ba | 4822 | |
9bdaf1ba | 4823 | /* If this machine requires an external definition for library |
4824 | functions, write one out. */ | |
4825 | assemble_external_libcall (fun); | |
4826 | ||
4827 | original_args_size = args_size; | |
91b70175 | 4828 | args_size.constant = (((args_size.constant |
4829 | + stack_pointer_delta | |
4830 | + STACK_BYTES - 1) | |
4831 | / STACK_BYTES | |
4832 | * STACK_BYTES) | |
4833 | - stack_pointer_delta); | |
9bdaf1ba | 4834 | |
4835 | args_size.constant = MAX (args_size.constant, | |
4836 | reg_parm_stack_space); | |
4837 | ||
fa20f865 | 4838 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
63c68695 | 4839 | args_size.constant -= reg_parm_stack_space; |
9bdaf1ba | 4840 | |
abe32cce | 4841 | if (args_size.constant > crtl->outgoing_args_size) |
4842 | crtl->outgoing_args_size = args_size.constant; | |
9bdaf1ba | 4843 | |
8c0dd614 | 4844 | if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS) |
990495a7 | 4845 | { |
4846 | int pushed = args_size.constant + pending_stack_adjust; | |
4847 | if (pushed > current_function_pushed_stack_size) | |
4848 | current_function_pushed_stack_size = pushed; | |
4849 | } | |
4850 | ||
4448f543 | 4851 | if (ACCUMULATE_OUTGOING_ARGS) |
4852 | { | |
4853 | /* Since the stack pointer will never be pushed, it is possible for | |
4854 | the evaluation of a parm to clobber something we have already | |
4855 | written to the stack. Since most function calls on RISC machines | |
4856 | do not use the stack, this is uncommon, but must work correctly. | |
9bdaf1ba | 4857 | |
4448f543 | 4858 | Therefore, we save any area of the stack that was already written |
4859 | and that we are using. Here we set up to do this by making a new | |
4860 | stack usage map from the old one. | |
9bdaf1ba | 4861 | |
4448f543 | 4862 | Another approach might be to try to reorder the argument |
4863 | evaluations to avoid this conflicting stack usage. */ | |
9bdaf1ba | 4864 | |
4448f543 | 4865 | needed = args_size.constant; |
9bdaf1ba | 4866 | |
4448f543 | 4867 | /* Since we will be writing into the entire argument area, the |
4868 | map must be allocated for its entire size, not just the part that | |
4869 | is the responsibility of the caller. */ | |
fa20f865 | 4870 | if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))) |
63c68695 | 4871 | needed += reg_parm_stack_space; |
9bdaf1ba | 4872 | |
ccccd62c | 4873 | if (ARGS_GROW_DOWNWARD) |
4874 | highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, | |
4875 | needed + 1); | |
4876 | else | |
4877 | highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed); | |
4878 | ||
4c36ffe6 | 4879 | stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use); |
a331ea1b | 4880 | stack_usage_map = stack_usage_map_buf; |
9bdaf1ba | 4881 | |
4448f543 | 4882 | if (initial_highest_arg_in_use) |
8e547276 | 4883 | memcpy (stack_usage_map, initial_stack_usage_map, |
4884 | initial_highest_arg_in_use); | |
9bdaf1ba | 4885 | |
4448f543 | 4886 | if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
93d3b7de | 4887 | memset (&stack_usage_map[initial_highest_arg_in_use], 0, |
4448f543 | 4888 | highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
4889 | needed = 0; | |
9bdaf1ba | 4890 | |
9c0a756f | 4891 | /* We must be careful to use virtual regs before they're instantiated, |
a0c938f0 | 4892 | and real regs afterwards. Loop optimization, for example, can create |
9c0a756f | 4893 | new libcalls after we've instantiated the virtual regs, and if we |
4894 | use virtuals anyway, they won't match the rtl patterns. */ | |
9bdaf1ba | 4895 | |
9c0a756f | 4896 | if (virtuals_instantiated) |
29c05e22 | 4897 | argblock = plus_constant (Pmode, stack_pointer_rtx, |
4898 | STACK_POINTER_OFFSET); | |
9c0a756f | 4899 | else |
4900 | argblock = virtual_outgoing_args_rtx; | |
4448f543 | 4901 | } |
4902 | else | |
4903 | { | |
4904 | if (!PUSH_ARGS) | |
4905 | argblock = push_block (GEN_INT (args_size.constant), 0, 0); | |
4906 | } | |
9bdaf1ba | 4907 | |
bf29c577 | 4908 | /* We push args individually in reverse order, perform stack alignment |
9bdaf1ba | 4909 | before the first push (the last arg). */ |
bf29c577 | 4910 | if (argblock == 0) |
9bdaf1ba | 4911 | anti_adjust_stack (GEN_INT (args_size.constant |
4912 | - original_args_size.constant)); | |
9bdaf1ba | 4913 | |
bf29c577 | 4914 | argnum = nargs - 1; |
9bdaf1ba | 4915 | |
4448f543 | 4916 | #ifdef REG_PARM_STACK_SPACE |
4917 | if (ACCUMULATE_OUTGOING_ARGS) | |
4918 | { | |
4919 | /* The argument list is the property of the called routine and it | |
4920 | may clobber it. If the fixed area has been used for previous | |
6e96b626 | 4921 | parameters, we must save and restore it. */ |
4922 | save_area = save_fixed_argument_area (reg_parm_stack_space, argblock, | |
4923 | &low_to_save, &high_to_save); | |
9bdaf1ba | 4924 | } |
4925 | #endif | |
c87678e4 | 4926 | |
53597a55 | 4927 | /* When expanding a normal call, args are stored in push order, |
4928 | which is the reverse of what we have here. */ | |
4929 | bool any_regs = false; | |
4930 | for (int i = nargs; i-- > 0; ) | |
4931 | if (argvec[i].reg != NULL_RTX) | |
4932 | { | |
4933 | targetm.calls.call_args (argvec[i].reg, NULL_TREE); | |
4934 | any_regs = true; | |
4935 | } | |
4936 | if (!any_regs) | |
4937 | targetm.calls.call_args (pc_rtx, NULL_TREE); | |
4938 | ||
9bdaf1ba | 4939 | /* Push the args that need to be pushed. */ |
4940 | ||
8700bf9e | 4941 | have_push_fusage = false; |
4942 | ||
9bdaf1ba | 4943 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
4944 | are to be pushed. */ | |
bf29c577 | 4945 | for (count = 0; count < nargs; count++, argnum--) |
9bdaf1ba | 4946 | { |
3754d046 | 4947 | machine_mode mode = argvec[argnum].mode; |
19cb6b50 | 4948 | rtx val = argvec[argnum].value; |
9bdaf1ba | 4949 | rtx reg = argvec[argnum].reg; |
4950 | int partial = argvec[argnum].partial; | |
c2fd5e89 | 4951 | unsigned int parm_align = argvec[argnum].locate.boundary; |
4448f543 | 4952 | int lower_bound = 0, upper_bound = 0, i; |
9bdaf1ba | 4953 | |
4954 | if (! (reg != 0 && partial == 0)) | |
4955 | { | |
4143d08b | 4956 | rtx use; |
4957 | ||
4448f543 | 4958 | if (ACCUMULATE_OUTGOING_ARGS) |
4959 | { | |
02510658 | 4960 | /* If this is being stored into a pre-allocated, fixed-size, |
4961 | stack area, save any previous data at that location. */ | |
9bdaf1ba | 4962 | |
ccccd62c | 4963 | if (ARGS_GROW_DOWNWARD) |
4964 | { | |
4965 | /* stack_slot is negative, but we want to index stack_usage_map | |
4966 | with positive values. */ | |
4967 | upper_bound = -argvec[argnum].locate.slot_offset.constant + 1; | |
4968 | lower_bound = upper_bound - argvec[argnum].locate.size.constant; | |
4969 | } | |
4970 | else | |
4971 | { | |
4972 | lower_bound = argvec[argnum].locate.slot_offset.constant; | |
4973 | upper_bound = lower_bound + argvec[argnum].locate.size.constant; | |
4974 | } | |
9bdaf1ba | 4975 | |
fd2c0c1d | 4976 | i = lower_bound; |
4977 | /* Don't worry about things in the fixed argument area; | |
4978 | it has already been saved. */ | |
4979 | if (i < reg_parm_stack_space) | |
4980 | i = reg_parm_stack_space; | |
4981 | while (i < upper_bound && stack_usage_map[i] == 0) | |
4982 | i++; | |
9bdaf1ba | 4983 | |
fd2c0c1d | 4984 | if (i < upper_bound) |
4448f543 | 4985 | { |
241399f6 | 4986 | /* We need to make a save area. */ |
4987 | unsigned int size | |
4988 | = argvec[argnum].locate.size.constant * BITS_PER_UNIT; | |
3754d046 | 4989 | machine_mode save_mode |
517be012 | 4990 | = int_mode_for_size (size, 1).else_blk (); |
241399f6 | 4991 | rtx adr |
29c05e22 | 4992 | = plus_constant (Pmode, argblock, |
241399f6 | 4993 | argvec[argnum].locate.offset.constant); |
4448f543 | 4994 | rtx stack_area |
241399f6 | 4995 | = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); |
4448f543 | 4996 | |
f9c6a9c3 | 4997 | if (save_mode == BLKmode) |
4998 | { | |
4999 | argvec[argnum].save_area | |
5000 | = assign_stack_temp (BLKmode, | |
0ab48139 | 5001 | argvec[argnum].locate.size.constant |
5002 | ); | |
f9c6a9c3 | 5003 | |
d2b9158b | 5004 | emit_block_move (validize_mem |
5005 | (copy_rtx (argvec[argnum].save_area)), | |
a0c938f0 | 5006 | stack_area, |
f9c6a9c3 | 5007 | GEN_INT (argvec[argnum].locate.size.constant), |
5008 | BLOCK_OP_CALL_PARM); | |
5009 | } | |
5010 | else | |
5011 | { | |
5012 | argvec[argnum].save_area = gen_reg_rtx (save_mode); | |
5013 | ||
5014 | emit_move_insn (argvec[argnum].save_area, stack_area); | |
5015 | } | |
4448f543 | 5016 | } |
9bdaf1ba | 5017 | } |
325d1c45 | 5018 | |
c2fd5e89 | 5019 | emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align, |
0378dbdc | 5020 | partial, reg, 0, argblock, |
241399f6 | 5021 | GEN_INT (argvec[argnum].locate.offset.constant), |
5022 | reg_parm_stack_space, | |
a95e5776 | 5023 | ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false); |
9bdaf1ba | 5024 | |
9bdaf1ba | 5025 | /* Now mark the segment we just used. */ |
4448f543 | 5026 | if (ACCUMULATE_OUTGOING_ARGS) |
5027 | for (i = lower_bound; i < upper_bound; i++) | |
5028 | stack_usage_map[i] = 1; | |
9bdaf1ba | 5029 | |
5030 | NO_DEFER_POP; | |
2eb9302a | 5031 | |
4143d08b | 5032 | /* Indicate argument access so that alias.c knows that these |
5033 | values are live. */ | |
5034 | if (argblock) | |
29c05e22 | 5035 | use = plus_constant (Pmode, argblock, |
4143d08b | 5036 | argvec[argnum].locate.offset.constant); |
8700bf9e | 5037 | else if (have_push_fusage) |
5038 | continue; | |
4143d08b | 5039 | else |
8700bf9e | 5040 | { |
5041 | /* When arguments are pushed, trying to tell alias.c where | |
5042 | exactly this argument is won't work, because the | |
5043 | auto-increment causes confusion. So we merely indicate | |
5044 | that we access something with a known mode somewhere on | |
5045 | the stack. */ | |
5046 | use = gen_rtx_PLUS (Pmode, stack_pointer_rtx, | |
5047 | gen_rtx_SCRATCH (Pmode)); | |
5048 | have_push_fusage = true; | |
5049 | } | |
4143d08b | 5050 | use = gen_rtx_MEM (argvec[argnum].mode, use); |
5051 | use = gen_rtx_USE (VOIDmode, use); | |
5052 | call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage); | |
9bdaf1ba | 5053 | } |
5054 | } | |
5055 | ||
bf29c577 | 5056 | argnum = nargs - 1; |
9bdaf1ba | 5057 | |
82c7907c | 5058 | fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0); |
9bdaf1ba | 5059 | |
5060 | /* Now load any reg parms into their regs. */ | |
5061 | ||
5062 | /* ARGNUM indexes the ARGVEC array in the order in which the arguments | |
5063 | are to be pushed. */ | |
bf29c577 | 5064 | for (count = 0; count < nargs; count++, argnum--) |
9bdaf1ba | 5065 | { |
3754d046 | 5066 | machine_mode mode = argvec[argnum].mode; |
19cb6b50 | 5067 | rtx val = argvec[argnum].value; |
9bdaf1ba | 5068 | rtx reg = argvec[argnum].reg; |
5069 | int partial = argvec[argnum].partial; | |
ab6e3ce0 | 5070 | #ifdef BLOCK_REG_PADDING |
37cd19a4 | 5071 | int size = 0; |
ab6e3ce0 | 5072 | #endif |
37cd19a4 | 5073 | |
9bdaf1ba | 5074 | /* Handle calls that pass values in multiple non-contiguous |
5075 | locations. The PA64 has examples of this for library calls. */ | |
5076 | if (reg != 0 && GET_CODE (reg) == PARALLEL) | |
bec917cc | 5077 | emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode)); |
9bdaf1ba | 5078 | else if (reg != 0 && partial == 0) |
37cd19a4 | 5079 | { |
5080 | emit_move_insn (reg, val); | |
5081 | #ifdef BLOCK_REG_PADDING | |
5082 | size = GET_MODE_SIZE (argvec[argnum].mode); | |
5083 | ||
5084 | /* Copied from load_register_parameters. */ | |
5085 | ||
5086 | /* Handle case where we have a value that needs shifting | |
5087 | up to the msb. eg. a QImode value and we're padding | |
5088 | upward on a BYTES_BIG_ENDIAN machine. */ | |
5089 | if (size < UNITS_PER_WORD | |
5090 | && (argvec[argnum].locate.where_pad | |
d7ab0e3d | 5091 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
37cd19a4 | 5092 | { |
5093 | rtx x; | |
5094 | int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
5095 | ||
5096 | /* Assigning REG here rather than a temp makes CALL_FUSAGE | |
5097 | report the whole reg as used. Strictly speaking, the | |
5098 | call only uses SIZE bytes at the msb end, but it doesn't | |
5099 | seem worth generating rtl to say that. */ | |
5100 | reg = gen_rtx_REG (word_mode, REGNO (reg)); | |
5101 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1); | |
5102 | if (x != reg) | |
5103 | emit_move_insn (reg, x); | |
5104 | } | |
5105 | #endif | |
5106 | } | |
9bdaf1ba | 5107 | |
5108 | NO_DEFER_POP; | |
5109 | } | |
5110 | ||
9bdaf1ba | 5111 | /* Any regs containing parms remain in use through the call. */ |
5112 | for (count = 0; count < nargs; count++) | |
5113 | { | |
5114 | rtx reg = argvec[count].reg; | |
5115 | if (reg != 0 && GET_CODE (reg) == PARALLEL) | |
5116 | use_group_regs (&call_fusage, reg); | |
5117 | else if (reg != 0) | |
6c6f16e5 | 5118 | { |
5119 | int partial = argvec[count].partial; | |
5120 | if (partial) | |
5121 | { | |
5122 | int nregs; | |
5123 | gcc_assert (partial % UNITS_PER_WORD == 0); | |
5124 | nregs = partial / UNITS_PER_WORD; | |
5125 | use_regs (&call_fusage, REGNO (reg), nregs); | |
5126 | } | |
5127 | else | |
5128 | use_reg (&call_fusage, reg); | |
5129 | } | |
9bdaf1ba | 5130 | } |
5131 | ||
5132 | /* Pass the function the address in which to return a structure value. */ | |
45550790 | 5133 | if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) |
9bdaf1ba | 5134 | { |
45550790 | 5135 | emit_move_insn (struct_value, |
9bdaf1ba | 5136 | force_reg (Pmode, |
5137 | force_operand (XEXP (mem_value, 0), | |
5138 | NULL_RTX))); | |
8ad4c111 | 5139 | if (REG_P (struct_value)) |
45550790 | 5140 | use_reg (&call_fusage, struct_value); |
9bdaf1ba | 5141 | } |
5142 | ||
5143 | /* Don't allow popping to be deferred, since then | |
5144 | cse'ing of library calls could delete a call and leave the pop. */ | |
5145 | NO_DEFER_POP; | |
16204096 | 5146 | valreg = (mem_value == 0 && outmode != VOIDmode |
578d1295 | 5147 | ? hard_libcall_value (outmode, orgfun) : NULL_RTX); |
9bdaf1ba | 5148 | |
481feae3 | 5149 | /* Stack must be properly aligned now. */ |
231bd014 | 5150 | gcc_assert (!(stack_pointer_delta |
5151 | & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))); | |
fa4f1f09 | 5152 | |
644c283b | 5153 | before_call = get_last_insn (); |
5154 | ||
9bdaf1ba | 5155 | /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which |
5156 | will set inhibit_defer_pop to that value. */ | |
20f7032f | 5157 | /* The return type is needed to decide how many bytes the function pops. |
5158 | Signedness plays no role in that, so for simplicity, we pretend it's | |
5159 | always signed. We also assume that the list of arguments passed has | |
5160 | no impact, so we pretend it is unknown. */ | |
9bdaf1ba | 5161 | |
4ee9c684 | 5162 | emit_call_1 (fun, NULL, |
c87678e4 | 5163 | get_identifier (XSTR (orgfun, 0)), |
771d21fa | 5164 | build_function_type (tfom, NULL_TREE), |
c87678e4 | 5165 | original_args_size.constant, args_size.constant, |
9bdaf1ba | 5166 | struct_value_size, |
39cba157 | 5167 | targetm.calls.function_arg (args_so_far, |
f387af4f | 5168 | VOIDmode, void_type_node, true), |
16204096 | 5169 | valreg, |
39cba157 | 5170 | old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far); |
9bdaf1ba | 5171 | |
fcf56aaf | 5172 | if (flag_ipa_ra) |
2e3b0d0f | 5173 | { |
9ed997be | 5174 | rtx datum = orgfun; |
2e3b0d0f | 5175 | gcc_assert (GET_CODE (datum) == SYMBOL_REF); |
9ed997be | 5176 | rtx_call_insn *last = last_call_insn (); |
2e3b0d0f | 5177 | add_reg_note (last, REG_CALL_DECL, datum); |
5178 | } | |
5179 | ||
37cd19a4 | 5180 | /* Right-shift returned value if necessary. */ |
5181 | if (!pcc_struct_value | |
5182 | && TYPE_MODE (tfom) != BLKmode | |
5183 | && targetm.calls.return_in_msb (tfom)) | |
5184 | { | |
5185 | shift_return_value (TYPE_MODE (tfom), false, valreg); | |
5186 | valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg)); | |
5187 | } | |
5188 | ||
53597a55 | 5189 | targetm.calls.end_call_args (); |
5190 | ||
3072d30e | 5191 | /* For calls to `setjmp', etc., inform function.c:setjmp_warnings |
5192 | that it should complain if nonvolatile values are live. For | |
5193 | functions that cannot return, inform flow that control does not | |
5194 | fall through. */ | |
4fec1d6c | 5195 | if (flags & ECF_NORETURN) |
644c283b | 5196 | { |
9239aee6 | 5197 | /* The barrier note must be emitted |
644c283b | 5198 | immediately after the CALL_INSN. Some ports emit more than |
5199 | just a CALL_INSN above, so we must search for it here. */ | |
3663becd | 5200 | rtx_insn *last = get_last_insn (); |
6d7dc5b9 | 5201 | while (!CALL_P (last)) |
644c283b | 5202 | { |
5203 | last = PREV_INSN (last); | |
5204 | /* There was no CALL_INSN? */ | |
231bd014 | 5205 | gcc_assert (last != before_call); |
644c283b | 5206 | } |
5207 | ||
9239aee6 | 5208 | emit_barrier_after (last); |
644c283b | 5209 | } |
5210 | ||
43926c6a | 5211 | /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW |
5212 | and LCT_RETURNS_TWICE, cannot perform non-local gotos. */ | |
5213 | if (flags & ECF_NOTHROW) | |
5214 | { | |
3663becd | 5215 | rtx_insn *last = get_last_insn (); |
43926c6a | 5216 | while (!CALL_P (last)) |
5217 | { | |
5218 | last = PREV_INSN (last); | |
5219 | /* There was no CALL_INSN? */ | |
5220 | gcc_assert (last != before_call); | |
5221 | } | |
5222 | ||
5223 | make_reg_eh_region_note_nothrow_nononlocal (last); | |
5224 | } | |
5225 | ||
9bdaf1ba | 5226 | /* Now restore inhibit_defer_pop to its actual original value. */ |
5227 | OK_DEFER_POP; | |
5228 | ||
5229 | pop_temp_slots (); | |
5230 | ||
5231 | /* Copy the value to the right place. */ | |
20f7032f | 5232 | if (outmode != VOIDmode && retval) |
9bdaf1ba | 5233 | { |
5234 | if (mem_value) | |
5235 | { | |
5236 | if (value == 0) | |
5237 | value = mem_value; | |
5238 | if (value != mem_value) | |
5239 | emit_move_insn (value, mem_value); | |
5240 | } | |
40651bac | 5241 | else if (GET_CODE (valreg) == PARALLEL) |
5242 | { | |
5243 | if (value == 0) | |
5244 | value = gen_reg_rtx (outmode); | |
4c3a0ea5 | 5245 | emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); |
40651bac | 5246 | } |
9bdaf1ba | 5247 | else |
4e1a3169 | 5248 | { |
3b2411a8 | 5249 | /* Convert to the proper mode if a promotion has been active. */ |
4e1a3169 | 5250 | if (GET_MODE (valreg) != outmode) |
5251 | { | |
5252 | int unsignedp = TYPE_UNSIGNED (tfom); | |
5253 | ||
3b2411a8 | 5254 | gcc_assert (promote_function_mode (tfom, outmode, &unsignedp, |
5255 | fndecl ? TREE_TYPE (fndecl) : fntype, 1) | |
4e1a3169 | 5256 | == GET_MODE (valreg)); |
4e1a3169 | 5257 | valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0); |
5258 | } | |
5259 | ||
5260 | if (value != 0) | |
5261 | emit_move_insn (value, valreg); | |
5262 | else | |
5263 | value = valreg; | |
5264 | } | |
9bdaf1ba | 5265 | } |
5266 | ||
4448f543 | 5267 | if (ACCUMULATE_OUTGOING_ARGS) |
9bdaf1ba | 5268 | { |
4448f543 | 5269 | #ifdef REG_PARM_STACK_SPACE |
5270 | if (save_area) | |
6e96b626 | 5271 | restore_fixed_argument_area (save_area, argblock, |
5272 | high_to_save, low_to_save); | |
9bdaf1ba | 5273 | #endif |
c87678e4 | 5274 | |
4448f543 | 5275 | /* If we saved any argument areas, restore them. */ |
5276 | for (count = 0; count < nargs; count++) | |
5277 | if (argvec[count].save_area) | |
5278 | { | |
3754d046 | 5279 | machine_mode save_mode = GET_MODE (argvec[count].save_area); |
29c05e22 | 5280 | rtx adr = plus_constant (Pmode, argblock, |
241399f6 | 5281 | argvec[count].locate.offset.constant); |
5282 | rtx stack_area = gen_rtx_MEM (save_mode, | |
5283 | memory_address (save_mode, adr)); | |
4448f543 | 5284 | |
f9c6a9c3 | 5285 | if (save_mode == BLKmode) |
5286 | emit_block_move (stack_area, | |
d2b9158b | 5287 | validize_mem |
5288 | (copy_rtx (argvec[count].save_area)), | |
f9c6a9c3 | 5289 | GEN_INT (argvec[count].locate.size.constant), |
5290 | BLOCK_OP_CALL_PARM); | |
5291 | else | |
5292 | emit_move_insn (stack_area, argvec[count].save_area); | |
4448f543 | 5293 | } |
9bdaf1ba | 5294 | |
4448f543 | 5295 | highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
5296 | stack_usage_map = initial_stack_usage_map; | |
5297 | } | |
b39693dd | 5298 | |
dd045aee | 5299 | free (stack_usage_map_buf); |
a331ea1b | 5300 | |
20f7032f | 5301 | return value; |
5302 | ||
5303 | } | |
5304 | \f | |
058a1b7a | 5305 | |
5306 | /* Store pointer bounds argument ARG into Bounds Table entry | |
5307 | associated with PARM. */ | |
5308 | static void | |
5309 | store_bounds (struct arg_data *arg, struct arg_data *parm) | |
5310 | { | |
5311 | rtx slot = NULL, ptr = NULL, addr = NULL; | |
5312 | ||
5313 | /* We may pass bounds not associated with any pointer. */ | |
5314 | if (!parm) | |
5315 | { | |
5316 | gcc_assert (arg->special_slot); | |
5317 | slot = arg->special_slot; | |
5318 | ptr = const0_rtx; | |
5319 | } | |
5320 | /* Find pointer associated with bounds and where it is | |
5321 | passed. */ | |
5322 | else | |
5323 | { | |
5324 | if (!parm->reg) | |
5325 | { | |
5326 | gcc_assert (!arg->special_slot); | |
5327 | ||
5328 | addr = adjust_address (parm->stack, Pmode, arg->pointer_offset); | |
5329 | } | |
5330 | else if (REG_P (parm->reg)) | |
5331 | { | |
5332 | gcc_assert (arg->special_slot); | |
5333 | slot = arg->special_slot; | |
5334 | ||
5335 | if (MEM_P (parm->value)) | |
5336 | addr = adjust_address (parm->value, Pmode, arg->pointer_offset); | |
5337 | else if (REG_P (parm->value)) | |
5338 | ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset); | |
5339 | else | |
5340 | { | |
5341 | gcc_assert (!arg->pointer_offset); | |
5342 | ptr = parm->value; | |
5343 | } | |
5344 | } | |
5345 | else | |
5346 | { | |
5347 | gcc_assert (GET_CODE (parm->reg) == PARALLEL); | |
5348 | ||
5349 | gcc_assert (arg->special_slot); | |
5350 | slot = arg->special_slot; | |
5351 | ||
5352 | if (parm->parallel_value) | |
5353 | ptr = chkp_get_value_with_offs (parm->parallel_value, | |
5354 | GEN_INT (arg->pointer_offset)); | |
5355 | else | |
5356 | gcc_unreachable (); | |
5357 | } | |
5358 | } | |
5359 | ||
5360 | /* Expand bounds. */ | |
5361 | if (!arg->value) | |
5362 | arg->value = expand_normal (arg->tree_value); | |
5363 | ||
5364 | targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot); | |
5365 | } | |
5366 | ||
66d433c7 | 5367 | /* Store a single argument for a function call |
5368 | into the register or memory area where it must be passed. | |
5369 | *ARG describes the argument value and where to pass it. | |
5370 | ||
5371 | ARGBLOCK is the address of the stack-block for all the arguments, | |
f9e15121 | 5372 | or 0 on a machine where arguments are pushed individually. |
66d433c7 | 5373 | |
5374 | MAY_BE_ALLOCA nonzero says this could be a call to `alloca' | |
c87678e4 | 5375 | so must be careful about how the stack is used. |
66d433c7 | 5376 | |
5377 | VARIABLE_SIZE nonzero says that this was a variable-sized outgoing | |
5378 | argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate | |
5379 | that we need not worry about saving and restoring the stack. | |
5380 | ||
57679d39 | 5381 | FNDECL is the declaration of the function we are calling. |
c87678e4 | 5382 | |
d10cfa8d | 5383 | Return nonzero if this arg should cause sibcall failure, |
57679d39 | 5384 | zero otherwise. */ |
66d433c7 | 5385 | |
57679d39 | 5386 | static int |
4c9e08a4 | 5387 | store_one_arg (struct arg_data *arg, rtx argblock, int flags, |
5388 | int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) | |
66d433c7 | 5389 | { |
19cb6b50 | 5390 | tree pval = arg->tree_value; |
66d433c7 | 5391 | rtx reg = 0; |
5392 | int partial = 0; | |
5393 | int used = 0; | |
df9f2bb6 | 5394 | int i, lower_bound = 0, upper_bound = 0; |
57679d39 | 5395 | int sibcall_failure = 0; |
66d433c7 | 5396 | |
5397 | if (TREE_CODE (pval) == ERROR_MARK) | |
57679d39 | 5398 | return 1; |
66d433c7 | 5399 | |
1b117c60 | 5400 | /* Push a new temporary level for any temporaries we make for |
5401 | this argument. */ | |
5402 | push_temp_slots (); | |
5403 | ||
02510658 | 5404 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)) |
66d433c7 | 5405 | { |
4448f543 | 5406 | /* If this is being stored into a pre-allocated, fixed-size, stack area, |
5407 | save any previous data at that location. */ | |
5408 | if (argblock && ! variable_size && arg->stack) | |
5409 | { | |
ccccd62c | 5410 | if (ARGS_GROW_DOWNWARD) |
5411 | { | |
5412 | /* stack_slot is negative, but we want to index stack_usage_map | |
5413 | with positive values. */ | |
5414 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) | |
5415 | upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1; | |
5416 | else | |
5417 | upper_bound = 0; | |
66d433c7 | 5418 | |
ccccd62c | 5419 | lower_bound = upper_bound - arg->locate.size.constant; |
5420 | } | |
4448f543 | 5421 | else |
ccccd62c | 5422 | { |
5423 | if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) | |
5424 | lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); | |
5425 | else | |
5426 | lower_bound = 0; | |
66d433c7 | 5427 | |
ccccd62c | 5428 | upper_bound = lower_bound + arg->locate.size.constant; |
5429 | } | |
66d433c7 | 5430 | |
fd2c0c1d | 5431 | i = lower_bound; |
5432 | /* Don't worry about things in the fixed argument area; | |
5433 | it has already been saved. */ | |
5434 | if (i < reg_parm_stack_space) | |
5435 | i = reg_parm_stack_space; | |
5436 | while (i < upper_bound && stack_usage_map[i] == 0) | |
5437 | i++; | |
66d433c7 | 5438 | |
fd2c0c1d | 5439 | if (i < upper_bound) |
66d433c7 | 5440 | { |
241399f6 | 5441 | /* We need to make a save area. */ |
5442 | unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; | |
517be012 | 5443 | machine_mode save_mode |
5444 | = int_mode_for_size (size, 1).else_blk (); | |
241399f6 | 5445 | rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); |
5446 | rtx stack_area = gen_rtx_MEM (save_mode, adr); | |
4448f543 | 5447 | |
5448 | if (save_mode == BLKmode) | |
5449 | { | |
9f495e8d | 5450 | arg->save_area |
5451 | = assign_temp (TREE_TYPE (arg->tree_value), 1, 1); | |
4448f543 | 5452 | preserve_temp_slots (arg->save_area); |
d2b9158b | 5453 | emit_block_move (validize_mem (copy_rtx (arg->save_area)), |
5454 | stack_area, | |
c2ca1bab | 5455 | GEN_INT (arg->locate.size.constant), |
0378dbdc | 5456 | BLOCK_OP_CALL_PARM); |
4448f543 | 5457 | } |
5458 | else | |
5459 | { | |
5460 | arg->save_area = gen_reg_rtx (save_mode); | |
5461 | emit_move_insn (arg->save_area, stack_area); | |
5462 | } | |
66d433c7 | 5463 | } |
5464 | } | |
5465 | } | |
b3caaea3 | 5466 | |
66d433c7 | 5467 | /* If this isn't going to be placed on both the stack and in registers, |
5468 | set up the register and number of words. */ | |
5469 | if (! arg->pass_on_stack) | |
04d6fcf8 | 5470 | { |
5471 | if (flags & ECF_SIBCALL) | |
5472 | reg = arg->tail_call_reg; | |
5473 | else | |
5474 | reg = arg->reg; | |
5475 | partial = arg->partial; | |
5476 | } | |
66d433c7 | 5477 | |
231bd014 | 5478 | /* Being passed entirely in a register. We shouldn't be called in |
5479 | this case. */ | |
5480 | gcc_assert (reg == 0 || partial != 0); | |
a0c938f0 | 5481 | |
f28c7a75 | 5482 | /* If this arg needs special alignment, don't load the registers |
5483 | here. */ | |
5484 | if (arg->n_aligned_regs != 0) | |
5485 | reg = 0; | |
c87678e4 | 5486 | |
f28c7a75 | 5487 | /* If this is being passed partially in a register, we can't evaluate |
66d433c7 | 5488 | it directly into its stack slot. Otherwise, we can. */ |
5489 | if (arg->value == 0) | |
f848041f | 5490 | { |
f848041f | 5491 | /* stack_arg_under_construction is nonzero if a function argument is |
5492 | being evaluated directly into the outgoing argument list and | |
5493 | expand_call must take special action to preserve the argument list | |
5494 | if it is called recursively. | |
5495 | ||
5496 | For scalar function arguments stack_usage_map is sufficient to | |
5497 | determine which stack slots must be saved and restored. Scalar | |
5498 | arguments in general have pass_on_stack == 0. | |
5499 | ||
5500 | If this argument is initialized by a function which takes the | |
5501 | address of the argument (a C++ constructor or a C function | |
5502 | returning a BLKmode structure), then stack_usage_map is | |
5503 | insufficient and expand_call must push the stack around the | |
5504 | function call. Such arguments have pass_on_stack == 1. | |
5505 | ||
5506 | Note that it is always safe to set stack_arg_under_construction, | |
5507 | but this generates suboptimal code if set when not needed. */ | |
5508 | ||
5509 | if (arg->pass_on_stack) | |
5510 | stack_arg_under_construction++; | |
4448f543 | 5511 | |
7dbf1af4 | 5512 | arg->value = expand_expr (pval, |
5513 | (partial | |
5514 | || TYPE_MODE (TREE_TYPE (pval)) != arg->mode) | |
5515 | ? NULL_RTX : arg->stack, | |
a35a63ff | 5516 | VOIDmode, EXPAND_STACK_PARM); |
1c0c37a5 | 5517 | |
5518 | /* If we are promoting object (or for any other reason) the mode | |
5519 | doesn't agree, convert the mode. */ | |
5520 | ||
1560ef8f | 5521 | if (arg->mode != TYPE_MODE (TREE_TYPE (pval))) |
5522 | arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)), | |
5523 | arg->value, arg->unsignedp); | |
1c0c37a5 | 5524 | |
f848041f | 5525 | if (arg->pass_on_stack) |
5526 | stack_arg_under_construction--; | |
f848041f | 5527 | } |
66d433c7 | 5528 | |
63864e1c | 5529 | /* Check for overlap with already clobbered argument area. */ |
ff6c0ab2 | 5530 | if ((flags & ECF_SIBCALL) |
5531 | && MEM_P (arg->value) | |
5532 | && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0), | |
5533 | arg->locate.size.constant)) | |
5534 | sibcall_failure = 1; | |
63864e1c | 5535 | |
66d433c7 | 5536 | /* Don't allow anything left on stack from computation |
5537 | of argument to alloca. */ | |
02510658 | 5538 | if (flags & ECF_MAY_BE_ALLOCA) |
66d433c7 | 5539 | do_pending_stack_adjust (); |
5540 | ||
5541 | if (arg->value == arg->stack) | |
8a06f2d4 | 5542 | /* If the value is already in the stack slot, we are done. */ |
5543 | ; | |
1c0c37a5 | 5544 | else if (arg->mode != BLKmode) |
66d433c7 | 5545 | { |
19cb6b50 | 5546 | int size; |
851fc2b3 | 5547 | unsigned int parm_align; |
66d433c7 | 5548 | |
5549 | /* Argument is a scalar, not entirely passed in registers. | |
5550 | (If part is passed in registers, arg->partial says how much | |
5551 | and emit_push_insn will take care of putting it there.) | |
c87678e4 | 5552 | |
66d433c7 | 5553 | Push it, and if its size is less than the |
5554 | amount of space allocated to it, | |
5555 | also bump stack pointer by the additional space. | |
5556 | Note that in C the default argument promotions | |
5557 | will prevent such mismatches. */ | |
5558 | ||
532d84ff | 5559 | if (TYPE_EMPTY_P (TREE_TYPE (pval))) |
5560 | size = 0; | |
5561 | else | |
5562 | size = GET_MODE_SIZE (arg->mode); | |
5563 | ||
66d433c7 | 5564 | /* Compute how much space the push instruction will push. |
5565 | On many machines, pushing a byte will advance the stack | |
5566 | pointer by a halfword. */ | |
5567 | #ifdef PUSH_ROUNDING | |
5568 | size = PUSH_ROUNDING (size); | |
5569 | #endif | |
5570 | used = size; | |
5571 | ||
5572 | /* Compute how much space the argument should get: | |
5573 | round up to a multiple of the alignment for arguments. */ | |
d7ab0e3d | 5574 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5575 | != PAD_NONE) | |
66d433c7 | 5576 | used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1) |
5577 | / (PARM_BOUNDARY / BITS_PER_UNIT)) | |
5578 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
5579 | ||
851fc2b3 | 5580 | /* Compute the alignment of the pushed argument. */ |
5581 | parm_align = arg->locate.boundary; | |
d7ab0e3d | 5582 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5583 | == PAD_DOWNWARD) | |
851fc2b3 | 5584 | { |
5585 | int pad = used - size; | |
5586 | if (pad) | |
5587 | { | |
ac29ece2 | 5588 | unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT; |
851fc2b3 | 5589 | parm_align = MIN (parm_align, pad_align); |
5590 | } | |
5591 | } | |
5592 | ||
66d433c7 | 5593 | /* This isn't already where we want it on the stack, so put it there. |
5594 | This can either be done with push or copy insns. */ | |
532d84ff | 5595 | if (used |
5596 | && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), | |
5597 | NULL_RTX, parm_align, partial, reg, used - size, | |
5598 | argblock, ARGS_SIZE_RTX (arg->locate.offset), | |
5599 | reg_parm_stack_space, | |
5600 | ARGS_SIZE_RTX (arg->locate.alignment_pad), true)) | |
a95e5776 | 5601 | sibcall_failure = 1; |
d5c9a99f | 5602 | |
5603 | /* Unless this is a partially-in-register argument, the argument is now | |
5604 | in the stack. */ | |
5605 | if (partial == 0) | |
5606 | arg->value = arg->stack; | |
66d433c7 | 5607 | } |
5608 | else | |
5609 | { | |
5610 | /* BLKmode, at least partly to be pushed. */ | |
5611 | ||
cf78c9ff | 5612 | unsigned int parm_align; |
19cb6b50 | 5613 | int excess; |
66d433c7 | 5614 | rtx size_rtx; |
5615 | ||
5616 | /* Pushing a nonscalar. | |
5617 | If part is passed in registers, PARTIAL says how much | |
5618 | and emit_push_insn will take care of putting it there. */ | |
5619 | ||
5620 | /* Round its size up to a multiple | |
5621 | of the allocation unit for arguments. */ | |
5622 | ||
241399f6 | 5623 | if (arg->locate.size.var != 0) |
66d433c7 | 5624 | { |
5625 | excess = 0; | |
241399f6 | 5626 | size_rtx = ARGS_SIZE_RTX (arg->locate.size); |
66d433c7 | 5627 | } |
5628 | else | |
5629 | { | |
f054eb3c | 5630 | /* PUSH_ROUNDING has no effect on us, because emit_push_insn |
5631 | for BLKmode is careful to avoid it. */ | |
5632 | excess = (arg->locate.size.constant | |
532d84ff | 5633 | - arg_int_size_in_bytes (TREE_TYPE (pval)) |
f054eb3c | 5634 | + partial); |
532d84ff | 5635 | size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)), |
b9c74b4d | 5636 | NULL_RTX, TYPE_MODE (sizetype), |
5637 | EXPAND_NORMAL); | |
66d433c7 | 5638 | } |
5639 | ||
c5dc0c32 | 5640 | parm_align = arg->locate.boundary; |
cf78c9ff | 5641 | |
5642 | /* When an argument is padded down, the block is aligned to | |
5643 | PARM_BOUNDARY, but the actual argument isn't. */ | |
d7ab0e3d | 5644 | if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval)) |
5645 | == PAD_DOWNWARD) | |
cf78c9ff | 5646 | { |
241399f6 | 5647 | if (arg->locate.size.var) |
cf78c9ff | 5648 | parm_align = BITS_PER_UNIT; |
5649 | else if (excess) | |
5650 | { | |
ac29ece2 | 5651 | unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT; |
cf78c9ff | 5652 | parm_align = MIN (parm_align, excess_align); |
5653 | } | |
5654 | } | |
5655 | ||
e16ceb8e | 5656 | if ((flags & ECF_SIBCALL) && MEM_P (arg->value)) |
57679d39 | 5657 | { |
5658 | /* emit_push_insn might not work properly if arg->value and | |
241399f6 | 5659 | argblock + arg->locate.offset areas overlap. */ |
57679d39 | 5660 | rtx x = arg->value; |
5661 | int i = 0; | |
5662 | ||
abe32cce | 5663 | if (XEXP (x, 0) == crtl->args.internal_arg_pointer |
57679d39 | 5664 | || (GET_CODE (XEXP (x, 0)) == PLUS |
5665 | && XEXP (XEXP (x, 0), 0) == | |
abe32cce | 5666 | crtl->args.internal_arg_pointer |
971ba038 | 5667 | && CONST_INT_P (XEXP (XEXP (x, 0), 1)))) |
57679d39 | 5668 | { |
abe32cce | 5669 | if (XEXP (x, 0) != crtl->args.internal_arg_pointer) |
57679d39 | 5670 | i = INTVAL (XEXP (XEXP (x, 0), 1)); |
5671 | ||
c62f411b | 5672 | /* arg.locate doesn't contain the pretend_args_size offset, |
5673 | it's part of argblock. Ensure we don't count it in I. */ | |
5674 | if (STACK_GROWS_DOWNWARD) | |
5675 | i -= crtl->args.pretend_args_size; | |
5676 | else | |
5677 | i += crtl->args.pretend_args_size; | |
5678 | ||
21dda4ee | 5679 | /* expand_call should ensure this. */ |
231bd014 | 5680 | gcc_assert (!arg->locate.offset.var |
2ad152f7 | 5681 | && arg->locate.size.var == 0 |
971ba038 | 5682 | && CONST_INT_P (size_rtx)); |
57679d39 | 5683 | |
241399f6 | 5684 | if (arg->locate.offset.constant > i) |
57679d39 | 5685 | { |
241399f6 | 5686 | if (arg->locate.offset.constant < i + INTVAL (size_rtx)) |
57679d39 | 5687 | sibcall_failure = 1; |
5688 | } | |
241399f6 | 5689 | else if (arg->locate.offset.constant < i) |
57679d39 | 5690 | { |
2ad152f7 | 5691 | /* Use arg->locate.size.constant instead of size_rtx |
5692 | because we only care about the part of the argument | |
5693 | on the stack. */ | |
5694 | if (i < (arg->locate.offset.constant | |
5695 | + arg->locate.size.constant)) | |
5696 | sibcall_failure = 1; | |
5697 | } | |
5698 | else | |
5699 | { | |
5700 | /* Even though they appear to be at the same location, | |
5701 | if part of the outgoing argument is in registers, | |
5702 | they aren't really at the same location. Check for | |
5703 | this by making sure that the incoming size is the | |
5704 | same as the outgoing size. */ | |
5705 | if (arg->locate.size.constant != INTVAL (size_rtx)) | |
57679d39 | 5706 | sibcall_failure = 1; |
5707 | } | |
5708 | } | |
5709 | } | |
5710 | ||
532d84ff | 5711 | if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0) |
5712 | emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, | |
5713 | parm_align, partial, reg, excess, argblock, | |
5714 | ARGS_SIZE_RTX (arg->locate.offset), | |
5715 | reg_parm_stack_space, | |
5716 | ARGS_SIZE_RTX (arg->locate.alignment_pad), false); | |
66d433c7 | 5717 | |
d5c9a99f | 5718 | /* Unless this is a partially-in-register argument, the argument is now |
5719 | in the stack. | |
66d433c7 | 5720 | |
d5c9a99f | 5721 | ??? Unlike the case above, in which we want the actual |
5722 | address of the data, so that we can load it directly into a | |
5723 | register, here we want the address of the stack slot, so that | |
5724 | it's properly aligned for word-by-word copying or something | |
5725 | like that. It's not clear that this is always correct. */ | |
5726 | if (partial == 0) | |
5727 | arg->value = arg->stack_slot; | |
5728 | } | |
b600a907 | 5729 | |
5730 | if (arg->reg && GET_CODE (arg->reg) == PARALLEL) | |
5731 | { | |
5732 | tree type = TREE_TYPE (arg->tree_value); | |
5733 | arg->parallel_value | |
5734 | = emit_group_load_into_temps (arg->reg, arg->value, type, | |
5735 | int_size_in_bytes (type)); | |
5736 | } | |
66d433c7 | 5737 | |
a35a63ff | 5738 | /* Mark all slots this store used. */ |
5739 | if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL) | |
5740 | && argblock && ! variable_size && arg->stack) | |
5741 | for (i = lower_bound; i < upper_bound; i++) | |
5742 | stack_usage_map[i] = 1; | |
5743 | ||
66d433c7 | 5744 | /* Once we have pushed something, pops can't safely |
5745 | be deferred during the rest of the arguments. */ | |
5746 | NO_DEFER_POP; | |
5747 | ||
0ab48139 | 5748 | /* Free any temporary slots made in processing this argument. */ |
1b117c60 | 5749 | pop_temp_slots (); |
57679d39 | 5750 | |
5751 | return sibcall_failure; | |
66d433c7 | 5752 | } |
890f0c17 | 5753 | |
0336f0f0 | 5754 | /* Nonzero if we do not know how to pass TYPE solely in registers. */ |
890f0c17 | 5755 | |
0336f0f0 | 5756 | bool |
3754d046 | 5757 | must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED, |
fb80456a | 5758 | const_tree type) |
0336f0f0 | 5759 | { |
5760 | if (!type) | |
5761 | return false; | |
5762 | ||
5763 | /* If the type has variable size... */ | |
5764 | if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
5765 | return true; | |
890f0c17 | 5766 | |
0336f0f0 | 5767 | /* If the type is marked as addressable (it is required |
5768 | to be constructed into the stack)... */ | |
5769 | if (TREE_ADDRESSABLE (type)) | |
5770 | return true; | |
5771 | ||
5772 | return false; | |
5773 | } | |
890f0c17 | 5774 | |
0d568ddf | 5775 | /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one |
0336f0f0 | 5776 | takes trailing padding of a structure into account. */ |
5777 | /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */ | |
890f0c17 | 5778 | |
5779 | bool | |
3754d046 | 5780 | must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type) |
890f0c17 | 5781 | { |
5782 | if (!type) | |
dceaa0b1 | 5783 | return false; |
890f0c17 | 5784 | |
5785 | /* If the type has variable size... */ | |
5786 | if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
5787 | return true; | |
5788 | ||
5789 | /* If the type is marked as addressable (it is required | |
5790 | to be constructed into the stack)... */ | |
5791 | if (TREE_ADDRESSABLE (type)) | |
5792 | return true; | |
5793 | ||
532d84ff | 5794 | if (TYPE_EMPTY_P (type)) |
5795 | return false; | |
5796 | ||
890f0c17 | 5797 | /* If the padding and mode of the type is such that a copy into |
5798 | a register would put it into the wrong part of the register. */ | |
5799 | if (mode == BLKmode | |
5800 | && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT) | |
d7ab0e3d | 5801 | && (targetm.calls.function_arg_padding (mode, type) |
5802 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) | |
890f0c17 | 5803 | return true; |
5804 | ||
5805 | return false; | |
5806 | } | |
3f82fe35 | 5807 | |
5808 | /* Tell the garbage collector about GTY markers in this source file. */ | |
5809 | #include "gt-calls.h" |