]>
Commit | Line | Data |
---|---|---|
5e6908ea | 1 | /* Expands front end tree to back end RTL for GCC. |
818ab71a | 2 | Copyright (C) 1987-2016 Free Software Foundation, Inc. |
6f086dfc | 3 | |
1322177d | 4 | This file is part of GCC. |
6f086dfc | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
6f086dfc | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
6f086dfc RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
6f086dfc | 19 | |
6f086dfc RS |
20 | /* This file handles the generation of rtl code from tree structure |
21 | at the level of the function as a whole. | |
22 | It creates the rtl expressions for parameters and auto variables | |
23 | and has full responsibility for allocating stack slots. | |
24 | ||
25 | `expand_function_start' is called at the beginning of a function, | |
26 | before the function body is parsed, and `expand_function_end' is | |
27 | called after parsing the body. | |
28 | ||
29 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
30 | This is usually done during the RTL generation for the function body, | |
31 | but it can also be done in the reload pass when a pseudo-register does | |
8fff4fc1 | 32 | not get a hard register. */ |
6f086dfc RS |
33 | |
34 | #include "config.h" | |
670ee920 | 35 | #include "system.h" |
4977bab6 | 36 | #include "coretypes.h" |
c7131fb2 | 37 | #include "backend.h" |
957060b5 | 38 | #include "target.h" |
c7131fb2 | 39 | #include "rtl.h" |
957060b5 AM |
40 | #include "tree.h" |
41 | #include "gimple-expr.h" | |
42 | #include "cfghooks.h" | |
c7131fb2 | 43 | #include "df.h" |
957060b5 AM |
44 | #include "tm_p.h" |
45 | #include "stringpool.h" | |
46 | #include "expmed.h" | |
47 | #include "optabs.h" | |
48 | #include "regs.h" | |
49 | #include "emit-rtl.h" | |
50 | #include "recog.h" | |
0cbd9993 | 51 | #include "rtl-error.h" |
40e23961 | 52 | #include "alias.h" |
40e23961 | 53 | #include "fold-const.h" |
d8a2d370 DN |
54 | #include "stor-layout.h" |
55 | #include "varasm.h" | |
1ef08c63 | 56 | #include "except.h" |
36566b39 PK |
57 | #include "dojump.h" |
58 | #include "explow.h" | |
59 | #include "calls.h" | |
6f086dfc | 60 | #include "expr.h" |
385399a8 | 61 | #include "optabs-tree.h" |
6f086dfc | 62 | #include "output.h" |
7afff7cf | 63 | #include "langhooks.h" |
677f3fa8 | 64 | #include "common/common-target.h" |
45b0be94 | 65 | #include "gimplify.h" |
ef330312 | 66 | #include "tree-pass.h" |
60393bbc AM |
67 | #include "cfgrtl.h" |
68 | #include "cfganal.h" | |
69 | #include "cfgbuild.h" | |
70 | #include "cfgcleanup.h" | |
1f9ceff1 | 71 | #include "cfgexpand.h" |
f30e25a3 | 72 | #include "shrink-wrap.h" |
b9b5f433 | 73 | #include "toplev.h" |
b8704801 | 74 | #include "rtl-iter.h" |
d5e254e1 IE |
75 | #include "tree-chkp.h" |
76 | #include "rtl-chkp.h" | |
f11a7b6d | 77 | #include "tree-dfa.h" |
b3e46655 | 78 | #include "tree-ssa.h" |
7d69de61 | 79 | |
5576d6f2 TT |
80 | /* So we can assign to cfun in this file. */ |
81 | #undef cfun | |
82 | ||
95f3f59e JDA |
83 | #ifndef STACK_ALIGNMENT_NEEDED |
84 | #define STACK_ALIGNMENT_NEEDED 1 | |
85 | #endif | |
86 | ||
975f3818 RS |
87 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) |
88 | ||
6f086dfc RS |
89 | /* Round a value to the lowest integer less than it that is a multiple of |
90 | the required alignment. Avoid using division in case the value is | |
91 | negative. Assume the alignment is a power of two. */ | |
92 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
93 | ||
94 | /* Similar, but round to the next highest integer that meets the | |
95 | alignment. */ | |
96 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
97 | ||
6f086dfc | 98 | /* Nonzero once virtual register instantiation has been done. |
c39ada04 DD |
99 | assign_stack_local uses frame_pointer_rtx when this is nonzero. |
100 | calls.c:emit_library_call_value_1 uses it to set up | |
101 | post-instantiation libcalls. */ | |
102 | int virtuals_instantiated; | |
6f086dfc | 103 | |
df696a75 | 104 | /* Assign unique numbers to labels generated for profiling, debugging, etc. */ |
17211ab5 | 105 | static GTY(()) int funcdef_no; |
f6f315fe | 106 | |
414c4dc4 NC |
107 | /* These variables hold pointers to functions to create and destroy |
108 | target specific, per-function data structures. */ | |
fa8db1f7 | 109 | struct machine_function * (*init_machine_status) (void); |
46766466 | 110 | |
b384405b | 111 | /* The currently compiled function. */ |
01d939e8 | 112 | struct function *cfun = 0; |
b384405b | 113 | |
cd9c1ca8 | 114 | /* These hashes record the prologue and epilogue insns. */ |
d242408f | 115 | |
6c907cff | 116 | struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def> |
d242408f TS |
117 | { |
118 | static hashval_t hash (rtx x) { return htab_hash_pointer (x); } | |
119 | static bool equal (rtx a, rtx b) { return a == b; } | |
120 | }; | |
121 | ||
122 | static GTY((cache)) | |
123 | hash_table<insn_cache_hasher> *prologue_insn_hash; | |
124 | static GTY((cache)) | |
125 | hash_table<insn_cache_hasher> *epilogue_insn_hash; | |
6f086dfc | 126 | \f |
b646ba3f | 127 | |
2a22f99c | 128 | hash_table<used_type_hasher> *types_used_by_vars_hash = NULL; |
9771b263 | 129 | vec<tree, va_gc> *types_used_by_cur_var_decl; |
b646ba3f | 130 | |
e15679f8 RK |
131 | /* Forward declarations. */ |
132 | ||
fa8db1f7 | 133 | static struct temp_slot *find_temp_slot_from_address (rtx); |
fa8db1f7 | 134 | static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); |
ef4bddc2 | 135 | static void pad_below (struct args_size *, machine_mode, tree); |
691fe203 | 136 | static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *); |
fa8db1f7 AJ |
137 | static int all_blocks (tree, tree *); |
138 | static tree *get_block_vector (tree, int *); | |
139 | extern tree debug_find_var_in_block_tree (tree, tree); | |
1f52178b | 140 | /* We always define `record_insns' even if it's not used so that we |
ec97b83a | 141 | can always export `prologue_epilogue_contains'. */ |
d242408f TS |
142 | static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **) |
143 | ATTRIBUTE_UNUSED; | |
144 | static bool contains (const_rtx, hash_table<insn_cache_hasher> *); | |
db2960f4 | 145 | static void prepare_function_start (void); |
fa8db1f7 AJ |
146 | static void do_clobber_return_reg (rtx, void *); |
147 | static void do_use_return_reg (rtx, void *); | |
1f9ceff1 | 148 | |
c20bf1f3 | 149 | \f |
936fc9ba JH |
150 | /* Stack of nested functions. */ |
151 | /* Keep track of the cfun stack. */ | |
e5e809f4 | 152 | |
526ceb68 | 153 | static vec<function *> function_context_stack; |
6f086dfc RS |
154 | |
155 | /* Save the current context for compilation of a nested function. | |
d2784db4 | 156 | This is called from language-specific code. */ |
6f086dfc RS |
157 | |
158 | void | |
d2784db4 | 159 | push_function_context (void) |
6f086dfc | 160 | { |
01d939e8 | 161 | if (cfun == 0) |
182e0d71 | 162 | allocate_struct_function (NULL, false); |
b384405b | 163 | |
9771b263 | 164 | function_context_stack.safe_push (cfun); |
db2960f4 | 165 | set_cfun (NULL); |
6f086dfc RS |
166 | } |
167 | ||
168 | /* Restore the last saved context, at the end of a nested function. | |
169 | This function is called from language-specific code. */ | |
170 | ||
171 | void | |
d2784db4 | 172 | pop_function_context (void) |
6f086dfc | 173 | { |
9771b263 | 174 | struct function *p = function_context_stack.pop (); |
db2960f4 | 175 | set_cfun (p); |
6f086dfc | 176 | current_function_decl = p->decl; |
6f086dfc | 177 | |
6f086dfc | 178 | /* Reset variables that have known state during rtx generation. */ |
6f086dfc | 179 | virtuals_instantiated = 0; |
1b3d8f8a | 180 | generating_concat_p = 1; |
6f086dfc | 181 | } |
e4a4639e | 182 | |
fa51b01b RH |
183 | /* Clear out all parts of the state in F that can safely be discarded |
184 | after the function has been parsed, but not compiled, to let | |
185 | garbage collection reclaim the memory. */ | |
186 | ||
187 | void | |
fa8db1f7 | 188 | free_after_parsing (struct function *f) |
fa51b01b | 189 | { |
e8924938 | 190 | f->language = 0; |
fa51b01b RH |
191 | } |
192 | ||
e2ecd91c BS |
193 | /* Clear out all parts of the state in F that can safely be discarded |
194 | after the function has been compiled, to let garbage collection | |
0a8a198c | 195 | reclaim the memory. */ |
21cd906e | 196 | |
e2ecd91c | 197 | void |
fa8db1f7 | 198 | free_after_compilation (struct function *f) |
e2ecd91c | 199 | { |
cd9c1ca8 RH |
200 | prologue_insn_hash = NULL; |
201 | epilogue_insn_hash = NULL; | |
202 | ||
04695783 | 203 | free (crtl->emit.regno_pointer_align); |
f995dcfe | 204 | |
3e029763 | 205 | memset (crtl, 0, sizeof (struct rtl_data)); |
e2500fed | 206 | f->eh = NULL; |
e2500fed | 207 | f->machine = NULL; |
997de8ed | 208 | f->cfg = NULL; |
b11f11a1 | 209 | f->curr_properties &= ~PROP_cfg; |
fa51b01b | 210 | |
57b9e367 | 211 | regno_reg_rtx = NULL; |
e2ecd91c | 212 | } |
6f086dfc | 213 | \f |
49ad7cfa BS |
214 | /* Return size needed for stack frame based on slots so far allocated. |
215 | This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; | |
216 | the caller may have to do that. */ | |
9fb798d7 | 217 | |
49ad7cfa | 218 | HOST_WIDE_INT |
fa8db1f7 | 219 | get_frame_size (void) |
49ad7cfa | 220 | { |
bd60bab2 JH |
221 | if (FRAME_GROWS_DOWNWARD) |
222 | return -frame_offset; | |
223 | else | |
224 | return frame_offset; | |
49ad7cfa BS |
225 | } |
226 | ||
9fb798d7 EB |
227 | /* Issue an error message and return TRUE if frame OFFSET overflows in |
228 | the signed target pointer arithmetics for function FUNC. Otherwise | |
229 | return FALSE. */ | |
230 | ||
231 | bool | |
232 | frame_offset_overflow (HOST_WIDE_INT offset, tree func) | |
b8698a0f | 233 | { |
9fb798d7 EB |
234 | unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; |
235 | ||
fecfbfa4 | 236 | if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) |
9fb798d7 EB |
237 | /* Leave room for the fixed part of the frame. */ |
238 | - 64 * UNITS_PER_WORD) | |
239 | { | |
c5d75364 MLI |
240 | error_at (DECL_SOURCE_LOCATION (func), |
241 | "total size of local objects too large"); | |
9fb798d7 EB |
242 | return TRUE; |
243 | } | |
244 | ||
245 | return FALSE; | |
246 | } | |
247 | ||
76fe54f0 L |
248 | /* Return stack slot alignment in bits for TYPE and MODE. */ |
249 | ||
250 | static unsigned int | |
ef4bddc2 | 251 | get_stack_local_alignment (tree type, machine_mode mode) |
76fe54f0 L |
252 | { |
253 | unsigned int alignment; | |
254 | ||
255 | if (mode == BLKmode) | |
256 | alignment = BIGGEST_ALIGNMENT; | |
257 | else | |
258 | alignment = GET_MODE_ALIGNMENT (mode); | |
259 | ||
260 | /* Allow the frond-end to (possibly) increase the alignment of this | |
261 | stack slot. */ | |
262 | if (! type) | |
263 | type = lang_hooks.types.type_for_mode (mode, 0); | |
264 | ||
265 | return STACK_SLOT_ALIGNMENT (type, mode, alignment); | |
266 | } | |
267 | ||
56731d64 BS |
268 | /* Determine whether it is possible to fit a stack slot of size SIZE and |
269 | alignment ALIGNMENT into an area in the stack frame that starts at | |
270 | frame offset START and has a length of LENGTH. If so, store the frame | |
271 | offset to be used for the stack slot in *POFFSET and return true; | |
272 | return false otherwise. This function will extend the frame size when | |
273 | given a start/length pair that lies at the end of the frame. */ | |
274 | ||
275 | static bool | |
276 | try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, | |
277 | HOST_WIDE_INT size, unsigned int alignment, | |
278 | HOST_WIDE_INT *poffset) | |
279 | { | |
280 | HOST_WIDE_INT this_frame_offset; | |
281 | int frame_off, frame_alignment, frame_phase; | |
282 | ||
283 | /* Calculate how many bytes the start of local variables is off from | |
284 | stack alignment. */ | |
285 | frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
286 | frame_off = STARTING_FRAME_OFFSET % frame_alignment; | |
287 | frame_phase = frame_off ? frame_alignment - frame_off : 0; | |
288 | ||
289 | /* Round the frame offset to the specified alignment. */ | |
290 | ||
291 | /* We must be careful here, since FRAME_OFFSET might be negative and | |
292 | division with a negative dividend isn't as well defined as we might | |
293 | like. So we instead assume that ALIGNMENT is a power of two and | |
294 | use logical operations which are unambiguous. */ | |
295 | if (FRAME_GROWS_DOWNWARD) | |
296 | this_frame_offset | |
297 | = (FLOOR_ROUND (start + length - size - frame_phase, | |
298 | (unsigned HOST_WIDE_INT) alignment) | |
299 | + frame_phase); | |
300 | else | |
301 | this_frame_offset | |
302 | = (CEIL_ROUND (start - frame_phase, | |
303 | (unsigned HOST_WIDE_INT) alignment) | |
304 | + frame_phase); | |
305 | ||
306 | /* See if it fits. If this space is at the edge of the frame, | |
307 | consider extending the frame to make it fit. Our caller relies on | |
308 | this when allocating a new slot. */ | |
309 | if (frame_offset == start && this_frame_offset < frame_offset) | |
310 | frame_offset = this_frame_offset; | |
311 | else if (this_frame_offset < start) | |
312 | return false; | |
313 | else if (start + length == frame_offset | |
314 | && this_frame_offset + size > start + length) | |
315 | frame_offset = this_frame_offset + size; | |
316 | else if (this_frame_offset + size > start + length) | |
317 | return false; | |
318 | ||
319 | *poffset = this_frame_offset; | |
320 | return true; | |
321 | } | |
322 | ||
323 | /* Create a new frame_space structure describing free space in the stack | |
324 | frame beginning at START and ending at END, and chain it into the | |
325 | function's frame_space_list. */ | |
326 | ||
327 | static void | |
328 | add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) | |
329 | { | |
766090c2 | 330 | struct frame_space *space = ggc_alloc<frame_space> (); |
56731d64 BS |
331 | space->next = crtl->frame_space_list; |
332 | crtl->frame_space_list = space; | |
333 | space->start = start; | |
334 | space->length = end - start; | |
335 | } | |
336 | ||
6f086dfc RS |
337 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it |
338 | with machine mode MODE. | |
718fe406 | 339 | |
6f086dfc RS |
340 | ALIGN controls the amount of alignment for the address of the slot: |
341 | 0 means according to MODE, | |
342 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
cfa29a4c | 343 | -2 means use BITS_PER_UNIT, |
6f086dfc RS |
344 | positive specifies alignment boundary in bits. |
345 | ||
80a832cd JJ |
346 | KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce |
347 | alignment and ASLK_RECORD_PAD bit set if we should remember | |
348 | extra space we allocated for alignment purposes. When we are | |
349 | called from assign_stack_temp_for_type, it is not set so we don't | |
350 | track the same stack slot in two independent lists. | |
2e3f842f | 351 | |
bd60bab2 | 352 | We do not round to stack_boundary here. */ |
6f086dfc | 353 | |
bd60bab2 | 354 | rtx |
ef4bddc2 | 355 | assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size, |
80a832cd | 356 | int align, int kind) |
6f086dfc | 357 | { |
b3694847 | 358 | rtx x, addr; |
6f086dfc | 359 | int bigend_correction = 0; |
427188d5 | 360 | HOST_WIDE_INT slot_offset = 0, old_frame_offset; |
76fe54f0 | 361 | unsigned int alignment, alignment_in_bits; |
6f086dfc RS |
362 | |
363 | if (align == 0) | |
364 | { | |
76fe54f0 | 365 | alignment = get_stack_local_alignment (NULL, mode); |
d16790f2 | 366 | alignment /= BITS_PER_UNIT; |
6f086dfc RS |
367 | } |
368 | else if (align == -1) | |
369 | { | |
370 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
371 | size = CEIL_ROUND (size, alignment); | |
372 | } | |
cfa29a4c EB |
373 | else if (align == -2) |
374 | alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ | |
6f086dfc RS |
375 | else |
376 | alignment = align / BITS_PER_UNIT; | |
377 | ||
2e3f842f L |
378 | alignment_in_bits = alignment * BITS_PER_UNIT; |
379 | ||
2e3f842f L |
380 | /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ |
381 | if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) | |
382 | { | |
383 | alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; | |
384 | alignment = alignment_in_bits / BITS_PER_UNIT; | |
385 | } | |
a0871656 | 386 | |
2e3f842f L |
387 | if (SUPPORTS_STACK_ALIGNMENT) |
388 | { | |
389 | if (crtl->stack_alignment_estimated < alignment_in_bits) | |
390 | { | |
391 | if (!crtl->stack_realign_processed) | |
392 | crtl->stack_alignment_estimated = alignment_in_bits; | |
393 | else | |
394 | { | |
395 | /* If stack is realigned and stack alignment value | |
396 | hasn't been finalized, it is OK not to increase | |
397 | stack_alignment_estimated. The bigger alignment | |
398 | requirement is recorded in stack_alignment_needed | |
399 | below. */ | |
400 | gcc_assert (!crtl->stack_realign_finalized); | |
401 | if (!crtl->stack_realign_needed) | |
402 | { | |
403 | /* It is OK to reduce the alignment as long as the | |
404 | requested size is 0 or the estimated stack | |
405 | alignment >= mode alignment. */ | |
80a832cd | 406 | gcc_assert ((kind & ASLK_REDUCE_ALIGN) |
2e3f842f L |
407 | || size == 0 |
408 | || (crtl->stack_alignment_estimated | |
409 | >= GET_MODE_ALIGNMENT (mode))); | |
410 | alignment_in_bits = crtl->stack_alignment_estimated; | |
411 | alignment = alignment_in_bits / BITS_PER_UNIT; | |
412 | } | |
413 | } | |
414 | } | |
415 | } | |
76fe54f0 L |
416 | |
417 | if (crtl->stack_alignment_needed < alignment_in_bits) | |
418 | crtl->stack_alignment_needed = alignment_in_bits; | |
f85882d8 JY |
419 | if (crtl->max_used_stack_slot_alignment < alignment_in_bits) |
420 | crtl->max_used_stack_slot_alignment = alignment_in_bits; | |
a0871656 | 421 | |
56731d64 BS |
422 | if (mode != BLKmode || size != 0) |
423 | { | |
80a832cd | 424 | if (kind & ASLK_RECORD_PAD) |
56731d64 | 425 | { |
80a832cd JJ |
426 | struct frame_space **psp; |
427 | ||
428 | for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) | |
429 | { | |
430 | struct frame_space *space = *psp; | |
431 | if (!try_fit_stack_local (space->start, space->length, size, | |
432 | alignment, &slot_offset)) | |
433 | continue; | |
434 | *psp = space->next; | |
435 | if (slot_offset > space->start) | |
436 | add_frame_space (space->start, slot_offset); | |
437 | if (slot_offset + size < space->start + space->length) | |
438 | add_frame_space (slot_offset + size, | |
439 | space->start + space->length); | |
440 | goto found_space; | |
441 | } | |
56731d64 BS |
442 | } |
443 | } | |
444 | else if (!STACK_ALIGNMENT_NEEDED) | |
445 | { | |
446 | slot_offset = frame_offset; | |
447 | goto found_space; | |
448 | } | |
449 | ||
450 | old_frame_offset = frame_offset; | |
451 | ||
452 | if (FRAME_GROWS_DOWNWARD) | |
453 | { | |
454 | frame_offset -= size; | |
455 | try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); | |
58dbcf05 | 456 | |
80a832cd JJ |
457 | if (kind & ASLK_RECORD_PAD) |
458 | { | |
459 | if (slot_offset > frame_offset) | |
460 | add_frame_space (frame_offset, slot_offset); | |
461 | if (slot_offset + size < old_frame_offset) | |
462 | add_frame_space (slot_offset + size, old_frame_offset); | |
463 | } | |
56731d64 BS |
464 | } |
465 | else | |
95f3f59e | 466 | { |
56731d64 BS |
467 | frame_offset += size; |
468 | try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); | |
469 | ||
80a832cd JJ |
470 | if (kind & ASLK_RECORD_PAD) |
471 | { | |
472 | if (slot_offset > old_frame_offset) | |
473 | add_frame_space (old_frame_offset, slot_offset); | |
474 | if (slot_offset + size < frame_offset) | |
475 | add_frame_space (slot_offset + size, frame_offset); | |
476 | } | |
95f3f59e | 477 | } |
6f086dfc | 478 | |
56731d64 | 479 | found_space: |
6f086dfc RS |
480 | /* On a big-endian machine, if we are allocating more space than we will use, |
481 | use the least significant bytes of those that are allocated. */ | |
d70eadf7 | 482 | if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) |
6f086dfc | 483 | bigend_correction = size - GET_MODE_SIZE (mode); |
6f086dfc | 484 | |
6f086dfc RS |
485 | /* If we have already instantiated virtual registers, return the actual |
486 | address relative to the frame pointer. */ | |
bd60bab2 | 487 | if (virtuals_instantiated) |
0a81f074 | 488 | addr = plus_constant (Pmode, frame_pointer_rtx, |
c41536f5 | 489 | trunc_int_for_mode |
56731d64 | 490 | (slot_offset + bigend_correction |
c41536f5 | 491 | + STARTING_FRAME_OFFSET, Pmode)); |
6f086dfc | 492 | else |
0a81f074 | 493 | addr = plus_constant (Pmode, virtual_stack_vars_rtx, |
c41536f5 | 494 | trunc_int_for_mode |
56731d64 | 495 | (slot_offset + bigend_correction, |
c41536f5 | 496 | Pmode)); |
6f086dfc | 497 | |
38a448ca | 498 | x = gen_rtx_MEM (mode, addr); |
76fe54f0 | 499 | set_mem_align (x, alignment_in_bits); |
be0c514c | 500 | MEM_NOTRAP_P (x) = 1; |
6f086dfc | 501 | |
8c39f8ae | 502 | vec_safe_push (stack_slot_list, x); |
e2ecd91c | 503 | |
bd60bab2 JH |
504 | if (frame_offset_overflow (frame_offset, current_function_decl)) |
505 | frame_offset = 0; | |
9070115b | 506 | |
6f086dfc RS |
507 | return x; |
508 | } | |
2e3f842f L |
509 | |
510 | /* Wrap up assign_stack_local_1 with last parameter as false. */ | |
511 | ||
512 | rtx | |
ef4bddc2 | 513 | assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align) |
2e3f842f | 514 | { |
80a832cd | 515 | return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); |
2e3f842f | 516 | } |
0aea6467 | 517 | \f |
fb0703f7 SB |
518 | /* In order to evaluate some expressions, such as function calls returning |
519 | structures in memory, we need to temporarily allocate stack locations. | |
520 | We record each allocated temporary in the following structure. | |
521 | ||
522 | Associated with each temporary slot is a nesting level. When we pop up | |
523 | one level, all temporaries associated with the previous level are freed. | |
524 | Normally, all temporaries are freed after the execution of the statement | |
525 | in which they were created. However, if we are inside a ({...}) grouping, | |
526 | the result may be in a temporary and hence must be preserved. If the | |
527 | result could be in a temporary, we preserve it if we can determine which | |
528 | one it is in. If we cannot determine which temporary may contain the | |
529 | result, all temporaries are preserved. A temporary is preserved by | |
9474e8ab | 530 | pretending it was allocated at the previous nesting level. */ |
fb0703f7 | 531 | |
d1b38208 | 532 | struct GTY(()) temp_slot { |
fb0703f7 SB |
533 | /* Points to next temporary slot. */ |
534 | struct temp_slot *next; | |
535 | /* Points to previous temporary slot. */ | |
536 | struct temp_slot *prev; | |
537 | /* The rtx to used to reference the slot. */ | |
538 | rtx slot; | |
fb0703f7 SB |
539 | /* The size, in units, of the slot. */ |
540 | HOST_WIDE_INT size; | |
541 | /* The type of the object in the slot, or zero if it doesn't correspond | |
542 | to a type. We use this to determine whether a slot can be reused. | |
543 | It can be reused if objects of the type of the new slot will always | |
544 | conflict with objects of the type of the old slot. */ | |
545 | tree type; | |
8f5929e1 JJ |
546 | /* The alignment (in bits) of the slot. */ |
547 | unsigned int align; | |
fb0703f7 SB |
548 | /* Nonzero if this temporary is currently in use. */ |
549 | char in_use; | |
fb0703f7 SB |
550 | /* Nesting level at which this slot is being used. */ |
551 | int level; | |
fb0703f7 SB |
552 | /* The offset of the slot from the frame_pointer, including extra space |
553 | for alignment. This info is for combine_temp_slots. */ | |
554 | HOST_WIDE_INT base_offset; | |
555 | /* The size of the slot, including extra space for alignment. This | |
556 | info is for combine_temp_slots. */ | |
557 | HOST_WIDE_INT full_size; | |
558 | }; | |
559 | ||
2a22f99c TS |
560 | /* Entry for the below hash table. */ |
561 | struct GTY((for_user)) temp_slot_address_entry { | |
fb0703f7 SB |
562 | hashval_t hash; |
563 | rtx address; | |
564 | struct temp_slot *temp_slot; | |
565 | }; | |
566 | ||
ca752f39 | 567 | struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry> |
2a22f99c TS |
568 | { |
569 | static hashval_t hash (temp_slot_address_entry *); | |
570 | static bool equal (temp_slot_address_entry *, temp_slot_address_entry *); | |
571 | }; | |
572 | ||
573 | /* A table of addresses that represent a stack slot. The table is a mapping | |
574 | from address RTXen to a temp slot. */ | |
575 | static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table; | |
576 | static size_t n_temp_slots_in_use; | |
577 | ||
0aea6467 ZD |
578 | /* Removes temporary slot TEMP from LIST. */ |
579 | ||
580 | static void | |
581 | cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list) | |
582 | { | |
583 | if (temp->next) | |
584 | temp->next->prev = temp->prev; | |
585 | if (temp->prev) | |
586 | temp->prev->next = temp->next; | |
587 | else | |
588 | *list = temp->next; | |
589 | ||
590 | temp->prev = temp->next = NULL; | |
591 | } | |
592 | ||
593 | /* Inserts temporary slot TEMP to LIST. */ | |
594 | ||
595 | static void | |
596 | insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) | |
597 | { | |
598 | temp->next = *list; | |
599 | if (*list) | |
600 | (*list)->prev = temp; | |
601 | temp->prev = NULL; | |
602 | *list = temp; | |
603 | } | |
604 | ||
605 | /* Returns the list of used temp slots at LEVEL. */ | |
606 | ||
607 | static struct temp_slot ** | |
608 | temp_slots_at_level (int level) | |
609 | { | |
9771b263 DN |
610 | if (level >= (int) vec_safe_length (used_temp_slots)) |
611 | vec_safe_grow_cleared (used_temp_slots, level + 1); | |
0aea6467 | 612 | |
9771b263 | 613 | return &(*used_temp_slots)[level]; |
0aea6467 ZD |
614 | } |
615 | ||
616 | /* Returns the maximal temporary slot level. */ | |
617 | ||
618 | static int | |
619 | max_slot_level (void) | |
620 | { | |
621 | if (!used_temp_slots) | |
622 | return -1; | |
623 | ||
9771b263 | 624 | return used_temp_slots->length () - 1; |
0aea6467 ZD |
625 | } |
626 | ||
627 | /* Moves temporary slot TEMP to LEVEL. */ | |
628 | ||
629 | static void | |
630 | move_slot_to_level (struct temp_slot *temp, int level) | |
631 | { | |
632 | cut_slot_from_list (temp, temp_slots_at_level (temp->level)); | |
633 | insert_slot_to_list (temp, temp_slots_at_level (level)); | |
634 | temp->level = level; | |
635 | } | |
636 | ||
637 | /* Make temporary slot TEMP available. */ | |
638 | ||
639 | static void | |
640 | make_slot_available (struct temp_slot *temp) | |
641 | { | |
642 | cut_slot_from_list (temp, temp_slots_at_level (temp->level)); | |
643 | insert_slot_to_list (temp, &avail_temp_slots); | |
644 | temp->in_use = 0; | |
645 | temp->level = -1; | |
f8395d62 | 646 | n_temp_slots_in_use--; |
0aea6467 | 647 | } |
fb0703f7 SB |
648 | |
649 | /* Compute the hash value for an address -> temp slot mapping. | |
650 | The value is cached on the mapping entry. */ | |
651 | static hashval_t | |
652 | temp_slot_address_compute_hash (struct temp_slot_address_entry *t) | |
653 | { | |
654 | int do_not_record = 0; | |
655 | return hash_rtx (t->address, GET_MODE (t->address), | |
656 | &do_not_record, NULL, false); | |
657 | } | |
658 | ||
659 | /* Return the hash value for an address -> temp slot mapping. */ | |
2a22f99c TS |
660 | hashval_t |
661 | temp_address_hasher::hash (temp_slot_address_entry *t) | |
fb0703f7 | 662 | { |
fb0703f7 SB |
663 | return t->hash; |
664 | } | |
665 | ||
666 | /* Compare two address -> temp slot mapping entries. */ | |
2a22f99c TS |
667 | bool |
668 | temp_address_hasher::equal (temp_slot_address_entry *t1, | |
669 | temp_slot_address_entry *t2) | |
fb0703f7 | 670 | { |
fb0703f7 SB |
671 | return exp_equiv_p (t1->address, t2->address, 0, true); |
672 | } | |
673 | ||
674 | /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ | |
675 | static void | |
676 | insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) | |
677 | { | |
766090c2 | 678 | struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> (); |
fb0703f7 SB |
679 | t->address = address; |
680 | t->temp_slot = temp_slot; | |
681 | t->hash = temp_slot_address_compute_hash (t); | |
2a22f99c | 682 | *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t; |
fb0703f7 SB |
683 | } |
684 | ||
685 | /* Remove an address -> temp slot mapping entry if the temp slot is | |
686 | not in use anymore. Callback for remove_unused_temp_slot_addresses. */ | |
2a22f99c TS |
687 | int |
688 | remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *) | |
fb0703f7 | 689 | { |
2a22f99c | 690 | const struct temp_slot_address_entry *t = *slot; |
fb0703f7 | 691 | if (! t->temp_slot->in_use) |
2a22f99c | 692 | temp_slot_address_table->clear_slot (slot); |
fb0703f7 SB |
693 | return 1; |
694 | } | |
695 | ||
696 | /* Remove all mappings of addresses to unused temp slots. */ | |
697 | static void | |
698 | remove_unused_temp_slot_addresses (void) | |
699 | { | |
f8395d62 MM |
700 | /* Use quicker clearing if there aren't any active temp slots. */ |
701 | if (n_temp_slots_in_use) | |
2a22f99c TS |
702 | temp_slot_address_table->traverse |
703 | <void *, remove_unused_temp_slot_addresses_1> (NULL); | |
f8395d62 | 704 | else |
2a22f99c | 705 | temp_slot_address_table->empty (); |
fb0703f7 SB |
706 | } |
707 | ||
708 | /* Find the temp slot corresponding to the object at address X. */ | |
709 | ||
710 | static struct temp_slot * | |
711 | find_temp_slot_from_address (rtx x) | |
712 | { | |
713 | struct temp_slot *p; | |
714 | struct temp_slot_address_entry tmp, *t; | |
715 | ||
716 | /* First try the easy way: | |
717 | See if X exists in the address -> temp slot mapping. */ | |
718 | tmp.address = x; | |
719 | tmp.temp_slot = NULL; | |
720 | tmp.hash = temp_slot_address_compute_hash (&tmp); | |
2a22f99c | 721 | t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash); |
fb0703f7 SB |
722 | if (t) |
723 | return t->temp_slot; | |
724 | ||
725 | /* If we have a sum involving a register, see if it points to a temp | |
726 | slot. */ | |
727 | if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) | |
728 | && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) | |
729 | return p; | |
730 | else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) | |
731 | && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) | |
732 | return p; | |
733 | ||
734 | /* Last resort: Address is a virtual stack var address. */ | |
735 | if (GET_CODE (x) == PLUS | |
736 | && XEXP (x, 0) == virtual_stack_vars_rtx | |
481683e1 | 737 | && CONST_INT_P (XEXP (x, 1))) |
fb0703f7 SB |
738 | { |
739 | int i; | |
740 | for (i = max_slot_level (); i >= 0; i--) | |
741 | for (p = *temp_slots_at_level (i); p; p = p->next) | |
742 | { | |
743 | if (INTVAL (XEXP (x, 1)) >= p->base_offset | |
744 | && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) | |
745 | return p; | |
746 | } | |
747 | } | |
748 | ||
749 | return NULL; | |
750 | } | |
6f086dfc RS |
751 | \f |
752 | /* Allocate a temporary stack slot and record it for possible later | |
753 | reuse. | |
754 | ||
755 | MODE is the machine mode to be given to the returned rtx. | |
756 | ||
757 | SIZE is the size in units of the space required. We do no rounding here | |
758 | since assign_stack_local will do any required rounding. | |
759 | ||
a4c6502a | 760 | TYPE is the type that will be used for the stack slot. */ |
6f086dfc | 761 | |
a06ef755 | 762 | rtx |
ef4bddc2 | 763 | assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size, |
9474e8ab | 764 | tree type) |
6f086dfc | 765 | { |
74e2819c | 766 | unsigned int align; |
0aea6467 | 767 | struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; |
faa964e5 | 768 | rtx slot; |
6f086dfc | 769 | |
303ec2aa RK |
770 | /* If SIZE is -1 it means that somebody tried to allocate a temporary |
771 | of a variable size. */ | |
0bccc606 | 772 | gcc_assert (size != -1); |
303ec2aa | 773 | |
76fe54f0 | 774 | align = get_stack_local_alignment (type, mode); |
d16790f2 JW |
775 | |
776 | /* Try to find an available, already-allocated temporary of the proper | |
777 | mode which meets the size and alignment requirements. Choose the | |
3e8b0446 | 778 | smallest one with the closest alignment. |
b8698a0f | 779 | |
3e8b0446 ZD |
780 | If assign_stack_temp is called outside of the tree->rtl expansion, |
781 | we cannot reuse the stack slots (that may still refer to | |
782 | VIRTUAL_STACK_VARS_REGNUM). */ | |
783 | if (!virtuals_instantiated) | |
0aea6467 | 784 | { |
3e8b0446 | 785 | for (p = avail_temp_slots; p; p = p->next) |
0aea6467 | 786 | { |
3e8b0446 ZD |
787 | if (p->align >= align && p->size >= size |
788 | && GET_MODE (p->slot) == mode | |
789 | && objects_must_conflict_p (p->type, type) | |
790 | && (best_p == 0 || best_p->size > p->size | |
791 | || (best_p->size == p->size && best_p->align > p->align))) | |
0aea6467 | 792 | { |
3e8b0446 ZD |
793 | if (p->align == align && p->size == size) |
794 | { | |
795 | selected = p; | |
796 | cut_slot_from_list (selected, &avail_temp_slots); | |
797 | best_p = 0; | |
798 | break; | |
799 | } | |
800 | best_p = p; | |
0aea6467 | 801 | } |
0aea6467 ZD |
802 | } |
803 | } | |
6f086dfc RS |
804 | |
805 | /* Make our best, if any, the one to use. */ | |
806 | if (best_p) | |
a45035b6 | 807 | { |
0aea6467 ZD |
808 | selected = best_p; |
809 | cut_slot_from_list (selected, &avail_temp_slots); | |
810 | ||
a45035b6 JW |
811 | /* If there are enough aligned bytes left over, make them into a new |
812 | temp_slot so that the extra bytes don't get wasted. Do this only | |
813 | for BLKmode slots, so that we can be sure of the alignment. */ | |
3bdf5ad1 | 814 | if (GET_MODE (best_p->slot) == BLKmode) |
a45035b6 | 815 | { |
d16790f2 | 816 | int alignment = best_p->align / BITS_PER_UNIT; |
e5e809f4 | 817 | HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment); |
a45035b6 JW |
818 | |
819 | if (best_p->size - rounded_size >= alignment) | |
820 | { | |
766090c2 | 821 | p = ggc_alloc<temp_slot> (); |
9474e8ab | 822 | p->in_use = 0; |
a45035b6 | 823 | p->size = best_p->size - rounded_size; |
307d8cd6 RK |
824 | p->base_offset = best_p->base_offset + rounded_size; |
825 | p->full_size = best_p->full_size - rounded_size; | |
be0c514c | 826 | p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); |
d16790f2 | 827 | p->align = best_p->align; |
1da68f56 | 828 | p->type = best_p->type; |
0aea6467 | 829 | insert_slot_to_list (p, &avail_temp_slots); |
a45035b6 | 830 | |
8c39f8ae | 831 | vec_safe_push (stack_slot_list, p->slot); |
a45035b6 JW |
832 | |
833 | best_p->size = rounded_size; | |
291dde90 | 834 | best_p->full_size = rounded_size; |
a45035b6 JW |
835 | } |
836 | } | |
a45035b6 | 837 | } |
718fe406 | 838 | |
6f086dfc | 839 | /* If we still didn't find one, make a new temporary. */ |
0aea6467 | 840 | if (selected == 0) |
6f086dfc | 841 | { |
e5e809f4 JL |
842 | HOST_WIDE_INT frame_offset_old = frame_offset; |
843 | ||
766090c2 | 844 | p = ggc_alloc<temp_slot> (); |
e5e809f4 | 845 | |
c87a0a39 JL |
846 | /* We are passing an explicit alignment request to assign_stack_local. |
847 | One side effect of that is assign_stack_local will not round SIZE | |
848 | to ensure the frame offset remains suitably aligned. | |
849 | ||
850 | So for requests which depended on the rounding of SIZE, we go ahead | |
851 | and round it now. We also make sure ALIGNMENT is at least | |
852 | BIGGEST_ALIGNMENT. */ | |
0bccc606 | 853 | gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); |
80a832cd JJ |
854 | p->slot = assign_stack_local_1 (mode, |
855 | (mode == BLKmode | |
856 | ? CEIL_ROUND (size, | |
857 | (int) align | |
858 | / BITS_PER_UNIT) | |
859 | : size), | |
860 | align, 0); | |
d16790f2 JW |
861 | |
862 | p->align = align; | |
e5e809f4 | 863 | |
b2a80c0d DE |
864 | /* The following slot size computation is necessary because we don't |
865 | know the actual size of the temporary slot until assign_stack_local | |
866 | has performed all the frame alignment and size rounding for the | |
fc91b0d0 RK |
867 | requested temporary. Note that extra space added for alignment |
868 | can be either above or below this stack slot depending on which | |
869 | way the frame grows. We include the extra space if and only if it | |
870 | is above this slot. */ | |
f62c8a5c JJ |
871 | if (FRAME_GROWS_DOWNWARD) |
872 | p->size = frame_offset_old - frame_offset; | |
873 | else | |
874 | p->size = size; | |
e5e809f4 | 875 | |
fc91b0d0 | 876 | /* Now define the fields used by combine_temp_slots. */ |
f62c8a5c JJ |
877 | if (FRAME_GROWS_DOWNWARD) |
878 | { | |
879 | p->base_offset = frame_offset; | |
880 | p->full_size = frame_offset_old - frame_offset; | |
881 | } | |
882 | else | |
883 | { | |
884 | p->base_offset = frame_offset_old; | |
885 | p->full_size = frame_offset - frame_offset_old; | |
886 | } | |
0aea6467 ZD |
887 | |
888 | selected = p; | |
6f086dfc RS |
889 | } |
890 | ||
0aea6467 | 891 | p = selected; |
6f086dfc | 892 | p->in_use = 1; |
1da68f56 | 893 | p->type = type; |
7efcb746 | 894 | p->level = temp_slot_level; |
f8395d62 | 895 | n_temp_slots_in_use++; |
1995f267 | 896 | |
0aea6467 ZD |
897 | pp = temp_slots_at_level (p->level); |
898 | insert_slot_to_list (p, pp); | |
fb0703f7 | 899 | insert_temp_slot_address (XEXP (p->slot, 0), p); |
faa964e5 UW |
900 | |
901 | /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */ | |
902 | slot = gen_rtx_MEM (mode, XEXP (p->slot, 0)); | |
8c39f8ae | 903 | vec_safe_push (stack_slot_list, slot); |
3bdf5ad1 | 904 | |
1da68f56 RK |
905 | /* If we know the alias set for the memory that will be used, use |
906 | it. If there's no TYPE, then we don't know anything about the | |
907 | alias set for the memory. */ | |
faa964e5 UW |
908 | set_mem_alias_set (slot, type ? get_alias_set (type) : 0); |
909 | set_mem_align (slot, align); | |
1da68f56 | 910 | |
30f7a378 | 911 | /* If a type is specified, set the relevant flags. */ |
3bdf5ad1 | 912 | if (type != 0) |
55356334 | 913 | MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); |
be0c514c | 914 | MEM_NOTRAP_P (slot) = 1; |
3bdf5ad1 | 915 | |
faa964e5 | 916 | return slot; |
6f086dfc | 917 | } |
d16790f2 JW |
918 | |
919 | /* Allocate a temporary stack slot and record it for possible later | |
9474e8ab | 920 | reuse. First two arguments are same as in preceding function. */ |
d16790f2 JW |
921 | |
922 | rtx | |
ef4bddc2 | 923 | assign_stack_temp (machine_mode mode, HOST_WIDE_INT size) |
d16790f2 | 924 | { |
9474e8ab | 925 | return assign_stack_temp_for_type (mode, size, NULL_TREE); |
d16790f2 | 926 | } |
638141a6 | 927 | \f |
9432c136 EB |
928 | /* Assign a temporary. |
929 | If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl | |
930 | and so that should be used in error messages. In either case, we | |
931 | allocate of the given type. | |
230f21b4 | 932 | MEMORY_REQUIRED is 1 if the result must be addressable stack memory; |
b55d9ff8 RK |
933 | it is 0 if a register is OK. |
934 | DONT_PROMOTE is 1 if we should not promote values in register | |
935 | to wider modes. */ | |
230f21b4 PB |
936 | |
937 | rtx | |
9474e8ab | 938 | assign_temp (tree type_or_decl, int memory_required, |
fa8db1f7 | 939 | int dont_promote ATTRIBUTE_UNUSED) |
230f21b4 | 940 | { |
9432c136 | 941 | tree type, decl; |
ef4bddc2 | 942 | machine_mode mode; |
9e1622ed | 943 | #ifdef PROMOTE_MODE |
9432c136 EB |
944 | int unsignedp; |
945 | #endif | |
946 | ||
947 | if (DECL_P (type_or_decl)) | |
948 | decl = type_or_decl, type = TREE_TYPE (decl); | |
949 | else | |
950 | decl = NULL, type = type_or_decl; | |
951 | ||
952 | mode = TYPE_MODE (type); | |
9e1622ed | 953 | #ifdef PROMOTE_MODE |
8df83eae | 954 | unsignedp = TYPE_UNSIGNED (type); |
0ce8a59c | 955 | #endif |
638141a6 | 956 | |
45177337 JM |
957 | /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front |
958 | end. See also create_tmp_var for the gimplification-time check. */ | |
959 | gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | |
960 | ||
230f21b4 PB |
961 | if (mode == BLKmode || memory_required) |
962 | { | |
e5e809f4 | 963 | HOST_WIDE_INT size = int_size_in_bytes (type); |
230f21b4 PB |
964 | rtx tmp; |
965 | ||
44affdae JH |
966 | /* Zero sized arrays are GNU C extension. Set size to 1 to avoid |
967 | problems with allocating the stack space. */ | |
968 | if (size == 0) | |
969 | size = 1; | |
970 | ||
230f21b4 | 971 | /* Unfortunately, we don't yet know how to allocate variable-sized |
a441447f OH |
972 | temporaries. However, sometimes we can find a fixed upper limit on |
973 | the size, so try that instead. */ | |
974 | else if (size == -1) | |
975 | size = max_int_size_in_bytes (type); | |
e30bb772 | 976 | |
9432c136 EB |
977 | /* The size of the temporary may be too large to fit into an integer. */ |
978 | /* ??? Not sure this should happen except for user silliness, so limit | |
797a6ac1 | 979 | this to things that aren't compiler-generated temporaries. The |
535a42b1 | 980 | rest of the time we'll die in assign_stack_temp_for_type. */ |
9432c136 EB |
981 | if (decl && size == -1 |
982 | && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) | |
983 | { | |
dee15844 | 984 | error ("size of variable %q+D is too large", decl); |
9432c136 EB |
985 | size = 1; |
986 | } | |
987 | ||
9474e8ab | 988 | tmp = assign_stack_temp_for_type (mode, size, type); |
230f21b4 PB |
989 | return tmp; |
990 | } | |
638141a6 | 991 | |
9e1622ed | 992 | #ifdef PROMOTE_MODE |
b55d9ff8 | 993 | if (! dont_promote) |
cde0f3fd | 994 | mode = promote_mode (type, mode, &unsignedp); |
230f21b4 | 995 | #endif |
638141a6 | 996 | |
230f21b4 PB |
997 | return gen_reg_rtx (mode); |
998 | } | |
638141a6 | 999 | \f |
a45035b6 JW |
1000 | /* Combine temporary stack slots which are adjacent on the stack. |
1001 | ||
1002 | This allows for better use of already allocated stack space. This is only | |
1003 | done for BLKmode slots because we can be sure that we won't have alignment | |
1004 | problems in this case. */ | |
1005 | ||
6fe79279 | 1006 | static void |
fa8db1f7 | 1007 | combine_temp_slots (void) |
a45035b6 | 1008 | { |
0aea6467 | 1009 | struct temp_slot *p, *q, *next, *next_q; |
e5e809f4 JL |
1010 | int num_slots; |
1011 | ||
a4c6502a MM |
1012 | /* We can't combine slots, because the information about which slot |
1013 | is in which alias set will be lost. */ | |
1014 | if (flag_strict_aliasing) | |
1015 | return; | |
1016 | ||
718fe406 | 1017 | /* If there are a lot of temp slots, don't do anything unless |
d6a7951f | 1018 | high levels of optimization. */ |
e5e809f4 | 1019 | if (! flag_expensive_optimizations) |
0aea6467 | 1020 | for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++) |
e5e809f4 JL |
1021 | if (num_slots > 100 || (num_slots > 10 && optimize == 0)) |
1022 | return; | |
a45035b6 | 1023 | |
0aea6467 | 1024 | for (p = avail_temp_slots; p; p = next) |
e9b7093a RS |
1025 | { |
1026 | int delete_p = 0; | |
e5e809f4 | 1027 | |
0aea6467 ZD |
1028 | next = p->next; |
1029 | ||
1030 | if (GET_MODE (p->slot) != BLKmode) | |
1031 | continue; | |
1032 | ||
1033 | for (q = p->next; q; q = next_q) | |
e9b7093a | 1034 | { |
0aea6467 ZD |
1035 | int delete_q = 0; |
1036 | ||
1037 | next_q = q->next; | |
1038 | ||
1039 | if (GET_MODE (q->slot) != BLKmode) | |
1040 | continue; | |
1041 | ||
1042 | if (p->base_offset + p->full_size == q->base_offset) | |
1043 | { | |
1044 | /* Q comes after P; combine Q into P. */ | |
1045 | p->size += q->size; | |
1046 | p->full_size += q->full_size; | |
1047 | delete_q = 1; | |
1048 | } | |
1049 | else if (q->base_offset + q->full_size == p->base_offset) | |
1050 | { | |
1051 | /* P comes after Q; combine P into Q. */ | |
1052 | q->size += p->size; | |
1053 | q->full_size += p->full_size; | |
1054 | delete_p = 1; | |
1055 | break; | |
1056 | } | |
1057 | if (delete_q) | |
1058 | cut_slot_from_list (q, &avail_temp_slots); | |
e9b7093a | 1059 | } |
0aea6467 ZD |
1060 | |
1061 | /* Either delete P or advance past it. */ | |
1062 | if (delete_p) | |
1063 | cut_slot_from_list (p, &avail_temp_slots); | |
e9b7093a | 1064 | } |
a45035b6 | 1065 | } |
6f086dfc | 1066 | \f |
82d6e6fc KG |
1067 | /* Indicate that NEW_RTX is an alternate way of referring to the temp |
1068 | slot that previously was known by OLD_RTX. */ | |
e5e76139 RK |
1069 | |
1070 | void | |
82d6e6fc | 1071 | update_temp_slot_address (rtx old_rtx, rtx new_rtx) |
e5e76139 | 1072 | { |
14a774a9 | 1073 | struct temp_slot *p; |
e5e76139 | 1074 | |
82d6e6fc | 1075 | if (rtx_equal_p (old_rtx, new_rtx)) |
e5e76139 | 1076 | return; |
14a774a9 | 1077 | |
82d6e6fc | 1078 | p = find_temp_slot_from_address (old_rtx); |
14a774a9 | 1079 | |
82d6e6fc KG |
1080 | /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and |
1081 | NEW_RTX is a register, see if one operand of the PLUS is a | |
1082 | temporary location. If so, NEW_RTX points into it. Otherwise, | |
1083 | if both OLD_RTX and NEW_RTX are a PLUS and if there is a register | |
1084 | in common between them. If so, try a recursive call on those | |
1085 | values. */ | |
14a774a9 RK |
1086 | if (p == 0) |
1087 | { | |
82d6e6fc | 1088 | if (GET_CODE (old_rtx) != PLUS) |
700f19f0 RK |
1089 | return; |
1090 | ||
82d6e6fc | 1091 | if (REG_P (new_rtx)) |
700f19f0 | 1092 | { |
82d6e6fc KG |
1093 | update_temp_slot_address (XEXP (old_rtx, 0), new_rtx); |
1094 | update_temp_slot_address (XEXP (old_rtx, 1), new_rtx); | |
700f19f0 RK |
1095 | return; |
1096 | } | |
82d6e6fc | 1097 | else if (GET_CODE (new_rtx) != PLUS) |
14a774a9 RK |
1098 | return; |
1099 | ||
82d6e6fc KG |
1100 | if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0))) |
1101 | update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1)); | |
1102 | else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0))) | |
1103 | update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1)); | |
1104 | else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1))) | |
1105 | update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0)); | |
1106 | else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1))) | |
1107 | update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0)); | |
14a774a9 RK |
1108 | |
1109 | return; | |
1110 | } | |
1111 | ||
718fe406 | 1112 | /* Otherwise add an alias for the temp's address. */ |
fb0703f7 | 1113 | insert_temp_slot_address (new_rtx, p); |
e5e76139 RK |
1114 | } |
1115 | ||
9cca6a99 MS |
1116 | /* If X could be a reference to a temporary slot, mark that slot as |
1117 | belonging to the to one level higher than the current level. If X | |
1118 | matched one of our slots, just mark that one. Otherwise, we can't | |
9474e8ab | 1119 | easily predict which it is, so upgrade all of them. |
6f086dfc RS |
1120 | |
1121 | This is called when an ({...}) construct occurs and a statement | |
1122 | returns a value in memory. */ | |
1123 | ||
1124 | void | |
fa8db1f7 | 1125 | preserve_temp_slots (rtx x) |
6f086dfc | 1126 | { |
0aea6467 | 1127 | struct temp_slot *p = 0, *next; |
6f086dfc | 1128 | |
e3a77161 | 1129 | if (x == 0) |
9474e8ab | 1130 | return; |
f7b6d104 | 1131 | |
8fff4fc1 | 1132 | /* If X is a register that is being used as a pointer, see if we have |
9474e8ab | 1133 | a temporary slot we know it points to. */ |
8fff4fc1 RH |
1134 | if (REG_P (x) && REG_POINTER (x)) |
1135 | p = find_temp_slot_from_address (x); | |
f7b6d104 | 1136 | |
8fff4fc1 | 1137 | /* If X is not in memory or is at a constant address, it cannot be in |
9474e8ab | 1138 | a temporary slot. */ |
8fff4fc1 | 1139 | if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))) |
9474e8ab | 1140 | return; |
8fff4fc1 RH |
1141 | |
1142 | /* First see if we can find a match. */ | |
1143 | if (p == 0) | |
1144 | p = find_temp_slot_from_address (XEXP (x, 0)); | |
1145 | ||
1146 | if (p != 0) | |
1147 | { | |
8fff4fc1 | 1148 | if (p->level == temp_slot_level) |
9474e8ab | 1149 | move_slot_to_level (p, temp_slot_level - 1); |
8fff4fc1 | 1150 | return; |
f7b6d104 | 1151 | } |
e9a25f70 | 1152 | |
8fff4fc1 RH |
1153 | /* Otherwise, preserve all non-kept slots at this level. */ |
1154 | for (p = *temp_slots_at_level (temp_slot_level); p; p = next) | |
e9a25f70 | 1155 | { |
8fff4fc1 | 1156 | next = p->next; |
9474e8ab | 1157 | move_slot_to_level (p, temp_slot_level - 1); |
8fff4fc1 | 1158 | } |
fe9b4957 MM |
1159 | } |
1160 | ||
8fff4fc1 RH |
1161 | /* Free all temporaries used so far. This is normally called at the |
1162 | end of generating code for a statement. */ | |
fe9b4957 | 1163 | |
8fff4fc1 RH |
1164 | void |
1165 | free_temp_slots (void) | |
fe9b4957 | 1166 | { |
8fff4fc1 | 1167 | struct temp_slot *p, *next; |
5d7cefe5 | 1168 | bool some_available = false; |
fe9b4957 | 1169 | |
8fff4fc1 RH |
1170 | for (p = *temp_slots_at_level (temp_slot_level); p; p = next) |
1171 | { | |
1172 | next = p->next; | |
9474e8ab MM |
1173 | make_slot_available (p); |
1174 | some_available = true; | |
8fff4fc1 | 1175 | } |
fe9b4957 | 1176 | |
5d7cefe5 MM |
1177 | if (some_available) |
1178 | { | |
1179 | remove_unused_temp_slot_addresses (); | |
1180 | combine_temp_slots (); | |
1181 | } | |
8fff4fc1 | 1182 | } |
fe9b4957 | 1183 | |
8fff4fc1 | 1184 | /* Push deeper into the nesting level for stack temporaries. */ |
fe9b4957 | 1185 | |
8fff4fc1 RH |
1186 | void |
1187 | push_temp_slots (void) | |
fe9b4957 | 1188 | { |
8fff4fc1 | 1189 | temp_slot_level++; |
fe9b4957 MM |
1190 | } |
1191 | ||
8fff4fc1 RH |
1192 | /* Pop a temporary nesting level. All slots in use in the current level |
1193 | are freed. */ | |
fe9b4957 | 1194 | |
8fff4fc1 RH |
1195 | void |
1196 | pop_temp_slots (void) | |
fe9b4957 | 1197 | { |
9474e8ab | 1198 | free_temp_slots (); |
8fff4fc1 | 1199 | temp_slot_level--; |
8c36698e NC |
1200 | } |
1201 | ||
8fff4fc1 | 1202 | /* Initialize temporary slots. */ |
e9a25f70 JL |
1203 | |
1204 | void | |
8fff4fc1 | 1205 | init_temp_slots (void) |
e9a25f70 | 1206 | { |
8fff4fc1 RH |
1207 | /* We have not allocated any temporaries yet. */ |
1208 | avail_temp_slots = 0; | |
9771b263 | 1209 | vec_alloc (used_temp_slots, 0); |
8fff4fc1 | 1210 | temp_slot_level = 0; |
f8395d62 | 1211 | n_temp_slots_in_use = 0; |
fb0703f7 SB |
1212 | |
1213 | /* Set up the table to map addresses to temp slots. */ | |
1214 | if (! temp_slot_address_table) | |
2a22f99c | 1215 | temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32); |
fb0703f7 | 1216 | else |
2a22f99c | 1217 | temp_slot_address_table->empty (); |
8fff4fc1 RH |
1218 | } |
1219 | \f | |
6399c0ab SB |
1220 | /* Functions and data structures to keep track of the values hard regs |
1221 | had at the start of the function. */ | |
1222 | ||
1223 | /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val, | |
1224 | and has_hard_reg_initial_val.. */ | |
50686850 | 1225 | struct GTY(()) initial_value_pair { |
6399c0ab SB |
1226 | rtx hard_reg; |
1227 | rtx pseudo; | |
50686850 | 1228 | }; |
6399c0ab SB |
1229 | /* ??? This could be a VEC but there is currently no way to define an |
1230 | opaque VEC type. This could be worked around by defining struct | |
1231 | initial_value_pair in function.h. */ | |
50686850 | 1232 | struct GTY(()) initial_value_struct { |
6399c0ab SB |
1233 | int num_entries; |
1234 | int max_entries; | |
1235 | initial_value_pair * GTY ((length ("%h.num_entries"))) entries; | |
50686850 | 1236 | }; |
6399c0ab SB |
1237 | |
1238 | /* If a pseudo represents an initial hard reg (or expression), return | |
1239 | it, else return NULL_RTX. */ | |
1240 | ||
1241 | rtx | |
1242 | get_hard_reg_initial_reg (rtx reg) | |
1243 | { | |
1244 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1245 | int i; | |
1246 | ||
1247 | if (ivs == 0) | |
1248 | return NULL_RTX; | |
1249 | ||
1250 | for (i = 0; i < ivs->num_entries; i++) | |
1251 | if (rtx_equal_p (ivs->entries[i].pseudo, reg)) | |
1252 | return ivs->entries[i].hard_reg; | |
1253 | ||
1254 | return NULL_RTX; | |
1255 | } | |
1256 | ||
1257 | /* Make sure that there's a pseudo register of mode MODE that stores the | |
1258 | initial value of hard register REGNO. Return an rtx for such a pseudo. */ | |
1259 | ||
1260 | rtx | |
ef4bddc2 | 1261 | get_hard_reg_initial_val (machine_mode mode, unsigned int regno) |
6399c0ab SB |
1262 | { |
1263 | struct initial_value_struct *ivs; | |
1264 | rtx rv; | |
1265 | ||
1266 | rv = has_hard_reg_initial_val (mode, regno); | |
1267 | if (rv) | |
1268 | return rv; | |
1269 | ||
1270 | ivs = crtl->hard_reg_initial_vals; | |
1271 | if (ivs == 0) | |
1272 | { | |
766090c2 | 1273 | ivs = ggc_alloc<initial_value_struct> (); |
6399c0ab SB |
1274 | ivs->num_entries = 0; |
1275 | ivs->max_entries = 5; | |
766090c2 | 1276 | ivs->entries = ggc_vec_alloc<initial_value_pair> (5); |
6399c0ab SB |
1277 | crtl->hard_reg_initial_vals = ivs; |
1278 | } | |
1279 | ||
1280 | if (ivs->num_entries >= ivs->max_entries) | |
1281 | { | |
1282 | ivs->max_entries += 5; | |
1283 | ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries, | |
1284 | ivs->max_entries); | |
1285 | } | |
1286 | ||
1287 | ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno); | |
1288 | ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode); | |
1289 | ||
1290 | return ivs->entries[ivs->num_entries++].pseudo; | |
1291 | } | |
1292 | ||
1293 | /* See if get_hard_reg_initial_val has been used to create a pseudo | |
1294 | for the initial value of hard register REGNO in mode MODE. Return | |
1295 | the associated pseudo if so, otherwise return NULL. */ | |
1296 | ||
1297 | rtx | |
ef4bddc2 | 1298 | has_hard_reg_initial_val (machine_mode mode, unsigned int regno) |
6399c0ab SB |
1299 | { |
1300 | struct initial_value_struct *ivs; | |
1301 | int i; | |
1302 | ||
1303 | ivs = crtl->hard_reg_initial_vals; | |
1304 | if (ivs != 0) | |
1305 | for (i = 0; i < ivs->num_entries; i++) | |
1306 | if (GET_MODE (ivs->entries[i].hard_reg) == mode | |
1307 | && REGNO (ivs->entries[i].hard_reg) == regno) | |
1308 | return ivs->entries[i].pseudo; | |
1309 | ||
1310 | return NULL_RTX; | |
1311 | } | |
1312 | ||
1313 | unsigned int | |
1314 | emit_initial_value_sets (void) | |
1315 | { | |
1316 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1317 | int i; | |
691fe203 | 1318 | rtx_insn *seq; |
6399c0ab SB |
1319 | |
1320 | if (ivs == 0) | |
1321 | return 0; | |
1322 | ||
1323 | start_sequence (); | |
1324 | for (i = 0; i < ivs->num_entries; i++) | |
1325 | emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg); | |
1326 | seq = get_insns (); | |
1327 | end_sequence (); | |
1328 | ||
1329 | emit_insn_at_entry (seq); | |
1330 | return 0; | |
1331 | } | |
1332 | ||
1333 | /* Return the hardreg-pseudoreg initial values pair entry I and | |
1334 | TRUE if I is a valid entry, or FALSE if I is not a valid entry. */ | |
1335 | bool | |
1336 | initial_value_entry (int i, rtx *hreg, rtx *preg) | |
1337 | { | |
1338 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1339 | if (!ivs || i >= ivs->num_entries) | |
1340 | return false; | |
1341 | ||
1342 | *hreg = ivs->entries[i].hard_reg; | |
1343 | *preg = ivs->entries[i].pseudo; | |
1344 | return true; | |
1345 | } | |
1346 | \f | |
8fff4fc1 RH |
1347 | /* These routines are responsible for converting virtual register references |
1348 | to the actual hard register references once RTL generation is complete. | |
718fe406 | 1349 | |
8fff4fc1 RH |
1350 | The following four variables are used for communication between the |
1351 | routines. They contain the offsets of the virtual registers from their | |
1352 | respective hard registers. */ | |
fe9b4957 | 1353 | |
8fff4fc1 RH |
1354 | static int in_arg_offset; |
1355 | static int var_offset; | |
1356 | static int dynamic_offset; | |
1357 | static int out_arg_offset; | |
1358 | static int cfa_offset; | |
8a5275eb | 1359 | |
8fff4fc1 RH |
1360 | /* In most machines, the stack pointer register is equivalent to the bottom |
1361 | of the stack. */ | |
718fe406 | 1362 | |
8fff4fc1 RH |
1363 | #ifndef STACK_POINTER_OFFSET |
1364 | #define STACK_POINTER_OFFSET 0 | |
1365 | #endif | |
8c36698e | 1366 | |
ddbb449f AM |
1367 | #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE) |
1368 | #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE | |
1369 | #endif | |
1370 | ||
8fff4fc1 RH |
1371 | /* If not defined, pick an appropriate default for the offset of dynamically |
1372 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
ddbb449f | 1373 | INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ |
fe9b4957 | 1374 | |
8fff4fc1 | 1375 | #ifndef STACK_DYNAMIC_OFFSET |
8a5275eb | 1376 | |
8fff4fc1 RH |
1377 | /* The bottom of the stack points to the actual arguments. If |
1378 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
1379 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
1380 | stack space for register parameters is not pushed by the caller, but | |
1381 | rather part of the fixed stack areas and hence not included in | |
38173d38 | 1382 | `crtl->outgoing_args_size'. Nevertheless, we must allow |
8fff4fc1 | 1383 | for it when allocating stack dynamic objects. */ |
8a5275eb | 1384 | |
ddbb449f | 1385 | #ifdef INCOMING_REG_PARM_STACK_SPACE |
8fff4fc1 RH |
1386 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ |
1387 | ((ACCUMULATE_OUTGOING_ARGS \ | |
38173d38 | 1388 | ? (crtl->outgoing_args_size \ |
81464b2c | 1389 | + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ |
ddbb449f | 1390 | : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \ |
ac294f0b | 1391 | : 0) + (STACK_POINTER_OFFSET)) |
8fff4fc1 RH |
1392 | #else |
1393 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
38173d38 | 1394 | ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \ |
8fff4fc1 RH |
1395 | + (STACK_POINTER_OFFSET)) |
1396 | #endif | |
1397 | #endif | |
4fa48eae | 1398 | |
659e47fb | 1399 | \f |
bbf9b913 RH |
1400 | /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX |
1401 | is a virtual register, return the equivalent hard register and set the | |
1402 | offset indirectly through the pointer. Otherwise, return 0. */ | |
6f086dfc | 1403 | |
bbf9b913 RH |
1404 | static rtx |
1405 | instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) | |
6f086dfc | 1406 | { |
82d6e6fc | 1407 | rtx new_rtx; |
bbf9b913 | 1408 | HOST_WIDE_INT offset; |
6f086dfc | 1409 | |
bbf9b913 | 1410 | if (x == virtual_incoming_args_rtx) |
2e3f842f | 1411 | { |
d015f7cc | 1412 | if (stack_realign_drap) |
2e3f842f | 1413 | { |
d015f7cc L |
1414 | /* Replace virtual_incoming_args_rtx with internal arg |
1415 | pointer if DRAP is used to realign stack. */ | |
82d6e6fc | 1416 | new_rtx = crtl->args.internal_arg_pointer; |
2e3f842f L |
1417 | offset = 0; |
1418 | } | |
1419 | else | |
82d6e6fc | 1420 | new_rtx = arg_pointer_rtx, offset = in_arg_offset; |
2e3f842f | 1421 | } |
bbf9b913 | 1422 | else if (x == virtual_stack_vars_rtx) |
82d6e6fc | 1423 | new_rtx = frame_pointer_rtx, offset = var_offset; |
bbf9b913 | 1424 | else if (x == virtual_stack_dynamic_rtx) |
82d6e6fc | 1425 | new_rtx = stack_pointer_rtx, offset = dynamic_offset; |
bbf9b913 | 1426 | else if (x == virtual_outgoing_args_rtx) |
82d6e6fc | 1427 | new_rtx = stack_pointer_rtx, offset = out_arg_offset; |
bbf9b913 | 1428 | else if (x == virtual_cfa_rtx) |
f6672e8e RH |
1429 | { |
1430 | #ifdef FRAME_POINTER_CFA_OFFSET | |
82d6e6fc | 1431 | new_rtx = frame_pointer_rtx; |
f6672e8e | 1432 | #else |
82d6e6fc | 1433 | new_rtx = arg_pointer_rtx; |
f6672e8e RH |
1434 | #endif |
1435 | offset = cfa_offset; | |
1436 | } | |
32990d5b JJ |
1437 | else if (x == virtual_preferred_stack_boundary_rtx) |
1438 | { | |
1439 | new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT); | |
1440 | offset = 0; | |
1441 | } | |
bbf9b913 RH |
1442 | else |
1443 | return NULL_RTX; | |
6f086dfc | 1444 | |
bbf9b913 | 1445 | *poffset = offset; |
82d6e6fc | 1446 | return new_rtx; |
6f086dfc RS |
1447 | } |
1448 | ||
b8704801 RS |
1449 | /* A subroutine of instantiate_virtual_regs. Instantiate any virtual |
1450 | registers present inside of *LOC. The expression is simplified, | |
1451 | as much as possible, but is not to be considered "valid" in any sense | |
1452 | implied by the target. Return true if any change is made. */ | |
6f086dfc | 1453 | |
b8704801 RS |
1454 | static bool |
1455 | instantiate_virtual_regs_in_rtx (rtx *loc) | |
6f086dfc | 1456 | { |
b8704801 RS |
1457 | if (!*loc) |
1458 | return false; | |
1459 | bool changed = false; | |
1460 | subrtx_ptr_iterator::array_type array; | |
1461 | FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST) | |
6f086dfc | 1462 | { |
b8704801 RS |
1463 | rtx *loc = *iter; |
1464 | if (rtx x = *loc) | |
bbf9b913 | 1465 | { |
b8704801 RS |
1466 | rtx new_rtx; |
1467 | HOST_WIDE_INT offset; | |
1468 | switch (GET_CODE (x)) | |
1469 | { | |
1470 | case REG: | |
1471 | new_rtx = instantiate_new_reg (x, &offset); | |
1472 | if (new_rtx) | |
1473 | { | |
1474 | *loc = plus_constant (GET_MODE (x), new_rtx, offset); | |
1475 | changed = true; | |
1476 | } | |
1477 | iter.skip_subrtxes (); | |
1478 | break; | |
bbf9b913 | 1479 | |
b8704801 RS |
1480 | case PLUS: |
1481 | new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); | |
1482 | if (new_rtx) | |
1483 | { | |
1484 | XEXP (x, 0) = new_rtx; | |
1485 | *loc = plus_constant (GET_MODE (x), x, offset, true); | |
1486 | changed = true; | |
1487 | iter.skip_subrtxes (); | |
1488 | break; | |
1489 | } | |
e5e809f4 | 1490 | |
b8704801 RS |
1491 | /* FIXME -- from old code */ |
1492 | /* If we have (plus (subreg (virtual-reg)) (const_int)), we know | |
1493 | we can commute the PLUS and SUBREG because pointers into the | |
1494 | frame are well-behaved. */ | |
1495 | break; | |
ce717ce4 | 1496 | |
b8704801 RS |
1497 | default: |
1498 | break; | |
1499 | } | |
1500 | } | |
6f086dfc | 1501 | } |
b8704801 | 1502 | return changed; |
6f086dfc RS |
1503 | } |
1504 | ||
bbf9b913 RH |
1505 | /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X |
1506 | matches the predicate for insn CODE operand OPERAND. */ | |
6f086dfc | 1507 | |
bbf9b913 RH |
1508 | static int |
1509 | safe_insn_predicate (int code, int operand, rtx x) | |
6f086dfc | 1510 | { |
2ef6ce06 | 1511 | return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x); |
bbf9b913 | 1512 | } |
5a73491b | 1513 | |
bbf9b913 RH |
1514 | /* A subroutine of instantiate_virtual_regs. Instantiate any virtual |
1515 | registers present inside of insn. The result will be a valid insn. */ | |
5a73491b RK |
1516 | |
1517 | static void | |
691fe203 | 1518 | instantiate_virtual_regs_in_insn (rtx_insn *insn) |
5a73491b | 1519 | { |
bbf9b913 RH |
1520 | HOST_WIDE_INT offset; |
1521 | int insn_code, i; | |
9325973e | 1522 | bool any_change = false; |
691fe203 DM |
1523 | rtx set, new_rtx, x; |
1524 | rtx_insn *seq; | |
32e66afd | 1525 | |
bbf9b913 RH |
1526 | /* There are some special cases to be handled first. */ |
1527 | set = single_set (insn); | |
1528 | if (set) | |
32e66afd | 1529 | { |
bbf9b913 RH |
1530 | /* We're allowed to assign to a virtual register. This is interpreted |
1531 | to mean that the underlying register gets assigned the inverse | |
1532 | transformation. This is used, for example, in the handling of | |
1533 | non-local gotos. */ | |
82d6e6fc KG |
1534 | new_rtx = instantiate_new_reg (SET_DEST (set), &offset); |
1535 | if (new_rtx) | |
bbf9b913 RH |
1536 | { |
1537 | start_sequence (); | |
32e66afd | 1538 | |
b8704801 | 1539 | instantiate_virtual_regs_in_rtx (&SET_SRC (set)); |
82d6e6fc | 1540 | x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set), |
69a59f0f | 1541 | gen_int_mode (-offset, GET_MODE (new_rtx))); |
82d6e6fc KG |
1542 | x = force_operand (x, new_rtx); |
1543 | if (x != new_rtx) | |
1544 | emit_move_insn (new_rtx, x); | |
5a73491b | 1545 | |
bbf9b913 RH |
1546 | seq = get_insns (); |
1547 | end_sequence (); | |
5a73491b | 1548 | |
bbf9b913 RH |
1549 | emit_insn_before (seq, insn); |
1550 | delete_insn (insn); | |
1551 | return; | |
1552 | } | |
5a73491b | 1553 | |
bbf9b913 RH |
1554 | /* Handle a straight copy from a virtual register by generating a |
1555 | new add insn. The difference between this and falling through | |
1556 | to the generic case is avoiding a new pseudo and eliminating a | |
1557 | move insn in the initial rtl stream. */ | |
82d6e6fc KG |
1558 | new_rtx = instantiate_new_reg (SET_SRC (set), &offset); |
1559 | if (new_rtx && offset != 0 | |
bbf9b913 RH |
1560 | && REG_P (SET_DEST (set)) |
1561 | && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | |
1562 | { | |
1563 | start_sequence (); | |
5a73491b | 1564 | |
2f1cd2eb RS |
1565 | x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx, |
1566 | gen_int_mode (offset, | |
1567 | GET_MODE (SET_DEST (set))), | |
1568 | SET_DEST (set), 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1569 | if (x != SET_DEST (set)) |
1570 | emit_move_insn (SET_DEST (set), x); | |
770ae6cc | 1571 | |
bbf9b913 RH |
1572 | seq = get_insns (); |
1573 | end_sequence (); | |
87ce34d6 | 1574 | |
bbf9b913 RH |
1575 | emit_insn_before (seq, insn); |
1576 | delete_insn (insn); | |
87ce34d6 | 1577 | return; |
bbf9b913 | 1578 | } |
5a73491b | 1579 | |
bbf9b913 | 1580 | extract_insn (insn); |
9325973e | 1581 | insn_code = INSN_CODE (insn); |
5a73491b | 1582 | |
bbf9b913 RH |
1583 | /* Handle a plus involving a virtual register by determining if the |
1584 | operands remain valid if they're modified in place. */ | |
1585 | if (GET_CODE (SET_SRC (set)) == PLUS | |
1586 | && recog_data.n_operands >= 3 | |
1587 | && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) | |
1588 | && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) | |
481683e1 | 1589 | && CONST_INT_P (recog_data.operand[2]) |
82d6e6fc | 1590 | && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) |
bbf9b913 RH |
1591 | { |
1592 | offset += INTVAL (recog_data.operand[2]); | |
5a73491b | 1593 | |
bbf9b913 | 1594 | /* If the sum is zero, then replace with a plain move. */ |
9325973e RH |
1595 | if (offset == 0 |
1596 | && REG_P (SET_DEST (set)) | |
1597 | && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | |
bbf9b913 RH |
1598 | { |
1599 | start_sequence (); | |
82d6e6fc | 1600 | emit_move_insn (SET_DEST (set), new_rtx); |
bbf9b913 RH |
1601 | seq = get_insns (); |
1602 | end_sequence (); | |
d1405722 | 1603 | |
bbf9b913 RH |
1604 | emit_insn_before (seq, insn); |
1605 | delete_insn (insn); | |
1606 | return; | |
1607 | } | |
d1405722 | 1608 | |
bbf9b913 | 1609 | x = gen_int_mode (offset, recog_data.operand_mode[2]); |
bbf9b913 RH |
1610 | |
1611 | /* Using validate_change and apply_change_group here leaves | |
1612 | recog_data in an invalid state. Since we know exactly what | |
1613 | we want to check, do those two by hand. */ | |
82d6e6fc | 1614 | if (safe_insn_predicate (insn_code, 1, new_rtx) |
bbf9b913 RH |
1615 | && safe_insn_predicate (insn_code, 2, x)) |
1616 | { | |
82d6e6fc | 1617 | *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx; |
bbf9b913 RH |
1618 | *recog_data.operand_loc[2] = recog_data.operand[2] = x; |
1619 | any_change = true; | |
9325973e RH |
1620 | |
1621 | /* Fall through into the regular operand fixup loop in | |
1622 | order to take care of operands other than 1 and 2. */ | |
bbf9b913 RH |
1623 | } |
1624 | } | |
1625 | } | |
d1405722 | 1626 | else |
9325973e RH |
1627 | { |
1628 | extract_insn (insn); | |
1629 | insn_code = INSN_CODE (insn); | |
1630 | } | |
5dc96d60 | 1631 | |
bbf9b913 RH |
1632 | /* In the general case, we expect virtual registers to appear only in |
1633 | operands, and then only as either bare registers or inside memories. */ | |
1634 | for (i = 0; i < recog_data.n_operands; ++i) | |
1635 | { | |
1636 | x = recog_data.operand[i]; | |
1637 | switch (GET_CODE (x)) | |
1638 | { | |
1639 | case MEM: | |
1640 | { | |
1641 | rtx addr = XEXP (x, 0); | |
bbf9b913 | 1642 | |
b8704801 | 1643 | if (!instantiate_virtual_regs_in_rtx (&addr)) |
bbf9b913 RH |
1644 | continue; |
1645 | ||
1646 | start_sequence (); | |
23b33725 | 1647 | x = replace_equiv_address (x, addr, true); |
a5bfb13a MM |
1648 | /* It may happen that the address with the virtual reg |
1649 | was valid (e.g. based on the virtual stack reg, which might | |
1650 | be acceptable to the predicates with all offsets), whereas | |
1651 | the address now isn't anymore, for instance when the address | |
1652 | is still offsetted, but the base reg isn't virtual-stack-reg | |
1653 | anymore. Below we would do a force_reg on the whole operand, | |
1654 | but this insn might actually only accept memory. Hence, | |
1655 | before doing that last resort, try to reload the address into | |
1656 | a register, so this operand stays a MEM. */ | |
1657 | if (!safe_insn_predicate (insn_code, i, x)) | |
1658 | { | |
1659 | addr = force_reg (GET_MODE (addr), addr); | |
23b33725 | 1660 | x = replace_equiv_address (x, addr, true); |
a5bfb13a | 1661 | } |
bbf9b913 RH |
1662 | seq = get_insns (); |
1663 | end_sequence (); | |
1664 | if (seq) | |
1665 | emit_insn_before (seq, insn); | |
1666 | } | |
1667 | break; | |
1668 | ||
1669 | case REG: | |
82d6e6fc KG |
1670 | new_rtx = instantiate_new_reg (x, &offset); |
1671 | if (new_rtx == NULL) | |
bbf9b913 RH |
1672 | continue; |
1673 | if (offset == 0) | |
82d6e6fc | 1674 | x = new_rtx; |
bbf9b913 RH |
1675 | else |
1676 | { | |
1677 | start_sequence (); | |
6f086dfc | 1678 | |
bbf9b913 RH |
1679 | /* Careful, special mode predicates may have stuff in |
1680 | insn_data[insn_code].operand[i].mode that isn't useful | |
1681 | to us for computing a new value. */ | |
1682 | /* ??? Recognize address_operand and/or "p" constraints | |
1683 | to see if (plus new offset) is a valid before we put | |
1684 | this through expand_simple_binop. */ | |
82d6e6fc | 1685 | x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx, |
2f1cd2eb RS |
1686 | gen_int_mode (offset, GET_MODE (x)), |
1687 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1688 | seq = get_insns (); |
1689 | end_sequence (); | |
1690 | emit_insn_before (seq, insn); | |
1691 | } | |
1692 | break; | |
6f086dfc | 1693 | |
bbf9b913 | 1694 | case SUBREG: |
82d6e6fc KG |
1695 | new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); |
1696 | if (new_rtx == NULL) | |
bbf9b913 RH |
1697 | continue; |
1698 | if (offset != 0) | |
1699 | { | |
1700 | start_sequence (); | |
2f1cd2eb RS |
1701 | new_rtx = expand_simple_binop |
1702 | (GET_MODE (new_rtx), PLUS, new_rtx, | |
1703 | gen_int_mode (offset, GET_MODE (new_rtx)), | |
1704 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1705 | seq = get_insns (); |
1706 | end_sequence (); | |
1707 | emit_insn_before (seq, insn); | |
1708 | } | |
82d6e6fc KG |
1709 | x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx, |
1710 | GET_MODE (new_rtx), SUBREG_BYTE (x)); | |
7314c7dd | 1711 | gcc_assert (x); |
bbf9b913 | 1712 | break; |
6f086dfc | 1713 | |
bbf9b913 RH |
1714 | default: |
1715 | continue; | |
1716 | } | |
6f086dfc | 1717 | |
bbf9b913 RH |
1718 | /* At this point, X contains the new value for the operand. |
1719 | Validate the new value vs the insn predicate. Note that | |
1720 | asm insns will have insn_code -1 here. */ | |
1721 | if (!safe_insn_predicate (insn_code, i, x)) | |
6ba1bd36 JM |
1722 | { |
1723 | start_sequence (); | |
f7ce0951 SE |
1724 | if (REG_P (x)) |
1725 | { | |
1726 | gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER); | |
1727 | x = copy_to_reg (x); | |
1728 | } | |
1729 | else | |
1730 | x = force_reg (insn_data[insn_code].operand[i].mode, x); | |
6ba1bd36 JM |
1731 | seq = get_insns (); |
1732 | end_sequence (); | |
1733 | if (seq) | |
1734 | emit_insn_before (seq, insn); | |
1735 | } | |
6f086dfc | 1736 | |
bbf9b913 RH |
1737 | *recog_data.operand_loc[i] = recog_data.operand[i] = x; |
1738 | any_change = true; | |
1739 | } | |
6f086dfc | 1740 | |
bbf9b913 RH |
1741 | if (any_change) |
1742 | { | |
1743 | /* Propagate operand changes into the duplicates. */ | |
1744 | for (i = 0; i < recog_data.n_dups; ++i) | |
1745 | *recog_data.dup_loc[i] | |
3e916873 | 1746 | = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); |
5dc96d60 | 1747 | |
bbf9b913 RH |
1748 | /* Force re-recognition of the instruction for validation. */ |
1749 | INSN_CODE (insn) = -1; | |
1750 | } | |
6f086dfc | 1751 | |
bbf9b913 | 1752 | if (asm_noperands (PATTERN (insn)) >= 0) |
6f086dfc | 1753 | { |
bbf9b913 | 1754 | if (!check_asm_operands (PATTERN (insn))) |
6f086dfc | 1755 | { |
bbf9b913 | 1756 | error_for_asm (insn, "impossible constraint in %<asm%>"); |
5a860835 JJ |
1757 | /* For asm goto, instead of fixing up all the edges |
1758 | just clear the template and clear input operands | |
1759 | (asm goto doesn't have any output operands). */ | |
1760 | if (JUMP_P (insn)) | |
1761 | { | |
1762 | rtx asm_op = extract_asm_operands (PATTERN (insn)); | |
1763 | ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup (""); | |
1764 | ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0); | |
1765 | ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0); | |
1766 | } | |
1767 | else | |
1768 | delete_insn (insn); | |
bbf9b913 RH |
1769 | } |
1770 | } | |
1771 | else | |
1772 | { | |
1773 | if (recog_memoized (insn) < 0) | |
1774 | fatal_insn_not_found (insn); | |
1775 | } | |
1776 | } | |
14a774a9 | 1777 | |
bbf9b913 RH |
1778 | /* Subroutine of instantiate_decls. Given RTL representing a decl, |
1779 | do any instantiation required. */ | |
14a774a9 | 1780 | |
e41b2a33 PB |
1781 | void |
1782 | instantiate_decl_rtl (rtx x) | |
bbf9b913 RH |
1783 | { |
1784 | rtx addr; | |
6f086dfc | 1785 | |
bbf9b913 RH |
1786 | if (x == 0) |
1787 | return; | |
6f086dfc | 1788 | |
bbf9b913 RH |
1789 | /* If this is a CONCAT, recurse for the pieces. */ |
1790 | if (GET_CODE (x) == CONCAT) | |
1791 | { | |
e41b2a33 PB |
1792 | instantiate_decl_rtl (XEXP (x, 0)); |
1793 | instantiate_decl_rtl (XEXP (x, 1)); | |
bbf9b913 RH |
1794 | return; |
1795 | } | |
6f086dfc | 1796 | |
bbf9b913 RH |
1797 | /* If this is not a MEM, no need to do anything. Similarly if the |
1798 | address is a constant or a register that is not a virtual register. */ | |
1799 | if (!MEM_P (x)) | |
1800 | return; | |
6f086dfc | 1801 | |
bbf9b913 RH |
1802 | addr = XEXP (x, 0); |
1803 | if (CONSTANT_P (addr) | |
1804 | || (REG_P (addr) | |
1805 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
1806 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
1807 | return; | |
6f086dfc | 1808 | |
b8704801 | 1809 | instantiate_virtual_regs_in_rtx (&XEXP (x, 0)); |
bbf9b913 | 1810 | } |
6f086dfc | 1811 | |
434eba35 JJ |
1812 | /* Helper for instantiate_decls called via walk_tree: Process all decls |
1813 | in the given DECL_VALUE_EXPR. */ | |
1814 | ||
1815 | static tree | |
1816 | instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
1817 | { | |
1818 | tree t = *tp; | |
726a989a | 1819 | if (! EXPR_P (t)) |
434eba35 JJ |
1820 | { |
1821 | *walk_subtrees = 0; | |
37d6a488 AO |
1822 | if (DECL_P (t)) |
1823 | { | |
1824 | if (DECL_RTL_SET_P (t)) | |
1825 | instantiate_decl_rtl (DECL_RTL (t)); | |
1826 | if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t) | |
1827 | && DECL_INCOMING_RTL (t)) | |
1828 | instantiate_decl_rtl (DECL_INCOMING_RTL (t)); | |
1829 | if ((TREE_CODE (t) == VAR_DECL | |
1830 | || TREE_CODE (t) == RESULT_DECL) | |
1831 | && DECL_HAS_VALUE_EXPR_P (t)) | |
1832 | { | |
1833 | tree v = DECL_VALUE_EXPR (t); | |
1834 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1835 | } | |
1836 | } | |
434eba35 JJ |
1837 | } |
1838 | return NULL; | |
1839 | } | |
1840 | ||
bbf9b913 RH |
1841 | /* Subroutine of instantiate_decls: Process all decls in the given |
1842 | BLOCK node and all its subblocks. */ | |
6f086dfc | 1843 | |
bbf9b913 RH |
1844 | static void |
1845 | instantiate_decls_1 (tree let) | |
1846 | { | |
1847 | tree t; | |
6f086dfc | 1848 | |
910ad8de | 1849 | for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t)) |
434eba35 JJ |
1850 | { |
1851 | if (DECL_RTL_SET_P (t)) | |
e41b2a33 | 1852 | instantiate_decl_rtl (DECL_RTL (t)); |
434eba35 JJ |
1853 | if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) |
1854 | { | |
1855 | tree v = DECL_VALUE_EXPR (t); | |
1856 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1857 | } | |
1858 | } | |
6f086dfc | 1859 | |
bbf9b913 | 1860 | /* Process all subblocks. */ |
87caf699 | 1861 | for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t)) |
bbf9b913 RH |
1862 | instantiate_decls_1 (t); |
1863 | } | |
6f086dfc | 1864 | |
bbf9b913 RH |
1865 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate |
1866 | all virtual registers in their DECL_RTL's. */ | |
6f086dfc | 1867 | |
bbf9b913 RH |
1868 | static void |
1869 | instantiate_decls (tree fndecl) | |
1870 | { | |
c021f10b NF |
1871 | tree decl; |
1872 | unsigned ix; | |
6f086dfc | 1873 | |
bbf9b913 | 1874 | /* Process all parameters of the function. */ |
910ad8de | 1875 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl)) |
bbf9b913 | 1876 | { |
e41b2a33 PB |
1877 | instantiate_decl_rtl (DECL_RTL (decl)); |
1878 | instantiate_decl_rtl (DECL_INCOMING_RTL (decl)); | |
434eba35 JJ |
1879 | if (DECL_HAS_VALUE_EXPR_P (decl)) |
1880 | { | |
1881 | tree v = DECL_VALUE_EXPR (decl); | |
1882 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1883 | } | |
bbf9b913 | 1884 | } |
4fd796bb | 1885 | |
37d6a488 AO |
1886 | if ((decl = DECL_RESULT (fndecl)) |
1887 | && TREE_CODE (decl) == RESULT_DECL) | |
1888 | { | |
1889 | if (DECL_RTL_SET_P (decl)) | |
1890 | instantiate_decl_rtl (DECL_RTL (decl)); | |
1891 | if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1892 | { | |
1893 | tree v = DECL_VALUE_EXPR (decl); | |
1894 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1895 | } | |
1896 | } | |
1897 | ||
3fd48b12 EB |
1898 | /* Process the saved static chain if it exists. */ |
1899 | decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl; | |
1900 | if (decl && DECL_HAS_VALUE_EXPR_P (decl)) | |
1901 | instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl))); | |
1902 | ||
bbf9b913 RH |
1903 | /* Now process all variables defined in the function or its subblocks. */ |
1904 | instantiate_decls_1 (DECL_INITIAL (fndecl)); | |
802e9f8e | 1905 | |
c021f10b NF |
1906 | FOR_EACH_LOCAL_DECL (cfun, ix, decl) |
1907 | if (DECL_RTL_SET_P (decl)) | |
1908 | instantiate_decl_rtl (DECL_RTL (decl)); | |
9771b263 | 1909 | vec_free (cfun->local_decls); |
bbf9b913 | 1910 | } |
6f086dfc | 1911 | |
bbf9b913 RH |
1912 | /* Pass through the INSNS of function FNDECL and convert virtual register |
1913 | references to hard register references. */ | |
6f086dfc | 1914 | |
c2924966 | 1915 | static unsigned int |
bbf9b913 RH |
1916 | instantiate_virtual_regs (void) |
1917 | { | |
691fe203 | 1918 | rtx_insn *insn; |
6f086dfc | 1919 | |
bbf9b913 RH |
1920 | /* Compute the offsets to use for this function. */ |
1921 | in_arg_offset = FIRST_PARM_OFFSET (current_function_decl); | |
1922 | var_offset = STARTING_FRAME_OFFSET; | |
1923 | dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); | |
1924 | out_arg_offset = STACK_POINTER_OFFSET; | |
f6672e8e RH |
1925 | #ifdef FRAME_POINTER_CFA_OFFSET |
1926 | cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); | |
1927 | #else | |
bbf9b913 | 1928 | cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); |
f6672e8e | 1929 | #endif |
e9a25f70 | 1930 | |
bbf9b913 RH |
1931 | /* Initialize recognition, indicating that volatile is OK. */ |
1932 | init_recog (); | |
6f086dfc | 1933 | |
bbf9b913 RH |
1934 | /* Scan through all the insns, instantiating every virtual register still |
1935 | present. */ | |
45dbce1b NF |
1936 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1937 | if (INSN_P (insn)) | |
1938 | { | |
1939 | /* These patterns in the instruction stream can never be recognized. | |
1940 | Fortunately, they shouldn't contain virtual registers either. */ | |
39718607 | 1941 | if (GET_CODE (PATTERN (insn)) == USE |
45dbce1b | 1942 | || GET_CODE (PATTERN (insn)) == CLOBBER |
45dbce1b NF |
1943 | || GET_CODE (PATTERN (insn)) == ASM_INPUT) |
1944 | continue; | |
1945 | else if (DEBUG_INSN_P (insn)) | |
b8704801 | 1946 | instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn)); |
45dbce1b NF |
1947 | else |
1948 | instantiate_virtual_regs_in_insn (insn); | |
ba4807a0 | 1949 | |
4654c0cf | 1950 | if (insn->deleted ()) |
45dbce1b | 1951 | continue; |
7114321e | 1952 | |
b8704801 | 1953 | instantiate_virtual_regs_in_rtx (®_NOTES (insn)); |
ba4807a0 | 1954 | |
45dbce1b NF |
1955 | /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ |
1956 | if (CALL_P (insn)) | |
b8704801 | 1957 | instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn)); |
45dbce1b | 1958 | } |
6f086dfc | 1959 | |
bbf9b913 RH |
1960 | /* Instantiate the virtual registers in the DECLs for debugging purposes. */ |
1961 | instantiate_decls (current_function_decl); | |
1962 | ||
e41b2a33 PB |
1963 | targetm.instantiate_decls (); |
1964 | ||
bbf9b913 RH |
1965 | /* Indicate that, from now on, assign_stack_local should use |
1966 | frame_pointer_rtx. */ | |
1967 | virtuals_instantiated = 1; | |
d3c12306 | 1968 | |
c2924966 | 1969 | return 0; |
6f086dfc | 1970 | } |
ef330312 | 1971 | |
27a4cd48 DM |
1972 | namespace { |
1973 | ||
1974 | const pass_data pass_data_instantiate_virtual_regs = | |
1975 | { | |
1976 | RTL_PASS, /* type */ | |
1977 | "vregs", /* name */ | |
1978 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
1979 | TV_NONE, /* tv_id */ |
1980 | 0, /* properties_required */ | |
1981 | 0, /* properties_provided */ | |
1982 | 0, /* properties_destroyed */ | |
1983 | 0, /* todo_flags_start */ | |
1984 | 0, /* todo_flags_finish */ | |
ef330312 PB |
1985 | }; |
1986 | ||
27a4cd48 DM |
1987 | class pass_instantiate_virtual_regs : public rtl_opt_pass |
1988 | { | |
1989 | public: | |
c3284718 RS |
1990 | pass_instantiate_virtual_regs (gcc::context *ctxt) |
1991 | : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt) | |
27a4cd48 DM |
1992 | {} |
1993 | ||
1994 | /* opt_pass methods: */ | |
be55bfe6 TS |
1995 | virtual unsigned int execute (function *) |
1996 | { | |
1997 | return instantiate_virtual_regs (); | |
1998 | } | |
27a4cd48 DM |
1999 | |
2000 | }; // class pass_instantiate_virtual_regs | |
2001 | ||
2002 | } // anon namespace | |
2003 | ||
2004 | rtl_opt_pass * | |
2005 | make_pass_instantiate_virtual_regs (gcc::context *ctxt) | |
2006 | { | |
2007 | return new pass_instantiate_virtual_regs (ctxt); | |
2008 | } | |
2009 | ||
6f086dfc | 2010 | \f |
d181c154 RS |
2011 | /* Return 1 if EXP is an aggregate type (or a value with aggregate type). |
2012 | This means a type for which function calls must pass an address to the | |
2013 | function or get an address back from the function. | |
2014 | EXP may be a type node or an expression (whose type is tested). */ | |
6f086dfc RS |
2015 | |
2016 | int | |
586de218 | 2017 | aggregate_value_p (const_tree exp, const_tree fntype) |
6f086dfc | 2018 | { |
d47d0a8d | 2019 | const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); |
9d790a4f RS |
2020 | int i, regno, nregs; |
2021 | rtx reg; | |
2f939d94 | 2022 | |
61f71b34 DD |
2023 | if (fntype) |
2024 | switch (TREE_CODE (fntype)) | |
2025 | { | |
2026 | case CALL_EXPR: | |
d47d0a8d EB |
2027 | { |
2028 | tree fndecl = get_callee_fndecl (fntype); | |
1304953e JJ |
2029 | if (fndecl) |
2030 | fntype = TREE_TYPE (fndecl); | |
2031 | else if (CALL_EXPR_FN (fntype)) | |
2032 | fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))); | |
2033 | else | |
2034 | /* For internal functions, assume nothing needs to be | |
2035 | returned in memory. */ | |
2036 | return 0; | |
d47d0a8d | 2037 | } |
61f71b34 DD |
2038 | break; |
2039 | case FUNCTION_DECL: | |
d47d0a8d | 2040 | fntype = TREE_TYPE (fntype); |
61f71b34 DD |
2041 | break; |
2042 | case FUNCTION_TYPE: | |
2043 | case METHOD_TYPE: | |
2044 | break; | |
2045 | case IDENTIFIER_NODE: | |
d47d0a8d | 2046 | fntype = NULL_TREE; |
61f71b34 DD |
2047 | break; |
2048 | default: | |
d47d0a8d | 2049 | /* We don't expect other tree types here. */ |
0bccc606 | 2050 | gcc_unreachable (); |
61f71b34 DD |
2051 | } |
2052 | ||
d47d0a8d | 2053 | if (VOID_TYPE_P (type)) |
d7bf8ada | 2054 | return 0; |
500c353d | 2055 | |
ebf0bf7f JJ |
2056 | /* If a record should be passed the same as its first (and only) member |
2057 | don't pass it as an aggregate. */ | |
2058 | if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) | |
2059 | return aggregate_value_p (first_field (type), fntype); | |
2060 | ||
cc77ae10 JM |
2061 | /* If the front end has decided that this needs to be passed by |
2062 | reference, do so. */ | |
2063 | if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) | |
2064 | && DECL_BY_REFERENCE (exp)) | |
2065 | return 1; | |
500c353d | 2066 | |
d47d0a8d EB |
2067 | /* Function types that are TREE_ADDRESSABLE force return in memory. */ |
2068 | if (fntype && TREE_ADDRESSABLE (fntype)) | |
500c353d | 2069 | return 1; |
b8698a0f | 2070 | |
956d6950 | 2071 | /* Types that are TREE_ADDRESSABLE must be constructed in memory, |
49a2e5b2 DE |
2072 | and thus can't be returned in registers. */ |
2073 | if (TREE_ADDRESSABLE (type)) | |
2074 | return 1; | |
d47d0a8d | 2075 | |
05e3bdb9 | 2076 | if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) |
6f086dfc | 2077 | return 1; |
d47d0a8d EB |
2078 | |
2079 | if (targetm.calls.return_in_memory (type, fntype)) | |
2080 | return 1; | |
2081 | ||
9d790a4f RS |
2082 | /* Make sure we have suitable call-clobbered regs to return |
2083 | the value in; if not, we must return it in memory. */ | |
1d636cc6 | 2084 | reg = hard_function_value (type, 0, fntype, 0); |
e71f7aa5 JW |
2085 | |
2086 | /* If we have something other than a REG (e.g. a PARALLEL), then assume | |
2087 | it is OK. */ | |
f8cfc6aa | 2088 | if (!REG_P (reg)) |
e71f7aa5 JW |
2089 | return 0; |
2090 | ||
9d790a4f | 2091 | regno = REGNO (reg); |
66fd46b6 | 2092 | nregs = hard_regno_nregs[regno][TYPE_MODE (type)]; |
9d790a4f RS |
2093 | for (i = 0; i < nregs; i++) |
2094 | if (! call_used_regs[regno + i]) | |
2095 | return 1; | |
d47d0a8d | 2096 | |
6f086dfc RS |
2097 | return 0; |
2098 | } | |
2099 | \f | |
8fff4fc1 RH |
2100 | /* Return true if we should assign DECL a pseudo register; false if it |
2101 | should live on the local stack. */ | |
2102 | ||
2103 | bool | |
fa233e34 | 2104 | use_register_for_decl (const_tree decl) |
8fff4fc1 | 2105 | { |
1f9ceff1 AO |
2106 | if (TREE_CODE (decl) == SSA_NAME) |
2107 | { | |
2108 | /* We often try to use the SSA_NAME, instead of its underlying | |
2109 | decl, to get type information and guide decisions, to avoid | |
2110 | differences of behavior between anonymous and named | |
2111 | variables, but in this one case we have to go for the actual | |
2112 | variable if there is one. The main reason is that, at least | |
2113 | at -O0, we want to place user variables on the stack, but we | |
2114 | don't mind using pseudos for anonymous or ignored temps. | |
2115 | Should we take the SSA_NAME, we'd conclude all SSA_NAMEs | |
2116 | should go in pseudos, whereas their corresponding variables | |
2117 | might have to go on the stack. So, disregarding the decl | |
2118 | here would negatively impact debug info at -O0, enable | |
2119 | coalescing between SSA_NAMEs that ought to get different | |
2120 | stack/pseudo assignments, and get the incoming argument | |
2121 | processing thoroughly confused by PARM_DECLs expected to live | |
2122 | in stack slots but assigned to pseudos. */ | |
2123 | if (!SSA_NAME_VAR (decl)) | |
2124 | return TYPE_MODE (TREE_TYPE (decl)) != BLKmode | |
2125 | && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))); | |
2126 | ||
2127 | decl = SSA_NAME_VAR (decl); | |
2128 | } | |
2129 | ||
8fff4fc1 RH |
2130 | /* Honor volatile. */ |
2131 | if (TREE_SIDE_EFFECTS (decl)) | |
2132 | return false; | |
2133 | ||
2134 | /* Honor addressability. */ | |
2135 | if (TREE_ADDRESSABLE (decl)) | |
2136 | return false; | |
2137 | ||
f11a7b6d AO |
2138 | /* RESULT_DECLs are a bit special in that they're assigned without |
2139 | regard to use_register_for_decl, but we generally only store in | |
2140 | them. If we coalesce their SSA NAMEs, we'd better return a | |
2141 | result that matches the assignment in expand_function_start. */ | |
2142 | if (TREE_CODE (decl) == RESULT_DECL) | |
2143 | { | |
2144 | /* If it's not an aggregate, we're going to use a REG or a | |
2145 | PARALLEL containing a REG. */ | |
2146 | if (!aggregate_value_p (decl, current_function_decl)) | |
2147 | return true; | |
2148 | ||
2149 | /* If expand_function_start determines the return value, we'll | |
2150 | use MEM if it's not by reference. */ | |
2151 | if (cfun->returns_pcc_struct | |
2152 | || (targetm.calls.struct_value_rtx | |
2153 | (TREE_TYPE (current_function_decl), 1))) | |
2154 | return DECL_BY_REFERENCE (decl); | |
2155 | ||
2156 | /* Otherwise, we're taking an extra all.function_result_decl | |
2157 | argument. It's set up in assign_parms_augmented_arg_list, | |
2158 | under the (negated) conditions above, and then it's used to | |
2159 | set up the RESULT_DECL rtl in assign_params, after looping | |
2160 | over all parameters. Now, if the RESULT_DECL is not by | |
2161 | reference, we'll use a MEM either way. */ | |
2162 | if (!DECL_BY_REFERENCE (decl)) | |
2163 | return false; | |
2164 | ||
2165 | /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take | |
2166 | the function_result_decl's assignment. Since it's a pointer, | |
2167 | we can short-circuit a number of the tests below, and we must | |
2168 | duplicat e them because we don't have the | |
2169 | function_result_decl to test. */ | |
2170 | if (!targetm.calls.allocate_stack_slots_for_args ()) | |
2171 | return true; | |
2172 | /* We don't set DECL_IGNORED_P for the function_result_decl. */ | |
2173 | if (optimize) | |
2174 | return true; | |
2175 | /* We don't set DECL_REGISTER for the function_result_decl. */ | |
2176 | return false; | |
2177 | } | |
2178 | ||
d5e254e1 IE |
2179 | /* Decl is implicitly addressible by bound stores and loads |
2180 | if it is an aggregate holding bounds. */ | |
2181 | if (chkp_function_instrumented_p (current_function_decl) | |
2182 | && TREE_TYPE (decl) | |
2183 | && !BOUNDED_P (decl) | |
2184 | && chkp_type_has_pointer (TREE_TYPE (decl))) | |
2185 | return false; | |
2186 | ||
8fff4fc1 RH |
2187 | /* Only register-like things go in registers. */ |
2188 | if (DECL_MODE (decl) == BLKmode) | |
2189 | return false; | |
2190 | ||
2191 | /* If -ffloat-store specified, don't put explicit float variables | |
2192 | into registers. */ | |
2193 | /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa | |
2194 | propagates values across these stores, and it probably shouldn't. */ | |
2195 | if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))) | |
2196 | return false; | |
2197 | ||
de0fb905 AB |
2198 | if (!targetm.calls.allocate_stack_slots_for_args ()) |
2199 | return true; | |
2200 | ||
78e0d62b RH |
2201 | /* If we're not interested in tracking debugging information for |
2202 | this decl, then we can certainly put it in a register. */ | |
2203 | if (DECL_IGNORED_P (decl)) | |
8fff4fc1 RH |
2204 | return true; |
2205 | ||
d130d647 JJ |
2206 | if (optimize) |
2207 | return true; | |
2208 | ||
2209 | if (!DECL_REGISTER (decl)) | |
2210 | return false; | |
2211 | ||
2212 | switch (TREE_CODE (TREE_TYPE (decl))) | |
2213 | { | |
2214 | case RECORD_TYPE: | |
2215 | case UNION_TYPE: | |
2216 | case QUAL_UNION_TYPE: | |
2217 | /* When not optimizing, disregard register keyword for variables with | |
2218 | types containing methods, otherwise the methods won't be callable | |
2219 | from the debugger. */ | |
5ce039df | 2220 | if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl)))) |
d130d647 JJ |
2221 | return false; |
2222 | break; | |
2223 | default: | |
2224 | break; | |
2225 | } | |
2226 | ||
2227 | return true; | |
8fff4fc1 RH |
2228 | } |
2229 | ||
6071dc7f RH |
2230 | /* Structures to communicate between the subroutines of assign_parms. |
2231 | The first holds data persistent across all parameters, the second | |
2232 | is cleared out for each parameter. */ | |
6f086dfc | 2233 | |
6071dc7f | 2234 | struct assign_parm_data_all |
6f086dfc | 2235 | { |
d5cc9181 JR |
2236 | /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS |
2237 | should become a job of the target or otherwise encapsulated. */ | |
2238 | CUMULATIVE_ARGS args_so_far_v; | |
2239 | cumulative_args_t args_so_far; | |
6f086dfc | 2240 | struct args_size stack_args_size; |
6071dc7f RH |
2241 | tree function_result_decl; |
2242 | tree orig_fnargs; | |
7a688d52 DM |
2243 | rtx_insn *first_conversion_insn; |
2244 | rtx_insn *last_conversion_insn; | |
6071dc7f RH |
2245 | HOST_WIDE_INT pretend_args_size; |
2246 | HOST_WIDE_INT extra_pretend_bytes; | |
2247 | int reg_parm_stack_space; | |
2248 | }; | |
6f086dfc | 2249 | |
6071dc7f RH |
2250 | struct assign_parm_data_one |
2251 | { | |
2252 | tree nominal_type; | |
2253 | tree passed_type; | |
2254 | rtx entry_parm; | |
2255 | rtx stack_parm; | |
ef4bddc2 RS |
2256 | machine_mode nominal_mode; |
2257 | machine_mode passed_mode; | |
2258 | machine_mode promoted_mode; | |
6071dc7f RH |
2259 | struct locate_and_pad_arg_data locate; |
2260 | int partial; | |
2261 | BOOL_BITFIELD named_arg : 1; | |
6071dc7f RH |
2262 | BOOL_BITFIELD passed_pointer : 1; |
2263 | BOOL_BITFIELD on_stack : 1; | |
2264 | BOOL_BITFIELD loaded_in_reg : 1; | |
2265 | }; | |
ebb904cb | 2266 | |
d5e254e1 IE |
2267 | struct bounds_parm_data |
2268 | { | |
2269 | assign_parm_data_one parm_data; | |
2270 | tree bounds_parm; | |
2271 | tree ptr_parm; | |
2272 | rtx ptr_entry; | |
2273 | int bound_no; | |
2274 | }; | |
2275 | ||
6071dc7f | 2276 | /* A subroutine of assign_parms. Initialize ALL. */ |
6f086dfc | 2277 | |
6071dc7f RH |
2278 | static void |
2279 | assign_parms_initialize_all (struct assign_parm_data_all *all) | |
2280 | { | |
fc2f1f53 | 2281 | tree fntype ATTRIBUTE_UNUSED; |
6f086dfc | 2282 | |
6071dc7f RH |
2283 | memset (all, 0, sizeof (*all)); |
2284 | ||
2285 | fntype = TREE_TYPE (current_function_decl); | |
2286 | ||
2287 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
d5cc9181 | 2288 | INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX); |
6071dc7f | 2289 | #else |
d5cc9181 | 2290 | INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX, |
6071dc7f RH |
2291 | current_function_decl, -1); |
2292 | #endif | |
d5cc9181 | 2293 | all->args_so_far = pack_cumulative_args (&all->args_so_far_v); |
6071dc7f | 2294 | |
ddbb449f AM |
2295 | #ifdef INCOMING_REG_PARM_STACK_SPACE |
2296 | all->reg_parm_stack_space | |
2297 | = INCOMING_REG_PARM_STACK_SPACE (current_function_decl); | |
6071dc7f RH |
2298 | #endif |
2299 | } | |
6f086dfc | 2300 | |
6071dc7f RH |
2301 | /* If ARGS contains entries with complex types, split the entry into two |
2302 | entries of the component type. Return a new list of substitutions are | |
2303 | needed, else the old list. */ | |
2304 | ||
3b3f318a | 2305 | static void |
f11a7b6d | 2306 | split_complex_args (vec<tree> *args) |
6071dc7f | 2307 | { |
3b3f318a | 2308 | unsigned i; |
6071dc7f RH |
2309 | tree p; |
2310 | ||
9771b263 | 2311 | FOR_EACH_VEC_ELT (*args, i, p) |
6071dc7f RH |
2312 | { |
2313 | tree type = TREE_TYPE (p); | |
2314 | if (TREE_CODE (type) == COMPLEX_TYPE | |
2315 | && targetm.calls.split_complex_arg (type)) | |
2316 | { | |
2317 | tree decl; | |
2318 | tree subtype = TREE_TYPE (type); | |
6ccd356e | 2319 | bool addressable = TREE_ADDRESSABLE (p); |
6071dc7f RH |
2320 | |
2321 | /* Rewrite the PARM_DECL's type with its component. */ | |
3b3f318a | 2322 | p = copy_node (p); |
6071dc7f RH |
2323 | TREE_TYPE (p) = subtype; |
2324 | DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p)); | |
2325 | DECL_MODE (p) = VOIDmode; | |
2326 | DECL_SIZE (p) = NULL; | |
2327 | DECL_SIZE_UNIT (p) = NULL; | |
6ccd356e AM |
2328 | /* If this arg must go in memory, put it in a pseudo here. |
2329 | We can't allow it to go in memory as per normal parms, | |
2330 | because the usual place might not have the imag part | |
2331 | adjacent to the real part. */ | |
2332 | DECL_ARTIFICIAL (p) = addressable; | |
2333 | DECL_IGNORED_P (p) = addressable; | |
2334 | TREE_ADDRESSABLE (p) = 0; | |
6071dc7f | 2335 | layout_decl (p, 0); |
9771b263 | 2336 | (*args)[i] = p; |
6071dc7f RH |
2337 | |
2338 | /* Build a second synthetic decl. */ | |
c2255bc4 AH |
2339 | decl = build_decl (EXPR_LOCATION (p), |
2340 | PARM_DECL, NULL_TREE, subtype); | |
6071dc7f | 2341 | DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); |
6ccd356e AM |
2342 | DECL_ARTIFICIAL (decl) = addressable; |
2343 | DECL_IGNORED_P (decl) = addressable; | |
6071dc7f | 2344 | layout_decl (decl, 0); |
9771b263 | 2345 | args->safe_insert (++i, decl); |
6071dc7f RH |
2346 | } |
2347 | } | |
6071dc7f RH |
2348 | } |
2349 | ||
2350 | /* A subroutine of assign_parms. Adjust the parameter list to incorporate | |
2351 | the hidden struct return argument, and (abi willing) complex args. | |
2352 | Return the new parameter list. */ | |
2353 | ||
9771b263 | 2354 | static vec<tree> |
6071dc7f RH |
2355 | assign_parms_augmented_arg_list (struct assign_parm_data_all *all) |
2356 | { | |
2357 | tree fndecl = current_function_decl; | |
2358 | tree fntype = TREE_TYPE (fndecl); | |
6e1aa848 | 2359 | vec<tree> fnargs = vNULL; |
3b3f318a RG |
2360 | tree arg; |
2361 | ||
910ad8de | 2362 | for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg)) |
9771b263 | 2363 | fnargs.safe_push (arg); |
3b3f318a RG |
2364 | |
2365 | all->orig_fnargs = DECL_ARGUMENTS (fndecl); | |
6f086dfc RS |
2366 | |
2367 | /* If struct value address is treated as the first argument, make it so. */ | |
61f71b34 | 2368 | if (aggregate_value_p (DECL_RESULT (fndecl), fndecl) |
e3b5732b | 2369 | && ! cfun->returns_pcc_struct |
61f71b34 | 2370 | && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) |
6f086dfc | 2371 | { |
f9f29478 | 2372 | tree type = build_pointer_type (TREE_TYPE (fntype)); |
6071dc7f | 2373 | tree decl; |
6f086dfc | 2374 | |
c2255bc4 | 2375 | decl = build_decl (DECL_SOURCE_LOCATION (fndecl), |
8dcfef8f | 2376 | PARM_DECL, get_identifier (".result_ptr"), type); |
6071dc7f RH |
2377 | DECL_ARG_TYPE (decl) = type; |
2378 | DECL_ARTIFICIAL (decl) = 1; | |
8dcfef8f AO |
2379 | DECL_NAMELESS (decl) = 1; |
2380 | TREE_CONSTANT (decl) = 1; | |
f11a7b6d AO |
2381 | /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this |
2382 | changes, the end of the RESULT_DECL handling block in | |
2383 | use_register_for_decl must be adjusted to match. */ | |
6f086dfc | 2384 | |
910ad8de | 2385 | DECL_CHAIN (decl) = all->orig_fnargs; |
3b3f318a | 2386 | all->orig_fnargs = decl; |
9771b263 | 2387 | fnargs.safe_insert (0, decl); |
3b3f318a | 2388 | |
6071dc7f | 2389 | all->function_result_decl = decl; |
d5e254e1 IE |
2390 | |
2391 | /* If function is instrumented then bounds of the | |
2392 | passed structure address is the second argument. */ | |
2393 | if (chkp_function_instrumented_p (fndecl)) | |
2394 | { | |
2395 | decl = build_decl (DECL_SOURCE_LOCATION (fndecl), | |
2396 | PARM_DECL, get_identifier (".result_bnd"), | |
2397 | pointer_bounds_type_node); | |
2398 | DECL_ARG_TYPE (decl) = pointer_bounds_type_node; | |
2399 | DECL_ARTIFICIAL (decl) = 1; | |
2400 | DECL_NAMELESS (decl) = 1; | |
2401 | TREE_CONSTANT (decl) = 1; | |
2402 | ||
2403 | DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs); | |
2404 | DECL_CHAIN (all->orig_fnargs) = decl; | |
2405 | fnargs.safe_insert (1, decl); | |
2406 | } | |
6f086dfc | 2407 | } |
718fe406 | 2408 | |
42ba5130 RH |
2409 | /* If the target wants to split complex arguments into scalars, do so. */ |
2410 | if (targetm.calls.split_complex_arg) | |
f11a7b6d | 2411 | split_complex_args (&fnargs); |
ded9bf77 | 2412 | |
6071dc7f RH |
2413 | return fnargs; |
2414 | } | |
e7949876 | 2415 | |
6071dc7f RH |
2416 | /* A subroutine of assign_parms. Examine PARM and pull out type and mode |
2417 | data for the parameter. Incorporate ABI specifics such as pass-by- | |
2418 | reference and type promotion. */ | |
6f086dfc | 2419 | |
6071dc7f RH |
2420 | static void |
2421 | assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, | |
2422 | struct assign_parm_data_one *data) | |
2423 | { | |
2424 | tree nominal_type, passed_type; | |
ef4bddc2 | 2425 | machine_mode nominal_mode, passed_mode, promoted_mode; |
cde0f3fd | 2426 | int unsignedp; |
6f086dfc | 2427 | |
6071dc7f RH |
2428 | memset (data, 0, sizeof (*data)); |
2429 | ||
fa10beec | 2430 | /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ |
e3b5732b | 2431 | if (!cfun->stdarg) |
fa10beec | 2432 | data->named_arg = 1; /* No variadic parms. */ |
910ad8de | 2433 | else if (DECL_CHAIN (parm)) |
fa10beec | 2434 | data->named_arg = 1; /* Not the last non-variadic parm. */ |
d5cc9181 | 2435 | else if (targetm.calls.strict_argument_naming (all->args_so_far)) |
fa10beec | 2436 | data->named_arg = 1; /* Only variadic ones are unnamed. */ |
6071dc7f | 2437 | else |
fa10beec | 2438 | data->named_arg = 0; /* Treat as variadic. */ |
6071dc7f RH |
2439 | |
2440 | nominal_type = TREE_TYPE (parm); | |
2441 | passed_type = DECL_ARG_TYPE (parm); | |
2442 | ||
2443 | /* Look out for errors propagating this far. Also, if the parameter's | |
2444 | type is void then its value doesn't matter. */ | |
2445 | if (TREE_TYPE (parm) == error_mark_node | |
2446 | /* This can happen after weird syntax errors | |
2447 | or if an enum type is defined among the parms. */ | |
2448 | || TREE_CODE (parm) != PARM_DECL | |
2449 | || passed_type == NULL | |
2450 | || VOID_TYPE_P (nominal_type)) | |
2451 | { | |
2452 | nominal_type = passed_type = void_type_node; | |
2453 | nominal_mode = passed_mode = promoted_mode = VOIDmode; | |
2454 | goto egress; | |
2455 | } | |
108b7d3d | 2456 | |
6071dc7f RH |
2457 | /* Find mode of arg as it is passed, and mode of arg as it should be |
2458 | during execution of this function. */ | |
2459 | passed_mode = TYPE_MODE (passed_type); | |
2460 | nominal_mode = TYPE_MODE (nominal_type); | |
2461 | ||
ebf0bf7f JJ |
2462 | /* If the parm is to be passed as a transparent union or record, use the |
2463 | type of the first field for the tests below. We have already verified | |
2464 | that the modes are the same. */ | |
2465 | if ((TREE_CODE (passed_type) == UNION_TYPE | |
2466 | || TREE_CODE (passed_type) == RECORD_TYPE) | |
2467 | && TYPE_TRANSPARENT_AGGR (passed_type)) | |
2468 | passed_type = TREE_TYPE (first_field (passed_type)); | |
6071dc7f | 2469 | |
0976078c | 2470 | /* See if this arg was passed by invisible reference. */ |
d5cc9181 | 2471 | if (pass_by_reference (&all->args_so_far_v, passed_mode, |
0976078c | 2472 | passed_type, data->named_arg)) |
6071dc7f RH |
2473 | { |
2474 | passed_type = nominal_type = build_pointer_type (passed_type); | |
2475 | data->passed_pointer = true; | |
fd91cfe3 | 2476 | passed_mode = nominal_mode = TYPE_MODE (nominal_type); |
6071dc7f | 2477 | } |
6f086dfc | 2478 | |
6071dc7f | 2479 | /* Find mode as it is passed by the ABI. */ |
cde0f3fd PB |
2480 | unsignedp = TYPE_UNSIGNED (passed_type); |
2481 | promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, | |
2482 | TREE_TYPE (current_function_decl), 0); | |
6f086dfc | 2483 | |
6071dc7f RH |
2484 | egress: |
2485 | data->nominal_type = nominal_type; | |
2486 | data->passed_type = passed_type; | |
2487 | data->nominal_mode = nominal_mode; | |
2488 | data->passed_mode = passed_mode; | |
2489 | data->promoted_mode = promoted_mode; | |
2490 | } | |
16bae307 | 2491 | |
6071dc7f | 2492 | /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */ |
6f086dfc | 2493 | |
6071dc7f RH |
2494 | static void |
2495 | assign_parms_setup_varargs (struct assign_parm_data_all *all, | |
2496 | struct assign_parm_data_one *data, bool no_rtl) | |
2497 | { | |
2498 | int varargs_pretend_bytes = 0; | |
2499 | ||
d5cc9181 | 2500 | targetm.calls.setup_incoming_varargs (all->args_so_far, |
6071dc7f RH |
2501 | data->promoted_mode, |
2502 | data->passed_type, | |
2503 | &varargs_pretend_bytes, no_rtl); | |
2504 | ||
2505 | /* If the back-end has requested extra stack space, record how much is | |
2506 | needed. Do not change pretend_args_size otherwise since it may be | |
2507 | nonzero from an earlier partial argument. */ | |
2508 | if (varargs_pretend_bytes > 0) | |
2509 | all->pretend_args_size = varargs_pretend_bytes; | |
2510 | } | |
a53e14c0 | 2511 | |
6071dc7f RH |
2512 | /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to |
2513 | the incoming location of the current parameter. */ | |
2514 | ||
2515 | static void | |
2516 | assign_parm_find_entry_rtl (struct assign_parm_data_all *all, | |
2517 | struct assign_parm_data_one *data) | |
2518 | { | |
2519 | HOST_WIDE_INT pretend_bytes = 0; | |
2520 | rtx entry_parm; | |
2521 | bool in_regs; | |
2522 | ||
2523 | if (data->promoted_mode == VOIDmode) | |
2524 | { | |
2525 | data->entry_parm = data->stack_parm = const0_rtx; | |
2526 | return; | |
2527 | } | |
a53e14c0 | 2528 | |
d5cc9181 | 2529 | entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, |
3c07301f NF |
2530 | data->promoted_mode, |
2531 | data->passed_type, | |
2532 | data->named_arg); | |
6f086dfc | 2533 | |
6071dc7f RH |
2534 | if (entry_parm == 0) |
2535 | data->promoted_mode = data->passed_mode; | |
6f086dfc | 2536 | |
6071dc7f RH |
2537 | /* Determine parm's home in the stack, in case it arrives in the stack |
2538 | or we should pretend it did. Compute the stack position and rtx where | |
2539 | the argument arrives and its size. | |
6f086dfc | 2540 | |
6071dc7f RH |
2541 | There is one complexity here: If this was a parameter that would |
2542 | have been passed in registers, but wasn't only because it is | |
2543 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
2544 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
2545 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 | |
2546 | as it was the previous time. */ | |
d5e254e1 | 2547 | in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type); |
6f086dfc | 2548 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
6071dc7f | 2549 | in_regs = true; |
e7949876 | 2550 | #endif |
6071dc7f RH |
2551 | if (!in_regs && !data->named_arg) |
2552 | { | |
d5cc9181 | 2553 | if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far)) |
e7949876 | 2554 | { |
6071dc7f | 2555 | rtx tem; |
d5cc9181 | 2556 | tem = targetm.calls.function_incoming_arg (all->args_so_far, |
3c07301f NF |
2557 | data->promoted_mode, |
2558 | data->passed_type, true); | |
6071dc7f | 2559 | in_regs = tem != NULL; |
e7949876 | 2560 | } |
6071dc7f | 2561 | } |
e7949876 | 2562 | |
6071dc7f RH |
2563 | /* If this parameter was passed both in registers and in the stack, use |
2564 | the copy on the stack. */ | |
fe984136 RH |
2565 | if (targetm.calls.must_pass_in_stack (data->promoted_mode, |
2566 | data->passed_type)) | |
6071dc7f | 2567 | entry_parm = 0; |
e7949876 | 2568 | |
6071dc7f RH |
2569 | if (entry_parm) |
2570 | { | |
2571 | int partial; | |
2572 | ||
d5cc9181 | 2573 | partial = targetm.calls.arg_partial_bytes (all->args_so_far, |
78a52f11 RH |
2574 | data->promoted_mode, |
2575 | data->passed_type, | |
2576 | data->named_arg); | |
6071dc7f RH |
2577 | data->partial = partial; |
2578 | ||
2579 | /* The caller might already have allocated stack space for the | |
2580 | register parameters. */ | |
2581 | if (partial != 0 && all->reg_parm_stack_space == 0) | |
975f3818 | 2582 | { |
6071dc7f RH |
2583 | /* Part of this argument is passed in registers and part |
2584 | is passed on the stack. Ask the prologue code to extend | |
2585 | the stack part so that we can recreate the full value. | |
2586 | ||
2587 | PRETEND_BYTES is the size of the registers we need to store. | |
2588 | CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra | |
2589 | stack space that the prologue should allocate. | |
2590 | ||
2591 | Internally, gcc assumes that the argument pointer is aligned | |
2592 | to STACK_BOUNDARY bits. This is used both for alignment | |
2593 | optimizations (see init_emit) and to locate arguments that are | |
2594 | aligned to more than PARM_BOUNDARY bits. We must preserve this | |
2595 | invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to | |
2596 | a stack boundary. */ | |
2597 | ||
2598 | /* We assume at most one partial arg, and it must be the first | |
2599 | argument on the stack. */ | |
0bccc606 | 2600 | gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size); |
6071dc7f | 2601 | |
78a52f11 | 2602 | pretend_bytes = partial; |
6071dc7f RH |
2603 | all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES); |
2604 | ||
2605 | /* We want to align relative to the actual stack pointer, so | |
2606 | don't include this in the stack size until later. */ | |
2607 | all->extra_pretend_bytes = all->pretend_args_size; | |
975f3818 | 2608 | } |
6071dc7f | 2609 | } |
e7949876 | 2610 | |
6071dc7f | 2611 | locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs, |
2e4ceca5 | 2612 | all->reg_parm_stack_space, |
6071dc7f RH |
2613 | entry_parm ? data->partial : 0, current_function_decl, |
2614 | &all->stack_args_size, &data->locate); | |
6f086dfc | 2615 | |
e94a448f L |
2616 | /* Update parm_stack_boundary if this parameter is passed in the |
2617 | stack. */ | |
2618 | if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary) | |
2619 | crtl->parm_stack_boundary = data->locate.boundary; | |
2620 | ||
6071dc7f RH |
2621 | /* Adjust offsets to include the pretend args. */ |
2622 | pretend_bytes = all->extra_pretend_bytes - pretend_bytes; | |
2623 | data->locate.slot_offset.constant += pretend_bytes; | |
2624 | data->locate.offset.constant += pretend_bytes; | |
ebca59c3 | 2625 | |
6071dc7f RH |
2626 | data->entry_parm = entry_parm; |
2627 | } | |
6f086dfc | 2628 | |
6071dc7f RH |
2629 | /* A subroutine of assign_parms. If there is actually space on the stack |
2630 | for this parm, count it in stack_args_size and return true. */ | |
6f086dfc | 2631 | |
6071dc7f RH |
2632 | static bool |
2633 | assign_parm_is_stack_parm (struct assign_parm_data_all *all, | |
2634 | struct assign_parm_data_one *data) | |
2635 | { | |
d5e254e1 IE |
2636 | /* Bounds are never passed on the stack to keep compatibility |
2637 | with not instrumented code. */ | |
2638 | if (POINTER_BOUNDS_TYPE_P (data->passed_type)) | |
2639 | return false; | |
2e6ae27f | 2640 | /* Trivially true if we've no incoming register. */ |
d5e254e1 | 2641 | else if (data->entry_parm == NULL) |
6071dc7f RH |
2642 | ; |
2643 | /* Also true if we're partially in registers and partially not, | |
2644 | since we've arranged to drop the entire argument on the stack. */ | |
2645 | else if (data->partial != 0) | |
2646 | ; | |
2647 | /* Also true if the target says that it's passed in both registers | |
2648 | and on the stack. */ | |
2649 | else if (GET_CODE (data->entry_parm) == PARALLEL | |
2650 | && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX) | |
2651 | ; | |
2652 | /* Also true if the target says that there's stack allocated for | |
2653 | all register parameters. */ | |
2654 | else if (all->reg_parm_stack_space > 0) | |
2655 | ; | |
2656 | /* Otherwise, no, this parameter has no ABI defined stack slot. */ | |
2657 | else | |
2658 | return false; | |
6f086dfc | 2659 | |
6071dc7f RH |
2660 | all->stack_args_size.constant += data->locate.size.constant; |
2661 | if (data->locate.size.var) | |
2662 | ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var); | |
718fe406 | 2663 | |
6071dc7f RH |
2664 | return true; |
2665 | } | |
0d1416c6 | 2666 | |
6071dc7f RH |
2667 | /* A subroutine of assign_parms. Given that this parameter is allocated |
2668 | stack space by the ABI, find it. */ | |
6f086dfc | 2669 | |
6071dc7f RH |
2670 | static void |
2671 | assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) | |
2672 | { | |
2673 | rtx offset_rtx, stack_parm; | |
2674 | unsigned int align, boundary; | |
6f086dfc | 2675 | |
6071dc7f RH |
2676 | /* If we're passing this arg using a reg, make its stack home the |
2677 | aligned stack slot. */ | |
2678 | if (data->entry_parm) | |
2679 | offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset); | |
2680 | else | |
2681 | offset_rtx = ARGS_SIZE_RTX (data->locate.offset); | |
2682 | ||
38173d38 | 2683 | stack_parm = crtl->args.internal_arg_pointer; |
6071dc7f RH |
2684 | if (offset_rtx != const0_rtx) |
2685 | stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); | |
2686 | stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); | |
2687 | ||
08ab0acf | 2688 | if (!data->passed_pointer) |
997f78fb | 2689 | { |
08ab0acf JJ |
2690 | set_mem_attributes (stack_parm, parm, 1); |
2691 | /* set_mem_attributes could set MEM_SIZE to the passed mode's size, | |
2692 | while promoted mode's size is needed. */ | |
2693 | if (data->promoted_mode != BLKmode | |
2694 | && data->promoted_mode != DECL_MODE (parm)) | |
997f78fb | 2695 | { |
f5541398 | 2696 | set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); |
527210c4 | 2697 | if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) |
08ab0acf JJ |
2698 | { |
2699 | int offset = subreg_lowpart_offset (DECL_MODE (parm), | |
2700 | data->promoted_mode); | |
2701 | if (offset) | |
527210c4 | 2702 | set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); |
08ab0acf | 2703 | } |
997f78fb JJ |
2704 | } |
2705 | } | |
6071dc7f | 2706 | |
bfc45551 AM |
2707 | boundary = data->locate.boundary; |
2708 | align = BITS_PER_UNIT; | |
6071dc7f RH |
2709 | |
2710 | /* If we're padding upward, we know that the alignment of the slot | |
c2ed6cf8 | 2711 | is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're |
6071dc7f RH |
2712 | intentionally forcing upward padding. Otherwise we have to come |
2713 | up with a guess at the alignment based on OFFSET_RTX. */ | |
bfc45551 | 2714 | if (data->locate.where_pad != downward || data->entry_parm) |
6071dc7f | 2715 | align = boundary; |
481683e1 | 2716 | else if (CONST_INT_P (offset_rtx)) |
6071dc7f RH |
2717 | { |
2718 | align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; | |
146ec50f | 2719 | align = least_bit_hwi (align); |
6071dc7f | 2720 | } |
bfc45551 | 2721 | set_mem_align (stack_parm, align); |
6071dc7f RH |
2722 | |
2723 | if (data->entry_parm) | |
2724 | set_reg_attrs_for_parm (data->entry_parm, stack_parm); | |
2725 | ||
2726 | data->stack_parm = stack_parm; | |
2727 | } | |
2728 | ||
2729 | /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's | |
2730 | always valid and contiguous. */ | |
2731 | ||
2732 | static void | |
2733 | assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) | |
2734 | { | |
2735 | rtx entry_parm = data->entry_parm; | |
2736 | rtx stack_parm = data->stack_parm; | |
2737 | ||
2738 | /* If this parm was passed part in regs and part in memory, pretend it | |
2739 | arrived entirely in memory by pushing the register-part onto the stack. | |
2740 | In the special case of a DImode or DFmode that is split, we could put | |
2741 | it together in a pseudoreg directly, but for now that's not worth | |
2742 | bothering with. */ | |
2743 | if (data->partial != 0) | |
2744 | { | |
2745 | /* Handle calls that pass values in multiple non-contiguous | |
2746 | locations. The Irix 6 ABI has examples of this. */ | |
2747 | if (GET_CODE (entry_parm) == PARALLEL) | |
1a8cb155 | 2748 | emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm, |
b8698a0f | 2749 | data->passed_type, |
6071dc7f | 2750 | int_size_in_bytes (data->passed_type)); |
6f086dfc | 2751 | else |
78a52f11 RH |
2752 | { |
2753 | gcc_assert (data->partial % UNITS_PER_WORD == 0); | |
1a8cb155 RS |
2754 | move_block_from_reg (REGNO (entry_parm), |
2755 | validize_mem (copy_rtx (stack_parm)), | |
78a52f11 RH |
2756 | data->partial / UNITS_PER_WORD); |
2757 | } | |
6f086dfc | 2758 | |
6071dc7f RH |
2759 | entry_parm = stack_parm; |
2760 | } | |
6f086dfc | 2761 | |
6071dc7f RH |
2762 | /* If we didn't decide this parm came in a register, by default it came |
2763 | on the stack. */ | |
2764 | else if (entry_parm == NULL) | |
2765 | entry_parm = stack_parm; | |
2766 | ||
2767 | /* When an argument is passed in multiple locations, we can't make use | |
2768 | of this information, but we can save some copying if the whole argument | |
2769 | is passed in a single register. */ | |
2770 | else if (GET_CODE (entry_parm) == PARALLEL | |
2771 | && data->nominal_mode != BLKmode | |
2772 | && data->passed_mode != BLKmode) | |
2773 | { | |
2774 | size_t i, len = XVECLEN (entry_parm, 0); | |
2775 | ||
2776 | for (i = 0; i < len; i++) | |
2777 | if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX | |
2778 | && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0)) | |
2779 | && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) | |
2780 | == data->passed_mode) | |
2781 | && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0) | |
2782 | { | |
2783 | entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0); | |
2784 | break; | |
2785 | } | |
2786 | } | |
e68a6ce1 | 2787 | |
6071dc7f RH |
2788 | data->entry_parm = entry_parm; |
2789 | } | |
6f086dfc | 2790 | |
4d2a9850 DJ |
2791 | /* A subroutine of assign_parms. Reconstitute any values which were |
2792 | passed in multiple registers and would fit in a single register. */ | |
2793 | ||
2794 | static void | |
2795 | assign_parm_remove_parallels (struct assign_parm_data_one *data) | |
2796 | { | |
2797 | rtx entry_parm = data->entry_parm; | |
2798 | ||
2799 | /* Convert the PARALLEL to a REG of the same mode as the parallel. | |
2800 | This can be done with register operations rather than on the | |
2801 | stack, even if we will store the reconstituted parameter on the | |
2802 | stack later. */ | |
85776d60 | 2803 | if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode) |
4d2a9850 DJ |
2804 | { |
2805 | rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
bbd46fd5 | 2806 | emit_group_store (parmreg, entry_parm, data->passed_type, |
4d2a9850 DJ |
2807 | GET_MODE_SIZE (GET_MODE (entry_parm))); |
2808 | entry_parm = parmreg; | |
2809 | } | |
2810 | ||
2811 | data->entry_parm = entry_parm; | |
2812 | } | |
2813 | ||
6071dc7f RH |
2814 | /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's |
2815 | always valid and properly aligned. */ | |
6f086dfc | 2816 | |
6071dc7f | 2817 | static void |
f11a7b6d | 2818 | assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data) |
6071dc7f RH |
2819 | { |
2820 | rtx stack_parm = data->stack_parm; | |
2821 | ||
2822 | /* If we can't trust the parm stack slot to be aligned enough for its | |
2823 | ultimate type, don't use that slot after entry. We'll make another | |
2824 | stack slot, if we need one. */ | |
f11a7b6d AO |
2825 | if (stack_parm |
2826 | && ((STRICT_ALIGNMENT | |
2827 | && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)) | |
2828 | || (data->nominal_type | |
2829 | && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm) | |
2830 | && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY))) | |
6071dc7f RH |
2831 | stack_parm = NULL; |
2832 | ||
2833 | /* If parm was passed in memory, and we need to convert it on entry, | |
2834 | don't store it back in that same slot. */ | |
2835 | else if (data->entry_parm == stack_parm | |
2836 | && data->nominal_mode != BLKmode | |
2837 | && data->nominal_mode != data->passed_mode) | |
2838 | stack_parm = NULL; | |
2839 | ||
7d69de61 RH |
2840 | /* If stack protection is in effect for this function, don't leave any |
2841 | pointers in their passed stack slots. */ | |
cb91fab0 | 2842 | else if (crtl->stack_protect_guard |
7d69de61 RH |
2843 | && (flag_stack_protect == 2 |
2844 | || data->passed_pointer | |
2845 | || POINTER_TYPE_P (data->nominal_type))) | |
2846 | stack_parm = NULL; | |
2847 | ||
6071dc7f RH |
2848 | data->stack_parm = stack_parm; |
2849 | } | |
a0506b54 | 2850 | |
6071dc7f RH |
2851 | /* A subroutine of assign_parms. Return true if the current parameter |
2852 | should be stored as a BLKmode in the current frame. */ | |
2853 | ||
2854 | static bool | |
2855 | assign_parm_setup_block_p (struct assign_parm_data_one *data) | |
2856 | { | |
2857 | if (data->nominal_mode == BLKmode) | |
2858 | return true; | |
85776d60 DJ |
2859 | if (GET_MODE (data->entry_parm) == BLKmode) |
2860 | return true; | |
531547e9 | 2861 | |
6e985040 | 2862 | #ifdef BLOCK_REG_PADDING |
ae8c9754 RS |
2863 | /* Only assign_parm_setup_block knows how to deal with register arguments |
2864 | that are padded at the least significant end. */ | |
2865 | if (REG_P (data->entry_parm) | |
2866 | && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD | |
2867 | && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) | |
2868 | == (BYTES_BIG_ENDIAN ? upward : downward))) | |
6071dc7f | 2869 | return true; |
6e985040 | 2870 | #endif |
6071dc7f RH |
2871 | |
2872 | return false; | |
2873 | } | |
2874 | ||
b8698a0f | 2875 | /* A subroutine of assign_parms. Arrange for the parameter to be |
6071dc7f RH |
2876 | present and valid in DATA->STACK_RTL. */ |
2877 | ||
2878 | static void | |
27e29549 RH |
2879 | assign_parm_setup_block (struct assign_parm_data_all *all, |
2880 | tree parm, struct assign_parm_data_one *data) | |
6071dc7f RH |
2881 | { |
2882 | rtx entry_parm = data->entry_parm; | |
2883 | rtx stack_parm = data->stack_parm; | |
f11a7b6d | 2884 | rtx target_reg = NULL_RTX; |
a029addd | 2885 | bool in_conversion_seq = false; |
bfc45551 AM |
2886 | HOST_WIDE_INT size; |
2887 | HOST_WIDE_INT size_stored; | |
6071dc7f | 2888 | |
27e29549 RH |
2889 | if (GET_CODE (entry_parm) == PARALLEL) |
2890 | entry_parm = emit_group_move_into_temps (entry_parm); | |
2891 | ||
f11a7b6d AO |
2892 | /* If we want the parameter in a pseudo, don't use a stack slot. */ |
2893 | if (is_gimple_reg (parm) && use_register_for_decl (parm)) | |
2894 | { | |
2895 | tree def = ssa_default_def (cfun, parm); | |
2896 | gcc_assert (def); | |
2897 | machine_mode mode = promote_ssa_mode (def, NULL); | |
2898 | rtx reg = gen_reg_rtx (mode); | |
2899 | if (GET_CODE (reg) != CONCAT) | |
2900 | stack_parm = reg; | |
2901 | else | |
a029addd AO |
2902 | { |
2903 | target_reg = reg; | |
2904 | /* Avoid allocating a stack slot, if there isn't one | |
2905 | preallocated by the ABI. It might seem like we should | |
2906 | always prefer a pseudo, but converting between | |
2907 | floating-point and integer modes goes through the stack | |
2908 | on various machines, so it's better to use the reserved | |
2909 | stack slot than to risk wasting it and allocating more | |
2910 | for the conversion. */ | |
2911 | if (stack_parm == NULL_RTX) | |
2912 | { | |
2913 | int save = generating_concat_p; | |
2914 | generating_concat_p = 0; | |
2915 | stack_parm = gen_reg_rtx (mode); | |
2916 | generating_concat_p = save; | |
2917 | } | |
2918 | } | |
f11a7b6d AO |
2919 | data->stack_parm = NULL; |
2920 | } | |
2921 | ||
bfc45551 AM |
2922 | size = int_size_in_bytes (data->passed_type); |
2923 | size_stored = CEIL_ROUND (size, UNITS_PER_WORD); | |
2924 | if (stack_parm == 0) | |
2925 | { | |
fe37c7af | 2926 | SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); |
f11a7b6d AO |
2927 | stack_parm = assign_stack_local (BLKmode, size_stored, |
2928 | DECL_ALIGN (parm)); | |
2929 | if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size) | |
2930 | PUT_MODE (stack_parm, GET_MODE (entry_parm)); | |
2931 | set_mem_attributes (stack_parm, parm, 1); | |
bfc45551 AM |
2932 | } |
2933 | ||
6071dc7f RH |
2934 | /* If a BLKmode arrives in registers, copy it to a stack slot. Handle |
2935 | calls that pass values in multiple non-contiguous locations. */ | |
2936 | if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL) | |
2937 | { | |
6071dc7f RH |
2938 | rtx mem; |
2939 | ||
2940 | /* Note that we will be storing an integral number of words. | |
2941 | So we have to be careful to ensure that we allocate an | |
bfc45551 | 2942 | integral number of words. We do this above when we call |
6071dc7f RH |
2943 | assign_stack_local if space was not allocated in the argument |
2944 | list. If it was, this will not work if PARM_BOUNDARY is not | |
2945 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
2946 | if it becomes a problem. Exception is when BLKmode arrives | |
2947 | with arguments not conforming to word_mode. */ | |
2948 | ||
bfc45551 AM |
2949 | if (data->stack_parm == 0) |
2950 | ; | |
6071dc7f RH |
2951 | else if (GET_CODE (entry_parm) == PARALLEL) |
2952 | ; | |
0bccc606 NS |
2953 | else |
2954 | gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD)); | |
6f086dfc | 2955 | |
1a8cb155 | 2956 | mem = validize_mem (copy_rtx (stack_parm)); |
c6b97fac | 2957 | |
6071dc7f | 2958 | /* Handle values in multiple non-contiguous locations. */ |
a029addd AO |
2959 | if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem)) |
2960 | emit_group_store (mem, entry_parm, data->passed_type, size); | |
2961 | else if (GET_CODE (entry_parm) == PARALLEL) | |
27e29549 | 2962 | { |
bb27eeda SE |
2963 | push_to_sequence2 (all->first_conversion_insn, |
2964 | all->last_conversion_insn); | |
27e29549 | 2965 | emit_group_store (mem, entry_parm, data->passed_type, size); |
bb27eeda SE |
2966 | all->first_conversion_insn = get_insns (); |
2967 | all->last_conversion_insn = get_last_insn (); | |
27e29549 | 2968 | end_sequence (); |
a029addd | 2969 | in_conversion_seq = true; |
27e29549 | 2970 | } |
c6b97fac | 2971 | |
6071dc7f RH |
2972 | else if (size == 0) |
2973 | ; | |
5c07bd7a | 2974 | |
6071dc7f RH |
2975 | /* If SIZE is that of a mode no bigger than a word, just use |
2976 | that mode's store operation. */ | |
2977 | else if (size <= UNITS_PER_WORD) | |
2978 | { | |
ef4bddc2 | 2979 | machine_mode mode |
6071dc7f | 2980 | = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0); |
c6b97fac | 2981 | |
6071dc7f | 2982 | if (mode != BLKmode |
6e985040 | 2983 | #ifdef BLOCK_REG_PADDING |
6071dc7f RH |
2984 | && (size == UNITS_PER_WORD |
2985 | || (BLOCK_REG_PADDING (mode, data->passed_type, 1) | |
2986 | != (BYTES_BIG_ENDIAN ? upward : downward))) | |
6e985040 | 2987 | #endif |
6071dc7f RH |
2988 | ) |
2989 | { | |
208996c7 RS |
2990 | rtx reg; |
2991 | ||
2992 | /* We are really truncating a word_mode value containing | |
2993 | SIZE bytes into a value of mode MODE. If such an | |
2994 | operation requires no actual instructions, we can refer | |
2995 | to the value directly in mode MODE, otherwise we must | |
2996 | start with the register in word_mode and explicitly | |
2997 | convert it. */ | |
2998 | if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD)) | |
2999 | reg = gen_rtx_REG (mode, REGNO (entry_parm)); | |
3000 | else | |
3001 | { | |
3002 | reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); | |
3003 | reg = convert_to_mode (mode, copy_to_reg (reg), 1); | |
3004 | } | |
6071dc7f RH |
3005 | emit_move_insn (change_address (mem, mode, 0), reg); |
3006 | } | |
c6b97fac | 3007 | |
1e5d7fd6 AO |
3008 | #ifdef BLOCK_REG_PADDING |
3009 | /* Storing the register in memory as a full word, as | |
3010 | move_block_from_reg below would do, and then using the | |
3011 | MEM in a smaller mode, has the effect of shifting right | |
3012 | if BYTES_BIG_ENDIAN. If we're bypassing memory, the | |
3013 | shifting must be explicit. */ | |
3014 | else if (!MEM_P (mem)) | |
3015 | { | |
3016 | rtx x; | |
3017 | ||
3018 | /* If the assert below fails, we should have taken the | |
3019 | mode != BLKmode path above, unless we have downward | |
3020 | padding of smaller-than-word arguments on a machine | |
3021 | with little-endian bytes, which would likely require | |
3022 | additional changes to work correctly. */ | |
3023 | gcc_checking_assert (BYTES_BIG_ENDIAN | |
3024 | && (BLOCK_REG_PADDING (mode, | |
3025 | data->passed_type, 1) | |
3026 | == upward)); | |
3027 | ||
3028 | int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
3029 | ||
3030 | x = gen_rtx_REG (word_mode, REGNO (entry_parm)); | |
3031 | x = expand_shift (RSHIFT_EXPR, word_mode, x, by, | |
3032 | NULL_RTX, 1); | |
3033 | x = force_reg (word_mode, x); | |
3034 | x = gen_lowpart_SUBREG (GET_MODE (mem), x); | |
3035 | ||
3036 | emit_move_insn (mem, x); | |
3037 | } | |
3038 | #endif | |
3039 | ||
6071dc7f RH |
3040 | /* Blocks smaller than a word on a BYTES_BIG_ENDIAN |
3041 | machine must be aligned to the left before storing | |
3042 | to memory. Note that the previous test doesn't | |
3043 | handle all cases (e.g. SIZE == 3). */ | |
3044 | else if (size != UNITS_PER_WORD | |
6e985040 | 3045 | #ifdef BLOCK_REG_PADDING |
6071dc7f RH |
3046 | && (BLOCK_REG_PADDING (mode, data->passed_type, 1) |
3047 | == downward) | |
6e985040 | 3048 | #else |
6071dc7f | 3049 | && BYTES_BIG_ENDIAN |
6e985040 | 3050 | #endif |
6071dc7f RH |
3051 | ) |
3052 | { | |
3053 | rtx tem, x; | |
3054 | int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
65c844e2 | 3055 | rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); |
6071dc7f | 3056 | |
eb6c3df1 | 3057 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1); |
6071dc7f RH |
3058 | tem = change_address (mem, word_mode, 0); |
3059 | emit_move_insn (tem, x); | |
6f086dfc | 3060 | } |
6071dc7f | 3061 | else |
27e29549 | 3062 | move_block_from_reg (REGNO (entry_parm), mem, |
6071dc7f | 3063 | size_stored / UNITS_PER_WORD); |
6f086dfc | 3064 | } |
f11a7b6d | 3065 | else if (!MEM_P (mem)) |
1e5d7fd6 AO |
3066 | { |
3067 | gcc_checking_assert (size > UNITS_PER_WORD); | |
3068 | #ifdef BLOCK_REG_PADDING | |
3069 | gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem), | |
3070 | data->passed_type, 0) | |
3071 | == upward); | |
3072 | #endif | |
3073 | emit_move_insn (mem, entry_parm); | |
3074 | } | |
6071dc7f | 3075 | else |
27e29549 | 3076 | move_block_from_reg (REGNO (entry_parm), mem, |
6071dc7f RH |
3077 | size_stored / UNITS_PER_WORD); |
3078 | } | |
bfc45551 AM |
3079 | else if (data->stack_parm == 0) |
3080 | { | |
bb27eeda | 3081 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
bfc45551 AM |
3082 | emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), |
3083 | BLOCK_OP_NORMAL); | |
bb27eeda SE |
3084 | all->first_conversion_insn = get_insns (); |
3085 | all->last_conversion_insn = get_last_insn (); | |
bfc45551 | 3086 | end_sequence (); |
a029addd | 3087 | in_conversion_seq = true; |
bfc45551 | 3088 | } |
6071dc7f | 3089 | |
f11a7b6d AO |
3090 | if (target_reg) |
3091 | { | |
a029addd AO |
3092 | if (!in_conversion_seq) |
3093 | emit_move_insn (target_reg, stack_parm); | |
3094 | else | |
3095 | { | |
3096 | push_to_sequence2 (all->first_conversion_insn, | |
3097 | all->last_conversion_insn); | |
3098 | emit_move_insn (target_reg, stack_parm); | |
3099 | all->first_conversion_insn = get_insns (); | |
3100 | all->last_conversion_insn = get_last_insn (); | |
3101 | end_sequence (); | |
3102 | } | |
f11a7b6d AO |
3103 | stack_parm = target_reg; |
3104 | } | |
3105 | ||
bfc45551 | 3106 | data->stack_parm = stack_parm; |
f11a7b6d | 3107 | set_parm_rtl (parm, stack_parm); |
6071dc7f RH |
3108 | } |
3109 | ||
3110 | /* A subroutine of assign_parms. Allocate a pseudo to hold the current | |
3111 | parameter. Get it there. Perform all ABI specified conversions. */ | |
3112 | ||
3113 | static void | |
3114 | assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, | |
3115 | struct assign_parm_data_one *data) | |
3116 | { | |
71008de4 BS |
3117 | rtx parmreg, validated_mem; |
3118 | rtx equiv_stack_parm; | |
ef4bddc2 | 3119 | machine_mode promoted_nominal_mode; |
6071dc7f RH |
3120 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); |
3121 | bool did_conversion = false; | |
71008de4 | 3122 | bool need_conversion, moved; |
f11a7b6d | 3123 | rtx rtl; |
6071dc7f RH |
3124 | |
3125 | /* Store the parm in a pseudoregister during the function, but we may | |
666e3ceb PB |
3126 | need to do it in a wider mode. Using 2 here makes the result |
3127 | consistent with promote_decl_mode and thus expand_expr_real_1. */ | |
6071dc7f | 3128 | promoted_nominal_mode |
cde0f3fd | 3129 | = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp, |
666e3ceb | 3130 | TREE_TYPE (current_function_decl), 2); |
6071dc7f | 3131 | |
f11a7b6d AO |
3132 | parmreg = gen_reg_rtx (promoted_nominal_mode); |
3133 | if (!DECL_ARTIFICIAL (parm)) | |
3134 | mark_user_reg (parmreg); | |
6071dc7f RH |
3135 | |
3136 | /* If this was an item that we received a pointer to, | |
f11a7b6d AO |
3137 | set rtl appropriately. */ |
3138 | if (data->passed_pointer) | |
6071dc7f | 3139 | { |
f11a7b6d AO |
3140 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg); |
3141 | set_mem_attributes (rtl, parm, 1); | |
6071dc7f RH |
3142 | } |
3143 | else | |
f11a7b6d | 3144 | rtl = parmreg; |
6071dc7f | 3145 | |
4d2a9850 DJ |
3146 | assign_parm_remove_parallels (data); |
3147 | ||
666e3ceb PB |
3148 | /* Copy the value into the register, thus bridging between |
3149 | assign_parm_find_data_types and expand_expr_real_1. */ | |
6071dc7f | 3150 | |
71008de4 | 3151 | equiv_stack_parm = data->stack_parm; |
1a8cb155 | 3152 | validated_mem = validize_mem (copy_rtx (data->entry_parm)); |
71008de4 BS |
3153 | |
3154 | need_conversion = (data->nominal_mode != data->passed_mode | |
3155 | || promoted_nominal_mode != data->promoted_mode); | |
3156 | moved = false; | |
3157 | ||
dbb94435 BS |
3158 | if (need_conversion |
3159 | && GET_MODE_CLASS (data->nominal_mode) == MODE_INT | |
3160 | && data->nominal_mode == data->passed_mode | |
3161 | && data->nominal_mode == GET_MODE (data->entry_parm)) | |
71008de4 | 3162 | { |
6071dc7f RH |
3163 | /* ENTRY_PARM has been converted to PROMOTED_MODE, its |
3164 | mode, by the caller. We now have to convert it to | |
3165 | NOMINAL_MODE, if different. However, PARMREG may be in | |
3166 | a different mode than NOMINAL_MODE if it is being stored | |
3167 | promoted. | |
3168 | ||
3169 | If ENTRY_PARM is a hard register, it might be in a register | |
3170 | not valid for operating in its mode (e.g., an odd-numbered | |
3171 | register for a DFmode). In that case, moves are the only | |
3172 | thing valid, so we can't do a convert from there. This | |
3173 | occurs when the calling sequence allow such misaligned | |
3174 | usages. | |
3175 | ||
3176 | In addition, the conversion may involve a call, which could | |
3177 | clobber parameters which haven't been copied to pseudo | |
71008de4 BS |
3178 | registers yet. |
3179 | ||
3180 | First, we try to emit an insn which performs the necessary | |
3181 | conversion. We verify that this insn does not clobber any | |
3182 | hard registers. */ | |
3183 | ||
3184 | enum insn_code icode; | |
3185 | rtx op0, op1; | |
3186 | ||
3187 | icode = can_extend_p (promoted_nominal_mode, data->passed_mode, | |
3188 | unsignedp); | |
3189 | ||
3190 | op0 = parmreg; | |
3191 | op1 = validated_mem; | |
3192 | if (icode != CODE_FOR_nothing | |
2ef6ce06 RS |
3193 | && insn_operand_matches (icode, 0, op0) |
3194 | && insn_operand_matches (icode, 1, op1)) | |
71008de4 BS |
3195 | { |
3196 | enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND; | |
b32d5189 DM |
3197 | rtx_insn *insn, *insns; |
3198 | rtx t = op1; | |
71008de4 BS |
3199 | HARD_REG_SET hardregs; |
3200 | ||
3201 | start_sequence (); | |
f9fef349 JJ |
3202 | /* If op1 is a hard register that is likely spilled, first |
3203 | force it into a pseudo, otherwise combiner might extend | |
3204 | its lifetime too much. */ | |
3205 | if (GET_CODE (t) == SUBREG) | |
3206 | t = SUBREG_REG (t); | |
3207 | if (REG_P (t) | |
3208 | && HARD_REGISTER_P (t) | |
3209 | && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t)) | |
3210 | && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t)))) | |
3211 | { | |
3212 | t = gen_reg_rtx (GET_MODE (op1)); | |
3213 | emit_move_insn (t, op1); | |
3214 | } | |
3215 | else | |
3216 | t = op1; | |
e67d1102 RS |
3217 | rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode, |
3218 | data->passed_mode, unsignedp); | |
a11899b2 | 3219 | emit_insn (pat); |
71008de4 BS |
3220 | insns = get_insns (); |
3221 | ||
3222 | moved = true; | |
3223 | CLEAR_HARD_REG_SET (hardregs); | |
3224 | for (insn = insns; insn && moved; insn = NEXT_INSN (insn)) | |
3225 | { | |
3226 | if (INSN_P (insn)) | |
3227 | note_stores (PATTERN (insn), record_hard_reg_sets, | |
3228 | &hardregs); | |
3229 | if (!hard_reg_set_empty_p (hardregs)) | |
3230 | moved = false; | |
3231 | } | |
3232 | ||
3233 | end_sequence (); | |
3234 | ||
3235 | if (moved) | |
3236 | { | |
3237 | emit_insn (insns); | |
dbb94435 BS |
3238 | if (equiv_stack_parm != NULL_RTX) |
3239 | equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg), | |
3240 | equiv_stack_parm); | |
71008de4 BS |
3241 | } |
3242 | } | |
3243 | } | |
3244 | ||
3245 | if (moved) | |
3246 | /* Nothing to do. */ | |
3247 | ; | |
3248 | else if (need_conversion) | |
3249 | { | |
3250 | /* We did not have an insn to convert directly, or the sequence | |
3251 | generated appeared unsafe. We must first copy the parm to a | |
3252 | pseudo reg, and save the conversion until after all | |
6071dc7f RH |
3253 | parameters have been moved. */ |
3254 | ||
71008de4 | 3255 | int save_tree_used; |
6071dc7f RH |
3256 | rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); |
3257 | ||
71008de4 | 3258 | emit_move_insn (tempreg, validated_mem); |
6071dc7f | 3259 | |
bb27eeda | 3260 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
6071dc7f RH |
3261 | tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); |
3262 | ||
3263 | if (GET_CODE (tempreg) == SUBREG | |
3264 | && GET_MODE (tempreg) == data->nominal_mode | |
3265 | && REG_P (SUBREG_REG (tempreg)) | |
3266 | && data->nominal_mode == data->passed_mode | |
3267 | && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm) | |
3268 | && GET_MODE_SIZE (GET_MODE (tempreg)) | |
3269 | < GET_MODE_SIZE (GET_MODE (data->entry_parm))) | |
6f086dfc | 3270 | { |
6071dc7f RH |
3271 | /* The argument is already sign/zero extended, so note it |
3272 | into the subreg. */ | |
3273 | SUBREG_PROMOTED_VAR_P (tempreg) = 1; | |
362d42dc | 3274 | SUBREG_PROMOTED_SET (tempreg, unsignedp); |
6071dc7f | 3275 | } |
00d8a4c1 | 3276 | |
6071dc7f RH |
3277 | /* TREE_USED gets set erroneously during expand_assignment. */ |
3278 | save_tree_used = TREE_USED (parm); | |
f11a7b6d | 3279 | SET_DECL_RTL (parm, rtl); |
79f5e442 | 3280 | expand_assignment (parm, make_tree (data->nominal_type, tempreg), false); |
f11a7b6d | 3281 | SET_DECL_RTL (parm, NULL_RTX); |
6071dc7f | 3282 | TREE_USED (parm) = save_tree_used; |
bb27eeda SE |
3283 | all->first_conversion_insn = get_insns (); |
3284 | all->last_conversion_insn = get_last_insn (); | |
6071dc7f | 3285 | end_sequence (); |
00d8a4c1 | 3286 | |
6071dc7f RH |
3287 | did_conversion = true; |
3288 | } | |
f11a7b6d | 3289 | else |
71008de4 | 3290 | emit_move_insn (parmreg, validated_mem); |
6071dc7f RH |
3291 | |
3292 | /* If we were passed a pointer but the actual value can safely live | |
f7e088e7 | 3293 | in a register, retrieve it and use it directly. */ |
f11a7b6d | 3294 | if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode) |
6071dc7f RH |
3295 | { |
3296 | /* We can't use nominal_mode, because it will have been set to | |
3297 | Pmode above. We must use the actual mode of the parm. */ | |
f11a7b6d | 3298 | if (use_register_for_decl (parm)) |
f7e088e7 EB |
3299 | { |
3300 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
3301 | mark_user_reg (parmreg); | |
3302 | } | |
3303 | else | |
3304 | { | |
3305 | int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), | |
3306 | TYPE_MODE (TREE_TYPE (parm)), | |
3307 | TYPE_ALIGN (TREE_TYPE (parm))); | |
3308 | parmreg | |
3309 | = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)), | |
3310 | GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))), | |
3311 | align); | |
3312 | set_mem_attributes (parmreg, parm, 1); | |
3313 | } | |
cd5b3469 | 3314 | |
951d8c8a EB |
3315 | /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for |
3316 | the debug info in case it is not legitimate. */ | |
f11a7b6d | 3317 | if (GET_MODE (parmreg) != GET_MODE (rtl)) |
6071dc7f | 3318 | { |
f11a7b6d | 3319 | rtx tempreg = gen_reg_rtx (GET_MODE (rtl)); |
6071dc7f RH |
3320 | int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm)); |
3321 | ||
bb27eeda SE |
3322 | push_to_sequence2 (all->first_conversion_insn, |
3323 | all->last_conversion_insn); | |
f11a7b6d | 3324 | emit_move_insn (tempreg, rtl); |
6071dc7f | 3325 | tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p); |
951d8c8a EB |
3326 | emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, |
3327 | tempreg); | |
bb27eeda SE |
3328 | all->first_conversion_insn = get_insns (); |
3329 | all->last_conversion_insn = get_last_insn (); | |
6071dc7f | 3330 | end_sequence (); |
6f086dfc | 3331 | |
6071dc7f RH |
3332 | did_conversion = true; |
3333 | } | |
3334 | else | |
951d8c8a | 3335 | emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl); |
6f086dfc | 3336 | |
f11a7b6d | 3337 | rtl = parmreg; |
797a6ac1 | 3338 | |
6071dc7f RH |
3339 | /* STACK_PARM is the pointer, not the parm, and PARMREG is |
3340 | now the parm. */ | |
f11a7b6d | 3341 | data->stack_parm = NULL; |
6071dc7f | 3342 | } |
ddef6bc7 | 3343 | |
f11a7b6d AO |
3344 | set_parm_rtl (parm, rtl); |
3345 | ||
6071dc7f RH |
3346 | /* Mark the register as eliminable if we did no conversion and it was |
3347 | copied from memory at a fixed offset, and the arg pointer was not | |
3348 | copied to a pseudo-reg. If the arg pointer is a pseudo reg or the | |
3349 | offset formed an invalid address, such memory-equivalences as we | |
3350 | make here would screw up life analysis for it. */ | |
3351 | if (data->nominal_mode == data->passed_mode | |
3352 | && !did_conversion | |
f11a7b6d AO |
3353 | && data->stack_parm != 0 |
3354 | && MEM_P (data->stack_parm) | |
6071dc7f RH |
3355 | && data->locate.offset.var == 0 |
3356 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
f11a7b6d | 3357 | XEXP (data->stack_parm, 0))) |
6071dc7f | 3358 | { |
691fe203 DM |
3359 | rtx_insn *linsn = get_last_insn (); |
3360 | rtx_insn *sinsn; | |
3361 | rtx set; | |
a03caf76 | 3362 | |
6071dc7f RH |
3363 | /* Mark complex types separately. */ |
3364 | if (GET_CODE (parmreg) == CONCAT) | |
3365 | { | |
ef4bddc2 | 3366 | machine_mode submode |
6071dc7f | 3367 | = GET_MODE_INNER (GET_MODE (parmreg)); |
1466e387 RH |
3368 | int regnor = REGNO (XEXP (parmreg, 0)); |
3369 | int regnoi = REGNO (XEXP (parmreg, 1)); | |
f11a7b6d AO |
3370 | rtx stackr = adjust_address_nv (data->stack_parm, submode, 0); |
3371 | rtx stacki = adjust_address_nv (data->stack_parm, submode, | |
1466e387 | 3372 | GET_MODE_SIZE (submode)); |
6071dc7f RH |
3373 | |
3374 | /* Scan backwards for the set of the real and | |
3375 | imaginary parts. */ | |
3376 | for (sinsn = linsn; sinsn != 0; | |
3377 | sinsn = prev_nonnote_insn (sinsn)) | |
3378 | { | |
3379 | set = single_set (sinsn); | |
3380 | if (set == 0) | |
3381 | continue; | |
3382 | ||
3383 | if (SET_DEST (set) == regno_reg_rtx [regnoi]) | |
a31830a7 | 3384 | set_unique_reg_note (sinsn, REG_EQUIV, stacki); |
6071dc7f | 3385 | else if (SET_DEST (set) == regno_reg_rtx [regnor]) |
a31830a7 | 3386 | set_unique_reg_note (sinsn, REG_EQUIV, stackr); |
a03caf76 | 3387 | } |
6071dc7f | 3388 | } |
f11a7b6d | 3389 | else |
7543f918 | 3390 | set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg); |
6071dc7f RH |
3391 | } |
3392 | ||
3393 | /* For pointer data type, suggest pointer register. */ | |
3394 | if (POINTER_TYPE_P (TREE_TYPE (parm))) | |
3395 | mark_reg_pointer (parmreg, | |
3396 | TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); | |
3397 | } | |
3398 | ||
3399 | /* A subroutine of assign_parms. Allocate stack space to hold the current | |
3400 | parameter. Get it there. Perform all ABI specified conversions. */ | |
3401 | ||
3402 | static void | |
3403 | assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, | |
3404 | struct assign_parm_data_one *data) | |
3405 | { | |
3406 | /* Value must be stored in the stack slot STACK_PARM during function | |
3407 | execution. */ | |
bfc45551 | 3408 | bool to_conversion = false; |
6071dc7f | 3409 | |
4d2a9850 DJ |
3410 | assign_parm_remove_parallels (data); |
3411 | ||
6071dc7f RH |
3412 | if (data->promoted_mode != data->nominal_mode) |
3413 | { | |
3414 | /* Conversion is required. */ | |
3415 | rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); | |
6f086dfc | 3416 | |
1a8cb155 | 3417 | emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm))); |
6071dc7f | 3418 | |
bb27eeda | 3419 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
bfc45551 AM |
3420 | to_conversion = true; |
3421 | ||
6071dc7f RH |
3422 | data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, |
3423 | TYPE_UNSIGNED (TREE_TYPE (parm))); | |
3424 | ||
3425 | if (data->stack_parm) | |
dd67163f JJ |
3426 | { |
3427 | int offset = subreg_lowpart_offset (data->nominal_mode, | |
3428 | GET_MODE (data->stack_parm)); | |
3429 | /* ??? This may need a big-endian conversion on sparc64. */ | |
3430 | data->stack_parm | |
3431 | = adjust_address (data->stack_parm, data->nominal_mode, 0); | |
527210c4 | 3432 | if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm)) |
dd67163f | 3433 | set_mem_offset (data->stack_parm, |
527210c4 | 3434 | MEM_OFFSET (data->stack_parm) + offset); |
dd67163f | 3435 | } |
6071dc7f RH |
3436 | } |
3437 | ||
3438 | if (data->entry_parm != data->stack_parm) | |
3439 | { | |
bfc45551 | 3440 | rtx src, dest; |
1f9ceff1 | 3441 | |
6071dc7f RH |
3442 | if (data->stack_parm == 0) |
3443 | { | |
3a695389 UW |
3444 | int align = STACK_SLOT_ALIGNMENT (data->passed_type, |
3445 | GET_MODE (data->entry_parm), | |
3446 | TYPE_ALIGN (data->passed_type)); | |
6071dc7f RH |
3447 | data->stack_parm |
3448 | = assign_stack_local (GET_MODE (data->entry_parm), | |
3449 | GET_MODE_SIZE (GET_MODE (data->entry_parm)), | |
3a695389 | 3450 | align); |
f11a7b6d | 3451 | set_mem_attributes (data->stack_parm, parm, 1); |
6f086dfc | 3452 | } |
6071dc7f | 3453 | |
1a8cb155 RS |
3454 | dest = validize_mem (copy_rtx (data->stack_parm)); |
3455 | src = validize_mem (copy_rtx (data->entry_parm)); | |
bfc45551 AM |
3456 | |
3457 | if (MEM_P (src)) | |
6f086dfc | 3458 | { |
bfc45551 AM |
3459 | /* Use a block move to handle potentially misaligned entry_parm. */ |
3460 | if (!to_conversion) | |
bb27eeda SE |
3461 | push_to_sequence2 (all->first_conversion_insn, |
3462 | all->last_conversion_insn); | |
bfc45551 AM |
3463 | to_conversion = true; |
3464 | ||
3465 | emit_block_move (dest, src, | |
3466 | GEN_INT (int_size_in_bytes (data->passed_type)), | |
3467 | BLOCK_OP_NORMAL); | |
6071dc7f RH |
3468 | } |
3469 | else | |
4a235312 L |
3470 | { |
3471 | if (!REG_P (src)) | |
3472 | src = force_reg (GET_MODE (src), src); | |
3473 | emit_move_insn (dest, src); | |
3474 | } | |
bfc45551 AM |
3475 | } |
3476 | ||
3477 | if (to_conversion) | |
3478 | { | |
bb27eeda SE |
3479 | all->first_conversion_insn = get_insns (); |
3480 | all->last_conversion_insn = get_last_insn (); | |
bfc45551 | 3481 | end_sequence (); |
6071dc7f | 3482 | } |
6f086dfc | 3483 | |
f11a7b6d | 3484 | set_parm_rtl (parm, data->stack_parm); |
6071dc7f | 3485 | } |
3412b298 | 3486 | |
6071dc7f RH |
3487 | /* A subroutine of assign_parms. If the ABI splits complex arguments, then |
3488 | undo the frobbing that we did in assign_parms_augmented_arg_list. */ | |
86f8eff3 | 3489 | |
6071dc7f | 3490 | static void |
3b3f318a | 3491 | assign_parms_unsplit_complex (struct assign_parm_data_all *all, |
9771b263 | 3492 | vec<tree> fnargs) |
6071dc7f RH |
3493 | { |
3494 | tree parm; | |
6ccd356e | 3495 | tree orig_fnargs = all->orig_fnargs; |
3b3f318a | 3496 | unsigned i = 0; |
f4ef873c | 3497 | |
3b3f318a | 3498 | for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i) |
6071dc7f RH |
3499 | { |
3500 | if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE | |
3501 | && targetm.calls.split_complex_arg (TREE_TYPE (parm))) | |
3502 | { | |
3503 | rtx tmp, real, imag; | |
ef4bddc2 | 3504 | machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); |
6f086dfc | 3505 | |
9771b263 DN |
3506 | real = DECL_RTL (fnargs[i]); |
3507 | imag = DECL_RTL (fnargs[i + 1]); | |
6071dc7f | 3508 | if (inner != GET_MODE (real)) |
6f086dfc | 3509 | { |
f11a7b6d AO |
3510 | real = gen_lowpart_SUBREG (inner, real); |
3511 | imag = gen_lowpart_SUBREG (inner, imag); | |
6071dc7f | 3512 | } |
6ccd356e | 3513 | |
f11a7b6d | 3514 | if (TREE_ADDRESSABLE (parm)) |
6ccd356e AM |
3515 | { |
3516 | rtx rmem, imem; | |
3517 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm)); | |
3a695389 UW |
3518 | int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), |
3519 | DECL_MODE (parm), | |
3520 | TYPE_ALIGN (TREE_TYPE (parm))); | |
6ccd356e AM |
3521 | |
3522 | /* split_complex_arg put the real and imag parts in | |
3523 | pseudos. Move them to memory. */ | |
3a695389 | 3524 | tmp = assign_stack_local (DECL_MODE (parm), size, align); |
6ccd356e AM |
3525 | set_mem_attributes (tmp, parm, 1); |
3526 | rmem = adjust_address_nv (tmp, inner, 0); | |
3527 | imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); | |
bb27eeda SE |
3528 | push_to_sequence2 (all->first_conversion_insn, |
3529 | all->last_conversion_insn); | |
6ccd356e AM |
3530 | emit_move_insn (rmem, real); |
3531 | emit_move_insn (imem, imag); | |
bb27eeda SE |
3532 | all->first_conversion_insn = get_insns (); |
3533 | all->last_conversion_insn = get_last_insn (); | |
6ccd356e AM |
3534 | end_sequence (); |
3535 | } | |
3536 | else | |
3537 | tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); | |
f11a7b6d | 3538 | set_parm_rtl (parm, tmp); |
7e41ffa2 | 3539 | |
9771b263 DN |
3540 | real = DECL_INCOMING_RTL (fnargs[i]); |
3541 | imag = DECL_INCOMING_RTL (fnargs[i + 1]); | |
6071dc7f RH |
3542 | if (inner != GET_MODE (real)) |
3543 | { | |
3544 | real = gen_lowpart_SUBREG (inner, real); | |
3545 | imag = gen_lowpart_SUBREG (inner, imag); | |
6f086dfc | 3546 | } |
6071dc7f | 3547 | tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); |
5141868d | 3548 | set_decl_incoming_rtl (parm, tmp, false); |
3b3f318a | 3549 | i++; |
6f086dfc | 3550 | } |
6f086dfc | 3551 | } |
6071dc7f RH |
3552 | } |
3553 | ||
d5e254e1 IE |
3554 | /* Load bounds of PARM from bounds table. */ |
3555 | static void | |
3556 | assign_parm_load_bounds (struct assign_parm_data_one *data, | |
3557 | tree parm, | |
3558 | rtx entry, | |
3559 | unsigned bound_no) | |
3560 | { | |
3561 | bitmap_iterator bi; | |
3562 | unsigned i, offs = 0; | |
3563 | int bnd_no = -1; | |
3564 | rtx slot = NULL, ptr = NULL; | |
3565 | ||
3566 | if (parm) | |
3567 | { | |
3568 | bitmap slots; | |
3569 | bitmap_obstack_initialize (NULL); | |
3570 | slots = BITMAP_ALLOC (NULL); | |
3571 | chkp_find_bound_slots (TREE_TYPE (parm), slots); | |
3572 | EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi) | |
3573 | { | |
3574 | if (bound_no) | |
3575 | bound_no--; | |
3576 | else | |
3577 | { | |
3578 | bnd_no = i; | |
3579 | break; | |
3580 | } | |
3581 | } | |
3582 | BITMAP_FREE (slots); | |
3583 | bitmap_obstack_release (NULL); | |
3584 | } | |
3585 | ||
3586 | /* We may have bounds not associated with any pointer. */ | |
3587 | if (bnd_no != -1) | |
3588 | offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
3589 | ||
3590 | /* Find associated pointer. */ | |
3591 | if (bnd_no == -1) | |
3592 | { | |
3593 | /* If bounds are not associated with any bounds, | |
3594 | then it is passed in a register or special slot. */ | |
3595 | gcc_assert (data->entry_parm); | |
3596 | ptr = const0_rtx; | |
3597 | } | |
3598 | else if (MEM_P (entry)) | |
3599 | slot = adjust_address (entry, Pmode, offs); | |
3600 | else if (REG_P (entry)) | |
3601 | ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no); | |
3602 | else if (GET_CODE (entry) == PARALLEL) | |
3603 | ptr = chkp_get_value_with_offs (entry, GEN_INT (offs)); | |
3604 | else | |
3605 | gcc_unreachable (); | |
3606 | data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr, | |
3607 | data->entry_parm); | |
3608 | } | |
3609 | ||
3610 | /* Assign RTL expressions to the function's bounds parameters BNDARGS. */ | |
3611 | ||
3612 | static void | |
3613 | assign_bounds (vec<bounds_parm_data> &bndargs, | |
55429190 IE |
3614 | struct assign_parm_data_all &all, |
3615 | bool assign_regs, bool assign_special, | |
3616 | bool assign_bt) | |
d5e254e1 | 3617 | { |
55429190 | 3618 | unsigned i, pass; |
d5e254e1 IE |
3619 | bounds_parm_data *pbdata; |
3620 | ||
3621 | if (!bndargs.exists ()) | |
3622 | return; | |
3623 | ||
3624 | /* We make few passes to store input bounds. Firstly handle bounds | |
3625 | passed in registers. After that we load bounds passed in special | |
3626 | slots. Finally we load bounds from Bounds Table. */ | |
3627 | for (pass = 0; pass < 3; pass++) | |
3628 | FOR_EACH_VEC_ELT (bndargs, i, pbdata) | |
3629 | { | |
3630 | /* Pass 0 => regs only. */ | |
3631 | if (pass == 0 | |
55429190 IE |
3632 | && (!assign_regs |
3633 | ||(!pbdata->parm_data.entry_parm | |
3634 | || GET_CODE (pbdata->parm_data.entry_parm) != REG))) | |
d5e254e1 IE |
3635 | continue; |
3636 | /* Pass 1 => slots only. */ | |
3637 | else if (pass == 1 | |
55429190 IE |
3638 | && (!assign_special |
3639 | || (!pbdata->parm_data.entry_parm | |
3640 | || GET_CODE (pbdata->parm_data.entry_parm) == REG))) | |
d5e254e1 IE |
3641 | continue; |
3642 | /* Pass 2 => BT only. */ | |
3643 | else if (pass == 2 | |
55429190 IE |
3644 | && (!assign_bt |
3645 | || pbdata->parm_data.entry_parm)) | |
d5e254e1 IE |
3646 | continue; |
3647 | ||
3648 | if (!pbdata->parm_data.entry_parm | |
3649 | || GET_CODE (pbdata->parm_data.entry_parm) != REG) | |
3650 | assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm, | |
3651 | pbdata->ptr_entry, pbdata->bound_no); | |
3652 | ||
3653 | set_decl_incoming_rtl (pbdata->bounds_parm, | |
3654 | pbdata->parm_data.entry_parm, false); | |
3655 | ||
3656 | if (assign_parm_setup_block_p (&pbdata->parm_data)) | |
3657 | assign_parm_setup_block (&all, pbdata->bounds_parm, | |
3658 | &pbdata->parm_data); | |
3659 | else if (pbdata->parm_data.passed_pointer | |
f11a7b6d | 3660 | || use_register_for_decl (pbdata->bounds_parm)) |
d5e254e1 IE |
3661 | assign_parm_setup_reg (&all, pbdata->bounds_parm, |
3662 | &pbdata->parm_data); | |
3663 | else | |
3664 | assign_parm_setup_stack (&all, pbdata->bounds_parm, | |
3665 | &pbdata->parm_data); | |
d5e254e1 | 3666 | } |
d5e254e1 IE |
3667 | } |
3668 | ||
6071dc7f RH |
3669 | /* Assign RTL expressions to the function's parameters. This may involve |
3670 | copying them into registers and using those registers as the DECL_RTL. */ | |
3671 | ||
6fe79279 | 3672 | static void |
6071dc7f RH |
3673 | assign_parms (tree fndecl) |
3674 | { | |
3675 | struct assign_parm_data_all all; | |
3b3f318a | 3676 | tree parm; |
9771b263 | 3677 | vec<tree> fnargs; |
d5e254e1 IE |
3678 | unsigned i, bound_no = 0; |
3679 | tree last_arg = NULL; | |
3680 | rtx last_arg_entry = NULL; | |
3681 | vec<bounds_parm_data> bndargs = vNULL; | |
3682 | bounds_parm_data bdata; | |
6f086dfc | 3683 | |
38173d38 | 3684 | crtl->args.internal_arg_pointer |
150cdc9e | 3685 | = targetm.calls.internal_arg_pointer (); |
6071dc7f RH |
3686 | |
3687 | assign_parms_initialize_all (&all); | |
3688 | fnargs = assign_parms_augmented_arg_list (&all); | |
3689 | ||
9771b263 | 3690 | FOR_EACH_VEC_ELT (fnargs, i, parm) |
ded9bf77 | 3691 | { |
6071dc7f RH |
3692 | struct assign_parm_data_one data; |
3693 | ||
3694 | /* Extract the type of PARM; adjust it according to ABI. */ | |
3695 | assign_parm_find_data_types (&all, parm, &data); | |
3696 | ||
3697 | /* Early out for errors and void parameters. */ | |
3698 | if (data.passed_mode == VOIDmode) | |
ded9bf77 | 3699 | { |
6071dc7f RH |
3700 | SET_DECL_RTL (parm, const0_rtx); |
3701 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm); | |
3702 | continue; | |
3703 | } | |
196c42cd | 3704 | |
2e3f842f L |
3705 | /* Estimate stack alignment from parameter alignment. */ |
3706 | if (SUPPORTS_STACK_ALIGNMENT) | |
3707 | { | |
c2ed6cf8 NF |
3708 | unsigned int align |
3709 | = targetm.calls.function_arg_boundary (data.promoted_mode, | |
3710 | data.passed_type); | |
ae58e548 JJ |
3711 | align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, |
3712 | align); | |
2e3f842f | 3713 | if (TYPE_ALIGN (data.nominal_type) > align) |
ae58e548 JJ |
3714 | align = MINIMUM_ALIGNMENT (data.nominal_type, |
3715 | TYPE_MODE (data.nominal_type), | |
3716 | TYPE_ALIGN (data.nominal_type)); | |
2e3f842f L |
3717 | if (crtl->stack_alignment_estimated < align) |
3718 | { | |
3719 | gcc_assert (!crtl->stack_realign_processed); | |
3720 | crtl->stack_alignment_estimated = align; | |
3721 | } | |
3722 | } | |
b8698a0f | 3723 | |
6071dc7f RH |
3724 | /* Find out where the parameter arrives in this function. */ |
3725 | assign_parm_find_entry_rtl (&all, &data); | |
3726 | ||
3727 | /* Find out where stack space for this parameter might be. */ | |
3728 | if (assign_parm_is_stack_parm (&all, &data)) | |
3729 | { | |
3730 | assign_parm_find_stack_rtl (parm, &data); | |
3731 | assign_parm_adjust_entry_rtl (&data); | |
ded9bf77 | 3732 | } |
d5e254e1 IE |
3733 | if (!POINTER_BOUNDS_TYPE_P (data.passed_type)) |
3734 | { | |
3735 | /* Remember where last non bounds arg was passed in case | |
3736 | we have to load associated bounds for it from Bounds | |
3737 | Table. */ | |
3738 | last_arg = parm; | |
3739 | last_arg_entry = data.entry_parm; | |
3740 | bound_no = 0; | |
3741 | } | |
6071dc7f | 3742 | /* Record permanently how this parm was passed. */ |
a82ff31f JJ |
3743 | if (data.passed_pointer) |
3744 | { | |
3745 | rtx incoming_rtl | |
3746 | = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), | |
3747 | data.entry_parm); | |
3748 | set_decl_incoming_rtl (parm, incoming_rtl, true); | |
3749 | } | |
3750 | else | |
3751 | set_decl_incoming_rtl (parm, data.entry_parm, false); | |
6071dc7f | 3752 | |
f11a7b6d | 3753 | assign_parm_adjust_stack_rtl (&data); |
1f9ceff1 AO |
3754 | |
3755 | /* Bounds should be loaded in the particular order to | |
d5e254e1 IE |
3756 | have registers allocated correctly. Collect info about |
3757 | input bounds and load them later. */ | |
3758 | if (POINTER_BOUNDS_TYPE_P (data.passed_type)) | |
3759 | { | |
3760 | /* Expect bounds in instrumented functions only. */ | |
3761 | gcc_assert (chkp_function_instrumented_p (fndecl)); | |
3762 | ||
3763 | bdata.parm_data = data; | |
3764 | bdata.bounds_parm = parm; | |
3765 | bdata.ptr_parm = last_arg; | |
3766 | bdata.ptr_entry = last_arg_entry; | |
3767 | bdata.bound_no = bound_no; | |
3768 | bndargs.safe_push (bdata); | |
3769 | } | |
3770 | else | |
3771 | { | |
d5e254e1 IE |
3772 | if (assign_parm_setup_block_p (&data)) |
3773 | assign_parm_setup_block (&all, parm, &data); | |
f11a7b6d | 3774 | else if (data.passed_pointer || use_register_for_decl (parm)) |
d5e254e1 IE |
3775 | assign_parm_setup_reg (&all, parm, &data); |
3776 | else | |
3777 | assign_parm_setup_stack (&all, parm, &data); | |
3778 | } | |
3779 | ||
3780 | if (cfun->stdarg && !DECL_CHAIN (parm)) | |
3781 | { | |
3782 | int pretend_bytes = 0; | |
3783 | ||
3784 | assign_parms_setup_varargs (&all, &data, false); | |
3785 | ||
3786 | if (chkp_function_instrumented_p (fndecl)) | |
3787 | { | |
3788 | /* We expect this is the last parm. Otherwise it is wrong | |
3789 | to assign bounds right now. */ | |
3790 | gcc_assert (i == (fnargs.length () - 1)); | |
55429190 | 3791 | assign_bounds (bndargs, all, true, false, false); |
d5e254e1 IE |
3792 | targetm.calls.setup_incoming_vararg_bounds (all.args_so_far, |
3793 | data.promoted_mode, | |
3794 | data.passed_type, | |
3795 | &pretend_bytes, | |
3796 | false); | |
55429190 IE |
3797 | assign_bounds (bndargs, all, false, true, true); |
3798 | bndargs.release (); | |
d5e254e1 IE |
3799 | } |
3800 | } | |
3801 | ||
6071dc7f | 3802 | /* Update info on where next arg arrives in registers. */ |
d5cc9181 | 3803 | targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, |
3c07301f | 3804 | data.passed_type, data.named_arg); |
6071dc7f | 3805 | |
d5e254e1 IE |
3806 | if (POINTER_BOUNDS_TYPE_P (data.passed_type)) |
3807 | bound_no++; | |
ded9bf77 AH |
3808 | } |
3809 | ||
55429190 IE |
3810 | assign_bounds (bndargs, all, true, true, true); |
3811 | bndargs.release (); | |
d5e254e1 | 3812 | |
3b3f318a | 3813 | if (targetm.calls.split_complex_arg) |
6ccd356e | 3814 | assign_parms_unsplit_complex (&all, fnargs); |
6071dc7f | 3815 | |
9771b263 | 3816 | fnargs.release (); |
3b3f318a | 3817 | |
3412b298 JW |
3818 | /* Output all parameter conversion instructions (possibly including calls) |
3819 | now that all parameters have been copied out of hard registers. */ | |
bb27eeda | 3820 | emit_insn (all.first_conversion_insn); |
3412b298 | 3821 | |
2e3f842f L |
3822 | /* Estimate reload stack alignment from scalar return mode. */ |
3823 | if (SUPPORTS_STACK_ALIGNMENT) | |
3824 | { | |
3825 | if (DECL_RESULT (fndecl)) | |
3826 | { | |
3827 | tree type = TREE_TYPE (DECL_RESULT (fndecl)); | |
ef4bddc2 | 3828 | machine_mode mode = TYPE_MODE (type); |
2e3f842f L |
3829 | |
3830 | if (mode != BLKmode | |
3831 | && mode != VOIDmode | |
3832 | && !AGGREGATE_TYPE_P (type)) | |
3833 | { | |
3834 | unsigned int align = GET_MODE_ALIGNMENT (mode); | |
3835 | if (crtl->stack_alignment_estimated < align) | |
3836 | { | |
3837 | gcc_assert (!crtl->stack_realign_processed); | |
3838 | crtl->stack_alignment_estimated = align; | |
3839 | } | |
3840 | } | |
b8698a0f | 3841 | } |
2e3f842f L |
3842 | } |
3843 | ||
b36a8cc2 OH |
3844 | /* If we are receiving a struct value address as the first argument, set up |
3845 | the RTL for the function result. As this might require code to convert | |
3846 | the transmitted address to Pmode, we do this here to ensure that possible | |
3847 | preliminary conversions of the address have been emitted already. */ | |
6071dc7f | 3848 | if (all.function_result_decl) |
b36a8cc2 | 3849 | { |
6071dc7f RH |
3850 | tree result = DECL_RESULT (current_function_decl); |
3851 | rtx addr = DECL_RTL (all.function_result_decl); | |
b36a8cc2 | 3852 | rtx x; |
fa8db1f7 | 3853 | |
cc77ae10 | 3854 | if (DECL_BY_REFERENCE (result)) |
8dcfef8f AO |
3855 | { |
3856 | SET_DECL_VALUE_EXPR (result, all.function_result_decl); | |
3857 | x = addr; | |
3858 | } | |
cc77ae10 JM |
3859 | else |
3860 | { | |
8dcfef8f AO |
3861 | SET_DECL_VALUE_EXPR (result, |
3862 | build1 (INDIRECT_REF, TREE_TYPE (result), | |
3863 | all.function_result_decl)); | |
cc77ae10 JM |
3864 | addr = convert_memory_address (Pmode, addr); |
3865 | x = gen_rtx_MEM (DECL_MODE (result), addr); | |
3866 | set_mem_attributes (x, result, 1); | |
3867 | } | |
8dcfef8f AO |
3868 | |
3869 | DECL_HAS_VALUE_EXPR_P (result) = 1; | |
3870 | ||
f11a7b6d | 3871 | set_parm_rtl (result, x); |
b36a8cc2 OH |
3872 | } |
3873 | ||
53c428d0 | 3874 | /* We have aligned all the args, so add space for the pretend args. */ |
38173d38 | 3875 | crtl->args.pretend_args_size = all.pretend_args_size; |
6071dc7f | 3876 | all.stack_args_size.constant += all.extra_pretend_bytes; |
38173d38 | 3877 | crtl->args.size = all.stack_args_size.constant; |
6f086dfc RS |
3878 | |
3879 | /* Adjust function incoming argument size for alignment and | |
3880 | minimum length. */ | |
3881 | ||
2e4ceca5 | 3882 | crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space); |
38173d38 | 3883 | crtl->args.size = CEIL_ROUND (crtl->args.size, |
53366450 | 3884 | PARM_BOUNDARY / BITS_PER_UNIT); |
4433e339 | 3885 | |
6dad9361 TS |
3886 | if (ARGS_GROW_DOWNWARD) |
3887 | { | |
3888 | crtl->args.arg_offset_rtx | |
3889 | = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) | |
3890 | : expand_expr (size_diffop (all.stack_args_size.var, | |
3891 | size_int (-all.stack_args_size.constant)), | |
3892 | NULL_RTX, VOIDmode, EXPAND_NORMAL)); | |
3893 | } | |
3894 | else | |
3895 | crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); | |
6f086dfc RS |
3896 | |
3897 | /* See how many bytes, if any, of its args a function should try to pop | |
3898 | on return. */ | |
3899 | ||
079e7538 NF |
3900 | crtl->args.pops_args = targetm.calls.return_pops_args (fndecl, |
3901 | TREE_TYPE (fndecl), | |
3902 | crtl->args.size); | |
6f086dfc | 3903 | |
3b69d50e RK |
3904 | /* For stdarg.h function, save info about |
3905 | regs and stack space used by the named args. */ | |
6f086dfc | 3906 | |
d5cc9181 | 3907 | crtl->args.info = all.args_so_far_v; |
6f086dfc RS |
3908 | |
3909 | /* Set the rtx used for the function return value. Put this in its | |
3910 | own variable so any optimizers that need this information don't have | |
3911 | to include tree.h. Do this here so it gets done when an inlined | |
3912 | function gets output. */ | |
3913 | ||
38173d38 | 3914 | crtl->return_rtx |
19e7881c MM |
3915 | = (DECL_RTL_SET_P (DECL_RESULT (fndecl)) |
3916 | ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX); | |
ce5e43d0 JJ |
3917 | |
3918 | /* If scalar return value was computed in a pseudo-reg, or was a named | |
3919 | return value that got dumped to the stack, copy that to the hard | |
3920 | return register. */ | |
3921 | if (DECL_RTL_SET_P (DECL_RESULT (fndecl))) | |
3922 | { | |
3923 | tree decl_result = DECL_RESULT (fndecl); | |
3924 | rtx decl_rtl = DECL_RTL (decl_result); | |
3925 | ||
3926 | if (REG_P (decl_rtl) | |
3927 | ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER | |
3928 | : DECL_REGISTER (decl_result)) | |
3929 | { | |
3930 | rtx real_decl_rtl; | |
3931 | ||
1d636cc6 RG |
3932 | real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), |
3933 | fndecl, true); | |
d5e254e1 IE |
3934 | if (chkp_function_instrumented_p (fndecl)) |
3935 | crtl->return_bnd | |
3936 | = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result), | |
3937 | fndecl, true); | |
ce5e43d0 | 3938 | REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; |
38173d38 | 3939 | /* The delay slot scheduler assumes that crtl->return_rtx |
ce5e43d0 JJ |
3940 | holds the hard register containing the return value, not a |
3941 | temporary pseudo. */ | |
38173d38 | 3942 | crtl->return_rtx = real_decl_rtl; |
ce5e43d0 JJ |
3943 | } |
3944 | } | |
6f086dfc | 3945 | } |
4744afba RH |
3946 | |
3947 | /* A subroutine of gimplify_parameters, invoked via walk_tree. | |
3948 | For all seen types, gimplify their sizes. */ | |
3949 | ||
3950 | static tree | |
3951 | gimplify_parm_type (tree *tp, int *walk_subtrees, void *data) | |
3952 | { | |
3953 | tree t = *tp; | |
3954 | ||
3955 | *walk_subtrees = 0; | |
3956 | if (TYPE_P (t)) | |
3957 | { | |
3958 | if (POINTER_TYPE_P (t)) | |
3959 | *walk_subtrees = 1; | |
ad50bc8d RH |
3960 | else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t)) |
3961 | && !TYPE_SIZES_GIMPLIFIED (t)) | |
4744afba | 3962 | { |
726a989a | 3963 | gimplify_type_sizes (t, (gimple_seq *) data); |
4744afba RH |
3964 | *walk_subtrees = 1; |
3965 | } | |
3966 | } | |
3967 | ||
3968 | return NULL; | |
3969 | } | |
3970 | ||
3971 | /* Gimplify the parameter list for current_function_decl. This involves | |
3972 | evaluating SAVE_EXPRs of variable sized parameters and generating code | |
726a989a RB |
3973 | to implement callee-copies reference parameters. Returns a sequence of |
3974 | statements to add to the beginning of the function. */ | |
4744afba | 3975 | |
726a989a | 3976 | gimple_seq |
4744afba RH |
3977 | gimplify_parameters (void) |
3978 | { | |
3979 | struct assign_parm_data_all all; | |
3b3f318a | 3980 | tree parm; |
726a989a | 3981 | gimple_seq stmts = NULL; |
9771b263 | 3982 | vec<tree> fnargs; |
3b3f318a | 3983 | unsigned i; |
4744afba RH |
3984 | |
3985 | assign_parms_initialize_all (&all); | |
3986 | fnargs = assign_parms_augmented_arg_list (&all); | |
3987 | ||
9771b263 | 3988 | FOR_EACH_VEC_ELT (fnargs, i, parm) |
4744afba RH |
3989 | { |
3990 | struct assign_parm_data_one data; | |
3991 | ||
3992 | /* Extract the type of PARM; adjust it according to ABI. */ | |
3993 | assign_parm_find_data_types (&all, parm, &data); | |
3994 | ||
3995 | /* Early out for errors and void parameters. */ | |
3996 | if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) | |
3997 | continue; | |
3998 | ||
3999 | /* Update info on where next arg arrives in registers. */ | |
d5cc9181 | 4000 | targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, |
3c07301f | 4001 | data.passed_type, data.named_arg); |
4744afba RH |
4002 | |
4003 | /* ??? Once upon a time variable_size stuffed parameter list | |
4004 | SAVE_EXPRs (amongst others) onto a pending sizes list. This | |
4005 | turned out to be less than manageable in the gimple world. | |
4006 | Now we have to hunt them down ourselves. */ | |
4007 | walk_tree_without_duplicates (&data.passed_type, | |
4008 | gimplify_parm_type, &stmts); | |
4009 | ||
b38f3813 | 4010 | if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST) |
4744afba RH |
4011 | { |
4012 | gimplify_one_sizepos (&DECL_SIZE (parm), &stmts); | |
4013 | gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts); | |
4014 | } | |
4015 | ||
4016 | if (data.passed_pointer) | |
4017 | { | |
4018 | tree type = TREE_TYPE (data.passed_type); | |
d5cc9181 | 4019 | if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type), |
4744afba RH |
4020 | type, data.named_arg)) |
4021 | { | |
4022 | tree local, t; | |
4023 | ||
b38f3813 | 4024 | /* For constant-sized objects, this is trivial; for |
4744afba | 4025 | variable-sized objects, we have to play games. */ |
b38f3813 EB |
4026 | if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST |
4027 | && !(flag_stack_check == GENERIC_STACK_CHECK | |
4028 | && compare_tree_int (DECL_SIZE_UNIT (parm), | |
4029 | STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
4744afba | 4030 | { |
5dac1dae | 4031 | local = create_tmp_var (type, get_name (parm)); |
4744afba | 4032 | DECL_IGNORED_P (local) = 0; |
04487a2f JJ |
4033 | /* If PARM was addressable, move that flag over |
4034 | to the local copy, as its address will be taken, | |
37609bf0 RG |
4035 | not the PARMs. Keep the parms address taken |
4036 | as we'll query that flag during gimplification. */ | |
04487a2f | 4037 | if (TREE_ADDRESSABLE (parm)) |
37609bf0 | 4038 | TREE_ADDRESSABLE (local) = 1; |
5dac1dae JJ |
4039 | else if (TREE_CODE (type) == COMPLEX_TYPE |
4040 | || TREE_CODE (type) == VECTOR_TYPE) | |
4041 | DECL_GIMPLE_REG_P (local) = 1; | |
4744afba RH |
4042 | } |
4043 | else | |
4044 | { | |
5039610b | 4045 | tree ptr_type, addr; |
4744afba RH |
4046 | |
4047 | ptr_type = build_pointer_type (type); | |
c98b08ff | 4048 | addr = create_tmp_reg (ptr_type, get_name (parm)); |
4744afba RH |
4049 | DECL_IGNORED_P (addr) = 0; |
4050 | local = build_fold_indirect_ref (addr); | |
4051 | ||
e79983f4 | 4052 | t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); |
c28f4b5c | 4053 | t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm), |
13e49da9 TV |
4054 | size_int (DECL_ALIGN (parm))); |
4055 | ||
d3c12306 | 4056 | /* The call has been built for a variable-sized object. */ |
63d2a353 | 4057 | CALL_ALLOCA_FOR_VAR_P (t) = 1; |
4744afba | 4058 | t = fold_convert (ptr_type, t); |
726a989a | 4059 | t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
4744afba RH |
4060 | gimplify_and_add (t, &stmts); |
4061 | } | |
4062 | ||
726a989a | 4063 | gimplify_assign (local, parm, &stmts); |
4744afba | 4064 | |
833b3afe DB |
4065 | SET_DECL_VALUE_EXPR (parm, local); |
4066 | DECL_HAS_VALUE_EXPR_P (parm) = 1; | |
4744afba RH |
4067 | } |
4068 | } | |
4069 | } | |
4070 | ||
9771b263 | 4071 | fnargs.release (); |
3b3f318a | 4072 | |
4744afba RH |
4073 | return stmts; |
4074 | } | |
75dc3319 | 4075 | \f |
6f086dfc RS |
4076 | /* Compute the size and offset from the start of the stacked arguments for a |
4077 | parm passed in mode PASSED_MODE and with type TYPE. | |
4078 | ||
4079 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
4080 | arguments. | |
4081 | ||
e7949876 AM |
4082 | The starting offset and size for this parm are returned in |
4083 | LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is | |
4084 | nonzero, the offset is that of stack slot, which is returned in | |
4085 | LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of | |
4086 | padding required from the initial offset ptr to the stack slot. | |
6f086dfc | 4087 | |
cc2902df | 4088 | IN_REGS is nonzero if the argument will be passed in registers. It will |
6f086dfc RS |
4089 | never be set if REG_PARM_STACK_SPACE is not defined. |
4090 | ||
2e4ceca5 UW |
4091 | REG_PARM_STACK_SPACE is the number of bytes of stack space reserved |
4092 | for arguments which are passed in registers. | |
4093 | ||
6f086dfc RS |
4094 | FNDECL is the function in which the argument was defined. |
4095 | ||
4096 | There are two types of rounding that are done. The first, controlled by | |
c2ed6cf8 NF |
4097 | TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the |
4098 | argument list to be aligned to the specific boundary (in bits). This | |
4099 | rounding affects the initial and starting offsets, but not the argument | |
4100 | size. | |
6f086dfc RS |
4101 | |
4102 | The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY, | |
4103 | optionally rounds the size of the parm to PARM_BOUNDARY. The | |
4104 | initial offset is not affected by this rounding, while the size always | |
4105 | is and the starting offset may be. */ | |
4106 | ||
e7949876 AM |
4107 | /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case; |
4108 | INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's | |
6f086dfc | 4109 | callers pass in the total size of args so far as |
e7949876 | 4110 | INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */ |
6f086dfc | 4111 | |
6f086dfc | 4112 | void |
ef4bddc2 | 4113 | locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs, |
2e4ceca5 UW |
4114 | int reg_parm_stack_space, int partial, |
4115 | tree fndecl ATTRIBUTE_UNUSED, | |
fa8db1f7 AJ |
4116 | struct args_size *initial_offset_ptr, |
4117 | struct locate_and_pad_arg_data *locate) | |
6f086dfc | 4118 | { |
e7949876 AM |
4119 | tree sizetree; |
4120 | enum direction where_pad; | |
123148b5 | 4121 | unsigned int boundary, round_boundary; |
e7949876 | 4122 | int part_size_in_regs; |
6f086dfc | 4123 | |
6f086dfc RS |
4124 | /* If we have found a stack parm before we reach the end of the |
4125 | area reserved for registers, skip that area. */ | |
4126 | if (! in_regs) | |
4127 | { | |
6f086dfc RS |
4128 | if (reg_parm_stack_space > 0) |
4129 | { | |
4130 | if (initial_offset_ptr->var) | |
4131 | { | |
4132 | initial_offset_ptr->var | |
4133 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
fed3cef0 | 4134 | ssize_int (reg_parm_stack_space)); |
6f086dfc RS |
4135 | initial_offset_ptr->constant = 0; |
4136 | } | |
4137 | else if (initial_offset_ptr->constant < reg_parm_stack_space) | |
4138 | initial_offset_ptr->constant = reg_parm_stack_space; | |
4139 | } | |
4140 | } | |
6f086dfc | 4141 | |
78a52f11 | 4142 | part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); |
e7949876 AM |
4143 | |
4144 | sizetree | |
4145 | = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode)); | |
4146 | where_pad = FUNCTION_ARG_PADDING (passed_mode, type); | |
c2ed6cf8 | 4147 | boundary = targetm.calls.function_arg_boundary (passed_mode, type); |
123148b5 BS |
4148 | round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, |
4149 | type); | |
6e985040 | 4150 | locate->where_pad = where_pad; |
2e3f842f L |
4151 | |
4152 | /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */ | |
4153 | if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) | |
4154 | boundary = MAX_SUPPORTED_STACK_ALIGNMENT; | |
4155 | ||
bfc45551 | 4156 | locate->boundary = boundary; |
6f086dfc | 4157 | |
2e3f842f L |
4158 | if (SUPPORTS_STACK_ALIGNMENT) |
4159 | { | |
4160 | /* stack_alignment_estimated can't change after stack has been | |
4161 | realigned. */ | |
4162 | if (crtl->stack_alignment_estimated < boundary) | |
4163 | { | |
4164 | if (!crtl->stack_realign_processed) | |
4165 | crtl->stack_alignment_estimated = boundary; | |
4166 | else | |
4167 | { | |
4168 | /* If stack is realigned and stack alignment value | |
4169 | hasn't been finalized, it is OK not to increase | |
4170 | stack_alignment_estimated. The bigger alignment | |
4171 | requirement is recorded in stack_alignment_needed | |
4172 | below. */ | |
4173 | gcc_assert (!crtl->stack_realign_finalized | |
4174 | && crtl->stack_realign_needed); | |
4175 | } | |
4176 | } | |
4177 | } | |
4178 | ||
c7e777b5 RH |
4179 | /* Remember if the outgoing parameter requires extra alignment on the |
4180 | calling function side. */ | |
cb91fab0 JH |
4181 | if (crtl->stack_alignment_needed < boundary) |
4182 | crtl->stack_alignment_needed = boundary; | |
2e3f842f L |
4183 | if (crtl->preferred_stack_boundary < boundary) |
4184 | crtl->preferred_stack_boundary = boundary; | |
c7e777b5 | 4185 | |
6dad9361 TS |
4186 | if (ARGS_GROW_DOWNWARD) |
4187 | { | |
4188 | locate->slot_offset.constant = -initial_offset_ptr->constant; | |
4189 | if (initial_offset_ptr->var) | |
4190 | locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), | |
4191 | initial_offset_ptr->var); | |
4192 | ||
a589e68f DM |
4193 | { |
4194 | tree s2 = sizetree; | |
4195 | if (where_pad != none | |
4196 | && (!tree_fits_uhwi_p (sizetree) | |
4197 | || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) | |
4198 | s2 = round_up (s2, round_boundary / BITS_PER_UNIT); | |
4199 | SUB_PARM_SIZE (locate->slot_offset, s2); | |
4200 | } | |
6dad9361 TS |
4201 | |
4202 | locate->slot_offset.constant += part_size_in_regs; | |
4203 | ||
4204 | if (!in_regs || reg_parm_stack_space > 0) | |
4205 | pad_to_arg_alignment (&locate->slot_offset, boundary, | |
4206 | &locate->alignment_pad); | |
4207 | ||
4208 | locate->size.constant = (-initial_offset_ptr->constant | |
4209 | - locate->slot_offset.constant); | |
4210 | if (initial_offset_ptr->var) | |
4211 | locate->size.var = size_binop (MINUS_EXPR, | |
4212 | size_binop (MINUS_EXPR, | |
4213 | ssize_int (0), | |
4214 | initial_offset_ptr->var), | |
4215 | locate->slot_offset.var); | |
4216 | ||
4217 | /* Pad_below needs the pre-rounded size to know how much to pad | |
4218 | below. */ | |
4219 | locate->offset = locate->slot_offset; | |
4220 | if (where_pad == downward) | |
4221 | pad_below (&locate->offset, passed_mode, sizetree); | |
4222 | ||
4223 | } | |
4224 | else | |
4225 | { | |
4226 | if (!in_regs || reg_parm_stack_space > 0) | |
4227 | pad_to_arg_alignment (initial_offset_ptr, boundary, | |
4228 | &locate->alignment_pad); | |
4229 | locate->slot_offset = *initial_offset_ptr; | |
6f086dfc RS |
4230 | |
4231 | #ifdef PUSH_ROUNDING | |
6dad9361 TS |
4232 | if (passed_mode != BLKmode) |
4233 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
6f086dfc RS |
4234 | #endif |
4235 | ||
6dad9361 TS |
4236 | /* Pad_below needs the pre-rounded size to know how much to pad below |
4237 | so this must be done before rounding up. */ | |
4238 | locate->offset = locate->slot_offset; | |
4239 | if (where_pad == downward) | |
4240 | pad_below (&locate->offset, passed_mode, sizetree); | |
d4b0a7a0 | 4241 | |
6dad9361 TS |
4242 | if (where_pad != none |
4243 | && (!tree_fits_uhwi_p (sizetree) | |
4244 | || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) | |
4245 | sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT); | |
6f086dfc | 4246 | |
6dad9361 | 4247 | ADD_PARM_SIZE (locate->size, sizetree); |
e7949876 | 4248 | |
6dad9361 TS |
4249 | locate->size.constant -= part_size_in_regs; |
4250 | } | |
099590dc MM |
4251 | |
4252 | #ifdef FUNCTION_ARG_OFFSET | |
4253 | locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type); | |
4254 | #endif | |
6f086dfc RS |
4255 | } |
4256 | ||
e16c591a RS |
4257 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. |
4258 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
4259 | ||
6f086dfc | 4260 | static void |
fa8db1f7 AJ |
4261 | pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, |
4262 | struct args_size *alignment_pad) | |
6f086dfc | 4263 | { |
a544cfd2 KG |
4264 | tree save_var = NULL_TREE; |
4265 | HOST_WIDE_INT save_constant = 0; | |
a751cd5b | 4266 | int boundary_in_bytes = boundary / BITS_PER_UNIT; |
a594a19c GK |
4267 | HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; |
4268 | ||
4269 | #ifdef SPARC_STACK_BOUNDARY_HACK | |
2358ff91 EB |
4270 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than |
4271 | the real alignment of %sp. However, when it does this, the | |
4272 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
a594a19c GK |
4273 | if (SPARC_STACK_BOUNDARY_HACK) |
4274 | sp_offset = 0; | |
4275 | #endif | |
4fc026cd | 4276 | |
6f6b8f81 | 4277 | if (boundary > PARM_BOUNDARY) |
4fc026cd CM |
4278 | { |
4279 | save_var = offset_ptr->var; | |
4280 | save_constant = offset_ptr->constant; | |
4281 | } | |
4282 | ||
4283 | alignment_pad->var = NULL_TREE; | |
4284 | alignment_pad->constant = 0; | |
4fc026cd | 4285 | |
6f086dfc RS |
4286 | if (boundary > BITS_PER_UNIT) |
4287 | { | |
4288 | if (offset_ptr->var) | |
4289 | { | |
a594a19c GK |
4290 | tree sp_offset_tree = ssize_int (sp_offset); |
4291 | tree offset = size_binop (PLUS_EXPR, | |
4292 | ARGS_SIZE_TREE (*offset_ptr), | |
4293 | sp_offset_tree); | |
6dad9361 TS |
4294 | tree rounded; |
4295 | if (ARGS_GROW_DOWNWARD) | |
4296 | rounded = round_down (offset, boundary / BITS_PER_UNIT); | |
4297 | else | |
4298 | rounded = round_up (offset, boundary / BITS_PER_UNIT); | |
a594a19c GK |
4299 | |
4300 | offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree); | |
e7949876 AM |
4301 | /* ARGS_SIZE_TREE includes constant term. */ |
4302 | offset_ptr->constant = 0; | |
6f6b8f81 | 4303 | if (boundary > PARM_BOUNDARY) |
dd3f0101 | 4304 | alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, |
fed3cef0 | 4305 | save_var); |
6f086dfc RS |
4306 | } |
4307 | else | |
718fe406 | 4308 | { |
a594a19c | 4309 | offset_ptr->constant = -sp_offset + |
6b241bd1 MT |
4310 | (ARGS_GROW_DOWNWARD |
4311 | ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes) | |
4312 | : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)); | |
6dad9361 | 4313 | |
6f6b8f81 | 4314 | if (boundary > PARM_BOUNDARY) |
718fe406 KH |
4315 | alignment_pad->constant = offset_ptr->constant - save_constant; |
4316 | } | |
6f086dfc RS |
4317 | } |
4318 | } | |
4319 | ||
4320 | static void | |
ef4bddc2 | 4321 | pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) |
6f086dfc RS |
4322 | { |
4323 | if (passed_mode != BLKmode) | |
4324 | { | |
4325 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
4326 | offset_ptr->constant | |
4327 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
4328 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
4329 | - GET_MODE_SIZE (passed_mode)); | |
4330 | } | |
4331 | else | |
4332 | { | |
4333 | if (TREE_CODE (sizetree) != INTEGER_CST | |
4334 | || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY) | |
4335 | { | |
4336 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
4337 | tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT); | |
4338 | /* Add it in. */ | |
4339 | ADD_PARM_SIZE (*offset_ptr, s2); | |
4340 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
4341 | } | |
4342 | } | |
4343 | } | |
6f086dfc | 4344 | \f |
6f086dfc | 4345 | |
6fb5fa3c DB |
4346 | /* True if register REGNO was alive at a place where `setjmp' was |
4347 | called and was set more than once or is an argument. Such regs may | |
4348 | be clobbered by `longjmp'. */ | |
4349 | ||
4350 | static bool | |
4351 | regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno) | |
4352 | { | |
4353 | /* There appear to be cases where some local vars never reach the | |
4354 | backend but have bogus regnos. */ | |
4355 | if (regno >= max_reg_num ()) | |
4356 | return false; | |
4357 | ||
4358 | return ((REG_N_SETS (regno) > 1 | |
fefa31b5 DM |
4359 | || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), |
4360 | regno)) | |
6fb5fa3c DB |
4361 | && REGNO_REG_SET_P (setjmp_crosses, regno)); |
4362 | } | |
4363 | ||
4364 | /* Walk the tree of blocks describing the binding levels within a | |
4365 | function and warn about variables the might be killed by setjmp or | |
4366 | vfork. This is done after calling flow_analysis before register | |
4367 | allocation since that will clobber the pseudo-regs to hard | |
4368 | regs. */ | |
4369 | ||
4370 | static void | |
4371 | setjmp_vars_warning (bitmap setjmp_crosses, tree block) | |
6f086dfc | 4372 | { |
b3694847 | 4373 | tree decl, sub; |
6de9cd9a | 4374 | |
910ad8de | 4375 | for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) |
6f086dfc | 4376 | { |
6de9cd9a | 4377 | if (TREE_CODE (decl) == VAR_DECL |
bc41842b | 4378 | && DECL_RTL_SET_P (decl) |
f8cfc6aa | 4379 | && REG_P (DECL_RTL (decl)) |
6fb5fa3c | 4380 | && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) |
b8698a0f | 4381 | warning (OPT_Wclobbered, "variable %q+D might be clobbered by" |
2b001724 | 4382 | " %<longjmp%> or %<vfork%>", decl); |
6f086dfc | 4383 | } |
6de9cd9a | 4384 | |
87caf699 | 4385 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) |
6fb5fa3c | 4386 | setjmp_vars_warning (setjmp_crosses, sub); |
6f086dfc RS |
4387 | } |
4388 | ||
6de9cd9a | 4389 | /* Do the appropriate part of setjmp_vars_warning |
6f086dfc RS |
4390 | but for arguments instead of local variables. */ |
4391 | ||
6fb5fa3c DB |
4392 | static void |
4393 | setjmp_args_warning (bitmap setjmp_crosses) | |
6f086dfc | 4394 | { |
b3694847 | 4395 | tree decl; |
6f086dfc | 4396 | for (decl = DECL_ARGUMENTS (current_function_decl); |
910ad8de | 4397 | decl; decl = DECL_CHAIN (decl)) |
6f086dfc | 4398 | if (DECL_RTL (decl) != 0 |
f8cfc6aa | 4399 | && REG_P (DECL_RTL (decl)) |
6fb5fa3c | 4400 | && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) |
b8698a0f | 4401 | warning (OPT_Wclobbered, |
2b001724 | 4402 | "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", |
dee15844 | 4403 | decl); |
6f086dfc RS |
4404 | } |
4405 | ||
6fb5fa3c DB |
4406 | /* Generate warning messages for variables live across setjmp. */ |
4407 | ||
b8698a0f | 4408 | void |
6fb5fa3c DB |
4409 | generate_setjmp_warnings (void) |
4410 | { | |
4411 | bitmap setjmp_crosses = regstat_get_setjmp_crosses (); | |
4412 | ||
0cae8d31 | 4413 | if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS |
6fb5fa3c DB |
4414 | || bitmap_empty_p (setjmp_crosses)) |
4415 | return; | |
4416 | ||
4417 | setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); | |
4418 | setjmp_args_warning (setjmp_crosses); | |
4419 | } | |
4420 | ||
6f086dfc | 4421 | \f |
3373692b | 4422 | /* Reverse the order of elements in the fragment chain T of blocks, |
1e3c1d95 JJ |
4423 | and return the new head of the chain (old last element). |
4424 | In addition to that clear BLOCK_SAME_RANGE flags when needed | |
4425 | and adjust BLOCK_SUPERCONTEXT from the super fragment to | |
4426 | its super fragment origin. */ | |
3373692b JJ |
4427 | |
4428 | static tree | |
4429 | block_fragments_nreverse (tree t) | |
4430 | { | |
1e3c1d95 JJ |
4431 | tree prev = 0, block, next, prev_super = 0; |
4432 | tree super = BLOCK_SUPERCONTEXT (t); | |
4433 | if (BLOCK_FRAGMENT_ORIGIN (super)) | |
4434 | super = BLOCK_FRAGMENT_ORIGIN (super); | |
3373692b JJ |
4435 | for (block = t; block; block = next) |
4436 | { | |
4437 | next = BLOCK_FRAGMENT_CHAIN (block); | |
4438 | BLOCK_FRAGMENT_CHAIN (block) = prev; | |
1e3c1d95 JJ |
4439 | if ((prev && !BLOCK_SAME_RANGE (prev)) |
4440 | || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block)) | |
4441 | != prev_super)) | |
4442 | BLOCK_SAME_RANGE (block) = 0; | |
4443 | prev_super = BLOCK_SUPERCONTEXT (block); | |
4444 | BLOCK_SUPERCONTEXT (block) = super; | |
3373692b JJ |
4445 | prev = block; |
4446 | } | |
1e3c1d95 JJ |
4447 | t = BLOCK_FRAGMENT_ORIGIN (t); |
4448 | if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t)) | |
4449 | != prev_super) | |
4450 | BLOCK_SAME_RANGE (t) = 0; | |
4451 | BLOCK_SUPERCONTEXT (t) = super; | |
3373692b JJ |
4452 | return prev; |
4453 | } | |
4454 | ||
4455 | /* Reverse the order of elements in the chain T of blocks, | |
4456 | and return the new head of the chain (old last element). | |
4457 | Also do the same on subblocks and reverse the order of elements | |
4458 | in BLOCK_FRAGMENT_CHAIN as well. */ | |
4459 | ||
4460 | static tree | |
4461 | blocks_nreverse_all (tree t) | |
4462 | { | |
4463 | tree prev = 0, block, next; | |
4464 | for (block = t; block; block = next) | |
4465 | { | |
4466 | next = BLOCK_CHAIN (block); | |
4467 | BLOCK_CHAIN (block) = prev; | |
3373692b JJ |
4468 | if (BLOCK_FRAGMENT_CHAIN (block) |
4469 | && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE) | |
1e3c1d95 JJ |
4470 | { |
4471 | BLOCK_FRAGMENT_CHAIN (block) | |
4472 | = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block)); | |
4473 | if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block))) | |
4474 | BLOCK_SAME_RANGE (block) = 0; | |
4475 | } | |
4476 | BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); | |
3373692b JJ |
4477 | prev = block; |
4478 | } | |
4479 | return prev; | |
4480 | } | |
4481 | ||
4482 | ||
a20612aa RH |
4483 | /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, |
4484 | and create duplicate blocks. */ | |
4485 | /* ??? Need an option to either create block fragments or to create | |
4486 | abstract origin duplicates of a source block. It really depends | |
4487 | on what optimization has been performed. */ | |
467456d0 | 4488 | |
116eebd6 | 4489 | void |
fa8db1f7 | 4490 | reorder_blocks (void) |
467456d0 | 4491 | { |
116eebd6 | 4492 | tree block = DECL_INITIAL (current_function_decl); |
467456d0 | 4493 | |
1a4450c7 | 4494 | if (block == NULL_TREE) |
116eebd6 | 4495 | return; |
fc289cd1 | 4496 | |
00f96dc9 | 4497 | auto_vec<tree, 10> block_stack; |
18c038b9 | 4498 | |
a20612aa | 4499 | /* Reset the TREE_ASM_WRITTEN bit for all blocks. */ |
6de9cd9a | 4500 | clear_block_marks (block); |
a20612aa | 4501 | |
116eebd6 MM |
4502 | /* Prune the old trees away, so that they don't get in the way. */ |
4503 | BLOCK_SUBBLOCKS (block) = NULL_TREE; | |
4504 | BLOCK_CHAIN (block) = NULL_TREE; | |
fc289cd1 | 4505 | |
a20612aa | 4506 | /* Recreate the block tree from the note nesting. */ |
116eebd6 | 4507 | reorder_blocks_1 (get_insns (), block, &block_stack); |
3373692b | 4508 | BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); |
467456d0 RS |
4509 | } |
4510 | ||
a20612aa | 4511 | /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ |
0a1c58a2 | 4512 | |
6de9cd9a DN |
4513 | void |
4514 | clear_block_marks (tree block) | |
cc1fe44f | 4515 | { |
a20612aa | 4516 | while (block) |
cc1fe44f | 4517 | { |
a20612aa | 4518 | TREE_ASM_WRITTEN (block) = 0; |
6de9cd9a | 4519 | clear_block_marks (BLOCK_SUBBLOCKS (block)); |
a20612aa | 4520 | block = BLOCK_CHAIN (block); |
cc1fe44f DD |
4521 | } |
4522 | } | |
4523 | ||
0a1c58a2 | 4524 | static void |
691fe203 DM |
4525 | reorder_blocks_1 (rtx_insn *insns, tree current_block, |
4526 | vec<tree> *p_block_stack) | |
0a1c58a2 | 4527 | { |
691fe203 | 4528 | rtx_insn *insn; |
1e3c1d95 | 4529 | tree prev_beg = NULL_TREE, prev_end = NULL_TREE; |
0a1c58a2 JL |
4530 | |
4531 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4532 | { | |
4b4bf941 | 4533 | if (NOTE_P (insn)) |
0a1c58a2 | 4534 | { |
a38e7aa5 | 4535 | if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) |
0a1c58a2 JL |
4536 | { |
4537 | tree block = NOTE_BLOCK (insn); | |
51b7d006 DJ |
4538 | tree origin; |
4539 | ||
3373692b JJ |
4540 | gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE); |
4541 | origin = block; | |
a20612aa | 4542 | |
1e3c1d95 JJ |
4543 | if (prev_end) |
4544 | BLOCK_SAME_RANGE (prev_end) = 0; | |
4545 | prev_end = NULL_TREE; | |
4546 | ||
a20612aa RH |
4547 | /* If we have seen this block before, that means it now |
4548 | spans multiple address regions. Create a new fragment. */ | |
0a1c58a2 JL |
4549 | if (TREE_ASM_WRITTEN (block)) |
4550 | { | |
a20612aa | 4551 | tree new_block = copy_node (block); |
a20612aa | 4552 | |
1e3c1d95 | 4553 | BLOCK_SAME_RANGE (new_block) = 0; |
a20612aa RH |
4554 | BLOCK_FRAGMENT_ORIGIN (new_block) = origin; |
4555 | BLOCK_FRAGMENT_CHAIN (new_block) | |
4556 | = BLOCK_FRAGMENT_CHAIN (origin); | |
4557 | BLOCK_FRAGMENT_CHAIN (origin) = new_block; | |
4558 | ||
4559 | NOTE_BLOCK (insn) = new_block; | |
4560 | block = new_block; | |
0a1c58a2 | 4561 | } |
a20612aa | 4562 | |
1e3c1d95 JJ |
4563 | if (prev_beg == current_block && prev_beg) |
4564 | BLOCK_SAME_RANGE (block) = 1; | |
4565 | ||
4566 | prev_beg = origin; | |
4567 | ||
0a1c58a2 JL |
4568 | BLOCK_SUBBLOCKS (block) = 0; |
4569 | TREE_ASM_WRITTEN (block) = 1; | |
339a28b9 ZW |
4570 | /* When there's only one block for the entire function, |
4571 | current_block == block and we mustn't do this, it | |
4572 | will cause infinite recursion. */ | |
4573 | if (block != current_block) | |
4574 | { | |
1e3c1d95 | 4575 | tree super; |
51b7d006 | 4576 | if (block != origin) |
1e3c1d95 JJ |
4577 | gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block |
4578 | || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT | |
4579 | (origin)) | |
4580 | == current_block); | |
9771b263 | 4581 | if (p_block_stack->is_empty ()) |
1e3c1d95 JJ |
4582 | super = current_block; |
4583 | else | |
4584 | { | |
9771b263 | 4585 | super = p_block_stack->last (); |
1e3c1d95 JJ |
4586 | gcc_assert (super == current_block |
4587 | || BLOCK_FRAGMENT_ORIGIN (super) | |
4588 | == current_block); | |
4589 | } | |
4590 | BLOCK_SUPERCONTEXT (block) = super; | |
339a28b9 ZW |
4591 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); |
4592 | BLOCK_SUBBLOCKS (current_block) = block; | |
51b7d006 | 4593 | current_block = origin; |
339a28b9 | 4594 | } |
9771b263 | 4595 | p_block_stack->safe_push (block); |
0a1c58a2 | 4596 | } |
a38e7aa5 | 4597 | else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) |
0a1c58a2 | 4598 | { |
9771b263 | 4599 | NOTE_BLOCK (insn) = p_block_stack->pop (); |
0a1c58a2 | 4600 | current_block = BLOCK_SUPERCONTEXT (current_block); |
1e3c1d95 JJ |
4601 | if (BLOCK_FRAGMENT_ORIGIN (current_block)) |
4602 | current_block = BLOCK_FRAGMENT_ORIGIN (current_block); | |
4603 | prev_beg = NULL_TREE; | |
4604 | prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn)) | |
4605 | ? NOTE_BLOCK (insn) : NULL_TREE; | |
0a1c58a2 JL |
4606 | } |
4607 | } | |
1e3c1d95 JJ |
4608 | else |
4609 | { | |
4610 | prev_beg = NULL_TREE; | |
4611 | if (prev_end) | |
4612 | BLOCK_SAME_RANGE (prev_end) = 0; | |
4613 | prev_end = NULL_TREE; | |
4614 | } | |
0a1c58a2 JL |
4615 | } |
4616 | } | |
4617 | ||
467456d0 RS |
4618 | /* Reverse the order of elements in the chain T of blocks, |
4619 | and return the new head of the chain (old last element). */ | |
4620 | ||
6de9cd9a | 4621 | tree |
fa8db1f7 | 4622 | blocks_nreverse (tree t) |
467456d0 | 4623 | { |
3373692b JJ |
4624 | tree prev = 0, block, next; |
4625 | for (block = t; block; block = next) | |
467456d0 | 4626 | { |
3373692b JJ |
4627 | next = BLOCK_CHAIN (block); |
4628 | BLOCK_CHAIN (block) = prev; | |
4629 | prev = block; | |
467456d0 RS |
4630 | } |
4631 | return prev; | |
4632 | } | |
4633 | ||
61e46a7d NF |
4634 | /* Concatenate two chains of blocks (chained through BLOCK_CHAIN) |
4635 | by modifying the last node in chain 1 to point to chain 2. */ | |
4636 | ||
4637 | tree | |
4638 | block_chainon (tree op1, tree op2) | |
4639 | { | |
4640 | tree t1; | |
4641 | ||
4642 | if (!op1) | |
4643 | return op2; | |
4644 | if (!op2) | |
4645 | return op1; | |
4646 | ||
4647 | for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1)) | |
4648 | continue; | |
4649 | BLOCK_CHAIN (t1) = op2; | |
4650 | ||
4651 | #ifdef ENABLE_TREE_CHECKING | |
4652 | { | |
4653 | tree t2; | |
4654 | for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2)) | |
4655 | gcc_assert (t2 != t1); | |
4656 | } | |
4657 | #endif | |
4658 | ||
4659 | return op1; | |
4660 | } | |
4661 | ||
18c038b9 MM |
4662 | /* Count the subblocks of the list starting with BLOCK. If VECTOR is |
4663 | non-NULL, list them all into VECTOR, in a depth-first preorder | |
4664 | traversal of the block tree. Also clear TREE_ASM_WRITTEN in all | |
b2a59b15 | 4665 | blocks. */ |
467456d0 RS |
4666 | |
4667 | static int | |
fa8db1f7 | 4668 | all_blocks (tree block, tree *vector) |
467456d0 | 4669 | { |
b2a59b15 MS |
4670 | int n_blocks = 0; |
4671 | ||
a84efb51 JO |
4672 | while (block) |
4673 | { | |
4674 | TREE_ASM_WRITTEN (block) = 0; | |
b2a59b15 | 4675 | |
a84efb51 JO |
4676 | /* Record this block. */ |
4677 | if (vector) | |
4678 | vector[n_blocks] = block; | |
b2a59b15 | 4679 | |
a84efb51 | 4680 | ++n_blocks; |
718fe406 | 4681 | |
a84efb51 JO |
4682 | /* Record the subblocks, and their subblocks... */ |
4683 | n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), | |
4684 | vector ? vector + n_blocks : 0); | |
4685 | block = BLOCK_CHAIN (block); | |
4686 | } | |
467456d0 RS |
4687 | |
4688 | return n_blocks; | |
4689 | } | |
18c038b9 MM |
4690 | |
4691 | /* Return a vector containing all the blocks rooted at BLOCK. The | |
4692 | number of elements in the vector is stored in N_BLOCKS_P. The | |
4693 | vector is dynamically allocated; it is the caller's responsibility | |
4694 | to call `free' on the pointer returned. */ | |
718fe406 | 4695 | |
18c038b9 | 4696 | static tree * |
fa8db1f7 | 4697 | get_block_vector (tree block, int *n_blocks_p) |
18c038b9 MM |
4698 | { |
4699 | tree *block_vector; | |
4700 | ||
4701 | *n_blocks_p = all_blocks (block, NULL); | |
5ed6ace5 | 4702 | block_vector = XNEWVEC (tree, *n_blocks_p); |
18c038b9 MM |
4703 | all_blocks (block, block_vector); |
4704 | ||
4705 | return block_vector; | |
4706 | } | |
4707 | ||
f83b236e | 4708 | static GTY(()) int next_block_index = 2; |
18c038b9 MM |
4709 | |
4710 | /* Set BLOCK_NUMBER for all the blocks in FN. */ | |
4711 | ||
4712 | void | |
fa8db1f7 | 4713 | number_blocks (tree fn) |
18c038b9 MM |
4714 | { |
4715 | int i; | |
4716 | int n_blocks; | |
4717 | tree *block_vector; | |
4718 | ||
4719 | /* For SDB and XCOFF debugging output, we start numbering the blocks | |
4720 | from 1 within each function, rather than keeping a running | |
4721 | count. */ | |
53943148 | 4722 | #if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO) |
b0e3a658 RK |
4723 | if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG) |
4724 | next_block_index = 1; | |
18c038b9 MM |
4725 | #endif |
4726 | ||
4727 | block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); | |
4728 | ||
4729 | /* The top-level BLOCK isn't numbered at all. */ | |
4730 | for (i = 1; i < n_blocks; ++i) | |
4731 | /* We number the blocks from two. */ | |
4732 | BLOCK_NUMBER (block_vector[i]) = next_block_index++; | |
4733 | ||
4734 | free (block_vector); | |
4735 | ||
4736 | return; | |
4737 | } | |
df8992f8 RH |
4738 | |
4739 | /* If VAR is present in a subblock of BLOCK, return the subblock. */ | |
4740 | ||
24e47c76 | 4741 | DEBUG_FUNCTION tree |
fa8db1f7 | 4742 | debug_find_var_in_block_tree (tree var, tree block) |
df8992f8 RH |
4743 | { |
4744 | tree t; | |
4745 | ||
4746 | for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) | |
4747 | if (t == var) | |
4748 | return block; | |
4749 | ||
4750 | for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) | |
4751 | { | |
4752 | tree ret = debug_find_var_in_block_tree (var, t); | |
4753 | if (ret) | |
4754 | return ret; | |
4755 | } | |
4756 | ||
4757 | return NULL_TREE; | |
4758 | } | |
467456d0 | 4759 | \f |
db2960f4 SL |
4760 | /* Keep track of whether we're in a dummy function context. If we are, |
4761 | we don't want to invoke the set_current_function hook, because we'll | |
4762 | get into trouble if the hook calls target_reinit () recursively or | |
4763 | when the initial initialization is not yet complete. */ | |
4764 | ||
4765 | static bool in_dummy_function; | |
4766 | ||
ab442df7 MM |
4767 | /* Invoke the target hook when setting cfun. Update the optimization options |
4768 | if the function uses different options than the default. */ | |
db2960f4 SL |
4769 | |
4770 | static void | |
4771 | invoke_set_current_function_hook (tree fndecl) | |
4772 | { | |
4773 | if (!in_dummy_function) | |
ab442df7 MM |
4774 | { |
4775 | tree opts = ((fndecl) | |
4776 | ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) | |
4777 | : optimization_default_node); | |
4778 | ||
4779 | if (!opts) | |
4780 | opts = optimization_default_node; | |
4781 | ||
4782 | /* Change optimization options if needed. */ | |
4783 | if (optimization_current_node != opts) | |
4784 | { | |
4785 | optimization_current_node = opts; | |
46625112 | 4786 | cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); |
ab442df7 MM |
4787 | } |
4788 | ||
892c4745 | 4789 | targetm.set_current_function (fndecl); |
4b1baac8 | 4790 | this_fn_optabs = this_target_optabs; |
135204dd | 4791 | |
4b1baac8 | 4792 | if (opts != optimization_default_node) |
135204dd | 4793 | { |
4b1baac8 RS |
4794 | init_tree_optimization_optabs (opts); |
4795 | if (TREE_OPTIMIZATION_OPTABS (opts)) | |
4796 | this_fn_optabs = (struct target_optabs *) | |
4797 | TREE_OPTIMIZATION_OPTABS (opts); | |
135204dd | 4798 | } |
ab442df7 | 4799 | } |
db2960f4 SL |
4800 | } |
4801 | ||
4802 | /* cfun should never be set directly; use this function. */ | |
4803 | ||
4804 | void | |
4805 | set_cfun (struct function *new_cfun) | |
4806 | { | |
4807 | if (cfun != new_cfun) | |
4808 | { | |
4809 | cfun = new_cfun; | |
4810 | invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE); | |
b3e46655 | 4811 | redirect_edge_var_map_empty (); |
db2960f4 SL |
4812 | } |
4813 | } | |
4814 | ||
db2960f4 SL |
4815 | /* Initialized with NOGC, making this poisonous to the garbage collector. */ |
4816 | ||
526ceb68 | 4817 | static vec<function *> cfun_stack; |
db2960f4 | 4818 | |
af16bc76 MJ |
4819 | /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set |
4820 | current_function_decl accordingly. */ | |
db2960f4 SL |
4821 | |
4822 | void | |
4823 | push_cfun (struct function *new_cfun) | |
4824 | { | |
af16bc76 MJ |
4825 | gcc_assert ((!cfun && !current_function_decl) |
4826 | || (cfun && current_function_decl == cfun->decl)); | |
9771b263 | 4827 | cfun_stack.safe_push (cfun); |
af16bc76 | 4828 | current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE; |
db2960f4 SL |
4829 | set_cfun (new_cfun); |
4830 | } | |
4831 | ||
af16bc76 | 4832 | /* Pop cfun from the stack. Also set current_function_decl accordingly. */ |
db2960f4 SL |
4833 | |
4834 | void | |
4835 | pop_cfun (void) | |
4836 | { | |
9771b263 | 4837 | struct function *new_cfun = cfun_stack.pop (); |
af16bc76 MJ |
4838 | /* When in_dummy_function, we do have a cfun but current_function_decl is |
4839 | NULL. We also allow pushing NULL cfun and subsequently changing | |
4840 | current_function_decl to something else and have both restored by | |
4841 | pop_cfun. */ | |
4842 | gcc_checking_assert (in_dummy_function | |
4843 | || !cfun | |
4844 | || current_function_decl == cfun->decl); | |
38d34676 | 4845 | set_cfun (new_cfun); |
af16bc76 | 4846 | current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE; |
db2960f4 | 4847 | } |
3e87758a RL |
4848 | |
4849 | /* Return value of funcdef and increase it. */ | |
4850 | int | |
b8698a0f | 4851 | get_next_funcdef_no (void) |
3e87758a RL |
4852 | { |
4853 | return funcdef_no++; | |
4854 | } | |
4855 | ||
903d1e67 XDL |
4856 | /* Return value of funcdef. */ |
4857 | int | |
4858 | get_last_funcdef_no (void) | |
4859 | { | |
4860 | return funcdef_no; | |
4861 | } | |
4862 | ||
3a70d621 | 4863 | /* Allocate a function structure for FNDECL and set its contents |
db2960f4 SL |
4864 | to the defaults. Set cfun to the newly-allocated object. |
4865 | Some of the helper functions invoked during initialization assume | |
4866 | that cfun has already been set. Therefore, assign the new object | |
4867 | directly into cfun and invoke the back end hook explicitly at the | |
4868 | very end, rather than initializing a temporary and calling set_cfun | |
4869 | on it. | |
182e0d71 AK |
4870 | |
4871 | ABSTRACT_P is true if this is a function that will never be seen by | |
4872 | the middle-end. Such functions are front-end concepts (like C++ | |
4873 | function templates) that do not correspond directly to functions | |
4874 | placed in object files. */ | |
7a80cf9a | 4875 | |
3a70d621 | 4876 | void |
182e0d71 | 4877 | allocate_struct_function (tree fndecl, bool abstract_p) |
6f086dfc | 4878 | { |
6de9cd9a | 4879 | tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE; |
6f086dfc | 4880 | |
766090c2 | 4881 | cfun = ggc_cleared_alloc<function> (); |
b384405b | 4882 | |
3a70d621 | 4883 | init_eh_for_function (); |
6f086dfc | 4884 | |
3a70d621 RH |
4885 | if (init_machine_status) |
4886 | cfun->machine = (*init_machine_status) (); | |
e2ecd91c | 4887 | |
7c800926 KT |
4888 | #ifdef OVERRIDE_ABI_FORMAT |
4889 | OVERRIDE_ABI_FORMAT (fndecl); | |
4890 | #endif | |
4891 | ||
81464b2c | 4892 | if (fndecl != NULL_TREE) |
3a70d621 | 4893 | { |
db2960f4 SL |
4894 | DECL_STRUCT_FUNCTION (fndecl) = cfun; |
4895 | cfun->decl = fndecl; | |
70cf5bc1 | 4896 | current_function_funcdef_no = get_next_funcdef_no (); |
5b9db1bc MJ |
4897 | } |
4898 | ||
4899 | invoke_set_current_function_hook (fndecl); | |
db2960f4 | 4900 | |
5b9db1bc MJ |
4901 | if (fndecl != NULL_TREE) |
4902 | { | |
4903 | tree result = DECL_RESULT (fndecl); | |
f11a7b6d AO |
4904 | |
4905 | if (!abstract_p) | |
4906 | { | |
4907 | /* Now that we have activated any function-specific attributes | |
4908 | that might affect layout, particularly vector modes, relayout | |
4909 | each of the parameters and the result. */ | |
4910 | relayout_decl (result); | |
4911 | for (tree parm = DECL_ARGUMENTS (fndecl); parm; | |
4912 | parm = DECL_CHAIN (parm)) | |
4913 | relayout_decl (parm); | |
63b0cb04 CB |
4914 | |
4915 | /* Similarly relayout the function decl. */ | |
4916 | targetm.target_option.relayout_function (fndecl); | |
f11a7b6d AO |
4917 | } |
4918 | ||
182e0d71 | 4919 | if (!abstract_p && aggregate_value_p (result, fndecl)) |
db2960f4 | 4920 | { |
3a70d621 | 4921 | #ifdef PCC_STATIC_STRUCT_RETURN |
e3b5732b | 4922 | cfun->returns_pcc_struct = 1; |
3a70d621 | 4923 | #endif |
e3b5732b | 4924 | cfun->returns_struct = 1; |
db2960f4 SL |
4925 | } |
4926 | ||
f38958e8 | 4927 | cfun->stdarg = stdarg_p (fntype); |
b8698a0f | 4928 | |
db2960f4 SL |
4929 | /* Assume all registers in stdarg functions need to be saved. */ |
4930 | cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
4931 | cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
8f4f502f EB |
4932 | |
4933 | /* ??? This could be set on a per-function basis by the front-end | |
4934 | but is this worth the hassle? */ | |
4935 | cfun->can_throw_non_call_exceptions = flag_non_call_exceptions; | |
d764963b | 4936 | cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; |
0b37ba8a AK |
4937 | |
4938 | if (!profile_flag && !flag_instrument_function_entry_exit) | |
4939 | DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; | |
3a70d621 | 4940 | } |
db2960f4 SL |
4941 | } |
4942 | ||
4943 | /* This is like allocate_struct_function, but pushes a new cfun for FNDECL | |
4944 | instead of just setting it. */ | |
9d30f3c1 | 4945 | |
db2960f4 SL |
4946 | void |
4947 | push_struct_function (tree fndecl) | |
4948 | { | |
af16bc76 MJ |
4949 | /* When in_dummy_function we might be in the middle of a pop_cfun and |
4950 | current_function_decl and cfun may not match. */ | |
4951 | gcc_assert (in_dummy_function | |
4952 | || (!cfun && !current_function_decl) | |
4953 | || (cfun && current_function_decl == cfun->decl)); | |
9771b263 | 4954 | cfun_stack.safe_push (cfun); |
af16bc76 | 4955 | current_function_decl = fndecl; |
182e0d71 | 4956 | allocate_struct_function (fndecl, false); |
3a70d621 | 4957 | } |
6f086dfc | 4958 | |
8f4f502f | 4959 | /* Reset crtl and other non-struct-function variables to defaults as |
2067c116 | 4960 | appropriate for emitting rtl at the start of a function. */ |
6f086dfc | 4961 | |
3a70d621 | 4962 | static void |
db2960f4 | 4963 | prepare_function_start (void) |
3a70d621 | 4964 | { |
614d5bd8 | 4965 | gcc_assert (!get_last_insn ()); |
fb0703f7 | 4966 | init_temp_slots (); |
0de456a5 | 4967 | init_emit (); |
bd60bab2 | 4968 | init_varasm_status (); |
0de456a5 | 4969 | init_expr (); |
bf08ebeb | 4970 | default_rtl_profile (); |
6f086dfc | 4971 | |
a11e0df4 | 4972 | if (flag_stack_usage_info) |
d3c12306 | 4973 | { |
766090c2 | 4974 | cfun->su = ggc_cleared_alloc<stack_usage> (); |
d3c12306 EB |
4975 | cfun->su->static_stack_size = -1; |
4976 | } | |
4977 | ||
3a70d621 | 4978 | cse_not_expected = ! optimize; |
6f086dfc | 4979 | |
3a70d621 RH |
4980 | /* Caller save not needed yet. */ |
4981 | caller_save_needed = 0; | |
6f086dfc | 4982 | |
3a70d621 RH |
4983 | /* We haven't done register allocation yet. */ |
4984 | reg_renumber = 0; | |
6f086dfc | 4985 | |
b384405b BS |
4986 | /* Indicate that we have not instantiated virtual registers yet. */ |
4987 | virtuals_instantiated = 0; | |
4988 | ||
1b3d8f8a GK |
4989 | /* Indicate that we want CONCATs now. */ |
4990 | generating_concat_p = 1; | |
4991 | ||
b384405b BS |
4992 | /* Indicate we have no need of a frame pointer yet. */ |
4993 | frame_pointer_needed = 0; | |
b384405b BS |
4994 | } |
4995 | ||
5283d1ec TV |
4996 | void |
4997 | push_dummy_function (bool with_decl) | |
4998 | { | |
4999 | tree fn_decl, fn_type, fn_result_decl; | |
5000 | ||
5001 | gcc_assert (!in_dummy_function); | |
5002 | in_dummy_function = true; | |
5003 | ||
5004 | if (with_decl) | |
5005 | { | |
5006 | fn_type = build_function_type_list (void_type_node, NULL_TREE); | |
5007 | fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, | |
5008 | fn_type); | |
5009 | fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL, | |
5010 | NULL_TREE, void_type_node); | |
5011 | DECL_RESULT (fn_decl) = fn_result_decl; | |
5012 | } | |
5013 | else | |
5014 | fn_decl = NULL_TREE; | |
5015 | ||
5016 | push_struct_function (fn_decl); | |
5017 | } | |
5018 | ||
b384405b BS |
5019 | /* Initialize the rtl expansion mechanism so that we can do simple things |
5020 | like generate sequences. This is used to provide a context during global | |
db2960f4 SL |
5021 | initialization of some passes. You must call expand_dummy_function_end |
5022 | to exit this context. */ | |
5023 | ||
b384405b | 5024 | void |
fa8db1f7 | 5025 | init_dummy_function_start (void) |
b384405b | 5026 | { |
5283d1ec | 5027 | push_dummy_function (false); |
db2960f4 | 5028 | prepare_function_start (); |
b384405b BS |
5029 | } |
5030 | ||
5031 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
5032 | and initialize static variables for generating RTL for the statements | |
5033 | of the function. */ | |
5034 | ||
5035 | void | |
fa8db1f7 | 5036 | init_function_start (tree subr) |
b384405b | 5037 | { |
b9b5f433 JH |
5038 | /* Initialize backend, if needed. */ |
5039 | initialize_rtl (); | |
5040 | ||
db2960f4 | 5041 | prepare_function_start (); |
2c7eebae | 5042 | decide_function_section (subr); |
b384405b | 5043 | |
6f086dfc RS |
5044 | /* Warn if this value is an aggregate type, |
5045 | regardless of which calling convention we are using for it. */ | |
ccf08a6e DD |
5046 | if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) |
5047 | warning (OPT_Waggregate_return, "function returns an aggregate"); | |
49ad7cfa | 5048 | } |
5c7675e9 | 5049 | |
7d69de61 RH |
5050 | /* Expand code to verify the stack_protect_guard. This is invoked at |
5051 | the end of a function to be protected. */ | |
5052 | ||
b755446c | 5053 | void |
7d69de61 RH |
5054 | stack_protect_epilogue (void) |
5055 | { | |
5056 | tree guard_decl = targetm.stack_protect_guard (); | |
19f8b229 | 5057 | rtx_code_label *label = gen_label_rtx (); |
9a24a3cc | 5058 | rtx x, y; |
ebd765d4 | 5059 | rtx_insn *seq; |
7d69de61 | 5060 | |
08d4cc33 RH |
5061 | x = expand_normal (crtl->stack_protect_guard); |
5062 | y = expand_normal (guard_decl); | |
7d69de61 RH |
5063 | |
5064 | /* Allow the target to compare Y with X without leaking either into | |
5065 | a register. */ | |
ebd765d4 KC |
5066 | if (targetm.have_stack_protect_test () |
5067 | && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX)) | |
5068 | emit_insn (seq); | |
5069 | else | |
5070 | emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label); | |
7d69de61 RH |
5071 | |
5072 | /* The noreturn predictor has been moved to the tree level. The rtl-level | |
5073 | predictors estimate this branch about 20%, which isn't enough to get | |
5074 | things moved out of line. Since this is the only extant case of adding | |
5075 | a noreturn function at the rtl level, it doesn't seem worth doing ought | |
5076 | except adding the prediction by hand. */ | |
9a24a3cc | 5077 | rtx_insn *tmp = get_last_insn (); |
7d69de61 | 5078 | if (JUMP_P (tmp)) |
9a24a3cc | 5079 | predict_insn_def (tmp, PRED_NORETURN, TAKEN); |
7d69de61 | 5080 | |
b3c144a3 SB |
5081 | expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true); |
5082 | free_temp_slots (); | |
7d69de61 RH |
5083 | emit_label (label); |
5084 | } | |
5085 | \f | |
6f086dfc RS |
5086 | /* Start the RTL for a new function, and set variables used for |
5087 | emitting RTL. | |
5088 | SUBR is the FUNCTION_DECL node. | |
5089 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
5090 | the function's parameters, which must be run at any return statement. */ | |
5091 | ||
5092 | void | |
b79c5284 | 5093 | expand_function_start (tree subr) |
6f086dfc | 5094 | { |
6f086dfc RS |
5095 | /* Make sure volatile mem refs aren't considered |
5096 | valid operands of arithmetic insns. */ | |
5097 | init_recog_no_volatile (); | |
5098 | ||
e3b5732b | 5099 | crtl->profile |
70f4f91c WC |
5100 | = (profile_flag |
5101 | && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr)); | |
5102 | ||
e3b5732b | 5103 | crtl->limit_stack |
a157febd GK |
5104 | = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr)); |
5105 | ||
52a11cbf RH |
5106 | /* Make the label for return statements to jump to. Do not special |
5107 | case machines with special return instructions -- they will be | |
5108 | handled later during jump, ifcvt, or epilogue creation. */ | |
6f086dfc | 5109 | return_label = gen_label_rtx (); |
6f086dfc RS |
5110 | |
5111 | /* Initialize rtx used to return the value. */ | |
5112 | /* Do this before assign_parms so that we copy the struct value address | |
5113 | before any library calls that assign parms might generate. */ | |
5114 | ||
5115 | /* Decide whether to return the value in memory or in a register. */ | |
1f9ceff1 | 5116 | tree res = DECL_RESULT (subr); |
1f9ceff1 | 5117 | if (aggregate_value_p (res, subr)) |
6f086dfc RS |
5118 | { |
5119 | /* Returning something that won't go in a register. */ | |
b3694847 | 5120 | rtx value_address = 0; |
6f086dfc RS |
5121 | |
5122 | #ifdef PCC_STATIC_STRUCT_RETURN | |
e3b5732b | 5123 | if (cfun->returns_pcc_struct) |
6f086dfc | 5124 | { |
1f9ceff1 | 5125 | int size = int_size_in_bytes (TREE_TYPE (res)); |
6f086dfc RS |
5126 | value_address = assemble_static_space (size); |
5127 | } | |
5128 | else | |
5129 | #endif | |
5130 | { | |
2225b57c | 5131 | rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); |
6f086dfc RS |
5132 | /* Expect to be passed the address of a place to store the value. |
5133 | If it is passed as an argument, assign_parms will take care of | |
5134 | it. */ | |
61f71b34 | 5135 | if (sv) |
6f086dfc | 5136 | { |
f11a7b6d | 5137 | value_address = gen_reg_rtx (Pmode); |
61f71b34 | 5138 | emit_move_insn (value_address, sv); |
6f086dfc RS |
5139 | } |
5140 | } | |
5141 | if (value_address) | |
ccdecf58 | 5142 | { |
01c98570 | 5143 | rtx x = value_address; |
1f9ceff1 | 5144 | if (!DECL_BY_REFERENCE (res)) |
01c98570 | 5145 | { |
f11a7b6d AO |
5146 | x = gen_rtx_MEM (DECL_MODE (res), x); |
5147 | set_mem_attributes (x, res, 1); | |
01c98570 | 5148 | } |
f11a7b6d | 5149 | set_parm_rtl (res, x); |
ccdecf58 | 5150 | } |
6f086dfc | 5151 | } |
1f9ceff1 | 5152 | else if (DECL_MODE (res) == VOIDmode) |
6f086dfc | 5153 | /* If return mode is void, this decl rtl should not be used. */ |
f11a7b6d AO |
5154 | set_parm_rtl (res, NULL_RTX); |
5155 | else | |
a53e14c0 | 5156 | { |
d5bf1143 RH |
5157 | /* Compute the return values into a pseudo reg, which we will copy |
5158 | into the true return register after the cleanups are done. */ | |
1f9ceff1 | 5159 | tree return_type = TREE_TYPE (res); |
058c6384 EB |
5160 | |
5161 | /* If we may coalesce this result, make sure it has the expected mode | |
5162 | in case it was promoted. But we need not bother about BLKmode. */ | |
5163 | machine_mode promoted_mode | |
5164 | = flag_tree_coalesce_vars && is_gimple_reg (res) | |
5165 | ? promote_ssa_mode (ssa_default_def (cfun, res), NULL) | |
5166 | : BLKmode; | |
5167 | ||
5168 | if (promoted_mode != BLKmode) | |
5169 | set_parm_rtl (res, gen_reg_rtx (promoted_mode)); | |
1f9ceff1 AO |
5170 | else if (TYPE_MODE (return_type) != BLKmode |
5171 | && targetm.calls.return_in_msb (return_type)) | |
bef5d8b6 RS |
5172 | /* expand_function_end will insert the appropriate padding in |
5173 | this case. Use the return value's natural (unpadded) mode | |
5174 | within the function proper. */ | |
f11a7b6d | 5175 | set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type))); |
80a480ca | 5176 | else |
0bccc606 | 5177 | { |
bef5d8b6 RS |
5178 | /* In order to figure out what mode to use for the pseudo, we |
5179 | figure out what the mode of the eventual return register will | |
5180 | actually be, and use that. */ | |
1d636cc6 | 5181 | rtx hard_reg = hard_function_value (return_type, subr, 0, 1); |
bef5d8b6 RS |
5182 | |
5183 | /* Structures that are returned in registers are not | |
5184 | aggregate_value_p, so we may see a PARALLEL or a REG. */ | |
5185 | if (REG_P (hard_reg)) | |
f11a7b6d | 5186 | set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg))); |
bef5d8b6 RS |
5187 | else |
5188 | { | |
5189 | gcc_assert (GET_CODE (hard_reg) == PARALLEL); | |
f11a7b6d | 5190 | set_parm_rtl (res, gen_group_rtx (hard_reg)); |
bef5d8b6 | 5191 | } |
0bccc606 | 5192 | } |
a53e14c0 | 5193 | |
084a1106 JDA |
5194 | /* Set DECL_REGISTER flag so that expand_function_end will copy the |
5195 | result to the real return register(s). */ | |
1f9ceff1 | 5196 | DECL_REGISTER (res) = 1; |
d5e254e1 IE |
5197 | |
5198 | if (chkp_function_instrumented_p (current_function_decl)) | |
5199 | { | |
1f9ceff1 | 5200 | tree return_type = TREE_TYPE (res); |
d5e254e1 IE |
5201 | rtx bounds = targetm.calls.chkp_function_value_bounds (return_type, |
5202 | subr, 1); | |
1f9ceff1 | 5203 | SET_DECL_BOUNDS_RTL (res, bounds); |
d5e254e1 | 5204 | } |
a53e14c0 | 5205 | } |
6f086dfc RS |
5206 | |
5207 | /* Initialize rtx for parameters and local variables. | |
5208 | In some cases this requires emitting insns. */ | |
0d1416c6 | 5209 | assign_parms (subr); |
6f086dfc | 5210 | |
6de9cd9a DN |
5211 | /* If function gets a static chain arg, store it. */ |
5212 | if (cfun->static_chain_decl) | |
5213 | { | |
7e140280 | 5214 | tree parm = cfun->static_chain_decl; |
21afc57d | 5215 | rtx local, chain; |
f11a7b6d AO |
5216 | rtx_insn *insn; |
5217 | int unsignedp; | |
7e140280 | 5218 | |
f11a7b6d | 5219 | local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp)); |
531ca746 RH |
5220 | chain = targetm.calls.static_chain (current_function_decl, true); |
5221 | ||
5222 | set_decl_incoming_rtl (parm, chain, false); | |
f11a7b6d | 5223 | set_parm_rtl (parm, local); |
7e140280 | 5224 | mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); |
6de9cd9a | 5225 | |
f11a7b6d AO |
5226 | if (GET_MODE (local) != GET_MODE (chain)) |
5227 | { | |
5228 | convert_move (local, chain, unsignedp); | |
5229 | insn = get_last_insn (); | |
5230 | } | |
5231 | else | |
5232 | insn = emit_move_insn (local, chain); | |
531ca746 RH |
5233 | |
5234 | /* Mark the register as eliminable, similar to parameters. */ | |
5235 | if (MEM_P (chain) | |
5236 | && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0))) | |
7543f918 | 5237 | set_dst_reg_note (insn, REG_EQUIV, chain, local); |
3fd48b12 EB |
5238 | |
5239 | /* If we aren't optimizing, save the static chain onto the stack. */ | |
5240 | if (!optimize) | |
5241 | { | |
5242 | tree saved_static_chain_decl | |
5243 | = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL, | |
5244 | DECL_NAME (parm), TREE_TYPE (parm)); | |
5245 | rtx saved_static_chain_rtx | |
5246 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
5247 | SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx); | |
5248 | emit_move_insn (saved_static_chain_rtx, chain); | |
5249 | SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl); | |
5250 | DECL_HAS_VALUE_EXPR_P (parm) = 1; | |
5251 | } | |
6de9cd9a DN |
5252 | } |
5253 | ||
5254 | /* If the function receives a non-local goto, then store the | |
5255 | bits we need to restore the frame pointer. */ | |
5256 | if (cfun->nonlocal_goto_save_area) | |
5257 | { | |
5258 | tree t_save; | |
5259 | rtx r_save; | |
5260 | ||
4846b435 | 5261 | tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0); |
ca5f4331 | 5262 | gcc_assert (DECL_RTL_SET_P (var)); |
6de9cd9a | 5263 | |
6bbec3e1 L |
5264 | t_save = build4 (ARRAY_REF, |
5265 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
3244e67d RS |
5266 | cfun->nonlocal_goto_save_area, |
5267 | integer_zero_node, NULL_TREE, NULL_TREE); | |
6de9cd9a | 5268 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
6bbec3e1 | 5269 | gcc_assert (GET_MODE (r_save) == Pmode); |
f0c51a1e | 5270 | |
88280cf9 | 5271 | emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ()); |
6de9cd9a DN |
5272 | update_nonlocal_goto_save_area (); |
5273 | } | |
f0c51a1e | 5274 | |
6f086dfc RS |
5275 | /* The following was moved from init_function_start. |
5276 | The move is supposed to make sdb output more accurate. */ | |
5277 | /* Indicate the beginning of the function body, | |
5278 | as opposed to parm setup. */ | |
2e040219 | 5279 | emit_note (NOTE_INSN_FUNCTION_BEG); |
6f086dfc | 5280 | |
ede497cf SB |
5281 | gcc_assert (NOTE_P (get_last_insn ())); |
5282 | ||
6f086dfc RS |
5283 | parm_birth_insn = get_last_insn (); |
5284 | ||
e3b5732b | 5285 | if (crtl->profile) |
f6f315fe | 5286 | { |
f6f315fe | 5287 | #ifdef PROFILE_HOOK |
df696a75 | 5288 | PROFILE_HOOK (current_function_funcdef_no); |
411707f4 | 5289 | #endif |
f6f315fe | 5290 | } |
411707f4 | 5291 | |
6d3cc8f0 EB |
5292 | /* If we are doing generic stack checking, the probe should go here. */ |
5293 | if (flag_stack_check == GENERIC_STACK_CHECK) | |
ede497cf | 5294 | stack_check_probe_note = emit_note (NOTE_INSN_DELETED); |
6f086dfc RS |
5295 | } |
5296 | \f | |
5283d1ec TV |
5297 | void |
5298 | pop_dummy_function (void) | |
5299 | { | |
5300 | pop_cfun (); | |
5301 | in_dummy_function = false; | |
5302 | } | |
5303 | ||
49ad7cfa BS |
5304 | /* Undo the effects of init_dummy_function_start. */ |
5305 | void | |
fa8db1f7 | 5306 | expand_dummy_function_end (void) |
49ad7cfa | 5307 | { |
db2960f4 SL |
5308 | gcc_assert (in_dummy_function); |
5309 | ||
49ad7cfa BS |
5310 | /* End any sequences that failed to be closed due to syntax errors. */ |
5311 | while (in_sequence_p ()) | |
5312 | end_sequence (); | |
5313 | ||
5314 | /* Outside function body, can't compute type's actual size | |
5315 | until next function's body starts. */ | |
fa51b01b | 5316 | |
01d939e8 BS |
5317 | free_after_parsing (cfun); |
5318 | free_after_compilation (cfun); | |
5283d1ec | 5319 | pop_dummy_function (); |
49ad7cfa BS |
5320 | } |
5321 | ||
d5e254e1 | 5322 | /* Helper for diddle_return_value. */ |
bd695e1e RH |
5323 | |
5324 | void | |
d5e254e1 | 5325 | diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing) |
bd695e1e | 5326 | { |
c13fde05 RH |
5327 | if (! outgoing) |
5328 | return; | |
bd695e1e | 5329 | |
f8cfc6aa | 5330 | if (REG_P (outgoing)) |
c13fde05 RH |
5331 | (*doit) (outgoing, arg); |
5332 | else if (GET_CODE (outgoing) == PARALLEL) | |
5333 | { | |
5334 | int i; | |
bd695e1e | 5335 | |
c13fde05 RH |
5336 | for (i = 0; i < XVECLEN (outgoing, 0); i++) |
5337 | { | |
5338 | rtx x = XEXP (XVECEXP (outgoing, 0, i), 0); | |
5339 | ||
f8cfc6aa | 5340 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
c13fde05 | 5341 | (*doit) (x, arg); |
bd695e1e RH |
5342 | } |
5343 | } | |
5344 | } | |
5345 | ||
d5e254e1 IE |
5346 | /* Call DOIT for each hard register used as a return value from |
5347 | the current function. */ | |
5348 | ||
5349 | void | |
5350 | diddle_return_value (void (*doit) (rtx, void *), void *arg) | |
5351 | { | |
d5e254e1 | 5352 | diddle_return_value_1 (doit, arg, crtl->return_bnd); |
e9ae68af | 5353 | diddle_return_value_1 (doit, arg, crtl->return_rtx); |
d5e254e1 IE |
5354 | } |
5355 | ||
c13fde05 | 5356 | static void |
fa8db1f7 | 5357 | do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) |
c13fde05 | 5358 | { |
c41c1387 | 5359 | emit_clobber (reg); |
c13fde05 RH |
5360 | } |
5361 | ||
5362 | void | |
fa8db1f7 | 5363 | clobber_return_register (void) |
c13fde05 RH |
5364 | { |
5365 | diddle_return_value (do_clobber_return_reg, NULL); | |
9c65bbf4 JH |
5366 | |
5367 | /* In case we do use pseudo to return value, clobber it too. */ | |
5368 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) | |
5369 | { | |
5370 | tree decl_result = DECL_RESULT (current_function_decl); | |
5371 | rtx decl_rtl = DECL_RTL (decl_result); | |
5372 | if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER) | |
5373 | { | |
5374 | do_clobber_return_reg (decl_rtl, NULL); | |
5375 | } | |
5376 | } | |
c13fde05 RH |
5377 | } |
5378 | ||
5379 | static void | |
fa8db1f7 | 5380 | do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) |
c13fde05 | 5381 | { |
c41c1387 | 5382 | emit_use (reg); |
c13fde05 RH |
5383 | } |
5384 | ||
0bf8477d | 5385 | static void |
fa8db1f7 | 5386 | use_return_register (void) |
c13fde05 RH |
5387 | { |
5388 | diddle_return_value (do_use_return_reg, NULL); | |
5389 | } | |
5390 | ||
862d0b35 DN |
5391 | /* Set the location of the insn chain starting at INSN to LOC. */ |
5392 | ||
5393 | static void | |
dc01c3d1 | 5394 | set_insn_locations (rtx_insn *insn, int loc) |
862d0b35 | 5395 | { |
dc01c3d1 | 5396 | while (insn != NULL) |
862d0b35 DN |
5397 | { |
5398 | if (INSN_P (insn)) | |
5399 | INSN_LOCATION (insn) = loc; | |
5400 | insn = NEXT_INSN (insn); | |
5401 | } | |
5402 | } | |
5403 | ||
71c0e7fc | 5404 | /* Generate RTL for the end of the current function. */ |
6f086dfc RS |
5405 | |
5406 | void | |
fa8db1f7 | 5407 | expand_function_end (void) |
6f086dfc | 5408 | { |
964be02f RH |
5409 | /* If arg_pointer_save_area was referenced only from a nested |
5410 | function, we will not have initialized it yet. Do that now. */ | |
e3b5732b | 5411 | if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init) |
bd60bab2 | 5412 | get_arg_pointer_save_area (); |
964be02f | 5413 | |
b38f3813 | 5414 | /* If we are doing generic stack checking and this function makes calls, |
11044f66 RK |
5415 | do a stack probe at the start of the function to ensure we have enough |
5416 | space for another stack frame. */ | |
b38f3813 | 5417 | if (flag_stack_check == GENERIC_STACK_CHECK) |
11044f66 | 5418 | { |
691fe203 | 5419 | rtx_insn *insn, *seq; |
11044f66 RK |
5420 | |
5421 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
4b4bf941 | 5422 | if (CALL_P (insn)) |
11044f66 | 5423 | { |
c35af30f | 5424 | rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE); |
11044f66 | 5425 | start_sequence (); |
c35af30f EB |
5426 | if (STACK_CHECK_MOVING_SP) |
5427 | anti_adjust_stack_and_probe (max_frame_size, true); | |
5428 | else | |
5429 | probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); | |
11044f66 RK |
5430 | seq = get_insns (); |
5431 | end_sequence (); | |
5368224f | 5432 | set_insn_locations (seq, prologue_location); |
ede497cf | 5433 | emit_insn_before (seq, stack_check_probe_note); |
11044f66 RK |
5434 | break; |
5435 | } | |
5436 | } | |
5437 | ||
6f086dfc RS |
5438 | /* End any sequences that failed to be closed due to syntax errors. */ |
5439 | while (in_sequence_p ()) | |
5f4f0e22 | 5440 | end_sequence (); |
6f086dfc | 5441 | |
6f086dfc RS |
5442 | clear_pending_stack_adjust (); |
5443 | do_pending_stack_adjust (); | |
5444 | ||
6f086dfc RS |
5445 | /* Output a linenumber for the end of the function. |
5446 | SDB depends on this. */ | |
5368224f | 5447 | set_curr_insn_location (input_location); |
6f086dfc | 5448 | |
fbffc70a | 5449 | /* Before the return label (if any), clobber the return |
a1f300c0 | 5450 | registers so that they are not propagated live to the rest of |
fbffc70a GK |
5451 | the function. This can only happen with functions that drop |
5452 | through; if there had been a return statement, there would | |
932f0847 JH |
5453 | have either been a return rtx, or a jump to the return label. |
5454 | ||
5455 | We delay actual code generation after the current_function_value_rtx | |
5456 | is computed. */ | |
e67d1102 | 5457 | rtx_insn *clobber_after = get_last_insn (); |
fbffc70a | 5458 | |
526c334b KH |
5459 | /* Output the label for the actual return from the function. */ |
5460 | emit_label (return_label); | |
6f086dfc | 5461 | |
677f3fa8 | 5462 | if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) |
815eb8f0 AM |
5463 | { |
5464 | /* Let except.c know where it should emit the call to unregister | |
5465 | the function context for sjlj exceptions. */ | |
5466 | if (flag_exceptions) | |
5467 | sjlj_emit_function_exit_after (get_last_insn ()); | |
5468 | } | |
6fb5fa3c DB |
5469 | else |
5470 | { | |
5471 | /* We want to ensure that instructions that may trap are not | |
5472 | moved into the epilogue by scheduling, because we don't | |
5473 | always emit unwind information for the epilogue. */ | |
8f4f502f | 5474 | if (cfun->can_throw_non_call_exceptions) |
6fb5fa3c DB |
5475 | emit_insn (gen_blockage ()); |
5476 | } | |
0b59e81e | 5477 | |
652b0932 RH |
5478 | /* If this is an implementation of throw, do what's necessary to |
5479 | communicate between __builtin_eh_return and the epilogue. */ | |
5480 | expand_eh_return (); | |
5481 | ||
3e4eac3f RH |
5482 | /* If scalar return value was computed in a pseudo-reg, or was a named |
5483 | return value that got dumped to the stack, copy that to the hard | |
5484 | return register. */ | |
19e7881c | 5485 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) |
6f086dfc | 5486 | { |
3e4eac3f RH |
5487 | tree decl_result = DECL_RESULT (current_function_decl); |
5488 | rtx decl_rtl = DECL_RTL (decl_result); | |
5489 | ||
5490 | if (REG_P (decl_rtl) | |
5491 | ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER | |
5492 | : DECL_REGISTER (decl_result)) | |
5493 | { | |
38173d38 | 5494 | rtx real_decl_rtl = crtl->return_rtx; |
6f086dfc | 5495 | |
ce5e43d0 | 5496 | /* This should be set in assign_parms. */ |
0bccc606 | 5497 | gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl)); |
3e4eac3f RH |
5498 | |
5499 | /* If this is a BLKmode structure being returned in registers, | |
5500 | then use the mode computed in expand_return. Note that if | |
797a6ac1 | 5501 | decl_rtl is memory, then its mode may have been changed, |
38173d38 | 5502 | but that crtl->return_rtx has not. */ |
3e4eac3f | 5503 | if (GET_MODE (real_decl_rtl) == BLKmode) |
ce5e43d0 | 5504 | PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl)); |
3e4eac3f | 5505 | |
bef5d8b6 RS |
5506 | /* If a non-BLKmode return value should be padded at the least |
5507 | significant end of the register, shift it left by the appropriate | |
5508 | amount. BLKmode results are handled using the group load/store | |
5509 | machinery. */ | |
5510 | if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode | |
66de4d7c | 5511 | && REG_P (real_decl_rtl) |
bef5d8b6 RS |
5512 | && targetm.calls.return_in_msb (TREE_TYPE (decl_result))) |
5513 | { | |
5514 | emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl), | |
5515 | REGNO (real_decl_rtl)), | |
5516 | decl_rtl); | |
5517 | shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl); | |
5518 | } | |
aa570f54 | 5519 | else if (GET_CODE (real_decl_rtl) == PARALLEL) |
084a1106 JDA |
5520 | { |
5521 | /* If expand_function_start has created a PARALLEL for decl_rtl, | |
5522 | move the result to the real return registers. Otherwise, do | |
5523 | a group load from decl_rtl for a named return. */ | |
5524 | if (GET_CODE (decl_rtl) == PARALLEL) | |
5525 | emit_group_move (real_decl_rtl, decl_rtl); | |
5526 | else | |
5527 | emit_group_load (real_decl_rtl, decl_rtl, | |
6e985040 | 5528 | TREE_TYPE (decl_result), |
084a1106 JDA |
5529 | int_size_in_bytes (TREE_TYPE (decl_result))); |
5530 | } | |
652b0932 RH |
5531 | /* In the case of complex integer modes smaller than a word, we'll |
5532 | need to generate some non-trivial bitfield insertions. Do that | |
5533 | on a pseudo and not the hard register. */ | |
5534 | else if (GET_CODE (decl_rtl) == CONCAT | |
5535 | && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT | |
5536 | && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD) | |
5537 | { | |
5538 | int old_generating_concat_p; | |
5539 | rtx tmp; | |
5540 | ||
5541 | old_generating_concat_p = generating_concat_p; | |
5542 | generating_concat_p = 0; | |
5543 | tmp = gen_reg_rtx (GET_MODE (decl_rtl)); | |
5544 | generating_concat_p = old_generating_concat_p; | |
5545 | ||
5546 | emit_move_insn (tmp, decl_rtl); | |
5547 | emit_move_insn (real_decl_rtl, tmp); | |
5548 | } | |
fc5851fe AO |
5549 | /* If a named return value dumped decl_return to memory, then |
5550 | we may need to re-do the PROMOTE_MODE signed/unsigned | |
5551 | extension. */ | |
5552 | else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) | |
5553 | { | |
5554 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); | |
5555 | promote_function_mode (TREE_TYPE (decl_result), | |
5556 | GET_MODE (decl_rtl), &unsignedp, | |
5557 | TREE_TYPE (current_function_decl), 1); | |
5558 | ||
5559 | convert_move (real_decl_rtl, decl_rtl, unsignedp); | |
5560 | } | |
3e4eac3f RH |
5561 | else |
5562 | emit_move_insn (real_decl_rtl, decl_rtl); | |
3e4eac3f | 5563 | } |
6f086dfc RS |
5564 | } |
5565 | ||
5566 | /* If returning a structure, arrange to return the address of the value | |
5567 | in a place where debuggers expect to find it. | |
5568 | ||
5569 | If returning a structure PCC style, | |
5570 | the caller also depends on this value. | |
e3b5732b | 5571 | And cfun->returns_pcc_struct is not necessarily set. */ |
e0d14c39 BS |
5572 | if ((cfun->returns_struct || cfun->returns_pcc_struct) |
5573 | && !targetm.calls.omit_struct_return_reg) | |
6f086dfc | 5574 | { |
cc77ae10 | 5575 | rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl)); |
6f086dfc | 5576 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); |
cc77ae10 JM |
5577 | rtx outgoing; |
5578 | ||
5579 | if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))) | |
5580 | type = TREE_TYPE (type); | |
5581 | else | |
5582 | value_address = XEXP (value_address, 0); | |
5583 | ||
1d636cc6 RG |
5584 | outgoing = targetm.calls.function_value (build_pointer_type (type), |
5585 | current_function_decl, true); | |
6f086dfc RS |
5586 | |
5587 | /* Mark this as a function return value so integrate will delete the | |
5588 | assignment and USE below when inlining this function. */ | |
5589 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
5590 | ||
d1608933 | 5591 | /* The address may be ptr_mode and OUTGOING may be Pmode. */ |
5ae6cd0d MM |
5592 | value_address = convert_memory_address (GET_MODE (outgoing), |
5593 | value_address); | |
d1608933 | 5594 | |
6f086dfc | 5595 | emit_move_insn (outgoing, value_address); |
d1608933 RK |
5596 | |
5597 | /* Show return register used to hold result (in this case the address | |
5598 | of the result. */ | |
38173d38 | 5599 | crtl->return_rtx = outgoing; |
6f086dfc RS |
5600 | } |
5601 | ||
79c7fda6 JJ |
5602 | /* Emit the actual code to clobber return register. Don't emit |
5603 | it if clobber_after is a barrier, then the previous basic block | |
5604 | certainly doesn't fall thru into the exit block. */ | |
5605 | if (!BARRIER_P (clobber_after)) | |
5606 | { | |
79c7fda6 JJ |
5607 | start_sequence (); |
5608 | clobber_return_register (); | |
e67d1102 | 5609 | rtx_insn *seq = get_insns (); |
79c7fda6 | 5610 | end_sequence (); |
932f0847 | 5611 | |
79c7fda6 JJ |
5612 | emit_insn_after (seq, clobber_after); |
5613 | } | |
932f0847 | 5614 | |
609c3937 | 5615 | /* Output the label for the naked return from the function. */ |
4c33221c UW |
5616 | if (naked_return_label) |
5617 | emit_label (naked_return_label); | |
6e3077c6 | 5618 | |
25108646 AH |
5619 | /* @@@ This is a kludge. We want to ensure that instructions that |
5620 | may trap are not moved into the epilogue by scheduling, because | |
56d17681 | 5621 | we don't always emit unwind information for the epilogue. */ |
f0a0390e | 5622 | if (cfun->can_throw_non_call_exceptions |
677f3fa8 | 5623 | && targetm_common.except_unwind_info (&global_options) != UI_SJLJ) |
56d17681 | 5624 | emit_insn (gen_blockage ()); |
25108646 | 5625 | |
7d69de61 | 5626 | /* If stack protection is enabled for this function, check the guard. */ |
cb91fab0 | 5627 | if (crtl->stack_protect_guard) |
7d69de61 RH |
5628 | stack_protect_epilogue (); |
5629 | ||
40184445 BS |
5630 | /* If we had calls to alloca, and this machine needs |
5631 | an accurate stack pointer to exit the function, | |
5632 | insert some code to save and restore the stack pointer. */ | |
5633 | if (! EXIT_IGNORE_STACK | |
e3b5732b | 5634 | && cfun->calls_alloca) |
40184445 | 5635 | { |
e67d1102 | 5636 | rtx tem = 0; |
40184445 | 5637 | |
9eac0f2a RH |
5638 | start_sequence (); |
5639 | emit_stack_save (SAVE_FUNCTION, &tem); | |
e67d1102 | 5640 | rtx_insn *seq = get_insns (); |
9eac0f2a RH |
5641 | end_sequence (); |
5642 | emit_insn_before (seq, parm_birth_insn); | |
5643 | ||
5644 | emit_stack_restore (SAVE_FUNCTION, tem); | |
40184445 BS |
5645 | } |
5646 | ||
c13fde05 RH |
5647 | /* ??? This should no longer be necessary since stupid is no longer with |
5648 | us, but there are some parts of the compiler (eg reload_combine, and | |
5649 | sh mach_dep_reorg) that still try and compute their own lifetime info | |
5650 | instead of using the general framework. */ | |
5651 | use_return_register (); | |
6f086dfc | 5652 | } |
278ed218 RH |
5653 | |
5654 | rtx | |
bd60bab2 | 5655 | get_arg_pointer_save_area (void) |
278ed218 | 5656 | { |
bd60bab2 | 5657 | rtx ret = arg_pointer_save_area; |
278ed218 RH |
5658 | |
5659 | if (! ret) | |
5660 | { | |
bd60bab2 JH |
5661 | ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); |
5662 | arg_pointer_save_area = ret; | |
964be02f RH |
5663 | } |
5664 | ||
e3b5732b | 5665 | if (! crtl->arg_pointer_save_area_init) |
964be02f | 5666 | { |
797a6ac1 | 5667 | /* Save the arg pointer at the beginning of the function. The |
964be02f | 5668 | generated stack slot may not be a valid memory address, so we |
278ed218 RH |
5669 | have to check it and fix it if necessary. */ |
5670 | start_sequence (); | |
1a8cb155 | 5671 | emit_move_insn (validize_mem (copy_rtx (ret)), |
2e3f842f | 5672 | crtl->args.internal_arg_pointer); |
e67d1102 | 5673 | rtx_insn *seq = get_insns (); |
278ed218 RH |
5674 | end_sequence (); |
5675 | ||
964be02f | 5676 | push_topmost_sequence (); |
1cb2fc7b | 5677 | emit_insn_after (seq, entry_of_function ()); |
964be02f | 5678 | pop_topmost_sequence (); |
c1d9a70a ILT |
5679 | |
5680 | crtl->arg_pointer_save_area_init = true; | |
278ed218 RH |
5681 | } |
5682 | ||
5683 | return ret; | |
5684 | } | |
bdac5f58 | 5685 | \f |
cd9c1ca8 RH |
5686 | /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP |
5687 | for the first time. */ | |
bdac5f58 | 5688 | |
0a1c58a2 | 5689 | static void |
d242408f | 5690 | record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp) |
bdac5f58 | 5691 | { |
dc01c3d1 | 5692 | rtx_insn *tmp; |
d242408f | 5693 | hash_table<insn_cache_hasher> *hash = *hashp; |
0a1c58a2 | 5694 | |
cd9c1ca8 | 5695 | if (hash == NULL) |
d242408f | 5696 | *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17); |
cd9c1ca8 RH |
5697 | |
5698 | for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) | |
5699 | { | |
d242408f | 5700 | rtx *slot = hash->find_slot (tmp, INSERT); |
cd9c1ca8 RH |
5701 | gcc_assert (*slot == NULL); |
5702 | *slot = tmp; | |
5703 | } | |
5704 | } | |
5705 | ||
cd400280 RH |
5706 | /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a |
5707 | basic block, splitting or peepholes. If INSN is a prologue or epilogue | |
5708 | insn, then record COPY as well. */ | |
cd9c1ca8 RH |
5709 | |
5710 | void | |
cd400280 | 5711 | maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy) |
cd9c1ca8 | 5712 | { |
d242408f TS |
5713 | hash_table<insn_cache_hasher> *hash; |
5714 | rtx *slot; | |
cd9c1ca8 | 5715 | |
cd400280 | 5716 | hash = epilogue_insn_hash; |
d242408f | 5717 | if (!hash || !hash->find (insn)) |
cd400280 RH |
5718 | { |
5719 | hash = prologue_insn_hash; | |
d242408f | 5720 | if (!hash || !hash->find (insn)) |
cd400280 RH |
5721 | return; |
5722 | } | |
cd9c1ca8 | 5723 | |
d242408f | 5724 | slot = hash->find_slot (copy, INSERT); |
cd9c1ca8 RH |
5725 | gcc_assert (*slot == NULL); |
5726 | *slot = copy; | |
bdac5f58 TW |
5727 | } |
5728 | ||
cd9c1ca8 RH |
5729 | /* Determine if any INSNs in HASH are, or are part of, INSN. Because |
5730 | we can be running after reorg, SEQUENCE rtl is possible. */ | |
bdac5f58 | 5731 | |
cd9c1ca8 | 5732 | static bool |
d242408f | 5733 | contains (const_rtx insn, hash_table<insn_cache_hasher> *hash) |
bdac5f58 | 5734 | { |
cd9c1ca8 RH |
5735 | if (hash == NULL) |
5736 | return false; | |
bdac5f58 | 5737 | |
cd9c1ca8 | 5738 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
bdac5f58 | 5739 | { |
e0944870 | 5740 | rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); |
cd9c1ca8 | 5741 | int i; |
e0944870 | 5742 | for (i = seq->len () - 1; i >= 0; i--) |
d242408f | 5743 | if (hash->find (seq->element (i))) |
cd9c1ca8 RH |
5744 | return true; |
5745 | return false; | |
bdac5f58 | 5746 | } |
cd9c1ca8 | 5747 | |
d242408f | 5748 | return hash->find (const_cast<rtx> (insn)) != NULL; |
bdac5f58 | 5749 | } |
5c7675e9 RH |
5750 | |
5751 | int | |
4f588890 | 5752 | prologue_epilogue_contains (const_rtx insn) |
5c7675e9 | 5753 | { |
cd9c1ca8 | 5754 | if (contains (insn, prologue_insn_hash)) |
5c7675e9 | 5755 | return 1; |
cd9c1ca8 | 5756 | if (contains (insn, epilogue_insn_hash)) |
5c7675e9 RH |
5757 | return 1; |
5758 | return 0; | |
5759 | } | |
bdac5f58 | 5760 | |
69732dcb | 5761 | |
387748de AM |
5762 | /* Set JUMP_LABEL for a return insn. */ |
5763 | ||
5764 | void | |
d38ff8dd | 5765 | set_return_jump_label (rtx_insn *returnjump) |
387748de AM |
5766 | { |
5767 | rtx pat = PATTERN (returnjump); | |
5768 | if (GET_CODE (pat) == PARALLEL) | |
5769 | pat = XVECEXP (pat, 0, 0); | |
5770 | if (ANY_RETURN_P (pat)) | |
5771 | JUMP_LABEL (returnjump) = pat; | |
5772 | else | |
5773 | JUMP_LABEL (returnjump) = ret_rtx; | |
5774 | } | |
5775 | ||
fb42ed99 SB |
5776 | /* Return a sequence to be used as the split prologue for the current |
5777 | function, or NULL. */ | |
5778 | ||
5779 | static rtx_insn * | |
5780 | make_split_prologue_seq (void) | |
5781 | { | |
5782 | if (!flag_split_stack | |
5783 | || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))) | |
5784 | return NULL; | |
5785 | ||
5786 | start_sequence (); | |
5787 | emit_insn (targetm.gen_split_stack_prologue ()); | |
5788 | rtx_insn *seq = get_insns (); | |
5789 | end_sequence (); | |
5790 | ||
5791 | record_insns (seq, NULL, &prologue_insn_hash); | |
5792 | set_insn_locations (seq, prologue_location); | |
5793 | ||
5794 | return seq; | |
5795 | } | |
5796 | ||
5797 | /* Return a sequence to be used as the prologue for the current function, | |
5798 | or NULL. */ | |
5799 | ||
5800 | static rtx_insn * | |
5801 | make_prologue_seq (void) | |
5802 | { | |
5803 | if (!targetm.have_prologue ()) | |
5804 | return NULL; | |
5805 | ||
5806 | start_sequence (); | |
5807 | rtx_insn *seq = targetm.gen_prologue (); | |
5808 | emit_insn (seq); | |
5809 | ||
5810 | /* Insert an explicit USE for the frame pointer | |
5811 | if the profiling is on and the frame pointer is required. */ | |
5812 | if (crtl->profile && frame_pointer_needed) | |
5813 | emit_use (hard_frame_pointer_rtx); | |
5814 | ||
5815 | /* Retain a map of the prologue insns. */ | |
5816 | record_insns (seq, NULL, &prologue_insn_hash); | |
5817 | emit_note (NOTE_INSN_PROLOGUE_END); | |
5818 | ||
5819 | /* Ensure that instructions are not moved into the prologue when | |
5820 | profiling is on. The call to the profiling routine can be | |
5821 | emitted within the live range of a call-clobbered register. */ | |
5822 | if (!targetm.profile_before_prologue () && crtl->profile) | |
5823 | emit_insn (gen_blockage ()); | |
5824 | ||
5825 | seq = get_insns (); | |
5826 | end_sequence (); | |
5827 | set_insn_locations (seq, prologue_location); | |
5828 | ||
5829 | return seq; | |
5830 | } | |
5831 | ||
5832 | /* Return a sequence to be used as the epilogue for the current function, | |
5833 | or NULL. */ | |
5834 | ||
5835 | static rtx_insn * | |
33fec8d5 | 5836 | make_epilogue_seq (void) |
fb42ed99 SB |
5837 | { |
5838 | if (!targetm.have_epilogue ()) | |
5839 | return NULL; | |
5840 | ||
5841 | start_sequence (); | |
33fec8d5 | 5842 | emit_note (NOTE_INSN_EPILOGUE_BEG); |
fb42ed99 SB |
5843 | rtx_insn *seq = targetm.gen_epilogue (); |
5844 | if (seq) | |
5845 | emit_jump_insn (seq); | |
5846 | ||
5847 | /* Retain a map of the epilogue insns. */ | |
5848 | record_insns (seq, NULL, &epilogue_insn_hash); | |
5849 | set_insn_locations (seq, epilogue_location); | |
5850 | ||
5851 | seq = get_insns (); | |
5852 | rtx_insn *returnjump = get_last_insn (); | |
5853 | end_sequence (); | |
5854 | ||
5855 | if (JUMP_P (returnjump)) | |
5856 | set_return_jump_label (returnjump); | |
5857 | ||
5858 | return seq; | |
5859 | } | |
5860 | ||
ffe14686 | 5861 | |
9faa82d8 | 5862 | /* Generate the prologue and epilogue RTL if the machine supports it. Thread |
bdac5f58 | 5863 | this into place with notes indicating where the prologue ends and where |
484db665 BS |
5864 | the epilogue begins. Update the basic block information when possible. |
5865 | ||
5866 | Notes on epilogue placement: | |
5867 | There are several kinds of edges to the exit block: | |
5868 | * a single fallthru edge from LAST_BB | |
5869 | * possibly, edges from blocks containing sibcalls | |
5870 | * possibly, fake edges from infinite loops | |
5871 | ||
5872 | The epilogue is always emitted on the fallthru edge from the last basic | |
5873 | block in the function, LAST_BB, into the exit block. | |
5874 | ||
5875 | If LAST_BB is empty except for a label, it is the target of every | |
5876 | other basic block in the function that ends in a return. If a | |
5877 | target has a return or simple_return pattern (possibly with | |
5878 | conditional variants), these basic blocks can be changed so that a | |
5879 | return insn is emitted into them, and their target is adjusted to | |
5880 | the real exit block. | |
5881 | ||
5882 | Notes on shrink wrapping: We implement a fairly conservative | |
5883 | version of shrink-wrapping rather than the textbook one. We only | |
5884 | generate a single prologue and a single epilogue. This is | |
5885 | sufficient to catch a number of interesting cases involving early | |
5886 | exits. | |
5887 | ||
5888 | First, we identify the blocks that require the prologue to occur before | |
5889 | them. These are the ones that modify a call-saved register, or reference | |
5890 | any of the stack or frame pointer registers. To simplify things, we then | |
5891 | mark everything reachable from these blocks as also requiring a prologue. | |
5892 | This takes care of loops automatically, and avoids the need to examine | |
5893 | whether MEMs reference the frame, since it is sufficient to check for | |
5894 | occurrences of the stack or frame pointer. | |
5895 | ||
5896 | We then compute the set of blocks for which the need for a prologue | |
5897 | is anticipatable (borrowing terminology from the shrink-wrapping | |
5898 | description in Muchnick's book). These are the blocks which either | |
5899 | require a prologue themselves, or those that have only successors | |
5900 | where the prologue is anticipatable. The prologue needs to be | |
5901 | inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1 | |
5902 | is not. For the moment, we ensure that only one such edge exists. | |
5903 | ||
5904 | The epilogue is placed as described above, but we make a | |
5905 | distinction between inserting return and simple_return patterns | |
5906 | when modifying other blocks that end in a return. Blocks that end | |
5907 | in a sibcall omit the sibcall_epilogue if the block is not in | |
5908 | ANTIC. */ | |
bdac5f58 | 5909 | |
c81b4a0e | 5910 | void |
6fb5fa3c | 5911 | thread_prologue_and_epilogue_insns (void) |
bdac5f58 | 5912 | { |
484db665 | 5913 | df_analyze (); |
e881bb1b | 5914 | |
7458026b ILT |
5915 | /* Can't deal with multiple successors of the entry block at the |
5916 | moment. Function should always have at least one entry | |
5917 | point. */ | |
fefa31b5 | 5918 | gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun))); |
33fec8d5 SB |
5919 | |
5920 | edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
5921 | edge orig_entry_edge = entry_edge; | |
484db665 | 5922 | |
fb42ed99 SB |
5923 | rtx_insn *split_prologue_seq = make_split_prologue_seq (); |
5924 | rtx_insn *prologue_seq = make_prologue_seq (); | |
33fec8d5 | 5925 | rtx_insn *epilogue_seq = make_epilogue_seq (); |
484db665 | 5926 | |
484db665 BS |
5927 | /* Try to perform a kind of shrink-wrapping, making sure the |
5928 | prologue/epilogue is emitted only around those parts of the | |
5929 | function that require it. */ | |
5930 | ||
33fec8d5 | 5931 | try_shrink_wrapping (&entry_edge, prologue_seq); |
484db665 | 5932 | |
19d3c25c | 5933 | |
fefa31b5 | 5934 | rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)); |
484db665 | 5935 | |
cd9c1ca8 RH |
5936 | /* A small fib -- epilogue is not yet completed, but we wish to re-use |
5937 | this marker for the splits of EH_RETURN patterns, and nothing else | |
5938 | uses the flag in the meantime. */ | |
5939 | epilogue_completed = 1; | |
5940 | ||
cd9c1ca8 RH |
5941 | /* Find non-fallthru edges that end with EH_RETURN instructions. On |
5942 | some targets, these get split to a special version of the epilogue | |
5943 | code. In order to be able to properly annotate these with unwind | |
5944 | info, try to split them now. If we get a valid split, drop an | |
5945 | EPILOGUE_BEG note and mark the insns as epilogue insns. */ | |
33fec8d5 SB |
5946 | edge e; |
5947 | edge_iterator ei; | |
fefa31b5 | 5948 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
cd9c1ca8 | 5949 | { |
691fe203 | 5950 | rtx_insn *prev, *last, *trial; |
cd9c1ca8 RH |
5951 | |
5952 | if (e->flags & EDGE_FALLTHRU) | |
5953 | continue; | |
5954 | last = BB_END (e->src); | |
5955 | if (!eh_returnjump_p (last)) | |
5956 | continue; | |
5957 | ||
5958 | prev = PREV_INSN (last); | |
5959 | trial = try_split (PATTERN (last), last, 1); | |
5960 | if (trial == last) | |
5961 | continue; | |
5962 | ||
5963 | record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash); | |
5964 | emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev); | |
5965 | } | |
cd9c1ca8 | 5966 | |
33fec8d5 | 5967 | edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); |
cc1f86f3 | 5968 | |
33fec8d5 | 5969 | if (exit_fallthru_edge) |
623a66fa | 5970 | { |
33fec8d5 SB |
5971 | if (epilogue_seq) |
5972 | { | |
5973 | insert_insn_on_edge (epilogue_seq, exit_fallthru_edge); | |
e93044fc | 5974 | commit_edge_insertions (); |
33fec8d5 SB |
5975 | |
5976 | /* The epilogue insns we inserted may cause the exit edge to no longer | |
5977 | be fallthru. */ | |
5978 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) | |
5979 | { | |
5980 | if (((e->flags & EDGE_FALLTHRU) != 0) | |
5981 | && returnjump_p (BB_END (e->src))) | |
5982 | e->flags &= ~EDGE_FALLTHRU; | |
5983 | } | |
5984 | } | |
5985 | else if (next_active_insn (BB_END (exit_fallthru_edge->src))) | |
5986 | { | |
5987 | /* We have a fall-through edge to the exit block, the source is not | |
5988 | at the end of the function, and there will be an assembler epilogue | |
5989 | at the end of the function. | |
5990 | We can't use force_nonfallthru here, because that would try to | |
5991 | use return. Inserting a jump 'by hand' is extremely messy, so | |
5992 | we take advantage of cfg_layout_finalize using | |
5993 | fixup_fallthru_exit_predecessor. */ | |
5994 | cfg_layout_initialize (0); | |
5995 | basic_block cur_bb; | |
5996 | FOR_EACH_BB_FN (cur_bb, cfun) | |
5997 | if (cur_bb->index >= NUM_FIXED_BLOCKS | |
5998 | && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) | |
5999 | cur_bb->aux = cur_bb->next_bb; | |
6000 | cfg_layout_finalize (); | |
6001 | } | |
623a66fa | 6002 | } |
cf103ca4 | 6003 | |
33fec8d5 | 6004 | /* Insert the prologue. */ |
484db665 | 6005 | |
33fec8d5 | 6006 | rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
e881bb1b | 6007 | |
33fec8d5 | 6008 | if (split_prologue_seq || prologue_seq) |
30a873c3 | 6009 | { |
33fec8d5 SB |
6010 | if (split_prologue_seq) |
6011 | insert_insn_on_edge (split_prologue_seq, orig_entry_edge); | |
6012 | ||
6013 | if (prologue_seq) | |
6014 | insert_insn_on_edge (prologue_seq, entry_edge); | |
cf103ca4 | 6015 | |
30a873c3 ZD |
6016 | commit_edge_insertions (); |
6017 | ||
cf103ca4 | 6018 | /* Look for basic blocks within the prologue insns. */ |
7ba9e72d | 6019 | auto_sbitmap blocks (last_basic_block_for_fn (cfun)); |
f61e445a | 6020 | bitmap_clear (blocks); |
d7c028c0 LC |
6021 | bitmap_set_bit (blocks, entry_edge->dest->index); |
6022 | bitmap_set_bit (blocks, orig_entry_edge->dest->index); | |
cf103ca4 | 6023 | find_many_sub_basic_blocks (blocks); |
30a873c3 | 6024 | } |
0a1c58a2 | 6025 | |
33fec8d5 SB |
6026 | default_rtl_profile (); |
6027 | ||
0a1c58a2 | 6028 | /* Emit sibling epilogues before any sibling call sites. */ |
33fec8d5 SB |
6029 | for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); |
6030 | (e = ei_safe_edge (ei)); | |
6031 | ei_next (&ei)) | |
0a1c58a2 | 6032 | { |
33fec8d5 SB |
6033 | /* Skip those already handled, the ones that run without prologue. */ |
6034 | if (e->flags & EDGE_IGNORE) | |
628f6a4e | 6035 | { |
33fec8d5 | 6036 | e->flags &= ~EDGE_IGNORE; |
628f6a4e BE |
6037 | continue; |
6038 | } | |
0a1c58a2 | 6039 | |
33fec8d5 SB |
6040 | rtx_insn *insn = BB_END (e->src); |
6041 | ||
6042 | if (!(CALL_P (insn) && SIBLING_CALL_P (insn))) | |
6043 | continue; | |
6044 | ||
e86a9946 | 6045 | if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ()) |
484db665 BS |
6046 | { |
6047 | start_sequence (); | |
6048 | emit_note (NOTE_INSN_EPILOGUE_BEG); | |
6049 | emit_insn (ep_seq); | |
dc01c3d1 | 6050 | rtx_insn *seq = get_insns (); |
484db665 | 6051 | end_sequence (); |
0a1c58a2 | 6052 | |
484db665 BS |
6053 | /* Retain a map of the epilogue insns. Used in life analysis to |
6054 | avoid getting rid of sibcall epilogue insns. Do this before we | |
6055 | actually emit the sequence. */ | |
6056 | record_insns (seq, NULL, &epilogue_insn_hash); | |
5368224f | 6057 | set_insn_locations (seq, epilogue_location); |
2f937369 | 6058 | |
484db665 BS |
6059 | emit_insn_before (seq, insn); |
6060 | } | |
0a1c58a2 | 6061 | } |
ca1117cc | 6062 | |
33fec8d5 | 6063 | if (epilogue_seq) |
86c82654 | 6064 | { |
9c8348cf | 6065 | rtx_insn *insn, *next; |
86c82654 RH |
6066 | |
6067 | /* Similarly, move any line notes that appear after the epilogue. | |
ff7cc307 | 6068 | There is no need, however, to be quite so anal about the existence |
071a42f9 | 6069 | of such a note. Also possibly move |
84c1fa24 UW |
6070 | NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug |
6071 | info generation. */ | |
33fec8d5 | 6072 | for (insn = epilogue_seq; insn; insn = next) |
86c82654 RH |
6073 | { |
6074 | next = NEXT_INSN (insn); | |
b8698a0f | 6075 | if (NOTE_P (insn) |
a38e7aa5 | 6076 | && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) |
33fec8d5 | 6077 | reorder_insns (insn, insn, PREV_INSN (epilogue_seq)); |
86c82654 RH |
6078 | } |
6079 | } | |
6fb5fa3c DB |
6080 | |
6081 | /* Threading the prologue and epilogue changes the artificial refs | |
6082 | in the entry and exit blocks. */ | |
6083 | epilogue_completed = 1; | |
6084 | df_update_entry_exit_and_calls (); | |
bdac5f58 TW |
6085 | } |
6086 | ||
cd9c1ca8 RH |
6087 | /* Reposition the prologue-end and epilogue-begin notes after |
6088 | instruction scheduling. */ | |
bdac5f58 TW |
6089 | |
6090 | void | |
6fb5fa3c | 6091 | reposition_prologue_and_epilogue_notes (void) |
bdac5f58 | 6092 | { |
e86a9946 RS |
6093 | if (!targetm.have_prologue () |
6094 | && !targetm.have_epilogue () | |
6095 | && !targetm.have_sibcall_epilogue ()) | |
5251b8b3 | 6096 | return; |
5251b8b3 | 6097 | |
cd9c1ca8 RH |
6098 | /* Since the hash table is created on demand, the fact that it is |
6099 | non-null is a signal that it is non-empty. */ | |
6100 | if (prologue_insn_hash != NULL) | |
bdac5f58 | 6101 | { |
d242408f | 6102 | size_t len = prologue_insn_hash->elements (); |
691fe203 | 6103 | rtx_insn *insn, *last = NULL, *note = NULL; |
bdac5f58 | 6104 | |
cd9c1ca8 RH |
6105 | /* Scan from the beginning until we reach the last prologue insn. */ |
6106 | /* ??? While we do have the CFG intact, there are two problems: | |
6107 | (1) The prologue can contain loops (typically probing the stack), | |
6108 | which means that the end of the prologue isn't in the first bb. | |
6109 | (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */ | |
6fb5fa3c | 6110 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
bdac5f58 | 6111 | { |
4b4bf941 | 6112 | if (NOTE_P (insn)) |
9392c110 | 6113 | { |
a38e7aa5 | 6114 | if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) |
0a1c58a2 JL |
6115 | note = insn; |
6116 | } | |
cd9c1ca8 | 6117 | else if (contains (insn, prologue_insn_hash)) |
0a1c58a2 | 6118 | { |
9f53e965 RH |
6119 | last = insn; |
6120 | if (--len == 0) | |
6121 | break; | |
6122 | } | |
6123 | } | |
797a6ac1 | 6124 | |
9f53e965 RH |
6125 | if (last) |
6126 | { | |
cd9c1ca8 | 6127 | if (note == NULL) |
9f53e965 | 6128 | { |
cd9c1ca8 RH |
6129 | /* Scan forward looking for the PROLOGUE_END note. It should |
6130 | be right at the beginning of the block, possibly with other | |
6131 | insn notes that got moved there. */ | |
6132 | for (note = NEXT_INSN (last); ; note = NEXT_INSN (note)) | |
6133 | { | |
6134 | if (NOTE_P (note) | |
6135 | && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) | |
6136 | break; | |
6137 | } | |
9f53e965 | 6138 | } |
c93b03c2 | 6139 | |
9f53e965 | 6140 | /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ |
4b4bf941 | 6141 | if (LABEL_P (last)) |
9f53e965 RH |
6142 | last = NEXT_INSN (last); |
6143 | reorder_insns (note, note, last); | |
bdac5f58 | 6144 | } |
0a1c58a2 JL |
6145 | } |
6146 | ||
cd9c1ca8 | 6147 | if (epilogue_insn_hash != NULL) |
0a1c58a2 | 6148 | { |
cd9c1ca8 RH |
6149 | edge_iterator ei; |
6150 | edge e; | |
bdac5f58 | 6151 | |
fefa31b5 | 6152 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
bdac5f58 | 6153 | { |
691fe203 | 6154 | rtx_insn *insn, *first = NULL, *note = NULL; |
997704f1 | 6155 | basic_block bb = e->src; |
c93b03c2 | 6156 | |
997704f1 | 6157 | /* Scan from the beginning until we reach the first epilogue insn. */ |
cd9c1ca8 | 6158 | FOR_BB_INSNS (bb, insn) |
9f53e965 | 6159 | { |
cd9c1ca8 RH |
6160 | if (NOTE_P (insn)) |
6161 | { | |
6162 | if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) | |
6163 | { | |
6164 | note = insn; | |
997704f1 | 6165 | if (first != NULL) |
cd9c1ca8 RH |
6166 | break; |
6167 | } | |
6168 | } | |
997704f1 | 6169 | else if (first == NULL && contains (insn, epilogue_insn_hash)) |
cd9c1ca8 | 6170 | { |
997704f1 | 6171 | first = insn; |
cd9c1ca8 RH |
6172 | if (note != NULL) |
6173 | break; | |
6174 | } | |
9392c110 | 6175 | } |
997704f1 RH |
6176 | |
6177 | if (note) | |
6178 | { | |
6179 | /* If the function has a single basic block, and no real | |
b8698a0f | 6180 | epilogue insns (e.g. sibcall with no cleanup), the |
997704f1 RH |
6181 | epilogue note can get scheduled before the prologue |
6182 | note. If we have frame related prologue insns, having | |
6183 | them scanned during the epilogue will result in a crash. | |
6184 | In this case re-order the epilogue note to just before | |
6185 | the last insn in the block. */ | |
6186 | if (first == NULL) | |
6187 | first = BB_END (bb); | |
6188 | ||
6189 | if (PREV_INSN (first) != note) | |
6190 | reorder_insns (note, note, PREV_INSN (first)); | |
6191 | } | |
bdac5f58 TW |
6192 | } |
6193 | } | |
bdac5f58 | 6194 | } |
87ff9c8e | 6195 | |
df92c640 SB |
6196 | /* Returns the name of function declared by FNDECL. */ |
6197 | const char * | |
6198 | fndecl_name (tree fndecl) | |
6199 | { | |
6200 | if (fndecl == NULL) | |
6201 | return "(nofn)"; | |
6202 | return lang_hooks.decl_printable_name (fndecl, 2); | |
6203 | } | |
6204 | ||
532aafad SB |
6205 | /* Returns the name of function FN. */ |
6206 | const char * | |
6207 | function_name (struct function *fn) | |
6208 | { | |
df92c640 SB |
6209 | tree fndecl = (fn == NULL) ? NULL : fn->decl; |
6210 | return fndecl_name (fndecl); | |
532aafad SB |
6211 | } |
6212 | ||
faed5cc3 SB |
6213 | /* Returns the name of the current function. */ |
6214 | const char * | |
6215 | current_function_name (void) | |
6216 | { | |
532aafad | 6217 | return function_name (cfun); |
faed5cc3 | 6218 | } |
ef330312 PB |
6219 | \f |
6220 | ||
c2924966 | 6221 | static unsigned int |
ef330312 PB |
6222 | rest_of_handle_check_leaf_regs (void) |
6223 | { | |
6224 | #ifdef LEAF_REGISTERS | |
416ff32e | 6225 | crtl->uses_only_leaf_regs |
ef330312 PB |
6226 | = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); |
6227 | #endif | |
c2924966 | 6228 | return 0; |
ef330312 PB |
6229 | } |
6230 | ||
8d8d1a28 | 6231 | /* Insert a TYPE into the used types hash table of CFUN. */ |
b646ba3f | 6232 | |
8d8d1a28 AH |
6233 | static void |
6234 | used_types_insert_helper (tree type, struct function *func) | |
33c9159e | 6235 | { |
8d8d1a28 | 6236 | if (type != NULL && func != NULL) |
33c9159e | 6237 | { |
33c9159e | 6238 | if (func->used_types_hash == NULL) |
b086d530 TS |
6239 | func->used_types_hash = hash_set<tree>::create_ggc (37); |
6240 | ||
6241 | func->used_types_hash->add (type); | |
33c9159e AH |
6242 | } |
6243 | } | |
6244 | ||
8d8d1a28 AH |
6245 | /* Given a type, insert it into the used hash table in cfun. */ |
6246 | void | |
6247 | used_types_insert (tree t) | |
6248 | { | |
6249 | while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) | |
095c7b3c JJ |
6250 | if (TYPE_NAME (t)) |
6251 | break; | |
6252 | else | |
6253 | t = TREE_TYPE (t); | |
29ce73cb PB |
6254 | if (TREE_CODE (t) == ERROR_MARK) |
6255 | return; | |
095c7b3c JJ |
6256 | if (TYPE_NAME (t) == NULL_TREE |
6257 | || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) | |
6258 | t = TYPE_MAIN_VARIANT (t); | |
8d8d1a28 | 6259 | if (debug_info_level > DINFO_LEVEL_NONE) |
b646ba3f DS |
6260 | { |
6261 | if (cfun) | |
6262 | used_types_insert_helper (t, cfun); | |
6263 | else | |
9771b263 DN |
6264 | { |
6265 | /* So this might be a type referenced by a global variable. | |
6266 | Record that type so that we can later decide to emit its | |
6267 | debug information. */ | |
6268 | vec_safe_push (types_used_by_cur_var_decl, t); | |
6269 | } | |
b646ba3f DS |
6270 | } |
6271 | } | |
6272 | ||
6273 | /* Helper to Hash a struct types_used_by_vars_entry. */ | |
6274 | ||
6275 | static hashval_t | |
6276 | hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) | |
6277 | { | |
6278 | gcc_assert (entry && entry->var_decl && entry->type); | |
6279 | ||
6280 | return iterative_hash_object (entry->type, | |
6281 | iterative_hash_object (entry->var_decl, 0)); | |
6282 | } | |
6283 | ||
6284 | /* Hash function of the types_used_by_vars_entry hash table. */ | |
6285 | ||
6286 | hashval_t | |
2a22f99c | 6287 | used_type_hasher::hash (types_used_by_vars_entry *entry) |
b646ba3f | 6288 | { |
b646ba3f DS |
6289 | return hash_types_used_by_vars_entry (entry); |
6290 | } | |
6291 | ||
6292 | /*Equality function of the types_used_by_vars_entry hash table. */ | |
6293 | ||
2a22f99c TS |
6294 | bool |
6295 | used_type_hasher::equal (types_used_by_vars_entry *e1, | |
6296 | types_used_by_vars_entry *e2) | |
b646ba3f | 6297 | { |
b646ba3f DS |
6298 | return (e1->var_decl == e2->var_decl && e1->type == e2->type); |
6299 | } | |
6300 | ||
6301 | /* Inserts an entry into the types_used_by_vars_hash hash table. */ | |
6302 | ||
6303 | void | |
6304 | types_used_by_var_decl_insert (tree type, tree var_decl) | |
6305 | { | |
6306 | if (type != NULL && var_decl != NULL) | |
6307 | { | |
2a22f99c | 6308 | types_used_by_vars_entry **slot; |
b646ba3f DS |
6309 | struct types_used_by_vars_entry e; |
6310 | e.var_decl = var_decl; | |
6311 | e.type = type; | |
6312 | if (types_used_by_vars_hash == NULL) | |
2a22f99c TS |
6313 | types_used_by_vars_hash |
6314 | = hash_table<used_type_hasher>::create_ggc (37); | |
6315 | ||
6316 | slot = types_used_by_vars_hash->find_slot (&e, INSERT); | |
b646ba3f DS |
6317 | if (*slot == NULL) |
6318 | { | |
6319 | struct types_used_by_vars_entry *entry; | |
766090c2 | 6320 | entry = ggc_alloc<types_used_by_vars_entry> (); |
b646ba3f DS |
6321 | entry->type = type; |
6322 | entry->var_decl = var_decl; | |
6323 | *slot = entry; | |
6324 | } | |
6325 | } | |
8d8d1a28 AH |
6326 | } |
6327 | ||
27a4cd48 DM |
6328 | namespace { |
6329 | ||
6330 | const pass_data pass_data_leaf_regs = | |
6331 | { | |
6332 | RTL_PASS, /* type */ | |
6333 | "*leaf_regs", /* name */ | |
6334 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
6335 | TV_NONE, /* tv_id */ |
6336 | 0, /* properties_required */ | |
6337 | 0, /* properties_provided */ | |
6338 | 0, /* properties_destroyed */ | |
6339 | 0, /* todo_flags_start */ | |
6340 | 0, /* todo_flags_finish */ | |
ef330312 PB |
6341 | }; |
6342 | ||
27a4cd48 DM |
6343 | class pass_leaf_regs : public rtl_opt_pass |
6344 | { | |
6345 | public: | |
c3284718 RS |
6346 | pass_leaf_regs (gcc::context *ctxt) |
6347 | : rtl_opt_pass (pass_data_leaf_regs, ctxt) | |
27a4cd48 DM |
6348 | {} |
6349 | ||
6350 | /* opt_pass methods: */ | |
be55bfe6 TS |
6351 | virtual unsigned int execute (function *) |
6352 | { | |
6353 | return rest_of_handle_check_leaf_regs (); | |
6354 | } | |
27a4cd48 DM |
6355 | |
6356 | }; // class pass_leaf_regs | |
6357 | ||
6358 | } // anon namespace | |
6359 | ||
6360 | rtl_opt_pass * | |
6361 | make_pass_leaf_regs (gcc::context *ctxt) | |
6362 | { | |
6363 | return new pass_leaf_regs (ctxt); | |
6364 | } | |
6365 | ||
6fb5fa3c DB |
6366 | static unsigned int |
6367 | rest_of_handle_thread_prologue_and_epilogue (void) | |
6368 | { | |
63d0f6ab SB |
6369 | /* prepare_shrink_wrap is sensitive to the block structure of the control |
6370 | flow graph, so clean it up first. */ | |
6fb5fa3c | 6371 | if (optimize) |
63d0f6ab | 6372 | cleanup_cfg (0); |
d3c12306 | 6373 | |
6fb5fa3c DB |
6374 | /* On some machines, the prologue and epilogue code, or parts thereof, |
6375 | can be represented as RTL. Doing so lets us schedule insns between | |
6376 | it and the rest of the code and also allows delayed branch | |
6377 | scheduling to operate in the epilogue. */ | |
6fb5fa3c | 6378 | thread_prologue_and_epilogue_insns (); |
d3c12306 | 6379 | |
86b107ae SB |
6380 | /* Some non-cold blocks may now be only reachable from cold blocks. |
6381 | Fix that up. */ | |
6382 | fixup_partitions (); | |
6383 | ||
bdc6e1ae SB |
6384 | /* Shrink-wrapping can result in unreachable edges in the epilogue, |
6385 | see PR57320. */ | |
63d0f6ab | 6386 | cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0); |
bdc6e1ae | 6387 | |
d3c12306 | 6388 | /* The stack usage info is finalized during prologue expansion. */ |
a11e0df4 | 6389 | if (flag_stack_usage_info) |
d3c12306 EB |
6390 | output_stack_usage (); |
6391 | ||
6fb5fa3c DB |
6392 | return 0; |
6393 | } | |
6394 | ||
27a4cd48 DM |
6395 | namespace { |
6396 | ||
6397 | const pass_data pass_data_thread_prologue_and_epilogue = | |
6398 | { | |
6399 | RTL_PASS, /* type */ | |
6400 | "pro_and_epilogue", /* name */ | |
6401 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
6402 | TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */ |
6403 | 0, /* properties_required */ | |
6404 | 0, /* properties_provided */ | |
6405 | 0, /* properties_destroyed */ | |
3bea341f RB |
6406 | 0, /* todo_flags_start */ |
6407 | ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */ | |
6fb5fa3c | 6408 | }; |
27a4cd48 DM |
6409 | |
6410 | class pass_thread_prologue_and_epilogue : public rtl_opt_pass | |
6411 | { | |
6412 | public: | |
c3284718 RS |
6413 | pass_thread_prologue_and_epilogue (gcc::context *ctxt) |
6414 | : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt) | |
27a4cd48 DM |
6415 | {} |
6416 | ||
6417 | /* opt_pass methods: */ | |
be55bfe6 TS |
6418 | virtual unsigned int execute (function *) |
6419 | { | |
6420 | return rest_of_handle_thread_prologue_and_epilogue (); | |
6421 | } | |
27a4cd48 DM |
6422 | |
6423 | }; // class pass_thread_prologue_and_epilogue | |
6424 | ||
6425 | } // anon namespace | |
6426 | ||
6427 | rtl_opt_pass * | |
6428 | make_pass_thread_prologue_and_epilogue (gcc::context *ctxt) | |
6429 | { | |
6430 | return new pass_thread_prologue_and_epilogue (ctxt); | |
6431 | } | |
d8d72314 PB |
6432 | \f |
6433 | ||
6434 | /* This mini-pass fixes fall-out from SSA in asm statements that have | |
b8698a0f | 6435 | in-out constraints. Say you start with |
d8d72314 PB |
6436 | |
6437 | orig = inout; | |
6438 | asm ("": "+mr" (inout)); | |
6439 | use (orig); | |
6440 | ||
6441 | which is transformed very early to use explicit output and match operands: | |
6442 | ||
6443 | orig = inout; | |
6444 | asm ("": "=mr" (inout) : "0" (inout)); | |
6445 | use (orig); | |
6446 | ||
6447 | Or, after SSA and copyprop, | |
6448 | ||
6449 | asm ("": "=mr" (inout_2) : "0" (inout_1)); | |
6450 | use (inout_1); | |
6451 | ||
6452 | Clearly inout_2 and inout_1 can't be coalesced easily anymore, as | |
6453 | they represent two separate values, so they will get different pseudo | |
6454 | registers during expansion. Then, since the two operands need to match | |
6455 | per the constraints, but use different pseudo registers, reload can | |
6456 | only register a reload for these operands. But reloads can only be | |
6457 | satisfied by hardregs, not by memory, so we need a register for this | |
6458 | reload, just because we are presented with non-matching operands. | |
6459 | So, even though we allow memory for this operand, no memory can be | |
6460 | used for it, just because the two operands don't match. This can | |
6461 | cause reload failures on register-starved targets. | |
6462 | ||
6463 | So it's a symptom of reload not being able to use memory for reloads | |
6464 | or, alternatively it's also a symptom of both operands not coming into | |
6465 | reload as matching (in which case the pseudo could go to memory just | |
6466 | fine, as the alternative allows it, and no reload would be necessary). | |
6467 | We fix the latter problem here, by transforming | |
6468 | ||
6469 | asm ("": "=mr" (inout_2) : "0" (inout_1)); | |
6470 | ||
6471 | back to | |
6472 | ||
6473 | inout_2 = inout_1; | |
6474 | asm ("": "=mr" (inout_2) : "0" (inout_2)); */ | |
6475 | ||
6476 | static void | |
691fe203 | 6477 | match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs) |
d8d72314 PB |
6478 | { |
6479 | int i; | |
6480 | bool changed = false; | |
6481 | rtx op = SET_SRC (p_sets[0]); | |
6482 | int ninputs = ASM_OPERANDS_INPUT_LENGTH (op); | |
6483 | rtvec inputs = ASM_OPERANDS_INPUT_VEC (op); | |
1b4572a8 | 6484 | bool *output_matched = XALLOCAVEC (bool, noutputs); |
d8d72314 | 6485 | |
d7b8033f | 6486 | memset (output_matched, 0, noutputs * sizeof (bool)); |
d8d72314 PB |
6487 | for (i = 0; i < ninputs; i++) |
6488 | { | |
691fe203 DM |
6489 | rtx input, output; |
6490 | rtx_insn *insns; | |
d8d72314 PB |
6491 | const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); |
6492 | char *end; | |
53220215 | 6493 | int match, j; |
d8d72314 | 6494 | |
70f16287 JJ |
6495 | if (*constraint == '%') |
6496 | constraint++; | |
6497 | ||
d8d72314 PB |
6498 | match = strtoul (constraint, &end, 10); |
6499 | if (end == constraint) | |
6500 | continue; | |
6501 | ||
6502 | gcc_assert (match < noutputs); | |
6503 | output = SET_DEST (p_sets[match]); | |
6504 | input = RTVEC_ELT (inputs, i); | |
53220215 MM |
6505 | /* Only do the transformation for pseudos. */ |
6506 | if (! REG_P (output) | |
6507 | || rtx_equal_p (output, input) | |
d8d72314 PB |
6508 | || (GET_MODE (input) != VOIDmode |
6509 | && GET_MODE (input) != GET_MODE (output))) | |
6510 | continue; | |
6511 | ||
53220215 MM |
6512 | /* We can't do anything if the output is also used as input, |
6513 | as we're going to overwrite it. */ | |
6514 | for (j = 0; j < ninputs; j++) | |
6515 | if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j))) | |
6516 | break; | |
6517 | if (j != ninputs) | |
6518 | continue; | |
6519 | ||
d7b8033f JJ |
6520 | /* Avoid changing the same input several times. For |
6521 | asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in)); | |
6522 | only change in once (to out1), rather than changing it | |
6523 | first to out1 and afterwards to out2. */ | |
6524 | if (i > 0) | |
6525 | { | |
6526 | for (j = 0; j < noutputs; j++) | |
6527 | if (output_matched[j] && input == SET_DEST (p_sets[j])) | |
6528 | break; | |
6529 | if (j != noutputs) | |
6530 | continue; | |
6531 | } | |
6532 | output_matched[match] = true; | |
6533 | ||
d8d72314 | 6534 | start_sequence (); |
53220215 | 6535 | emit_move_insn (output, input); |
d8d72314 PB |
6536 | insns = get_insns (); |
6537 | end_sequence (); | |
d8d72314 | 6538 | emit_insn_before (insns, insn); |
53220215 MM |
6539 | |
6540 | /* Now replace all mentions of the input with output. We can't | |
fa10beec | 6541 | just replace the occurrence in inputs[i], as the register might |
53220215 MM |
6542 | also be used in some other input (or even in an address of an |
6543 | output), which would mean possibly increasing the number of | |
6544 | inputs by one (namely 'output' in addition), which might pose | |
6545 | a too complicated problem for reload to solve. E.g. this situation: | |
6546 | ||
6547 | asm ("" : "=r" (output), "=m" (input) : "0" (input)) | |
6548 | ||
84fbffb2 | 6549 | Here 'input' is used in two occurrences as input (once for the |
53220215 | 6550 | input operand, once for the address in the second output operand). |
fa10beec | 6551 | If we would replace only the occurrence of the input operand (to |
53220215 MM |
6552 | make the matching) we would be left with this: |
6553 | ||
6554 | output = input | |
6555 | asm ("" : "=r" (output), "=m" (input) : "0" (output)) | |
6556 | ||
6557 | Now we suddenly have two different input values (containing the same | |
6558 | value, but different pseudos) where we formerly had only one. | |
6559 | With more complicated asms this might lead to reload failures | |
6560 | which wouldn't have happen without this pass. So, iterate over | |
84fbffb2 | 6561 | all operands and replace all occurrences of the register used. */ |
53220215 | 6562 | for (j = 0; j < noutputs; j++) |
1596d61e | 6563 | if (!rtx_equal_p (SET_DEST (p_sets[j]), input) |
53220215 MM |
6564 | && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j]))) |
6565 | SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]), | |
6566 | input, output); | |
6567 | for (j = 0; j < ninputs; j++) | |
6568 | if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j))) | |
6569 | RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j), | |
6570 | input, output); | |
6571 | ||
d8d72314 PB |
6572 | changed = true; |
6573 | } | |
6574 | ||
6575 | if (changed) | |
6576 | df_insn_rescan (insn); | |
6577 | } | |
6578 | ||
5cf18d25 ML |
6579 | /* Add the decl D to the local_decls list of FUN. */ |
6580 | ||
6581 | void | |
6582 | add_local_decl (struct function *fun, tree d) | |
6583 | { | |
6584 | gcc_assert (TREE_CODE (d) == VAR_DECL); | |
6585 | vec_safe_push (fun->local_decls, d); | |
6586 | } | |
6587 | ||
be55bfe6 TS |
6588 | namespace { |
6589 | ||
6590 | const pass_data pass_data_match_asm_constraints = | |
6591 | { | |
6592 | RTL_PASS, /* type */ | |
6593 | "asmcons", /* name */ | |
6594 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
6595 | TV_NONE, /* tv_id */ |
6596 | 0, /* properties_required */ | |
6597 | 0, /* properties_provided */ | |
6598 | 0, /* properties_destroyed */ | |
6599 | 0, /* todo_flags_start */ | |
6600 | 0, /* todo_flags_finish */ | |
6601 | }; | |
6602 | ||
6603 | class pass_match_asm_constraints : public rtl_opt_pass | |
6604 | { | |
6605 | public: | |
6606 | pass_match_asm_constraints (gcc::context *ctxt) | |
6607 | : rtl_opt_pass (pass_data_match_asm_constraints, ctxt) | |
6608 | {} | |
6609 | ||
6610 | /* opt_pass methods: */ | |
6611 | virtual unsigned int execute (function *); | |
6612 | ||
6613 | }; // class pass_match_asm_constraints | |
6614 | ||
6615 | unsigned | |
6616 | pass_match_asm_constraints::execute (function *fun) | |
d8d72314 PB |
6617 | { |
6618 | basic_block bb; | |
691fe203 DM |
6619 | rtx_insn *insn; |
6620 | rtx pat, *p_sets; | |
d8d72314 PB |
6621 | int noutputs; |
6622 | ||
e3b5732b | 6623 | if (!crtl->has_asm_statement) |
d8d72314 PB |
6624 | return 0; |
6625 | ||
6626 | df_set_flags (DF_DEFER_INSN_RESCAN); | |
be55bfe6 | 6627 | FOR_EACH_BB_FN (bb, fun) |
d8d72314 PB |
6628 | { |
6629 | FOR_BB_INSNS (bb, insn) | |
6630 | { | |
6631 | if (!INSN_P (insn)) | |
6632 | continue; | |
6633 | ||
6634 | pat = PATTERN (insn); | |
6635 | if (GET_CODE (pat) == PARALLEL) | |
6636 | p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0); | |
6637 | else if (GET_CODE (pat) == SET) | |
6638 | p_sets = &PATTERN (insn), noutputs = 1; | |
6639 | else | |
6640 | continue; | |
6641 | ||
6642 | if (GET_CODE (*p_sets) == SET | |
6643 | && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS) | |
6644 | match_asm_constraints_1 (insn, p_sets, noutputs); | |
6645 | } | |
6646 | } | |
6647 | ||
6648 | return TODO_df_finish; | |
6649 | } | |
6650 | ||
27a4cd48 DM |
6651 | } // anon namespace |
6652 | ||
6653 | rtl_opt_pass * | |
6654 | make_pass_match_asm_constraints (gcc::context *ctxt) | |
6655 | { | |
6656 | return new pass_match_asm_constraints (ctxt); | |
6657 | } | |
6658 | ||
faed5cc3 | 6659 | |
e2500fed | 6660 | #include "gt-function.h" |