]>
Commit | Line | Data |
---|---|---|
5e6908ea | 1 | /* Expands front end tree to back end RTL for GCC. |
85ec4feb | 2 | Copyright (C) 1987-2018 Free Software Foundation, Inc. |
6f086dfc | 3 | |
1322177d | 4 | This file is part of GCC. |
6f086dfc | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
6f086dfc | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
6f086dfc RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
6f086dfc | 19 | |
6f086dfc RS |
20 | /* This file handles the generation of rtl code from tree structure |
21 | at the level of the function as a whole. | |
22 | It creates the rtl expressions for parameters and auto variables | |
23 | and has full responsibility for allocating stack slots. | |
24 | ||
25 | `expand_function_start' is called at the beginning of a function, | |
26 | before the function body is parsed, and `expand_function_end' is | |
27 | called after parsing the body. | |
28 | ||
29 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
30 | This is usually done during the RTL generation for the function body, | |
31 | but it can also be done in the reload pass when a pseudo-register does | |
8fff4fc1 | 32 | not get a hard register. */ |
6f086dfc RS |
33 | |
34 | #include "config.h" | |
670ee920 | 35 | #include "system.h" |
4977bab6 | 36 | #include "coretypes.h" |
c7131fb2 | 37 | #include "backend.h" |
957060b5 | 38 | #include "target.h" |
c7131fb2 | 39 | #include "rtl.h" |
957060b5 AM |
40 | #include "tree.h" |
41 | #include "gimple-expr.h" | |
42 | #include "cfghooks.h" | |
c7131fb2 | 43 | #include "df.h" |
4d0cdd0c | 44 | #include "memmodel.h" |
957060b5 AM |
45 | #include "tm_p.h" |
46 | #include "stringpool.h" | |
47 | #include "expmed.h" | |
48 | #include "optabs.h" | |
49 | #include "regs.h" | |
50 | #include "emit-rtl.h" | |
51 | #include "recog.h" | |
0cbd9993 | 52 | #include "rtl-error.h" |
40e23961 | 53 | #include "alias.h" |
40e23961 | 54 | #include "fold-const.h" |
d8a2d370 DN |
55 | #include "stor-layout.h" |
56 | #include "varasm.h" | |
1ef08c63 | 57 | #include "except.h" |
36566b39 PK |
58 | #include "dojump.h" |
59 | #include "explow.h" | |
60 | #include "calls.h" | |
6f086dfc | 61 | #include "expr.h" |
385399a8 | 62 | #include "optabs-tree.h" |
6f086dfc | 63 | #include "output.h" |
7afff7cf | 64 | #include "langhooks.h" |
677f3fa8 | 65 | #include "common/common-target.h" |
45b0be94 | 66 | #include "gimplify.h" |
ef330312 | 67 | #include "tree-pass.h" |
60393bbc AM |
68 | #include "cfgrtl.h" |
69 | #include "cfganal.h" | |
70 | #include "cfgbuild.h" | |
71 | #include "cfgcleanup.h" | |
1f9ceff1 | 72 | #include "cfgexpand.h" |
f30e25a3 | 73 | #include "shrink-wrap.h" |
b9b5f433 | 74 | #include "toplev.h" |
b8704801 | 75 | #include "rtl-iter.h" |
f11a7b6d | 76 | #include "tree-dfa.h" |
b3e46655 | 77 | #include "tree-ssa.h" |
314e6352 ML |
78 | #include "stringpool.h" |
79 | #include "attribs.h" | |
6aee2fd0 JJ |
80 | #include "gimple.h" |
81 | #include "options.h" | |
7d69de61 | 82 | |
5576d6f2 TT |
83 | /* So we can assign to cfun in this file. */ |
84 | #undef cfun | |
85 | ||
95f3f59e JDA |
86 | #ifndef STACK_ALIGNMENT_NEEDED |
87 | #define STACK_ALIGNMENT_NEEDED 1 | |
88 | #endif | |
89 | ||
975f3818 RS |
90 | #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) |
91 | ||
6f086dfc RS |
92 | /* Round a value to the lowest integer less than it that is a multiple of |
93 | the required alignment. Avoid using division in case the value is | |
94 | negative. Assume the alignment is a power of two. */ | |
95 | #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) | |
96 | ||
97 | /* Similar, but round to the next highest integer that meets the | |
98 | alignment. */ | |
99 | #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) | |
100 | ||
6f086dfc | 101 | /* Nonzero once virtual register instantiation has been done. |
c39ada04 DD |
102 | assign_stack_local uses frame_pointer_rtx when this is nonzero. |
103 | calls.c:emit_library_call_value_1 uses it to set up | |
104 | post-instantiation libcalls. */ | |
105 | int virtuals_instantiated; | |
6f086dfc | 106 | |
df696a75 | 107 | /* Assign unique numbers to labels generated for profiling, debugging, etc. */ |
17211ab5 | 108 | static GTY(()) int funcdef_no; |
f6f315fe | 109 | |
414c4dc4 NC |
110 | /* These variables hold pointers to functions to create and destroy |
111 | target specific, per-function data structures. */ | |
fa8db1f7 | 112 | struct machine_function * (*init_machine_status) (void); |
46766466 | 113 | |
b384405b | 114 | /* The currently compiled function. */ |
01d939e8 | 115 | struct function *cfun = 0; |
b384405b | 116 | |
cd9c1ca8 | 117 | /* These hashes record the prologue and epilogue insns. */ |
d242408f | 118 | |
6c907cff | 119 | struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def> |
d242408f TS |
120 | { |
121 | static hashval_t hash (rtx x) { return htab_hash_pointer (x); } | |
122 | static bool equal (rtx a, rtx b) { return a == b; } | |
123 | }; | |
124 | ||
125 | static GTY((cache)) | |
126 | hash_table<insn_cache_hasher> *prologue_insn_hash; | |
127 | static GTY((cache)) | |
128 | hash_table<insn_cache_hasher> *epilogue_insn_hash; | |
6f086dfc | 129 | \f |
b646ba3f | 130 | |
2a22f99c | 131 | hash_table<used_type_hasher> *types_used_by_vars_hash = NULL; |
9771b263 | 132 | vec<tree, va_gc> *types_used_by_cur_var_decl; |
b646ba3f | 133 | |
e15679f8 RK |
134 | /* Forward declarations. */ |
135 | ||
fa8db1f7 | 136 | static struct temp_slot *find_temp_slot_from_address (rtx); |
fa8db1f7 | 137 | static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); |
ef4bddc2 | 138 | static void pad_below (struct args_size *, machine_mode, tree); |
691fe203 | 139 | static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *); |
fa8db1f7 AJ |
140 | static int all_blocks (tree, tree *); |
141 | static tree *get_block_vector (tree, int *); | |
142 | extern tree debug_find_var_in_block_tree (tree, tree); | |
1f52178b | 143 | /* We always define `record_insns' even if it's not used so that we |
ec97b83a | 144 | can always export `prologue_epilogue_contains'. */ |
d242408f TS |
145 | static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **) |
146 | ATTRIBUTE_UNUSED; | |
87ac59a0 | 147 | static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *); |
db2960f4 | 148 | static void prepare_function_start (void); |
fa8db1f7 AJ |
149 | static void do_clobber_return_reg (rtx, void *); |
150 | static void do_use_return_reg (rtx, void *); | |
1f9ceff1 | 151 | |
c20bf1f3 | 152 | \f |
936fc9ba JH |
153 | /* Stack of nested functions. */ |
154 | /* Keep track of the cfun stack. */ | |
e5e809f4 | 155 | |
526ceb68 | 156 | static vec<function *> function_context_stack; |
6f086dfc RS |
157 | |
158 | /* Save the current context for compilation of a nested function. | |
d2784db4 | 159 | This is called from language-specific code. */ |
6f086dfc RS |
160 | |
161 | void | |
d2784db4 | 162 | push_function_context (void) |
6f086dfc | 163 | { |
01d939e8 | 164 | if (cfun == 0) |
182e0d71 | 165 | allocate_struct_function (NULL, false); |
b384405b | 166 | |
9771b263 | 167 | function_context_stack.safe_push (cfun); |
db2960f4 | 168 | set_cfun (NULL); |
6f086dfc RS |
169 | } |
170 | ||
171 | /* Restore the last saved context, at the end of a nested function. | |
172 | This function is called from language-specific code. */ | |
173 | ||
174 | void | |
d2784db4 | 175 | pop_function_context (void) |
6f086dfc | 176 | { |
9771b263 | 177 | struct function *p = function_context_stack.pop (); |
db2960f4 | 178 | set_cfun (p); |
6f086dfc | 179 | current_function_decl = p->decl; |
6f086dfc | 180 | |
6f086dfc | 181 | /* Reset variables that have known state during rtx generation. */ |
6f086dfc | 182 | virtuals_instantiated = 0; |
1b3d8f8a | 183 | generating_concat_p = 1; |
6f086dfc | 184 | } |
e4a4639e | 185 | |
fa51b01b RH |
186 | /* Clear out all parts of the state in F that can safely be discarded |
187 | after the function has been parsed, but not compiled, to let | |
188 | garbage collection reclaim the memory. */ | |
189 | ||
190 | void | |
fa8db1f7 | 191 | free_after_parsing (struct function *f) |
fa51b01b | 192 | { |
e8924938 | 193 | f->language = 0; |
fa51b01b RH |
194 | } |
195 | ||
e2ecd91c BS |
196 | /* Clear out all parts of the state in F that can safely be discarded |
197 | after the function has been compiled, to let garbage collection | |
0a8a198c | 198 | reclaim the memory. */ |
21cd906e | 199 | |
e2ecd91c | 200 | void |
fa8db1f7 | 201 | free_after_compilation (struct function *f) |
e2ecd91c | 202 | { |
cd9c1ca8 RH |
203 | prologue_insn_hash = NULL; |
204 | epilogue_insn_hash = NULL; | |
205 | ||
04695783 | 206 | free (crtl->emit.regno_pointer_align); |
f995dcfe | 207 | |
3e029763 | 208 | memset (crtl, 0, sizeof (struct rtl_data)); |
e2500fed | 209 | f->eh = NULL; |
e2500fed | 210 | f->machine = NULL; |
997de8ed | 211 | f->cfg = NULL; |
b11f11a1 | 212 | f->curr_properties &= ~PROP_cfg; |
fa51b01b | 213 | |
57b9e367 | 214 | regno_reg_rtx = NULL; |
e2ecd91c | 215 | } |
6f086dfc | 216 | \f |
49ad7cfa BS |
217 | /* Return size needed for stack frame based on slots so far allocated. |
218 | This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; | |
219 | the caller may have to do that. */ | |
9fb798d7 | 220 | |
f075bd95 | 221 | poly_int64 |
fa8db1f7 | 222 | get_frame_size (void) |
49ad7cfa | 223 | { |
bd60bab2 JH |
224 | if (FRAME_GROWS_DOWNWARD) |
225 | return -frame_offset; | |
226 | else | |
227 | return frame_offset; | |
49ad7cfa BS |
228 | } |
229 | ||
9fb798d7 EB |
230 | /* Issue an error message and return TRUE if frame OFFSET overflows in |
231 | the signed target pointer arithmetics for function FUNC. Otherwise | |
232 | return FALSE. */ | |
233 | ||
234 | bool | |
f075bd95 | 235 | frame_offset_overflow (poly_int64 offset, tree func) |
b8698a0f | 236 | { |
f075bd95 RS |
237 | poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset; |
238 | unsigned HOST_WIDE_INT limit | |
239 | = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1)) | |
240 | /* Leave room for the fixed part of the frame. */ | |
241 | - 64 * UNITS_PER_WORD); | |
9fb798d7 | 242 | |
f075bd95 | 243 | if (!coeffs_in_range_p (size, 0U, limit)) |
9fb798d7 | 244 | { |
c5d75364 MLI |
245 | error_at (DECL_SOURCE_LOCATION (func), |
246 | "total size of local objects too large"); | |
f075bd95 | 247 | return true; |
9fb798d7 EB |
248 | } |
249 | ||
f075bd95 | 250 | return false; |
9fb798d7 EB |
251 | } |
252 | ||
83d0488b RS |
253 | /* Return the minimum spill slot alignment for a register of mode MODE. */ |
254 | ||
255 | unsigned int | |
256 | spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED) | |
257 | { | |
258 | return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode)); | |
259 | } | |
260 | ||
76fe54f0 L |
261 | /* Return stack slot alignment in bits for TYPE and MODE. */ |
262 | ||
263 | static unsigned int | |
ef4bddc2 | 264 | get_stack_local_alignment (tree type, machine_mode mode) |
76fe54f0 L |
265 | { |
266 | unsigned int alignment; | |
267 | ||
268 | if (mode == BLKmode) | |
269 | alignment = BIGGEST_ALIGNMENT; | |
270 | else | |
271 | alignment = GET_MODE_ALIGNMENT (mode); | |
272 | ||
273 | /* Allow the frond-end to (possibly) increase the alignment of this | |
274 | stack slot. */ | |
275 | if (! type) | |
276 | type = lang_hooks.types.type_for_mode (mode, 0); | |
277 | ||
278 | return STACK_SLOT_ALIGNMENT (type, mode, alignment); | |
279 | } | |
280 | ||
56731d64 BS |
281 | /* Determine whether it is possible to fit a stack slot of size SIZE and |
282 | alignment ALIGNMENT into an area in the stack frame that starts at | |
283 | frame offset START and has a length of LENGTH. If so, store the frame | |
284 | offset to be used for the stack slot in *POFFSET and return true; | |
285 | return false otherwise. This function will extend the frame size when | |
286 | given a start/length pair that lies at the end of the frame. */ | |
287 | ||
288 | static bool | |
f075bd95 RS |
289 | try_fit_stack_local (poly_int64 start, poly_int64 length, |
290 | poly_int64 size, unsigned int alignment, | |
291 | poly_int64_pod *poffset) | |
56731d64 | 292 | { |
f075bd95 | 293 | poly_int64 this_frame_offset; |
56731d64 BS |
294 | int frame_off, frame_alignment, frame_phase; |
295 | ||
296 | /* Calculate how many bytes the start of local variables is off from | |
297 | stack alignment. */ | |
298 | frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
2a31c321 | 299 | frame_off = targetm.starting_frame_offset () % frame_alignment; |
56731d64 BS |
300 | frame_phase = frame_off ? frame_alignment - frame_off : 0; |
301 | ||
302 | /* Round the frame offset to the specified alignment. */ | |
303 | ||
56731d64 BS |
304 | if (FRAME_GROWS_DOWNWARD) |
305 | this_frame_offset | |
f075bd95 | 306 | = (aligned_lower_bound (start + length - size - frame_phase, alignment) |
56731d64 BS |
307 | + frame_phase); |
308 | else | |
309 | this_frame_offset | |
f075bd95 | 310 | = aligned_upper_bound (start - frame_phase, alignment) + frame_phase; |
56731d64 BS |
311 | |
312 | /* See if it fits. If this space is at the edge of the frame, | |
313 | consider extending the frame to make it fit. Our caller relies on | |
314 | this when allocating a new slot. */ | |
f075bd95 RS |
315 | if (maybe_lt (this_frame_offset, start)) |
316 | { | |
317 | if (known_eq (frame_offset, start)) | |
318 | frame_offset = this_frame_offset; | |
319 | else | |
320 | return false; | |
321 | } | |
322 | else if (maybe_gt (this_frame_offset + size, start + length)) | |
323 | { | |
324 | if (known_eq (frame_offset, start + length)) | |
325 | frame_offset = this_frame_offset + size; | |
326 | else | |
327 | return false; | |
328 | } | |
56731d64 BS |
329 | |
330 | *poffset = this_frame_offset; | |
331 | return true; | |
332 | } | |
333 | ||
334 | /* Create a new frame_space structure describing free space in the stack | |
335 | frame beginning at START and ending at END, and chain it into the | |
336 | function's frame_space_list. */ | |
337 | ||
338 | static void | |
f075bd95 | 339 | add_frame_space (poly_int64 start, poly_int64 end) |
56731d64 | 340 | { |
766090c2 | 341 | struct frame_space *space = ggc_alloc<frame_space> (); |
56731d64 BS |
342 | space->next = crtl->frame_space_list; |
343 | crtl->frame_space_list = space; | |
344 | space->start = start; | |
345 | space->length = end - start; | |
346 | } | |
347 | ||
6f086dfc RS |
348 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it |
349 | with machine mode MODE. | |
718fe406 | 350 | |
6f086dfc RS |
351 | ALIGN controls the amount of alignment for the address of the slot: |
352 | 0 means according to MODE, | |
353 | -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, | |
cfa29a4c | 354 | -2 means use BITS_PER_UNIT, |
6f086dfc RS |
355 | positive specifies alignment boundary in bits. |
356 | ||
80a832cd JJ |
357 | KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce |
358 | alignment and ASLK_RECORD_PAD bit set if we should remember | |
359 | extra space we allocated for alignment purposes. When we are | |
360 | called from assign_stack_temp_for_type, it is not set so we don't | |
361 | track the same stack slot in two independent lists. | |
2e3f842f | 362 | |
bd60bab2 | 363 | We do not round to stack_boundary here. */ |
6f086dfc | 364 | |
bd60bab2 | 365 | rtx |
f075bd95 | 366 | assign_stack_local_1 (machine_mode mode, poly_int64 size, |
80a832cd | 367 | int align, int kind) |
6f086dfc | 368 | { |
b3694847 | 369 | rtx x, addr; |
f075bd95 RS |
370 | poly_int64 bigend_correction = 0; |
371 | poly_int64 slot_offset = 0, old_frame_offset; | |
76fe54f0 | 372 | unsigned int alignment, alignment_in_bits; |
6f086dfc RS |
373 | |
374 | if (align == 0) | |
375 | { | |
76fe54f0 | 376 | alignment = get_stack_local_alignment (NULL, mode); |
d16790f2 | 377 | alignment /= BITS_PER_UNIT; |
6f086dfc RS |
378 | } |
379 | else if (align == -1) | |
380 | { | |
381 | alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; | |
f075bd95 | 382 | size = aligned_upper_bound (size, alignment); |
6f086dfc | 383 | } |
cfa29a4c EB |
384 | else if (align == -2) |
385 | alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */ | |
6f086dfc RS |
386 | else |
387 | alignment = align / BITS_PER_UNIT; | |
388 | ||
2e3f842f L |
389 | alignment_in_bits = alignment * BITS_PER_UNIT; |
390 | ||
2e3f842f L |
391 | /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ |
392 | if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) | |
393 | { | |
394 | alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT; | |
395 | alignment = alignment_in_bits / BITS_PER_UNIT; | |
396 | } | |
a0871656 | 397 | |
2e3f842f L |
398 | if (SUPPORTS_STACK_ALIGNMENT) |
399 | { | |
400 | if (crtl->stack_alignment_estimated < alignment_in_bits) | |
401 | { | |
402 | if (!crtl->stack_realign_processed) | |
403 | crtl->stack_alignment_estimated = alignment_in_bits; | |
404 | else | |
405 | { | |
406 | /* If stack is realigned and stack alignment value | |
407 | hasn't been finalized, it is OK not to increase | |
408 | stack_alignment_estimated. The bigger alignment | |
409 | requirement is recorded in stack_alignment_needed | |
410 | below. */ | |
411 | gcc_assert (!crtl->stack_realign_finalized); | |
412 | if (!crtl->stack_realign_needed) | |
413 | { | |
414 | /* It is OK to reduce the alignment as long as the | |
415 | requested size is 0 or the estimated stack | |
416 | alignment >= mode alignment. */ | |
80a832cd | 417 | gcc_assert ((kind & ASLK_REDUCE_ALIGN) |
f075bd95 | 418 | || known_eq (size, 0) |
2e3f842f L |
419 | || (crtl->stack_alignment_estimated |
420 | >= GET_MODE_ALIGNMENT (mode))); | |
421 | alignment_in_bits = crtl->stack_alignment_estimated; | |
422 | alignment = alignment_in_bits / BITS_PER_UNIT; | |
423 | } | |
424 | } | |
425 | } | |
426 | } | |
76fe54f0 L |
427 | |
428 | if (crtl->stack_alignment_needed < alignment_in_bits) | |
429 | crtl->stack_alignment_needed = alignment_in_bits; | |
f85882d8 JY |
430 | if (crtl->max_used_stack_slot_alignment < alignment_in_bits) |
431 | crtl->max_used_stack_slot_alignment = alignment_in_bits; | |
a0871656 | 432 | |
f075bd95 | 433 | if (mode != BLKmode || maybe_ne (size, 0)) |
56731d64 | 434 | { |
80a832cd | 435 | if (kind & ASLK_RECORD_PAD) |
56731d64 | 436 | { |
80a832cd JJ |
437 | struct frame_space **psp; |
438 | ||
439 | for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) | |
440 | { | |
441 | struct frame_space *space = *psp; | |
442 | if (!try_fit_stack_local (space->start, space->length, size, | |
443 | alignment, &slot_offset)) | |
444 | continue; | |
445 | *psp = space->next; | |
f075bd95 | 446 | if (known_gt (slot_offset, space->start)) |
80a832cd | 447 | add_frame_space (space->start, slot_offset); |
f075bd95 | 448 | if (known_lt (slot_offset + size, space->start + space->length)) |
80a832cd JJ |
449 | add_frame_space (slot_offset + size, |
450 | space->start + space->length); | |
451 | goto found_space; | |
452 | } | |
56731d64 BS |
453 | } |
454 | } | |
455 | else if (!STACK_ALIGNMENT_NEEDED) | |
456 | { | |
457 | slot_offset = frame_offset; | |
458 | goto found_space; | |
459 | } | |
460 | ||
461 | old_frame_offset = frame_offset; | |
462 | ||
463 | if (FRAME_GROWS_DOWNWARD) | |
464 | { | |
465 | frame_offset -= size; | |
466 | try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); | |
58dbcf05 | 467 | |
80a832cd JJ |
468 | if (kind & ASLK_RECORD_PAD) |
469 | { | |
f075bd95 | 470 | if (known_gt (slot_offset, frame_offset)) |
80a832cd | 471 | add_frame_space (frame_offset, slot_offset); |
f075bd95 | 472 | if (known_lt (slot_offset + size, old_frame_offset)) |
80a832cd JJ |
473 | add_frame_space (slot_offset + size, old_frame_offset); |
474 | } | |
56731d64 BS |
475 | } |
476 | else | |
95f3f59e | 477 | { |
56731d64 BS |
478 | frame_offset += size; |
479 | try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); | |
480 | ||
80a832cd JJ |
481 | if (kind & ASLK_RECORD_PAD) |
482 | { | |
f075bd95 | 483 | if (known_gt (slot_offset, old_frame_offset)) |
80a832cd | 484 | add_frame_space (old_frame_offset, slot_offset); |
f075bd95 | 485 | if (known_lt (slot_offset + size, frame_offset)) |
80a832cd JJ |
486 | add_frame_space (slot_offset + size, frame_offset); |
487 | } | |
95f3f59e | 488 | } |
6f086dfc | 489 | |
56731d64 | 490 | found_space: |
6f086dfc RS |
491 | /* On a big-endian machine, if we are allocating more space than we will use, |
492 | use the least significant bytes of those that are allocated. */ | |
f075bd95 RS |
493 | if (mode != BLKmode) |
494 | { | |
495 | /* The slot size can sometimes be smaller than the mode size; | |
496 | e.g. the rs6000 port allocates slots with a vector mode | |
497 | that have the size of only one element. However, the slot | |
498 | size must always be ordered wrt to the mode size, in the | |
499 | same way as for a subreg. */ | |
500 | gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size)); | |
501 | if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size)) | |
502 | bigend_correction = size - GET_MODE_SIZE (mode); | |
503 | } | |
6f086dfc | 504 | |
6f086dfc RS |
505 | /* If we have already instantiated virtual registers, return the actual |
506 | address relative to the frame pointer. */ | |
bd60bab2 | 507 | if (virtuals_instantiated) |
0a81f074 | 508 | addr = plus_constant (Pmode, frame_pointer_rtx, |
c41536f5 | 509 | trunc_int_for_mode |
56731d64 | 510 | (slot_offset + bigend_correction |
2a31c321 | 511 | + targetm.starting_frame_offset (), Pmode)); |
6f086dfc | 512 | else |
0a81f074 | 513 | addr = plus_constant (Pmode, virtual_stack_vars_rtx, |
c41536f5 | 514 | trunc_int_for_mode |
56731d64 | 515 | (slot_offset + bigend_correction, |
c41536f5 | 516 | Pmode)); |
6f086dfc | 517 | |
38a448ca | 518 | x = gen_rtx_MEM (mode, addr); |
76fe54f0 | 519 | set_mem_align (x, alignment_in_bits); |
be0c514c | 520 | MEM_NOTRAP_P (x) = 1; |
6f086dfc | 521 | |
8c39f8ae | 522 | vec_safe_push (stack_slot_list, x); |
e2ecd91c | 523 | |
bd60bab2 JH |
524 | if (frame_offset_overflow (frame_offset, current_function_decl)) |
525 | frame_offset = 0; | |
9070115b | 526 | |
6f086dfc RS |
527 | return x; |
528 | } | |
2e3f842f L |
529 | |
530 | /* Wrap up assign_stack_local_1 with last parameter as false. */ | |
531 | ||
532 | rtx | |
f075bd95 | 533 | assign_stack_local (machine_mode mode, poly_int64 size, int align) |
2e3f842f | 534 | { |
80a832cd | 535 | return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD); |
2e3f842f | 536 | } |
0aea6467 | 537 | \f |
fb0703f7 SB |
538 | /* In order to evaluate some expressions, such as function calls returning |
539 | structures in memory, we need to temporarily allocate stack locations. | |
540 | We record each allocated temporary in the following structure. | |
541 | ||
542 | Associated with each temporary slot is a nesting level. When we pop up | |
543 | one level, all temporaries associated with the previous level are freed. | |
544 | Normally, all temporaries are freed after the execution of the statement | |
545 | in which they were created. However, if we are inside a ({...}) grouping, | |
546 | the result may be in a temporary and hence must be preserved. If the | |
547 | result could be in a temporary, we preserve it if we can determine which | |
548 | one it is in. If we cannot determine which temporary may contain the | |
549 | result, all temporaries are preserved. A temporary is preserved by | |
9474e8ab | 550 | pretending it was allocated at the previous nesting level. */ |
fb0703f7 | 551 | |
d1b38208 | 552 | struct GTY(()) temp_slot { |
fb0703f7 SB |
553 | /* Points to next temporary slot. */ |
554 | struct temp_slot *next; | |
555 | /* Points to previous temporary slot. */ | |
556 | struct temp_slot *prev; | |
557 | /* The rtx to used to reference the slot. */ | |
558 | rtx slot; | |
fb0703f7 | 559 | /* The size, in units, of the slot. */ |
f075bd95 | 560 | poly_int64 size; |
fb0703f7 SB |
561 | /* The type of the object in the slot, or zero if it doesn't correspond |
562 | to a type. We use this to determine whether a slot can be reused. | |
563 | It can be reused if objects of the type of the new slot will always | |
564 | conflict with objects of the type of the old slot. */ | |
565 | tree type; | |
8f5929e1 JJ |
566 | /* The alignment (in bits) of the slot. */ |
567 | unsigned int align; | |
fb0703f7 SB |
568 | /* Nonzero if this temporary is currently in use. */ |
569 | char in_use; | |
fb0703f7 SB |
570 | /* Nesting level at which this slot is being used. */ |
571 | int level; | |
fb0703f7 SB |
572 | /* The offset of the slot from the frame_pointer, including extra space |
573 | for alignment. This info is for combine_temp_slots. */ | |
f075bd95 | 574 | poly_int64 base_offset; |
fb0703f7 SB |
575 | /* The size of the slot, including extra space for alignment. This |
576 | info is for combine_temp_slots. */ | |
f075bd95 | 577 | poly_int64 full_size; |
fb0703f7 SB |
578 | }; |
579 | ||
2a22f99c TS |
580 | /* Entry for the below hash table. */ |
581 | struct GTY((for_user)) temp_slot_address_entry { | |
fb0703f7 SB |
582 | hashval_t hash; |
583 | rtx address; | |
584 | struct temp_slot *temp_slot; | |
585 | }; | |
586 | ||
ca752f39 | 587 | struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry> |
2a22f99c TS |
588 | { |
589 | static hashval_t hash (temp_slot_address_entry *); | |
590 | static bool equal (temp_slot_address_entry *, temp_slot_address_entry *); | |
591 | }; | |
592 | ||
593 | /* A table of addresses that represent a stack slot. The table is a mapping | |
594 | from address RTXen to a temp slot. */ | |
595 | static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table; | |
596 | static size_t n_temp_slots_in_use; | |
597 | ||
0aea6467 ZD |
598 | /* Removes temporary slot TEMP from LIST. */ |
599 | ||
600 | static void | |
601 | cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list) | |
602 | { | |
603 | if (temp->next) | |
604 | temp->next->prev = temp->prev; | |
605 | if (temp->prev) | |
606 | temp->prev->next = temp->next; | |
607 | else | |
608 | *list = temp->next; | |
609 | ||
610 | temp->prev = temp->next = NULL; | |
611 | } | |
612 | ||
613 | /* Inserts temporary slot TEMP to LIST. */ | |
614 | ||
615 | static void | |
616 | insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) | |
617 | { | |
618 | temp->next = *list; | |
619 | if (*list) | |
620 | (*list)->prev = temp; | |
621 | temp->prev = NULL; | |
622 | *list = temp; | |
623 | } | |
624 | ||
625 | /* Returns the list of used temp slots at LEVEL. */ | |
626 | ||
627 | static struct temp_slot ** | |
628 | temp_slots_at_level (int level) | |
629 | { | |
9771b263 DN |
630 | if (level >= (int) vec_safe_length (used_temp_slots)) |
631 | vec_safe_grow_cleared (used_temp_slots, level + 1); | |
0aea6467 | 632 | |
9771b263 | 633 | return &(*used_temp_slots)[level]; |
0aea6467 ZD |
634 | } |
635 | ||
636 | /* Returns the maximal temporary slot level. */ | |
637 | ||
638 | static int | |
639 | max_slot_level (void) | |
640 | { | |
641 | if (!used_temp_slots) | |
642 | return -1; | |
643 | ||
9771b263 | 644 | return used_temp_slots->length () - 1; |
0aea6467 ZD |
645 | } |
646 | ||
647 | /* Moves temporary slot TEMP to LEVEL. */ | |
648 | ||
649 | static void | |
650 | move_slot_to_level (struct temp_slot *temp, int level) | |
651 | { | |
652 | cut_slot_from_list (temp, temp_slots_at_level (temp->level)); | |
653 | insert_slot_to_list (temp, temp_slots_at_level (level)); | |
654 | temp->level = level; | |
655 | } | |
656 | ||
657 | /* Make temporary slot TEMP available. */ | |
658 | ||
659 | static void | |
660 | make_slot_available (struct temp_slot *temp) | |
661 | { | |
662 | cut_slot_from_list (temp, temp_slots_at_level (temp->level)); | |
663 | insert_slot_to_list (temp, &avail_temp_slots); | |
664 | temp->in_use = 0; | |
665 | temp->level = -1; | |
f8395d62 | 666 | n_temp_slots_in_use--; |
0aea6467 | 667 | } |
fb0703f7 SB |
668 | |
669 | /* Compute the hash value for an address -> temp slot mapping. | |
670 | The value is cached on the mapping entry. */ | |
671 | static hashval_t | |
672 | temp_slot_address_compute_hash (struct temp_slot_address_entry *t) | |
673 | { | |
674 | int do_not_record = 0; | |
675 | return hash_rtx (t->address, GET_MODE (t->address), | |
676 | &do_not_record, NULL, false); | |
677 | } | |
678 | ||
679 | /* Return the hash value for an address -> temp slot mapping. */ | |
2a22f99c TS |
680 | hashval_t |
681 | temp_address_hasher::hash (temp_slot_address_entry *t) | |
fb0703f7 | 682 | { |
fb0703f7 SB |
683 | return t->hash; |
684 | } | |
685 | ||
686 | /* Compare two address -> temp slot mapping entries. */ | |
2a22f99c TS |
687 | bool |
688 | temp_address_hasher::equal (temp_slot_address_entry *t1, | |
689 | temp_slot_address_entry *t2) | |
fb0703f7 | 690 | { |
fb0703f7 SB |
691 | return exp_equiv_p (t1->address, t2->address, 0, true); |
692 | } | |
693 | ||
694 | /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ | |
695 | static void | |
696 | insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) | |
697 | { | |
766090c2 | 698 | struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> (); |
fb0703f7 SB |
699 | t->address = address; |
700 | t->temp_slot = temp_slot; | |
701 | t->hash = temp_slot_address_compute_hash (t); | |
2a22f99c | 702 | *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t; |
fb0703f7 SB |
703 | } |
704 | ||
705 | /* Remove an address -> temp slot mapping entry if the temp slot is | |
706 | not in use anymore. Callback for remove_unused_temp_slot_addresses. */ | |
2a22f99c TS |
707 | int |
708 | remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *) | |
fb0703f7 | 709 | { |
2a22f99c | 710 | const struct temp_slot_address_entry *t = *slot; |
fb0703f7 | 711 | if (! t->temp_slot->in_use) |
2a22f99c | 712 | temp_slot_address_table->clear_slot (slot); |
fb0703f7 SB |
713 | return 1; |
714 | } | |
715 | ||
716 | /* Remove all mappings of addresses to unused temp slots. */ | |
717 | static void | |
718 | remove_unused_temp_slot_addresses (void) | |
719 | { | |
f8395d62 MM |
720 | /* Use quicker clearing if there aren't any active temp slots. */ |
721 | if (n_temp_slots_in_use) | |
2a22f99c TS |
722 | temp_slot_address_table->traverse |
723 | <void *, remove_unused_temp_slot_addresses_1> (NULL); | |
f8395d62 | 724 | else |
2a22f99c | 725 | temp_slot_address_table->empty (); |
fb0703f7 SB |
726 | } |
727 | ||
728 | /* Find the temp slot corresponding to the object at address X. */ | |
729 | ||
730 | static struct temp_slot * | |
731 | find_temp_slot_from_address (rtx x) | |
732 | { | |
733 | struct temp_slot *p; | |
734 | struct temp_slot_address_entry tmp, *t; | |
735 | ||
736 | /* First try the easy way: | |
737 | See if X exists in the address -> temp slot mapping. */ | |
738 | tmp.address = x; | |
739 | tmp.temp_slot = NULL; | |
740 | tmp.hash = temp_slot_address_compute_hash (&tmp); | |
2a22f99c | 741 | t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash); |
fb0703f7 SB |
742 | if (t) |
743 | return t->temp_slot; | |
744 | ||
745 | /* If we have a sum involving a register, see if it points to a temp | |
746 | slot. */ | |
747 | if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) | |
748 | && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) | |
749 | return p; | |
750 | else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) | |
751 | && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) | |
752 | return p; | |
753 | ||
754 | /* Last resort: Address is a virtual stack var address. */ | |
f075bd95 RS |
755 | poly_int64 offset; |
756 | if (strip_offset (x, &offset) == virtual_stack_vars_rtx) | |
fb0703f7 SB |
757 | { |
758 | int i; | |
759 | for (i = max_slot_level (); i >= 0; i--) | |
760 | for (p = *temp_slots_at_level (i); p; p = p->next) | |
f075bd95 RS |
761 | if (known_in_range_p (offset, p->base_offset, p->full_size)) |
762 | return p; | |
fb0703f7 SB |
763 | } |
764 | ||
765 | return NULL; | |
766 | } | |
6f086dfc RS |
767 | \f |
768 | /* Allocate a temporary stack slot and record it for possible later | |
769 | reuse. | |
770 | ||
771 | MODE is the machine mode to be given to the returned rtx. | |
772 | ||
773 | SIZE is the size in units of the space required. We do no rounding here | |
774 | since assign_stack_local will do any required rounding. | |
775 | ||
a4c6502a | 776 | TYPE is the type that will be used for the stack slot. */ |
6f086dfc | 777 | |
a06ef755 | 778 | rtx |
f075bd95 | 779 | assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type) |
6f086dfc | 780 | { |
74e2819c | 781 | unsigned int align; |
0aea6467 | 782 | struct temp_slot *p, *best_p = 0, *selected = NULL, **pp; |
faa964e5 | 783 | rtx slot; |
6f086dfc | 784 | |
f075bd95 | 785 | gcc_assert (known_size_p (size)); |
303ec2aa | 786 | |
76fe54f0 | 787 | align = get_stack_local_alignment (type, mode); |
d16790f2 JW |
788 | |
789 | /* Try to find an available, already-allocated temporary of the proper | |
790 | mode which meets the size and alignment requirements. Choose the | |
3e8b0446 | 791 | smallest one with the closest alignment. |
b8698a0f | 792 | |
3e8b0446 ZD |
793 | If assign_stack_temp is called outside of the tree->rtl expansion, |
794 | we cannot reuse the stack slots (that may still refer to | |
795 | VIRTUAL_STACK_VARS_REGNUM). */ | |
796 | if (!virtuals_instantiated) | |
0aea6467 | 797 | { |
3e8b0446 | 798 | for (p = avail_temp_slots; p; p = p->next) |
0aea6467 | 799 | { |
f075bd95 RS |
800 | if (p->align >= align |
801 | && known_ge (p->size, size) | |
3e8b0446 ZD |
802 | && GET_MODE (p->slot) == mode |
803 | && objects_must_conflict_p (p->type, type) | |
f075bd95 RS |
804 | && (best_p == 0 |
805 | || (known_eq (best_p->size, p->size) | |
806 | ? best_p->align > p->align | |
807 | : known_ge (best_p->size, p->size)))) | |
0aea6467 | 808 | { |
f075bd95 | 809 | if (p->align == align && known_eq (p->size, size)) |
3e8b0446 ZD |
810 | { |
811 | selected = p; | |
812 | cut_slot_from_list (selected, &avail_temp_slots); | |
813 | best_p = 0; | |
814 | break; | |
815 | } | |
816 | best_p = p; | |
0aea6467 | 817 | } |
0aea6467 ZD |
818 | } |
819 | } | |
6f086dfc RS |
820 | |
821 | /* Make our best, if any, the one to use. */ | |
822 | if (best_p) | |
a45035b6 | 823 | { |
0aea6467 ZD |
824 | selected = best_p; |
825 | cut_slot_from_list (selected, &avail_temp_slots); | |
826 | ||
a45035b6 JW |
827 | /* If there are enough aligned bytes left over, make them into a new |
828 | temp_slot so that the extra bytes don't get wasted. Do this only | |
829 | for BLKmode slots, so that we can be sure of the alignment. */ | |
3bdf5ad1 | 830 | if (GET_MODE (best_p->slot) == BLKmode) |
a45035b6 | 831 | { |
d16790f2 | 832 | int alignment = best_p->align / BITS_PER_UNIT; |
f075bd95 | 833 | poly_int64 rounded_size = aligned_upper_bound (size, alignment); |
a45035b6 | 834 | |
f075bd95 | 835 | if (known_ge (best_p->size - rounded_size, alignment)) |
a45035b6 | 836 | { |
766090c2 | 837 | p = ggc_alloc<temp_slot> (); |
9474e8ab | 838 | p->in_use = 0; |
a45035b6 | 839 | p->size = best_p->size - rounded_size; |
307d8cd6 RK |
840 | p->base_offset = best_p->base_offset + rounded_size; |
841 | p->full_size = best_p->full_size - rounded_size; | |
be0c514c | 842 | p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); |
d16790f2 | 843 | p->align = best_p->align; |
1da68f56 | 844 | p->type = best_p->type; |
0aea6467 | 845 | insert_slot_to_list (p, &avail_temp_slots); |
a45035b6 | 846 | |
8c39f8ae | 847 | vec_safe_push (stack_slot_list, p->slot); |
a45035b6 JW |
848 | |
849 | best_p->size = rounded_size; | |
291dde90 | 850 | best_p->full_size = rounded_size; |
a45035b6 JW |
851 | } |
852 | } | |
a45035b6 | 853 | } |
718fe406 | 854 | |
6f086dfc | 855 | /* If we still didn't find one, make a new temporary. */ |
0aea6467 | 856 | if (selected == 0) |
6f086dfc | 857 | { |
f075bd95 | 858 | poly_int64 frame_offset_old = frame_offset; |
e5e809f4 | 859 | |
766090c2 | 860 | p = ggc_alloc<temp_slot> (); |
e5e809f4 | 861 | |
c87a0a39 JL |
862 | /* We are passing an explicit alignment request to assign_stack_local. |
863 | One side effect of that is assign_stack_local will not round SIZE | |
864 | to ensure the frame offset remains suitably aligned. | |
865 | ||
866 | So for requests which depended on the rounding of SIZE, we go ahead | |
867 | and round it now. We also make sure ALIGNMENT is at least | |
868 | BIGGEST_ALIGNMENT. */ | |
0bccc606 | 869 | gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT); |
80a832cd JJ |
870 | p->slot = assign_stack_local_1 (mode, |
871 | (mode == BLKmode | |
f075bd95 RS |
872 | ? aligned_upper_bound (size, |
873 | (int) align | |
874 | / BITS_PER_UNIT) | |
80a832cd JJ |
875 | : size), |
876 | align, 0); | |
d16790f2 JW |
877 | |
878 | p->align = align; | |
e5e809f4 | 879 | |
b2a80c0d DE |
880 | /* The following slot size computation is necessary because we don't |
881 | know the actual size of the temporary slot until assign_stack_local | |
882 | has performed all the frame alignment and size rounding for the | |
fc91b0d0 RK |
883 | requested temporary. Note that extra space added for alignment |
884 | can be either above or below this stack slot depending on which | |
885 | way the frame grows. We include the extra space if and only if it | |
886 | is above this slot. */ | |
f62c8a5c JJ |
887 | if (FRAME_GROWS_DOWNWARD) |
888 | p->size = frame_offset_old - frame_offset; | |
889 | else | |
890 | p->size = size; | |
e5e809f4 | 891 | |
fc91b0d0 | 892 | /* Now define the fields used by combine_temp_slots. */ |
f62c8a5c JJ |
893 | if (FRAME_GROWS_DOWNWARD) |
894 | { | |
895 | p->base_offset = frame_offset; | |
896 | p->full_size = frame_offset_old - frame_offset; | |
897 | } | |
898 | else | |
899 | { | |
900 | p->base_offset = frame_offset_old; | |
901 | p->full_size = frame_offset - frame_offset_old; | |
902 | } | |
0aea6467 ZD |
903 | |
904 | selected = p; | |
6f086dfc RS |
905 | } |
906 | ||
0aea6467 | 907 | p = selected; |
6f086dfc | 908 | p->in_use = 1; |
1da68f56 | 909 | p->type = type; |
7efcb746 | 910 | p->level = temp_slot_level; |
f8395d62 | 911 | n_temp_slots_in_use++; |
1995f267 | 912 | |
0aea6467 ZD |
913 | pp = temp_slots_at_level (p->level); |
914 | insert_slot_to_list (p, pp); | |
fb0703f7 | 915 | insert_temp_slot_address (XEXP (p->slot, 0), p); |
faa964e5 UW |
916 | |
917 | /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */ | |
918 | slot = gen_rtx_MEM (mode, XEXP (p->slot, 0)); | |
8c39f8ae | 919 | vec_safe_push (stack_slot_list, slot); |
3bdf5ad1 | 920 | |
1da68f56 RK |
921 | /* If we know the alias set for the memory that will be used, use |
922 | it. If there's no TYPE, then we don't know anything about the | |
923 | alias set for the memory. */ | |
faa964e5 UW |
924 | set_mem_alias_set (slot, type ? get_alias_set (type) : 0); |
925 | set_mem_align (slot, align); | |
1da68f56 | 926 | |
30f7a378 | 927 | /* If a type is specified, set the relevant flags. */ |
3bdf5ad1 | 928 | if (type != 0) |
55356334 | 929 | MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); |
be0c514c | 930 | MEM_NOTRAP_P (slot) = 1; |
3bdf5ad1 | 931 | |
faa964e5 | 932 | return slot; |
6f086dfc | 933 | } |
d16790f2 JW |
934 | |
935 | /* Allocate a temporary stack slot and record it for possible later | |
9474e8ab | 936 | reuse. First two arguments are same as in preceding function. */ |
d16790f2 JW |
937 | |
938 | rtx | |
f075bd95 | 939 | assign_stack_temp (machine_mode mode, poly_int64 size) |
d16790f2 | 940 | { |
9474e8ab | 941 | return assign_stack_temp_for_type (mode, size, NULL_TREE); |
d16790f2 | 942 | } |
638141a6 | 943 | \f |
9432c136 EB |
944 | /* Assign a temporary. |
945 | If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl | |
946 | and so that should be used in error messages. In either case, we | |
947 | allocate of the given type. | |
230f21b4 | 948 | MEMORY_REQUIRED is 1 if the result must be addressable stack memory; |
b55d9ff8 RK |
949 | it is 0 if a register is OK. |
950 | DONT_PROMOTE is 1 if we should not promote values in register | |
951 | to wider modes. */ | |
230f21b4 PB |
952 | |
953 | rtx | |
9474e8ab | 954 | assign_temp (tree type_or_decl, int memory_required, |
fa8db1f7 | 955 | int dont_promote ATTRIBUTE_UNUSED) |
230f21b4 | 956 | { |
9432c136 | 957 | tree type, decl; |
ef4bddc2 | 958 | machine_mode mode; |
9e1622ed | 959 | #ifdef PROMOTE_MODE |
9432c136 EB |
960 | int unsignedp; |
961 | #endif | |
962 | ||
963 | if (DECL_P (type_or_decl)) | |
964 | decl = type_or_decl, type = TREE_TYPE (decl); | |
965 | else | |
966 | decl = NULL, type = type_or_decl; | |
967 | ||
968 | mode = TYPE_MODE (type); | |
9e1622ed | 969 | #ifdef PROMOTE_MODE |
8df83eae | 970 | unsignedp = TYPE_UNSIGNED (type); |
0ce8a59c | 971 | #endif |
638141a6 | 972 | |
45177337 JM |
973 | /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front |
974 | end. See also create_tmp_var for the gimplification-time check. */ | |
975 | gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | |
976 | ||
230f21b4 PB |
977 | if (mode == BLKmode || memory_required) |
978 | { | |
6e246559 | 979 | poly_int64 size; |
230f21b4 PB |
980 | rtx tmp; |
981 | ||
982 | /* Unfortunately, we don't yet know how to allocate variable-sized | |
a441447f OH |
983 | temporaries. However, sometimes we can find a fixed upper limit on |
984 | the size, so try that instead. */ | |
6e246559 | 985 | if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size)) |
a441447f | 986 | size = max_int_size_in_bytes (type); |
e30bb772 | 987 | |
6e246559 RS |
988 | /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid |
989 | problems with allocating the stack space. */ | |
990 | if (known_eq (size, 0)) | |
991 | size = 1; | |
992 | ||
9432c136 EB |
993 | /* The size of the temporary may be too large to fit into an integer. */ |
994 | /* ??? Not sure this should happen except for user silliness, so limit | |
797a6ac1 | 995 | this to things that aren't compiler-generated temporaries. The |
535a42b1 | 996 | rest of the time we'll die in assign_stack_temp_for_type. */ |
6e246559 RS |
997 | if (decl |
998 | && !known_size_p (size) | |
9432c136 EB |
999 | && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) |
1000 | { | |
dee15844 | 1001 | error ("size of variable %q+D is too large", decl); |
9432c136 EB |
1002 | size = 1; |
1003 | } | |
1004 | ||
9474e8ab | 1005 | tmp = assign_stack_temp_for_type (mode, size, type); |
230f21b4 PB |
1006 | return tmp; |
1007 | } | |
638141a6 | 1008 | |
9e1622ed | 1009 | #ifdef PROMOTE_MODE |
b55d9ff8 | 1010 | if (! dont_promote) |
cde0f3fd | 1011 | mode = promote_mode (type, mode, &unsignedp); |
230f21b4 | 1012 | #endif |
638141a6 | 1013 | |
230f21b4 PB |
1014 | return gen_reg_rtx (mode); |
1015 | } | |
638141a6 | 1016 | \f |
a45035b6 JW |
1017 | /* Combine temporary stack slots which are adjacent on the stack. |
1018 | ||
1019 | This allows for better use of already allocated stack space. This is only | |
1020 | done for BLKmode slots because we can be sure that we won't have alignment | |
1021 | problems in this case. */ | |
1022 | ||
6fe79279 | 1023 | static void |
fa8db1f7 | 1024 | combine_temp_slots (void) |
a45035b6 | 1025 | { |
0aea6467 | 1026 | struct temp_slot *p, *q, *next, *next_q; |
e5e809f4 JL |
1027 | int num_slots; |
1028 | ||
a4c6502a MM |
1029 | /* We can't combine slots, because the information about which slot |
1030 | is in which alias set will be lost. */ | |
1031 | if (flag_strict_aliasing) | |
1032 | return; | |
1033 | ||
718fe406 | 1034 | /* If there are a lot of temp slots, don't do anything unless |
d6a7951f | 1035 | high levels of optimization. */ |
e5e809f4 | 1036 | if (! flag_expensive_optimizations) |
0aea6467 | 1037 | for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++) |
e5e809f4 JL |
1038 | if (num_slots > 100 || (num_slots > 10 && optimize == 0)) |
1039 | return; | |
a45035b6 | 1040 | |
0aea6467 | 1041 | for (p = avail_temp_slots; p; p = next) |
e9b7093a RS |
1042 | { |
1043 | int delete_p = 0; | |
e5e809f4 | 1044 | |
0aea6467 ZD |
1045 | next = p->next; |
1046 | ||
1047 | if (GET_MODE (p->slot) != BLKmode) | |
1048 | continue; | |
1049 | ||
1050 | for (q = p->next; q; q = next_q) | |
e9b7093a | 1051 | { |
0aea6467 ZD |
1052 | int delete_q = 0; |
1053 | ||
1054 | next_q = q->next; | |
1055 | ||
1056 | if (GET_MODE (q->slot) != BLKmode) | |
1057 | continue; | |
1058 | ||
f075bd95 | 1059 | if (known_eq (p->base_offset + p->full_size, q->base_offset)) |
0aea6467 ZD |
1060 | { |
1061 | /* Q comes after P; combine Q into P. */ | |
1062 | p->size += q->size; | |
1063 | p->full_size += q->full_size; | |
1064 | delete_q = 1; | |
1065 | } | |
f075bd95 | 1066 | else if (known_eq (q->base_offset + q->full_size, p->base_offset)) |
0aea6467 ZD |
1067 | { |
1068 | /* P comes after Q; combine P into Q. */ | |
1069 | q->size += p->size; | |
1070 | q->full_size += p->full_size; | |
1071 | delete_p = 1; | |
1072 | break; | |
1073 | } | |
1074 | if (delete_q) | |
1075 | cut_slot_from_list (q, &avail_temp_slots); | |
e9b7093a | 1076 | } |
0aea6467 ZD |
1077 | |
1078 | /* Either delete P or advance past it. */ | |
1079 | if (delete_p) | |
1080 | cut_slot_from_list (p, &avail_temp_slots); | |
e9b7093a | 1081 | } |
a45035b6 | 1082 | } |
6f086dfc | 1083 | \f |
82d6e6fc KG |
1084 | /* Indicate that NEW_RTX is an alternate way of referring to the temp |
1085 | slot that previously was known by OLD_RTX. */ | |
e5e76139 RK |
1086 | |
1087 | void | |
82d6e6fc | 1088 | update_temp_slot_address (rtx old_rtx, rtx new_rtx) |
e5e76139 | 1089 | { |
14a774a9 | 1090 | struct temp_slot *p; |
e5e76139 | 1091 | |
82d6e6fc | 1092 | if (rtx_equal_p (old_rtx, new_rtx)) |
e5e76139 | 1093 | return; |
14a774a9 | 1094 | |
82d6e6fc | 1095 | p = find_temp_slot_from_address (old_rtx); |
14a774a9 | 1096 | |
82d6e6fc KG |
1097 | /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and |
1098 | NEW_RTX is a register, see if one operand of the PLUS is a | |
1099 | temporary location. If so, NEW_RTX points into it. Otherwise, | |
1100 | if both OLD_RTX and NEW_RTX are a PLUS and if there is a register | |
1101 | in common between them. If so, try a recursive call on those | |
1102 | values. */ | |
14a774a9 RK |
1103 | if (p == 0) |
1104 | { | |
82d6e6fc | 1105 | if (GET_CODE (old_rtx) != PLUS) |
700f19f0 RK |
1106 | return; |
1107 | ||
82d6e6fc | 1108 | if (REG_P (new_rtx)) |
700f19f0 | 1109 | { |
82d6e6fc KG |
1110 | update_temp_slot_address (XEXP (old_rtx, 0), new_rtx); |
1111 | update_temp_slot_address (XEXP (old_rtx, 1), new_rtx); | |
700f19f0 RK |
1112 | return; |
1113 | } | |
82d6e6fc | 1114 | else if (GET_CODE (new_rtx) != PLUS) |
14a774a9 RK |
1115 | return; |
1116 | ||
82d6e6fc KG |
1117 | if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0))) |
1118 | update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1)); | |
1119 | else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0))) | |
1120 | update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1)); | |
1121 | else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1))) | |
1122 | update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0)); | |
1123 | else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1))) | |
1124 | update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0)); | |
14a774a9 RK |
1125 | |
1126 | return; | |
1127 | } | |
1128 | ||
718fe406 | 1129 | /* Otherwise add an alias for the temp's address. */ |
fb0703f7 | 1130 | insert_temp_slot_address (new_rtx, p); |
e5e76139 RK |
1131 | } |
1132 | ||
9cca6a99 MS |
1133 | /* If X could be a reference to a temporary slot, mark that slot as |
1134 | belonging to the to one level higher than the current level. If X | |
1135 | matched one of our slots, just mark that one. Otherwise, we can't | |
9474e8ab | 1136 | easily predict which it is, so upgrade all of them. |
6f086dfc RS |
1137 | |
1138 | This is called when an ({...}) construct occurs and a statement | |
1139 | returns a value in memory. */ | |
1140 | ||
1141 | void | |
fa8db1f7 | 1142 | preserve_temp_slots (rtx x) |
6f086dfc | 1143 | { |
0aea6467 | 1144 | struct temp_slot *p = 0, *next; |
6f086dfc | 1145 | |
e3a77161 | 1146 | if (x == 0) |
9474e8ab | 1147 | return; |
f7b6d104 | 1148 | |
8fff4fc1 | 1149 | /* If X is a register that is being used as a pointer, see if we have |
9474e8ab | 1150 | a temporary slot we know it points to. */ |
8fff4fc1 RH |
1151 | if (REG_P (x) && REG_POINTER (x)) |
1152 | p = find_temp_slot_from_address (x); | |
f7b6d104 | 1153 | |
8fff4fc1 | 1154 | /* If X is not in memory or is at a constant address, it cannot be in |
9474e8ab | 1155 | a temporary slot. */ |
8fff4fc1 | 1156 | if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))) |
9474e8ab | 1157 | return; |
8fff4fc1 RH |
1158 | |
1159 | /* First see if we can find a match. */ | |
1160 | if (p == 0) | |
1161 | p = find_temp_slot_from_address (XEXP (x, 0)); | |
1162 | ||
1163 | if (p != 0) | |
1164 | { | |
8fff4fc1 | 1165 | if (p->level == temp_slot_level) |
9474e8ab | 1166 | move_slot_to_level (p, temp_slot_level - 1); |
8fff4fc1 | 1167 | return; |
f7b6d104 | 1168 | } |
e9a25f70 | 1169 | |
8fff4fc1 RH |
1170 | /* Otherwise, preserve all non-kept slots at this level. */ |
1171 | for (p = *temp_slots_at_level (temp_slot_level); p; p = next) | |
e9a25f70 | 1172 | { |
8fff4fc1 | 1173 | next = p->next; |
9474e8ab | 1174 | move_slot_to_level (p, temp_slot_level - 1); |
8fff4fc1 | 1175 | } |
fe9b4957 MM |
1176 | } |
1177 | ||
8fff4fc1 RH |
1178 | /* Free all temporaries used so far. This is normally called at the |
1179 | end of generating code for a statement. */ | |
fe9b4957 | 1180 | |
8fff4fc1 RH |
1181 | void |
1182 | free_temp_slots (void) | |
fe9b4957 | 1183 | { |
8fff4fc1 | 1184 | struct temp_slot *p, *next; |
5d7cefe5 | 1185 | bool some_available = false; |
fe9b4957 | 1186 | |
8fff4fc1 RH |
1187 | for (p = *temp_slots_at_level (temp_slot_level); p; p = next) |
1188 | { | |
1189 | next = p->next; | |
9474e8ab MM |
1190 | make_slot_available (p); |
1191 | some_available = true; | |
8fff4fc1 | 1192 | } |
fe9b4957 | 1193 | |
5d7cefe5 MM |
1194 | if (some_available) |
1195 | { | |
1196 | remove_unused_temp_slot_addresses (); | |
1197 | combine_temp_slots (); | |
1198 | } | |
8fff4fc1 | 1199 | } |
fe9b4957 | 1200 | |
8fff4fc1 | 1201 | /* Push deeper into the nesting level for stack temporaries. */ |
fe9b4957 | 1202 | |
8fff4fc1 RH |
1203 | void |
1204 | push_temp_slots (void) | |
fe9b4957 | 1205 | { |
8fff4fc1 | 1206 | temp_slot_level++; |
fe9b4957 MM |
1207 | } |
1208 | ||
8fff4fc1 RH |
1209 | /* Pop a temporary nesting level. All slots in use in the current level |
1210 | are freed. */ | |
fe9b4957 | 1211 | |
8fff4fc1 RH |
1212 | void |
1213 | pop_temp_slots (void) | |
fe9b4957 | 1214 | { |
9474e8ab | 1215 | free_temp_slots (); |
8fff4fc1 | 1216 | temp_slot_level--; |
8c36698e NC |
1217 | } |
1218 | ||
8fff4fc1 | 1219 | /* Initialize temporary slots. */ |
e9a25f70 JL |
1220 | |
1221 | void | |
8fff4fc1 | 1222 | init_temp_slots (void) |
e9a25f70 | 1223 | { |
8fff4fc1 RH |
1224 | /* We have not allocated any temporaries yet. */ |
1225 | avail_temp_slots = 0; | |
9771b263 | 1226 | vec_alloc (used_temp_slots, 0); |
8fff4fc1 | 1227 | temp_slot_level = 0; |
f8395d62 | 1228 | n_temp_slots_in_use = 0; |
fb0703f7 SB |
1229 | |
1230 | /* Set up the table to map addresses to temp slots. */ | |
1231 | if (! temp_slot_address_table) | |
2a22f99c | 1232 | temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32); |
fb0703f7 | 1233 | else |
2a22f99c | 1234 | temp_slot_address_table->empty (); |
8fff4fc1 RH |
1235 | } |
1236 | \f | |
6399c0ab SB |
1237 | /* Functions and data structures to keep track of the values hard regs |
1238 | had at the start of the function. */ | |
1239 | ||
1240 | /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val, | |
1241 | and has_hard_reg_initial_val.. */ | |
50686850 | 1242 | struct GTY(()) initial_value_pair { |
6399c0ab SB |
1243 | rtx hard_reg; |
1244 | rtx pseudo; | |
50686850 | 1245 | }; |
6399c0ab SB |
1246 | /* ??? This could be a VEC but there is currently no way to define an |
1247 | opaque VEC type. This could be worked around by defining struct | |
1248 | initial_value_pair in function.h. */ | |
50686850 | 1249 | struct GTY(()) initial_value_struct { |
6399c0ab SB |
1250 | int num_entries; |
1251 | int max_entries; | |
1252 | initial_value_pair * GTY ((length ("%h.num_entries"))) entries; | |
50686850 | 1253 | }; |
6399c0ab SB |
1254 | |
1255 | /* If a pseudo represents an initial hard reg (or expression), return | |
1256 | it, else return NULL_RTX. */ | |
1257 | ||
1258 | rtx | |
1259 | get_hard_reg_initial_reg (rtx reg) | |
1260 | { | |
1261 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1262 | int i; | |
1263 | ||
1264 | if (ivs == 0) | |
1265 | return NULL_RTX; | |
1266 | ||
1267 | for (i = 0; i < ivs->num_entries; i++) | |
1268 | if (rtx_equal_p (ivs->entries[i].pseudo, reg)) | |
1269 | return ivs->entries[i].hard_reg; | |
1270 | ||
1271 | return NULL_RTX; | |
1272 | } | |
1273 | ||
1274 | /* Make sure that there's a pseudo register of mode MODE that stores the | |
1275 | initial value of hard register REGNO. Return an rtx for such a pseudo. */ | |
1276 | ||
1277 | rtx | |
ef4bddc2 | 1278 | get_hard_reg_initial_val (machine_mode mode, unsigned int regno) |
6399c0ab SB |
1279 | { |
1280 | struct initial_value_struct *ivs; | |
1281 | rtx rv; | |
1282 | ||
1283 | rv = has_hard_reg_initial_val (mode, regno); | |
1284 | if (rv) | |
1285 | return rv; | |
1286 | ||
1287 | ivs = crtl->hard_reg_initial_vals; | |
1288 | if (ivs == 0) | |
1289 | { | |
766090c2 | 1290 | ivs = ggc_alloc<initial_value_struct> (); |
6399c0ab SB |
1291 | ivs->num_entries = 0; |
1292 | ivs->max_entries = 5; | |
766090c2 | 1293 | ivs->entries = ggc_vec_alloc<initial_value_pair> (5); |
6399c0ab SB |
1294 | crtl->hard_reg_initial_vals = ivs; |
1295 | } | |
1296 | ||
1297 | if (ivs->num_entries >= ivs->max_entries) | |
1298 | { | |
1299 | ivs->max_entries += 5; | |
1300 | ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries, | |
1301 | ivs->max_entries); | |
1302 | } | |
1303 | ||
1304 | ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno); | |
1305 | ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode); | |
1306 | ||
1307 | return ivs->entries[ivs->num_entries++].pseudo; | |
1308 | } | |
1309 | ||
1310 | /* See if get_hard_reg_initial_val has been used to create a pseudo | |
1311 | for the initial value of hard register REGNO in mode MODE. Return | |
1312 | the associated pseudo if so, otherwise return NULL. */ | |
1313 | ||
1314 | rtx | |
ef4bddc2 | 1315 | has_hard_reg_initial_val (machine_mode mode, unsigned int regno) |
6399c0ab SB |
1316 | { |
1317 | struct initial_value_struct *ivs; | |
1318 | int i; | |
1319 | ||
1320 | ivs = crtl->hard_reg_initial_vals; | |
1321 | if (ivs != 0) | |
1322 | for (i = 0; i < ivs->num_entries; i++) | |
1323 | if (GET_MODE (ivs->entries[i].hard_reg) == mode | |
1324 | && REGNO (ivs->entries[i].hard_reg) == regno) | |
1325 | return ivs->entries[i].pseudo; | |
1326 | ||
1327 | return NULL_RTX; | |
1328 | } | |
1329 | ||
1330 | unsigned int | |
1331 | emit_initial_value_sets (void) | |
1332 | { | |
1333 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1334 | int i; | |
691fe203 | 1335 | rtx_insn *seq; |
6399c0ab SB |
1336 | |
1337 | if (ivs == 0) | |
1338 | return 0; | |
1339 | ||
1340 | start_sequence (); | |
1341 | for (i = 0; i < ivs->num_entries; i++) | |
1342 | emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg); | |
1343 | seq = get_insns (); | |
1344 | end_sequence (); | |
1345 | ||
1346 | emit_insn_at_entry (seq); | |
1347 | return 0; | |
1348 | } | |
1349 | ||
1350 | /* Return the hardreg-pseudoreg initial values pair entry I and | |
1351 | TRUE if I is a valid entry, or FALSE if I is not a valid entry. */ | |
1352 | bool | |
1353 | initial_value_entry (int i, rtx *hreg, rtx *preg) | |
1354 | { | |
1355 | struct initial_value_struct *ivs = crtl->hard_reg_initial_vals; | |
1356 | if (!ivs || i >= ivs->num_entries) | |
1357 | return false; | |
1358 | ||
1359 | *hreg = ivs->entries[i].hard_reg; | |
1360 | *preg = ivs->entries[i].pseudo; | |
1361 | return true; | |
1362 | } | |
1363 | \f | |
8fff4fc1 RH |
1364 | /* These routines are responsible for converting virtual register references |
1365 | to the actual hard register references once RTL generation is complete. | |
718fe406 | 1366 | |
8fff4fc1 RH |
1367 | The following four variables are used for communication between the |
1368 | routines. They contain the offsets of the virtual registers from their | |
1369 | respective hard registers. */ | |
fe9b4957 | 1370 | |
e6715081 RS |
1371 | static poly_int64 in_arg_offset; |
1372 | static poly_int64 var_offset; | |
1373 | static poly_int64 dynamic_offset; | |
1374 | static poly_int64 out_arg_offset; | |
1375 | static poly_int64 cfa_offset; | |
8a5275eb | 1376 | |
8fff4fc1 RH |
1377 | /* In most machines, the stack pointer register is equivalent to the bottom |
1378 | of the stack. */ | |
718fe406 | 1379 | |
8fff4fc1 RH |
1380 | #ifndef STACK_POINTER_OFFSET |
1381 | #define STACK_POINTER_OFFSET 0 | |
1382 | #endif | |
8c36698e | 1383 | |
ddbb449f AM |
1384 | #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE) |
1385 | #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE | |
1386 | #endif | |
1387 | ||
8fff4fc1 RH |
1388 | /* If not defined, pick an appropriate default for the offset of dynamically |
1389 | allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, | |
ddbb449f | 1390 | INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ |
fe9b4957 | 1391 | |
8fff4fc1 | 1392 | #ifndef STACK_DYNAMIC_OFFSET |
8a5275eb | 1393 | |
8fff4fc1 RH |
1394 | /* The bottom of the stack points to the actual arguments. If |
1395 | REG_PARM_STACK_SPACE is defined, this includes the space for the register | |
1396 | parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, | |
1397 | stack space for register parameters is not pushed by the caller, but | |
1398 | rather part of the fixed stack areas and hence not included in | |
38173d38 | 1399 | `crtl->outgoing_args_size'. Nevertheless, we must allow |
8fff4fc1 | 1400 | for it when allocating stack dynamic objects. */ |
8a5275eb | 1401 | |
ddbb449f | 1402 | #ifdef INCOMING_REG_PARM_STACK_SPACE |
8fff4fc1 RH |
1403 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ |
1404 | ((ACCUMULATE_OUTGOING_ARGS \ | |
38173d38 | 1405 | ? (crtl->outgoing_args_size \ |
81464b2c | 1406 | + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \ |
ddbb449f | 1407 | : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \ |
ac294f0b | 1408 | : 0) + (STACK_POINTER_OFFSET)) |
8fff4fc1 RH |
1409 | #else |
1410 | #define STACK_DYNAMIC_OFFSET(FNDECL) \ | |
a20c5714 | 1411 | ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \ |
8fff4fc1 RH |
1412 | + (STACK_POINTER_OFFSET)) |
1413 | #endif | |
1414 | #endif | |
4fa48eae | 1415 | |
659e47fb | 1416 | \f |
bbf9b913 RH |
1417 | /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX |
1418 | is a virtual register, return the equivalent hard register and set the | |
1419 | offset indirectly through the pointer. Otherwise, return 0. */ | |
6f086dfc | 1420 | |
bbf9b913 | 1421 | static rtx |
e6715081 | 1422 | instantiate_new_reg (rtx x, poly_int64_pod *poffset) |
6f086dfc | 1423 | { |
82d6e6fc | 1424 | rtx new_rtx; |
e6715081 | 1425 | poly_int64 offset; |
6f086dfc | 1426 | |
bbf9b913 | 1427 | if (x == virtual_incoming_args_rtx) |
2e3f842f | 1428 | { |
d015f7cc | 1429 | if (stack_realign_drap) |
2e3f842f | 1430 | { |
d015f7cc L |
1431 | /* Replace virtual_incoming_args_rtx with internal arg |
1432 | pointer if DRAP is used to realign stack. */ | |
82d6e6fc | 1433 | new_rtx = crtl->args.internal_arg_pointer; |
2e3f842f L |
1434 | offset = 0; |
1435 | } | |
1436 | else | |
82d6e6fc | 1437 | new_rtx = arg_pointer_rtx, offset = in_arg_offset; |
2e3f842f | 1438 | } |
bbf9b913 | 1439 | else if (x == virtual_stack_vars_rtx) |
82d6e6fc | 1440 | new_rtx = frame_pointer_rtx, offset = var_offset; |
bbf9b913 | 1441 | else if (x == virtual_stack_dynamic_rtx) |
82d6e6fc | 1442 | new_rtx = stack_pointer_rtx, offset = dynamic_offset; |
bbf9b913 | 1443 | else if (x == virtual_outgoing_args_rtx) |
82d6e6fc | 1444 | new_rtx = stack_pointer_rtx, offset = out_arg_offset; |
bbf9b913 | 1445 | else if (x == virtual_cfa_rtx) |
f6672e8e RH |
1446 | { |
1447 | #ifdef FRAME_POINTER_CFA_OFFSET | |
82d6e6fc | 1448 | new_rtx = frame_pointer_rtx; |
f6672e8e | 1449 | #else |
82d6e6fc | 1450 | new_rtx = arg_pointer_rtx; |
f6672e8e RH |
1451 | #endif |
1452 | offset = cfa_offset; | |
1453 | } | |
32990d5b JJ |
1454 | else if (x == virtual_preferred_stack_boundary_rtx) |
1455 | { | |
1456 | new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT); | |
1457 | offset = 0; | |
1458 | } | |
bbf9b913 RH |
1459 | else |
1460 | return NULL_RTX; | |
6f086dfc | 1461 | |
bbf9b913 | 1462 | *poffset = offset; |
82d6e6fc | 1463 | return new_rtx; |
6f086dfc RS |
1464 | } |
1465 | ||
b8704801 RS |
1466 | /* A subroutine of instantiate_virtual_regs. Instantiate any virtual |
1467 | registers present inside of *LOC. The expression is simplified, | |
1468 | as much as possible, but is not to be considered "valid" in any sense | |
1469 | implied by the target. Return true if any change is made. */ | |
6f086dfc | 1470 | |
b8704801 RS |
1471 | static bool |
1472 | instantiate_virtual_regs_in_rtx (rtx *loc) | |
6f086dfc | 1473 | { |
b8704801 RS |
1474 | if (!*loc) |
1475 | return false; | |
1476 | bool changed = false; | |
1477 | subrtx_ptr_iterator::array_type array; | |
1478 | FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST) | |
6f086dfc | 1479 | { |
b8704801 RS |
1480 | rtx *loc = *iter; |
1481 | if (rtx x = *loc) | |
bbf9b913 | 1482 | { |
b8704801 | 1483 | rtx new_rtx; |
e6715081 | 1484 | poly_int64 offset; |
b8704801 RS |
1485 | switch (GET_CODE (x)) |
1486 | { | |
1487 | case REG: | |
1488 | new_rtx = instantiate_new_reg (x, &offset); | |
1489 | if (new_rtx) | |
1490 | { | |
1491 | *loc = plus_constant (GET_MODE (x), new_rtx, offset); | |
1492 | changed = true; | |
1493 | } | |
1494 | iter.skip_subrtxes (); | |
1495 | break; | |
bbf9b913 | 1496 | |
b8704801 RS |
1497 | case PLUS: |
1498 | new_rtx = instantiate_new_reg (XEXP (x, 0), &offset); | |
1499 | if (new_rtx) | |
1500 | { | |
1501 | XEXP (x, 0) = new_rtx; | |
1502 | *loc = plus_constant (GET_MODE (x), x, offset, true); | |
1503 | changed = true; | |
1504 | iter.skip_subrtxes (); | |
1505 | break; | |
1506 | } | |
e5e809f4 | 1507 | |
b8704801 RS |
1508 | /* FIXME -- from old code */ |
1509 | /* If we have (plus (subreg (virtual-reg)) (const_int)), we know | |
1510 | we can commute the PLUS and SUBREG because pointers into the | |
1511 | frame are well-behaved. */ | |
1512 | break; | |
ce717ce4 | 1513 | |
b8704801 RS |
1514 | default: |
1515 | break; | |
1516 | } | |
1517 | } | |
6f086dfc | 1518 | } |
b8704801 | 1519 | return changed; |
6f086dfc RS |
1520 | } |
1521 | ||
bbf9b913 RH |
1522 | /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X |
1523 | matches the predicate for insn CODE operand OPERAND. */ | |
6f086dfc | 1524 | |
bbf9b913 RH |
1525 | static int |
1526 | safe_insn_predicate (int code, int operand, rtx x) | |
6f086dfc | 1527 | { |
2ef6ce06 | 1528 | return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x); |
bbf9b913 | 1529 | } |
5a73491b | 1530 | |
bbf9b913 RH |
1531 | /* A subroutine of instantiate_virtual_regs. Instantiate any virtual |
1532 | registers present inside of insn. The result will be a valid insn. */ | |
5a73491b RK |
1533 | |
1534 | static void | |
691fe203 | 1535 | instantiate_virtual_regs_in_insn (rtx_insn *insn) |
5a73491b | 1536 | { |
e6715081 | 1537 | poly_int64 offset; |
bbf9b913 | 1538 | int insn_code, i; |
9325973e | 1539 | bool any_change = false; |
691fe203 DM |
1540 | rtx set, new_rtx, x; |
1541 | rtx_insn *seq; | |
32e66afd | 1542 | |
bbf9b913 RH |
1543 | /* There are some special cases to be handled first. */ |
1544 | set = single_set (insn); | |
1545 | if (set) | |
32e66afd | 1546 | { |
bbf9b913 RH |
1547 | /* We're allowed to assign to a virtual register. This is interpreted |
1548 | to mean that the underlying register gets assigned the inverse | |
1549 | transformation. This is used, for example, in the handling of | |
1550 | non-local gotos. */ | |
82d6e6fc KG |
1551 | new_rtx = instantiate_new_reg (SET_DEST (set), &offset); |
1552 | if (new_rtx) | |
bbf9b913 RH |
1553 | { |
1554 | start_sequence (); | |
32e66afd | 1555 | |
b8704801 | 1556 | instantiate_virtual_regs_in_rtx (&SET_SRC (set)); |
82d6e6fc | 1557 | x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set), |
69a59f0f | 1558 | gen_int_mode (-offset, GET_MODE (new_rtx))); |
82d6e6fc KG |
1559 | x = force_operand (x, new_rtx); |
1560 | if (x != new_rtx) | |
1561 | emit_move_insn (new_rtx, x); | |
5a73491b | 1562 | |
bbf9b913 RH |
1563 | seq = get_insns (); |
1564 | end_sequence (); | |
5a73491b | 1565 | |
bbf9b913 RH |
1566 | emit_insn_before (seq, insn); |
1567 | delete_insn (insn); | |
1568 | return; | |
1569 | } | |
5a73491b | 1570 | |
bbf9b913 RH |
1571 | /* Handle a straight copy from a virtual register by generating a |
1572 | new add insn. The difference between this and falling through | |
1573 | to the generic case is avoiding a new pseudo and eliminating a | |
1574 | move insn in the initial rtl stream. */ | |
82d6e6fc | 1575 | new_rtx = instantiate_new_reg (SET_SRC (set), &offset); |
e6715081 RS |
1576 | if (new_rtx |
1577 | && maybe_ne (offset, 0) | |
bbf9b913 RH |
1578 | && REG_P (SET_DEST (set)) |
1579 | && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | |
1580 | { | |
1581 | start_sequence (); | |
5a73491b | 1582 | |
2f1cd2eb RS |
1583 | x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx, |
1584 | gen_int_mode (offset, | |
1585 | GET_MODE (SET_DEST (set))), | |
1586 | SET_DEST (set), 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1587 | if (x != SET_DEST (set)) |
1588 | emit_move_insn (SET_DEST (set), x); | |
770ae6cc | 1589 | |
bbf9b913 RH |
1590 | seq = get_insns (); |
1591 | end_sequence (); | |
87ce34d6 | 1592 | |
bbf9b913 RH |
1593 | emit_insn_before (seq, insn); |
1594 | delete_insn (insn); | |
87ce34d6 | 1595 | return; |
bbf9b913 | 1596 | } |
5a73491b | 1597 | |
bbf9b913 | 1598 | extract_insn (insn); |
9325973e | 1599 | insn_code = INSN_CODE (insn); |
5a73491b | 1600 | |
bbf9b913 RH |
1601 | /* Handle a plus involving a virtual register by determining if the |
1602 | operands remain valid if they're modified in place. */ | |
e6715081 | 1603 | poly_int64 delta; |
bbf9b913 RH |
1604 | if (GET_CODE (SET_SRC (set)) == PLUS |
1605 | && recog_data.n_operands >= 3 | |
1606 | && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) | |
1607 | && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) | |
e6715081 | 1608 | && poly_int_rtx_p (recog_data.operand[2], &delta) |
82d6e6fc | 1609 | && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) |
bbf9b913 | 1610 | { |
e6715081 | 1611 | offset += delta; |
5a73491b | 1612 | |
bbf9b913 | 1613 | /* If the sum is zero, then replace with a plain move. */ |
e6715081 | 1614 | if (known_eq (offset, 0) |
9325973e RH |
1615 | && REG_P (SET_DEST (set)) |
1616 | && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER) | |
bbf9b913 RH |
1617 | { |
1618 | start_sequence (); | |
82d6e6fc | 1619 | emit_move_insn (SET_DEST (set), new_rtx); |
bbf9b913 RH |
1620 | seq = get_insns (); |
1621 | end_sequence (); | |
d1405722 | 1622 | |
bbf9b913 RH |
1623 | emit_insn_before (seq, insn); |
1624 | delete_insn (insn); | |
1625 | return; | |
1626 | } | |
d1405722 | 1627 | |
bbf9b913 | 1628 | x = gen_int_mode (offset, recog_data.operand_mode[2]); |
bbf9b913 RH |
1629 | |
1630 | /* Using validate_change and apply_change_group here leaves | |
1631 | recog_data in an invalid state. Since we know exactly what | |
1632 | we want to check, do those two by hand. */ | |
82d6e6fc | 1633 | if (safe_insn_predicate (insn_code, 1, new_rtx) |
bbf9b913 RH |
1634 | && safe_insn_predicate (insn_code, 2, x)) |
1635 | { | |
82d6e6fc | 1636 | *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx; |
bbf9b913 RH |
1637 | *recog_data.operand_loc[2] = recog_data.operand[2] = x; |
1638 | any_change = true; | |
9325973e RH |
1639 | |
1640 | /* Fall through into the regular operand fixup loop in | |
1641 | order to take care of operands other than 1 and 2. */ | |
bbf9b913 RH |
1642 | } |
1643 | } | |
1644 | } | |
d1405722 | 1645 | else |
9325973e RH |
1646 | { |
1647 | extract_insn (insn); | |
1648 | insn_code = INSN_CODE (insn); | |
1649 | } | |
5dc96d60 | 1650 | |
bbf9b913 RH |
1651 | /* In the general case, we expect virtual registers to appear only in |
1652 | operands, and then only as either bare registers or inside memories. */ | |
1653 | for (i = 0; i < recog_data.n_operands; ++i) | |
1654 | { | |
1655 | x = recog_data.operand[i]; | |
1656 | switch (GET_CODE (x)) | |
1657 | { | |
1658 | case MEM: | |
1659 | { | |
1660 | rtx addr = XEXP (x, 0); | |
bbf9b913 | 1661 | |
b8704801 | 1662 | if (!instantiate_virtual_regs_in_rtx (&addr)) |
bbf9b913 RH |
1663 | continue; |
1664 | ||
1665 | start_sequence (); | |
23b33725 | 1666 | x = replace_equiv_address (x, addr, true); |
a5bfb13a MM |
1667 | /* It may happen that the address with the virtual reg |
1668 | was valid (e.g. based on the virtual stack reg, which might | |
1669 | be acceptable to the predicates with all offsets), whereas | |
1670 | the address now isn't anymore, for instance when the address | |
1671 | is still offsetted, but the base reg isn't virtual-stack-reg | |
1672 | anymore. Below we would do a force_reg on the whole operand, | |
1673 | but this insn might actually only accept memory. Hence, | |
1674 | before doing that last resort, try to reload the address into | |
1675 | a register, so this operand stays a MEM. */ | |
1676 | if (!safe_insn_predicate (insn_code, i, x)) | |
1677 | { | |
1678 | addr = force_reg (GET_MODE (addr), addr); | |
23b33725 | 1679 | x = replace_equiv_address (x, addr, true); |
a5bfb13a | 1680 | } |
bbf9b913 RH |
1681 | seq = get_insns (); |
1682 | end_sequence (); | |
1683 | if (seq) | |
1684 | emit_insn_before (seq, insn); | |
1685 | } | |
1686 | break; | |
1687 | ||
1688 | case REG: | |
82d6e6fc KG |
1689 | new_rtx = instantiate_new_reg (x, &offset); |
1690 | if (new_rtx == NULL) | |
bbf9b913 | 1691 | continue; |
e6715081 | 1692 | if (known_eq (offset, 0)) |
82d6e6fc | 1693 | x = new_rtx; |
bbf9b913 RH |
1694 | else |
1695 | { | |
1696 | start_sequence (); | |
6f086dfc | 1697 | |
bbf9b913 RH |
1698 | /* Careful, special mode predicates may have stuff in |
1699 | insn_data[insn_code].operand[i].mode that isn't useful | |
1700 | to us for computing a new value. */ | |
1701 | /* ??? Recognize address_operand and/or "p" constraints | |
1702 | to see if (plus new offset) is a valid before we put | |
1703 | this through expand_simple_binop. */ | |
82d6e6fc | 1704 | x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx, |
2f1cd2eb RS |
1705 | gen_int_mode (offset, GET_MODE (x)), |
1706 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1707 | seq = get_insns (); |
1708 | end_sequence (); | |
1709 | emit_insn_before (seq, insn); | |
1710 | } | |
1711 | break; | |
6f086dfc | 1712 | |
bbf9b913 | 1713 | case SUBREG: |
82d6e6fc KG |
1714 | new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset); |
1715 | if (new_rtx == NULL) | |
bbf9b913 | 1716 | continue; |
e6715081 | 1717 | if (maybe_ne (offset, 0)) |
bbf9b913 RH |
1718 | { |
1719 | start_sequence (); | |
2f1cd2eb RS |
1720 | new_rtx = expand_simple_binop |
1721 | (GET_MODE (new_rtx), PLUS, new_rtx, | |
1722 | gen_int_mode (offset, GET_MODE (new_rtx)), | |
1723 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
bbf9b913 RH |
1724 | seq = get_insns (); |
1725 | end_sequence (); | |
1726 | emit_insn_before (seq, insn); | |
1727 | } | |
82d6e6fc KG |
1728 | x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx, |
1729 | GET_MODE (new_rtx), SUBREG_BYTE (x)); | |
7314c7dd | 1730 | gcc_assert (x); |
bbf9b913 | 1731 | break; |
6f086dfc | 1732 | |
bbf9b913 RH |
1733 | default: |
1734 | continue; | |
1735 | } | |
6f086dfc | 1736 | |
bbf9b913 RH |
1737 | /* At this point, X contains the new value for the operand. |
1738 | Validate the new value vs the insn predicate. Note that | |
1739 | asm insns will have insn_code -1 here. */ | |
1740 | if (!safe_insn_predicate (insn_code, i, x)) | |
6ba1bd36 JM |
1741 | { |
1742 | start_sequence (); | |
f7ce0951 SE |
1743 | if (REG_P (x)) |
1744 | { | |
1745 | gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER); | |
1746 | x = copy_to_reg (x); | |
1747 | } | |
1748 | else | |
1749 | x = force_reg (insn_data[insn_code].operand[i].mode, x); | |
6ba1bd36 JM |
1750 | seq = get_insns (); |
1751 | end_sequence (); | |
1752 | if (seq) | |
1753 | emit_insn_before (seq, insn); | |
1754 | } | |
6f086dfc | 1755 | |
bbf9b913 RH |
1756 | *recog_data.operand_loc[i] = recog_data.operand[i] = x; |
1757 | any_change = true; | |
1758 | } | |
6f086dfc | 1759 | |
bbf9b913 RH |
1760 | if (any_change) |
1761 | { | |
1762 | /* Propagate operand changes into the duplicates. */ | |
1763 | for (i = 0; i < recog_data.n_dups; ++i) | |
1764 | *recog_data.dup_loc[i] | |
3e916873 | 1765 | = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); |
5dc96d60 | 1766 | |
bbf9b913 RH |
1767 | /* Force re-recognition of the instruction for validation. */ |
1768 | INSN_CODE (insn) = -1; | |
1769 | } | |
6f086dfc | 1770 | |
bbf9b913 | 1771 | if (asm_noperands (PATTERN (insn)) >= 0) |
6f086dfc | 1772 | { |
bbf9b913 | 1773 | if (!check_asm_operands (PATTERN (insn))) |
6f086dfc | 1774 | { |
bbf9b913 | 1775 | error_for_asm (insn, "impossible constraint in %<asm%>"); |
5a860835 JJ |
1776 | /* For asm goto, instead of fixing up all the edges |
1777 | just clear the template and clear input operands | |
1778 | (asm goto doesn't have any output operands). */ | |
1779 | if (JUMP_P (insn)) | |
1780 | { | |
1781 | rtx asm_op = extract_asm_operands (PATTERN (insn)); | |
1782 | ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup (""); | |
1783 | ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0); | |
1784 | ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0); | |
1785 | } | |
1786 | else | |
1787 | delete_insn (insn); | |
bbf9b913 RH |
1788 | } |
1789 | } | |
1790 | else | |
1791 | { | |
1792 | if (recog_memoized (insn) < 0) | |
1793 | fatal_insn_not_found (insn); | |
1794 | } | |
1795 | } | |
14a774a9 | 1796 | |
bbf9b913 RH |
1797 | /* Subroutine of instantiate_decls. Given RTL representing a decl, |
1798 | do any instantiation required. */ | |
14a774a9 | 1799 | |
e41b2a33 PB |
1800 | void |
1801 | instantiate_decl_rtl (rtx x) | |
bbf9b913 RH |
1802 | { |
1803 | rtx addr; | |
6f086dfc | 1804 | |
bbf9b913 RH |
1805 | if (x == 0) |
1806 | return; | |
6f086dfc | 1807 | |
bbf9b913 RH |
1808 | /* If this is a CONCAT, recurse for the pieces. */ |
1809 | if (GET_CODE (x) == CONCAT) | |
1810 | { | |
e41b2a33 PB |
1811 | instantiate_decl_rtl (XEXP (x, 0)); |
1812 | instantiate_decl_rtl (XEXP (x, 1)); | |
bbf9b913 RH |
1813 | return; |
1814 | } | |
6f086dfc | 1815 | |
bbf9b913 RH |
1816 | /* If this is not a MEM, no need to do anything. Similarly if the |
1817 | address is a constant or a register that is not a virtual register. */ | |
1818 | if (!MEM_P (x)) | |
1819 | return; | |
6f086dfc | 1820 | |
bbf9b913 RH |
1821 | addr = XEXP (x, 0); |
1822 | if (CONSTANT_P (addr) | |
1823 | || (REG_P (addr) | |
1824 | && (REGNO (addr) < FIRST_VIRTUAL_REGISTER | |
1825 | || REGNO (addr) > LAST_VIRTUAL_REGISTER))) | |
1826 | return; | |
6f086dfc | 1827 | |
b8704801 | 1828 | instantiate_virtual_regs_in_rtx (&XEXP (x, 0)); |
bbf9b913 | 1829 | } |
6f086dfc | 1830 | |
434eba35 JJ |
1831 | /* Helper for instantiate_decls called via walk_tree: Process all decls |
1832 | in the given DECL_VALUE_EXPR. */ | |
1833 | ||
1834 | static tree | |
1835 | instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
1836 | { | |
1837 | tree t = *tp; | |
726a989a | 1838 | if (! EXPR_P (t)) |
434eba35 JJ |
1839 | { |
1840 | *walk_subtrees = 0; | |
37d6a488 AO |
1841 | if (DECL_P (t)) |
1842 | { | |
1843 | if (DECL_RTL_SET_P (t)) | |
1844 | instantiate_decl_rtl (DECL_RTL (t)); | |
1845 | if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t) | |
1846 | && DECL_INCOMING_RTL (t)) | |
1847 | instantiate_decl_rtl (DECL_INCOMING_RTL (t)); | |
8813a647 | 1848 | if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL) |
37d6a488 AO |
1849 | && DECL_HAS_VALUE_EXPR_P (t)) |
1850 | { | |
1851 | tree v = DECL_VALUE_EXPR (t); | |
1852 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1853 | } | |
1854 | } | |
434eba35 JJ |
1855 | } |
1856 | return NULL; | |
1857 | } | |
1858 | ||
bbf9b913 RH |
1859 | /* Subroutine of instantiate_decls: Process all decls in the given |
1860 | BLOCK node and all its subblocks. */ | |
6f086dfc | 1861 | |
bbf9b913 RH |
1862 | static void |
1863 | instantiate_decls_1 (tree let) | |
1864 | { | |
1865 | tree t; | |
6f086dfc | 1866 | |
910ad8de | 1867 | for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t)) |
434eba35 JJ |
1868 | { |
1869 | if (DECL_RTL_SET_P (t)) | |
e41b2a33 | 1870 | instantiate_decl_rtl (DECL_RTL (t)); |
8813a647 | 1871 | if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t)) |
434eba35 JJ |
1872 | { |
1873 | tree v = DECL_VALUE_EXPR (t); | |
1874 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1875 | } | |
1876 | } | |
6f086dfc | 1877 | |
bbf9b913 | 1878 | /* Process all subblocks. */ |
87caf699 | 1879 | for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t)) |
bbf9b913 RH |
1880 | instantiate_decls_1 (t); |
1881 | } | |
6f086dfc | 1882 | |
bbf9b913 RH |
1883 | /* Scan all decls in FNDECL (both variables and parameters) and instantiate |
1884 | all virtual registers in their DECL_RTL's. */ | |
6f086dfc | 1885 | |
bbf9b913 RH |
1886 | static void |
1887 | instantiate_decls (tree fndecl) | |
1888 | { | |
c021f10b NF |
1889 | tree decl; |
1890 | unsigned ix; | |
6f086dfc | 1891 | |
bbf9b913 | 1892 | /* Process all parameters of the function. */ |
910ad8de | 1893 | for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl)) |
bbf9b913 | 1894 | { |
e41b2a33 PB |
1895 | instantiate_decl_rtl (DECL_RTL (decl)); |
1896 | instantiate_decl_rtl (DECL_INCOMING_RTL (decl)); | |
434eba35 JJ |
1897 | if (DECL_HAS_VALUE_EXPR_P (decl)) |
1898 | { | |
1899 | tree v = DECL_VALUE_EXPR (decl); | |
1900 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1901 | } | |
bbf9b913 | 1902 | } |
4fd796bb | 1903 | |
37d6a488 AO |
1904 | if ((decl = DECL_RESULT (fndecl)) |
1905 | && TREE_CODE (decl) == RESULT_DECL) | |
1906 | { | |
1907 | if (DECL_RTL_SET_P (decl)) | |
1908 | instantiate_decl_rtl (DECL_RTL (decl)); | |
1909 | if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1910 | { | |
1911 | tree v = DECL_VALUE_EXPR (decl); | |
1912 | walk_tree (&v, instantiate_expr, NULL, NULL); | |
1913 | } | |
1914 | } | |
1915 | ||
3fd48b12 EB |
1916 | /* Process the saved static chain if it exists. */ |
1917 | decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl; | |
1918 | if (decl && DECL_HAS_VALUE_EXPR_P (decl)) | |
1919 | instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl))); | |
1920 | ||
bbf9b913 | 1921 | /* Now process all variables defined in the function or its subblocks. */ |
51b86113 DM |
1922 | if (DECL_INITIAL (fndecl)) |
1923 | instantiate_decls_1 (DECL_INITIAL (fndecl)); | |
802e9f8e | 1924 | |
c021f10b NF |
1925 | FOR_EACH_LOCAL_DECL (cfun, ix, decl) |
1926 | if (DECL_RTL_SET_P (decl)) | |
1927 | instantiate_decl_rtl (DECL_RTL (decl)); | |
9771b263 | 1928 | vec_free (cfun->local_decls); |
bbf9b913 | 1929 | } |
6f086dfc | 1930 | |
bbf9b913 RH |
1931 | /* Pass through the INSNS of function FNDECL and convert virtual register |
1932 | references to hard register references. */ | |
6f086dfc | 1933 | |
c2924966 | 1934 | static unsigned int |
bbf9b913 RH |
1935 | instantiate_virtual_regs (void) |
1936 | { | |
691fe203 | 1937 | rtx_insn *insn; |
6f086dfc | 1938 | |
bbf9b913 RH |
1939 | /* Compute the offsets to use for this function. */ |
1940 | in_arg_offset = FIRST_PARM_OFFSET (current_function_decl); | |
2a31c321 | 1941 | var_offset = targetm.starting_frame_offset (); |
bbf9b913 RH |
1942 | dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); |
1943 | out_arg_offset = STACK_POINTER_OFFSET; | |
f6672e8e RH |
1944 | #ifdef FRAME_POINTER_CFA_OFFSET |
1945 | cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); | |
1946 | #else | |
bbf9b913 | 1947 | cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); |
f6672e8e | 1948 | #endif |
e9a25f70 | 1949 | |
bbf9b913 RH |
1950 | /* Initialize recognition, indicating that volatile is OK. */ |
1951 | init_recog (); | |
6f086dfc | 1952 | |
bbf9b913 RH |
1953 | /* Scan through all the insns, instantiating every virtual register still |
1954 | present. */ | |
45dbce1b NF |
1955 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
1956 | if (INSN_P (insn)) | |
1957 | { | |
1958 | /* These patterns in the instruction stream can never be recognized. | |
1959 | Fortunately, they shouldn't contain virtual registers either. */ | |
39718607 | 1960 | if (GET_CODE (PATTERN (insn)) == USE |
45dbce1b | 1961 | || GET_CODE (PATTERN (insn)) == CLOBBER |
36f52e8f AO |
1962 | || GET_CODE (PATTERN (insn)) == ASM_INPUT |
1963 | || DEBUG_MARKER_INSN_P (insn)) | |
45dbce1b | 1964 | continue; |
36f52e8f AO |
1965 | else if (DEBUG_BIND_INSN_P (insn)) |
1966 | instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn)); | |
45dbce1b NF |
1967 | else |
1968 | instantiate_virtual_regs_in_insn (insn); | |
ba4807a0 | 1969 | |
4654c0cf | 1970 | if (insn->deleted ()) |
45dbce1b | 1971 | continue; |
7114321e | 1972 | |
b8704801 | 1973 | instantiate_virtual_regs_in_rtx (®_NOTES (insn)); |
ba4807a0 | 1974 | |
45dbce1b NF |
1975 | /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ |
1976 | if (CALL_P (insn)) | |
b8704801 | 1977 | instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn)); |
45dbce1b | 1978 | } |
6f086dfc | 1979 | |
bbf9b913 RH |
1980 | /* Instantiate the virtual registers in the DECLs for debugging purposes. */ |
1981 | instantiate_decls (current_function_decl); | |
1982 | ||
e41b2a33 PB |
1983 | targetm.instantiate_decls (); |
1984 | ||
bbf9b913 RH |
1985 | /* Indicate that, from now on, assign_stack_local should use |
1986 | frame_pointer_rtx. */ | |
1987 | virtuals_instantiated = 1; | |
d3c12306 | 1988 | |
c2924966 | 1989 | return 0; |
6f086dfc | 1990 | } |
ef330312 | 1991 | |
27a4cd48 DM |
1992 | namespace { |
1993 | ||
1994 | const pass_data pass_data_instantiate_virtual_regs = | |
1995 | { | |
1996 | RTL_PASS, /* type */ | |
1997 | "vregs", /* name */ | |
1998 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
1999 | TV_NONE, /* tv_id */ |
2000 | 0, /* properties_required */ | |
2001 | 0, /* properties_provided */ | |
2002 | 0, /* properties_destroyed */ | |
2003 | 0, /* todo_flags_start */ | |
2004 | 0, /* todo_flags_finish */ | |
ef330312 PB |
2005 | }; |
2006 | ||
27a4cd48 DM |
2007 | class pass_instantiate_virtual_regs : public rtl_opt_pass |
2008 | { | |
2009 | public: | |
c3284718 RS |
2010 | pass_instantiate_virtual_regs (gcc::context *ctxt) |
2011 | : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt) | |
27a4cd48 DM |
2012 | {} |
2013 | ||
2014 | /* opt_pass methods: */ | |
be55bfe6 TS |
2015 | virtual unsigned int execute (function *) |
2016 | { | |
2017 | return instantiate_virtual_regs (); | |
2018 | } | |
27a4cd48 DM |
2019 | |
2020 | }; // class pass_instantiate_virtual_regs | |
2021 | ||
2022 | } // anon namespace | |
2023 | ||
2024 | rtl_opt_pass * | |
2025 | make_pass_instantiate_virtual_regs (gcc::context *ctxt) | |
2026 | { | |
2027 | return new pass_instantiate_virtual_regs (ctxt); | |
2028 | } | |
2029 | ||
6f086dfc | 2030 | \f |
d181c154 RS |
2031 | /* Return 1 if EXP is an aggregate type (or a value with aggregate type). |
2032 | This means a type for which function calls must pass an address to the | |
2033 | function or get an address back from the function. | |
2034 | EXP may be a type node or an expression (whose type is tested). */ | |
6f086dfc RS |
2035 | |
2036 | int | |
586de218 | 2037 | aggregate_value_p (const_tree exp, const_tree fntype) |
6f086dfc | 2038 | { |
d47d0a8d | 2039 | const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); |
9d790a4f RS |
2040 | int i, regno, nregs; |
2041 | rtx reg; | |
2f939d94 | 2042 | |
61f71b34 DD |
2043 | if (fntype) |
2044 | switch (TREE_CODE (fntype)) | |
2045 | { | |
2046 | case CALL_EXPR: | |
d47d0a8d EB |
2047 | { |
2048 | tree fndecl = get_callee_fndecl (fntype); | |
1304953e JJ |
2049 | if (fndecl) |
2050 | fntype = TREE_TYPE (fndecl); | |
2051 | else if (CALL_EXPR_FN (fntype)) | |
2052 | fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))); | |
2053 | else | |
2054 | /* For internal functions, assume nothing needs to be | |
2055 | returned in memory. */ | |
2056 | return 0; | |
d47d0a8d | 2057 | } |
61f71b34 DD |
2058 | break; |
2059 | case FUNCTION_DECL: | |
d47d0a8d | 2060 | fntype = TREE_TYPE (fntype); |
61f71b34 DD |
2061 | break; |
2062 | case FUNCTION_TYPE: | |
2063 | case METHOD_TYPE: | |
2064 | break; | |
2065 | case IDENTIFIER_NODE: | |
d47d0a8d | 2066 | fntype = NULL_TREE; |
61f71b34 DD |
2067 | break; |
2068 | default: | |
d47d0a8d | 2069 | /* We don't expect other tree types here. */ |
0bccc606 | 2070 | gcc_unreachable (); |
61f71b34 DD |
2071 | } |
2072 | ||
d47d0a8d | 2073 | if (VOID_TYPE_P (type)) |
d7bf8ada | 2074 | return 0; |
500c353d | 2075 | |
ebf0bf7f JJ |
2076 | /* If a record should be passed the same as its first (and only) member |
2077 | don't pass it as an aggregate. */ | |
2078 | if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) | |
2079 | return aggregate_value_p (first_field (type), fntype); | |
2080 | ||
cc77ae10 JM |
2081 | /* If the front end has decided that this needs to be passed by |
2082 | reference, do so. */ | |
2083 | if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) | |
2084 | && DECL_BY_REFERENCE (exp)) | |
2085 | return 1; | |
500c353d | 2086 | |
d47d0a8d EB |
2087 | /* Function types that are TREE_ADDRESSABLE force return in memory. */ |
2088 | if (fntype && TREE_ADDRESSABLE (fntype)) | |
500c353d | 2089 | return 1; |
b8698a0f | 2090 | |
956d6950 | 2091 | /* Types that are TREE_ADDRESSABLE must be constructed in memory, |
49a2e5b2 DE |
2092 | and thus can't be returned in registers. */ |
2093 | if (TREE_ADDRESSABLE (type)) | |
2094 | return 1; | |
d47d0a8d | 2095 | |
974aedcc MP |
2096 | if (TYPE_EMPTY_P (type)) |
2097 | return 0; | |
2098 | ||
05e3bdb9 | 2099 | if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) |
6f086dfc | 2100 | return 1; |
d47d0a8d EB |
2101 | |
2102 | if (targetm.calls.return_in_memory (type, fntype)) | |
2103 | return 1; | |
2104 | ||
9d790a4f RS |
2105 | /* Make sure we have suitable call-clobbered regs to return |
2106 | the value in; if not, we must return it in memory. */ | |
1d636cc6 | 2107 | reg = hard_function_value (type, 0, fntype, 0); |
e71f7aa5 JW |
2108 | |
2109 | /* If we have something other than a REG (e.g. a PARALLEL), then assume | |
2110 | it is OK. */ | |
f8cfc6aa | 2111 | if (!REG_P (reg)) |
e71f7aa5 JW |
2112 | return 0; |
2113 | ||
9d790a4f | 2114 | regno = REGNO (reg); |
ad474626 | 2115 | nregs = hard_regno_nregs (regno, TYPE_MODE (type)); |
9d790a4f RS |
2116 | for (i = 0; i < nregs; i++) |
2117 | if (! call_used_regs[regno + i]) | |
2118 | return 1; | |
d47d0a8d | 2119 | |
6f086dfc RS |
2120 | return 0; |
2121 | } | |
2122 | \f | |
8fff4fc1 RH |
2123 | /* Return true if we should assign DECL a pseudo register; false if it |
2124 | should live on the local stack. */ | |
2125 | ||
2126 | bool | |
fa233e34 | 2127 | use_register_for_decl (const_tree decl) |
8fff4fc1 | 2128 | { |
1f9ceff1 AO |
2129 | if (TREE_CODE (decl) == SSA_NAME) |
2130 | { | |
2131 | /* We often try to use the SSA_NAME, instead of its underlying | |
2132 | decl, to get type information and guide decisions, to avoid | |
2133 | differences of behavior between anonymous and named | |
2134 | variables, but in this one case we have to go for the actual | |
2135 | variable if there is one. The main reason is that, at least | |
2136 | at -O0, we want to place user variables on the stack, but we | |
2137 | don't mind using pseudos for anonymous or ignored temps. | |
2138 | Should we take the SSA_NAME, we'd conclude all SSA_NAMEs | |
2139 | should go in pseudos, whereas their corresponding variables | |
2140 | might have to go on the stack. So, disregarding the decl | |
2141 | here would negatively impact debug info at -O0, enable | |
2142 | coalescing between SSA_NAMEs that ought to get different | |
2143 | stack/pseudo assignments, and get the incoming argument | |
2144 | processing thoroughly confused by PARM_DECLs expected to live | |
2145 | in stack slots but assigned to pseudos. */ | |
2146 | if (!SSA_NAME_VAR (decl)) | |
2147 | return TYPE_MODE (TREE_TYPE (decl)) != BLKmode | |
2148 | && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))); | |
2149 | ||
2150 | decl = SSA_NAME_VAR (decl); | |
2151 | } | |
2152 | ||
8fff4fc1 RH |
2153 | /* Honor volatile. */ |
2154 | if (TREE_SIDE_EFFECTS (decl)) | |
2155 | return false; | |
2156 | ||
2157 | /* Honor addressability. */ | |
2158 | if (TREE_ADDRESSABLE (decl)) | |
2159 | return false; | |
2160 | ||
f11a7b6d AO |
2161 | /* RESULT_DECLs are a bit special in that they're assigned without |
2162 | regard to use_register_for_decl, but we generally only store in | |
2163 | them. If we coalesce their SSA NAMEs, we'd better return a | |
2164 | result that matches the assignment in expand_function_start. */ | |
2165 | if (TREE_CODE (decl) == RESULT_DECL) | |
2166 | { | |
2167 | /* If it's not an aggregate, we're going to use a REG or a | |
2168 | PARALLEL containing a REG. */ | |
2169 | if (!aggregate_value_p (decl, current_function_decl)) | |
2170 | return true; | |
2171 | ||
2172 | /* If expand_function_start determines the return value, we'll | |
2173 | use MEM if it's not by reference. */ | |
2174 | if (cfun->returns_pcc_struct | |
2175 | || (targetm.calls.struct_value_rtx | |
2176 | (TREE_TYPE (current_function_decl), 1))) | |
2177 | return DECL_BY_REFERENCE (decl); | |
2178 | ||
2179 | /* Otherwise, we're taking an extra all.function_result_decl | |
2180 | argument. It's set up in assign_parms_augmented_arg_list, | |
2181 | under the (negated) conditions above, and then it's used to | |
2182 | set up the RESULT_DECL rtl in assign_params, after looping | |
2183 | over all parameters. Now, if the RESULT_DECL is not by | |
2184 | reference, we'll use a MEM either way. */ | |
2185 | if (!DECL_BY_REFERENCE (decl)) | |
2186 | return false; | |
2187 | ||
2188 | /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take | |
2189 | the function_result_decl's assignment. Since it's a pointer, | |
2190 | we can short-circuit a number of the tests below, and we must | |
2191 | duplicat e them because we don't have the | |
2192 | function_result_decl to test. */ | |
2193 | if (!targetm.calls.allocate_stack_slots_for_args ()) | |
2194 | return true; | |
2195 | /* We don't set DECL_IGNORED_P for the function_result_decl. */ | |
2196 | if (optimize) | |
2197 | return true; | |
2198 | /* We don't set DECL_REGISTER for the function_result_decl. */ | |
2199 | return false; | |
2200 | } | |
2201 | ||
8fff4fc1 RH |
2202 | /* Only register-like things go in registers. */ |
2203 | if (DECL_MODE (decl) == BLKmode) | |
2204 | return false; | |
2205 | ||
2206 | /* If -ffloat-store specified, don't put explicit float variables | |
2207 | into registers. */ | |
2208 | /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa | |
2209 | propagates values across these stores, and it probably shouldn't. */ | |
2210 | if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl))) | |
2211 | return false; | |
2212 | ||
de0fb905 AB |
2213 | if (!targetm.calls.allocate_stack_slots_for_args ()) |
2214 | return true; | |
2215 | ||
78e0d62b RH |
2216 | /* If we're not interested in tracking debugging information for |
2217 | this decl, then we can certainly put it in a register. */ | |
2218 | if (DECL_IGNORED_P (decl)) | |
8fff4fc1 RH |
2219 | return true; |
2220 | ||
d130d647 JJ |
2221 | if (optimize) |
2222 | return true; | |
2223 | ||
2224 | if (!DECL_REGISTER (decl)) | |
2225 | return false; | |
2226 | ||
5aaa8fb4 NS |
2227 | /* When not optimizing, disregard register keyword for types that |
2228 | could have methods, otherwise the methods won't be callable from | |
2229 | the debugger. */ | |
2230 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))) | |
2231 | return false; | |
d130d647 JJ |
2232 | |
2233 | return true; | |
8fff4fc1 RH |
2234 | } |
2235 | ||
6071dc7f RH |
2236 | /* Structures to communicate between the subroutines of assign_parms. |
2237 | The first holds data persistent across all parameters, the second | |
2238 | is cleared out for each parameter. */ | |
6f086dfc | 2239 | |
6071dc7f | 2240 | struct assign_parm_data_all |
6f086dfc | 2241 | { |
d5cc9181 JR |
2242 | /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS |
2243 | should become a job of the target or otherwise encapsulated. */ | |
2244 | CUMULATIVE_ARGS args_so_far_v; | |
2245 | cumulative_args_t args_so_far; | |
6f086dfc | 2246 | struct args_size stack_args_size; |
6071dc7f RH |
2247 | tree function_result_decl; |
2248 | tree orig_fnargs; | |
7a688d52 DM |
2249 | rtx_insn *first_conversion_insn; |
2250 | rtx_insn *last_conversion_insn; | |
6071dc7f RH |
2251 | HOST_WIDE_INT pretend_args_size; |
2252 | HOST_WIDE_INT extra_pretend_bytes; | |
2253 | int reg_parm_stack_space; | |
2254 | }; | |
6f086dfc | 2255 | |
6071dc7f RH |
2256 | struct assign_parm_data_one |
2257 | { | |
2258 | tree nominal_type; | |
2259 | tree passed_type; | |
2260 | rtx entry_parm; | |
2261 | rtx stack_parm; | |
ef4bddc2 RS |
2262 | machine_mode nominal_mode; |
2263 | machine_mode passed_mode; | |
2264 | machine_mode promoted_mode; | |
6071dc7f RH |
2265 | struct locate_and_pad_arg_data locate; |
2266 | int partial; | |
2267 | BOOL_BITFIELD named_arg : 1; | |
6071dc7f RH |
2268 | BOOL_BITFIELD passed_pointer : 1; |
2269 | BOOL_BITFIELD on_stack : 1; | |
2270 | BOOL_BITFIELD loaded_in_reg : 1; | |
2271 | }; | |
ebb904cb | 2272 | |
6071dc7f | 2273 | /* A subroutine of assign_parms. Initialize ALL. */ |
6f086dfc | 2274 | |
6071dc7f RH |
2275 | static void |
2276 | assign_parms_initialize_all (struct assign_parm_data_all *all) | |
2277 | { | |
fc2f1f53 | 2278 | tree fntype ATTRIBUTE_UNUSED; |
6f086dfc | 2279 | |
6071dc7f RH |
2280 | memset (all, 0, sizeof (*all)); |
2281 | ||
2282 | fntype = TREE_TYPE (current_function_decl); | |
2283 | ||
2284 | #ifdef INIT_CUMULATIVE_INCOMING_ARGS | |
d5cc9181 | 2285 | INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX); |
6071dc7f | 2286 | #else |
d5cc9181 | 2287 | INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX, |
6071dc7f RH |
2288 | current_function_decl, -1); |
2289 | #endif | |
d5cc9181 | 2290 | all->args_so_far = pack_cumulative_args (&all->args_so_far_v); |
6071dc7f | 2291 | |
ddbb449f AM |
2292 | #ifdef INCOMING_REG_PARM_STACK_SPACE |
2293 | all->reg_parm_stack_space | |
2294 | = INCOMING_REG_PARM_STACK_SPACE (current_function_decl); | |
6071dc7f RH |
2295 | #endif |
2296 | } | |
6f086dfc | 2297 | |
6071dc7f RH |
2298 | /* If ARGS contains entries with complex types, split the entry into two |
2299 | entries of the component type. Return a new list of substitutions are | |
2300 | needed, else the old list. */ | |
2301 | ||
3b3f318a | 2302 | static void |
f11a7b6d | 2303 | split_complex_args (vec<tree> *args) |
6071dc7f | 2304 | { |
3b3f318a | 2305 | unsigned i; |
6071dc7f RH |
2306 | tree p; |
2307 | ||
9771b263 | 2308 | FOR_EACH_VEC_ELT (*args, i, p) |
6071dc7f RH |
2309 | { |
2310 | tree type = TREE_TYPE (p); | |
2311 | if (TREE_CODE (type) == COMPLEX_TYPE | |
2312 | && targetm.calls.split_complex_arg (type)) | |
2313 | { | |
2314 | tree decl; | |
2315 | tree subtype = TREE_TYPE (type); | |
6ccd356e | 2316 | bool addressable = TREE_ADDRESSABLE (p); |
6071dc7f RH |
2317 | |
2318 | /* Rewrite the PARM_DECL's type with its component. */ | |
3b3f318a | 2319 | p = copy_node (p); |
6071dc7f RH |
2320 | TREE_TYPE (p) = subtype; |
2321 | DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p)); | |
899ca90e | 2322 | SET_DECL_MODE (p, VOIDmode); |
6071dc7f RH |
2323 | DECL_SIZE (p) = NULL; |
2324 | DECL_SIZE_UNIT (p) = NULL; | |
6ccd356e AM |
2325 | /* If this arg must go in memory, put it in a pseudo here. |
2326 | We can't allow it to go in memory as per normal parms, | |
2327 | because the usual place might not have the imag part | |
2328 | adjacent to the real part. */ | |
2329 | DECL_ARTIFICIAL (p) = addressable; | |
2330 | DECL_IGNORED_P (p) = addressable; | |
2331 | TREE_ADDRESSABLE (p) = 0; | |
6071dc7f | 2332 | layout_decl (p, 0); |
9771b263 | 2333 | (*args)[i] = p; |
6071dc7f RH |
2334 | |
2335 | /* Build a second synthetic decl. */ | |
c2255bc4 AH |
2336 | decl = build_decl (EXPR_LOCATION (p), |
2337 | PARM_DECL, NULL_TREE, subtype); | |
6071dc7f | 2338 | DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); |
6ccd356e AM |
2339 | DECL_ARTIFICIAL (decl) = addressable; |
2340 | DECL_IGNORED_P (decl) = addressable; | |
6071dc7f | 2341 | layout_decl (decl, 0); |
9771b263 | 2342 | args->safe_insert (++i, decl); |
6071dc7f RH |
2343 | } |
2344 | } | |
6071dc7f RH |
2345 | } |
2346 | ||
2347 | /* A subroutine of assign_parms. Adjust the parameter list to incorporate | |
2348 | the hidden struct return argument, and (abi willing) complex args. | |
2349 | Return the new parameter list. */ | |
2350 | ||
9771b263 | 2351 | static vec<tree> |
6071dc7f RH |
2352 | assign_parms_augmented_arg_list (struct assign_parm_data_all *all) |
2353 | { | |
2354 | tree fndecl = current_function_decl; | |
2355 | tree fntype = TREE_TYPE (fndecl); | |
6e1aa848 | 2356 | vec<tree> fnargs = vNULL; |
3b3f318a RG |
2357 | tree arg; |
2358 | ||
910ad8de | 2359 | for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg)) |
9771b263 | 2360 | fnargs.safe_push (arg); |
3b3f318a RG |
2361 | |
2362 | all->orig_fnargs = DECL_ARGUMENTS (fndecl); | |
6f086dfc RS |
2363 | |
2364 | /* If struct value address is treated as the first argument, make it so. */ | |
61f71b34 | 2365 | if (aggregate_value_p (DECL_RESULT (fndecl), fndecl) |
e3b5732b | 2366 | && ! cfun->returns_pcc_struct |
61f71b34 | 2367 | && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0) |
6f086dfc | 2368 | { |
f9f29478 | 2369 | tree type = build_pointer_type (TREE_TYPE (fntype)); |
6071dc7f | 2370 | tree decl; |
6f086dfc | 2371 | |
c2255bc4 | 2372 | decl = build_decl (DECL_SOURCE_LOCATION (fndecl), |
8dcfef8f | 2373 | PARM_DECL, get_identifier (".result_ptr"), type); |
6071dc7f RH |
2374 | DECL_ARG_TYPE (decl) = type; |
2375 | DECL_ARTIFICIAL (decl) = 1; | |
8dcfef8f AO |
2376 | DECL_NAMELESS (decl) = 1; |
2377 | TREE_CONSTANT (decl) = 1; | |
f11a7b6d AO |
2378 | /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this |
2379 | changes, the end of the RESULT_DECL handling block in | |
2380 | use_register_for_decl must be adjusted to match. */ | |
6f086dfc | 2381 | |
910ad8de | 2382 | DECL_CHAIN (decl) = all->orig_fnargs; |
3b3f318a | 2383 | all->orig_fnargs = decl; |
9771b263 | 2384 | fnargs.safe_insert (0, decl); |
3b3f318a | 2385 | |
6071dc7f | 2386 | all->function_result_decl = decl; |
6f086dfc | 2387 | } |
718fe406 | 2388 | |
42ba5130 RH |
2389 | /* If the target wants to split complex arguments into scalars, do so. */ |
2390 | if (targetm.calls.split_complex_arg) | |
f11a7b6d | 2391 | split_complex_args (&fnargs); |
ded9bf77 | 2392 | |
6071dc7f RH |
2393 | return fnargs; |
2394 | } | |
e7949876 | 2395 | |
6071dc7f RH |
2396 | /* A subroutine of assign_parms. Examine PARM and pull out type and mode |
2397 | data for the parameter. Incorporate ABI specifics such as pass-by- | |
2398 | reference and type promotion. */ | |
6f086dfc | 2399 | |
6071dc7f RH |
2400 | static void |
2401 | assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, | |
2402 | struct assign_parm_data_one *data) | |
2403 | { | |
2404 | tree nominal_type, passed_type; | |
ef4bddc2 | 2405 | machine_mode nominal_mode, passed_mode, promoted_mode; |
cde0f3fd | 2406 | int unsignedp; |
6f086dfc | 2407 | |
6071dc7f RH |
2408 | memset (data, 0, sizeof (*data)); |
2409 | ||
fa10beec | 2410 | /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */ |
e3b5732b | 2411 | if (!cfun->stdarg) |
fa10beec | 2412 | data->named_arg = 1; /* No variadic parms. */ |
910ad8de | 2413 | else if (DECL_CHAIN (parm)) |
fa10beec | 2414 | data->named_arg = 1; /* Not the last non-variadic parm. */ |
d5cc9181 | 2415 | else if (targetm.calls.strict_argument_naming (all->args_so_far)) |
fa10beec | 2416 | data->named_arg = 1; /* Only variadic ones are unnamed. */ |
6071dc7f | 2417 | else |
fa10beec | 2418 | data->named_arg = 0; /* Treat as variadic. */ |
6071dc7f RH |
2419 | |
2420 | nominal_type = TREE_TYPE (parm); | |
2421 | passed_type = DECL_ARG_TYPE (parm); | |
2422 | ||
2423 | /* Look out for errors propagating this far. Also, if the parameter's | |
2424 | type is void then its value doesn't matter. */ | |
2425 | if (TREE_TYPE (parm) == error_mark_node | |
2426 | /* This can happen after weird syntax errors | |
2427 | or if an enum type is defined among the parms. */ | |
2428 | || TREE_CODE (parm) != PARM_DECL | |
2429 | || passed_type == NULL | |
2430 | || VOID_TYPE_P (nominal_type)) | |
2431 | { | |
2432 | nominal_type = passed_type = void_type_node; | |
2433 | nominal_mode = passed_mode = promoted_mode = VOIDmode; | |
2434 | goto egress; | |
2435 | } | |
108b7d3d | 2436 | |
6071dc7f RH |
2437 | /* Find mode of arg as it is passed, and mode of arg as it should be |
2438 | during execution of this function. */ | |
2439 | passed_mode = TYPE_MODE (passed_type); | |
2440 | nominal_mode = TYPE_MODE (nominal_type); | |
2441 | ||
ebf0bf7f JJ |
2442 | /* If the parm is to be passed as a transparent union or record, use the |
2443 | type of the first field for the tests below. We have already verified | |
2444 | that the modes are the same. */ | |
2445 | if ((TREE_CODE (passed_type) == UNION_TYPE | |
2446 | || TREE_CODE (passed_type) == RECORD_TYPE) | |
2447 | && TYPE_TRANSPARENT_AGGR (passed_type)) | |
2448 | passed_type = TREE_TYPE (first_field (passed_type)); | |
6071dc7f | 2449 | |
0976078c | 2450 | /* See if this arg was passed by invisible reference. */ |
d5cc9181 | 2451 | if (pass_by_reference (&all->args_so_far_v, passed_mode, |
0976078c | 2452 | passed_type, data->named_arg)) |
6071dc7f RH |
2453 | { |
2454 | passed_type = nominal_type = build_pointer_type (passed_type); | |
2455 | data->passed_pointer = true; | |
fd91cfe3 | 2456 | passed_mode = nominal_mode = TYPE_MODE (nominal_type); |
6071dc7f | 2457 | } |
6f086dfc | 2458 | |
6071dc7f | 2459 | /* Find mode as it is passed by the ABI. */ |
cde0f3fd PB |
2460 | unsignedp = TYPE_UNSIGNED (passed_type); |
2461 | promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, | |
2462 | TREE_TYPE (current_function_decl), 0); | |
6f086dfc | 2463 | |
6071dc7f RH |
2464 | egress: |
2465 | data->nominal_type = nominal_type; | |
2466 | data->passed_type = passed_type; | |
2467 | data->nominal_mode = nominal_mode; | |
2468 | data->passed_mode = passed_mode; | |
2469 | data->promoted_mode = promoted_mode; | |
2470 | } | |
16bae307 | 2471 | |
6071dc7f | 2472 | /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */ |
6f086dfc | 2473 | |
6071dc7f RH |
2474 | static void |
2475 | assign_parms_setup_varargs (struct assign_parm_data_all *all, | |
2476 | struct assign_parm_data_one *data, bool no_rtl) | |
2477 | { | |
2478 | int varargs_pretend_bytes = 0; | |
2479 | ||
d5cc9181 | 2480 | targetm.calls.setup_incoming_varargs (all->args_so_far, |
6071dc7f RH |
2481 | data->promoted_mode, |
2482 | data->passed_type, | |
2483 | &varargs_pretend_bytes, no_rtl); | |
2484 | ||
2485 | /* If the back-end has requested extra stack space, record how much is | |
2486 | needed. Do not change pretend_args_size otherwise since it may be | |
2487 | nonzero from an earlier partial argument. */ | |
2488 | if (varargs_pretend_bytes > 0) | |
2489 | all->pretend_args_size = varargs_pretend_bytes; | |
2490 | } | |
a53e14c0 | 2491 | |
6071dc7f RH |
2492 | /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to |
2493 | the incoming location of the current parameter. */ | |
2494 | ||
2495 | static void | |
2496 | assign_parm_find_entry_rtl (struct assign_parm_data_all *all, | |
2497 | struct assign_parm_data_one *data) | |
2498 | { | |
2499 | HOST_WIDE_INT pretend_bytes = 0; | |
2500 | rtx entry_parm; | |
2501 | bool in_regs; | |
2502 | ||
2503 | if (data->promoted_mode == VOIDmode) | |
2504 | { | |
2505 | data->entry_parm = data->stack_parm = const0_rtx; | |
2506 | return; | |
2507 | } | |
a53e14c0 | 2508 | |
974aedcc MP |
2509 | targetm.calls.warn_parameter_passing_abi (all->args_so_far, |
2510 | data->passed_type); | |
2511 | ||
d5cc9181 | 2512 | entry_parm = targetm.calls.function_incoming_arg (all->args_so_far, |
3c07301f NF |
2513 | data->promoted_mode, |
2514 | data->passed_type, | |
2515 | data->named_arg); | |
6f086dfc | 2516 | |
6071dc7f RH |
2517 | if (entry_parm == 0) |
2518 | data->promoted_mode = data->passed_mode; | |
6f086dfc | 2519 | |
6071dc7f RH |
2520 | /* Determine parm's home in the stack, in case it arrives in the stack |
2521 | or we should pretend it did. Compute the stack position and rtx where | |
2522 | the argument arrives and its size. | |
6f086dfc | 2523 | |
6071dc7f RH |
2524 | There is one complexity here: If this was a parameter that would |
2525 | have been passed in registers, but wasn't only because it is | |
2526 | __builtin_va_alist, we want locate_and_pad_parm to treat it as if | |
2527 | it came in a register so that REG_PARM_STACK_SPACE isn't skipped. | |
2528 | In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0 | |
2529 | as it was the previous time. */ | |
31db0fe0 | 2530 | in_regs = (entry_parm != 0); |
6f086dfc | 2531 | #ifdef STACK_PARMS_IN_REG_PARM_AREA |
6071dc7f | 2532 | in_regs = true; |
e7949876 | 2533 | #endif |
6071dc7f RH |
2534 | if (!in_regs && !data->named_arg) |
2535 | { | |
d5cc9181 | 2536 | if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far)) |
e7949876 | 2537 | { |
6071dc7f | 2538 | rtx tem; |
d5cc9181 | 2539 | tem = targetm.calls.function_incoming_arg (all->args_so_far, |
3c07301f NF |
2540 | data->promoted_mode, |
2541 | data->passed_type, true); | |
6071dc7f | 2542 | in_regs = tem != NULL; |
e7949876 | 2543 | } |
6071dc7f | 2544 | } |
e7949876 | 2545 | |
6071dc7f RH |
2546 | /* If this parameter was passed both in registers and in the stack, use |
2547 | the copy on the stack. */ | |
fe984136 RH |
2548 | if (targetm.calls.must_pass_in_stack (data->promoted_mode, |
2549 | data->passed_type)) | |
6071dc7f | 2550 | entry_parm = 0; |
e7949876 | 2551 | |
6071dc7f RH |
2552 | if (entry_parm) |
2553 | { | |
2554 | int partial; | |
2555 | ||
d5cc9181 | 2556 | partial = targetm.calls.arg_partial_bytes (all->args_so_far, |
78a52f11 RH |
2557 | data->promoted_mode, |
2558 | data->passed_type, | |
2559 | data->named_arg); | |
6071dc7f RH |
2560 | data->partial = partial; |
2561 | ||
2562 | /* The caller might already have allocated stack space for the | |
2563 | register parameters. */ | |
2564 | if (partial != 0 && all->reg_parm_stack_space == 0) | |
975f3818 | 2565 | { |
6071dc7f RH |
2566 | /* Part of this argument is passed in registers and part |
2567 | is passed on the stack. Ask the prologue code to extend | |
2568 | the stack part so that we can recreate the full value. | |
2569 | ||
2570 | PRETEND_BYTES is the size of the registers we need to store. | |
2571 | CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra | |
2572 | stack space that the prologue should allocate. | |
2573 | ||
2574 | Internally, gcc assumes that the argument pointer is aligned | |
2575 | to STACK_BOUNDARY bits. This is used both for alignment | |
2576 | optimizations (see init_emit) and to locate arguments that are | |
2577 | aligned to more than PARM_BOUNDARY bits. We must preserve this | |
2578 | invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to | |
2579 | a stack boundary. */ | |
2580 | ||
2581 | /* We assume at most one partial arg, and it must be the first | |
2582 | argument on the stack. */ | |
0bccc606 | 2583 | gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size); |
6071dc7f | 2584 | |
78a52f11 | 2585 | pretend_bytes = partial; |
6071dc7f RH |
2586 | all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES); |
2587 | ||
2588 | /* We want to align relative to the actual stack pointer, so | |
2589 | don't include this in the stack size until later. */ | |
2590 | all->extra_pretend_bytes = all->pretend_args_size; | |
975f3818 | 2591 | } |
6071dc7f | 2592 | } |
e7949876 | 2593 | |
6071dc7f | 2594 | locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs, |
2e4ceca5 | 2595 | all->reg_parm_stack_space, |
6071dc7f RH |
2596 | entry_parm ? data->partial : 0, current_function_decl, |
2597 | &all->stack_args_size, &data->locate); | |
6f086dfc | 2598 | |
e94a448f L |
2599 | /* Update parm_stack_boundary if this parameter is passed in the |
2600 | stack. */ | |
2601 | if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary) | |
2602 | crtl->parm_stack_boundary = data->locate.boundary; | |
2603 | ||
6071dc7f RH |
2604 | /* Adjust offsets to include the pretend args. */ |
2605 | pretend_bytes = all->extra_pretend_bytes - pretend_bytes; | |
2606 | data->locate.slot_offset.constant += pretend_bytes; | |
2607 | data->locate.offset.constant += pretend_bytes; | |
ebca59c3 | 2608 | |
6071dc7f RH |
2609 | data->entry_parm = entry_parm; |
2610 | } | |
6f086dfc | 2611 | |
6071dc7f RH |
2612 | /* A subroutine of assign_parms. If there is actually space on the stack |
2613 | for this parm, count it in stack_args_size and return true. */ | |
6f086dfc | 2614 | |
6071dc7f RH |
2615 | static bool |
2616 | assign_parm_is_stack_parm (struct assign_parm_data_all *all, | |
2617 | struct assign_parm_data_one *data) | |
2618 | { | |
2e6ae27f | 2619 | /* Trivially true if we've no incoming register. */ |
31db0fe0 | 2620 | if (data->entry_parm == NULL) |
6071dc7f RH |
2621 | ; |
2622 | /* Also true if we're partially in registers and partially not, | |
2623 | since we've arranged to drop the entire argument on the stack. */ | |
2624 | else if (data->partial != 0) | |
2625 | ; | |
2626 | /* Also true if the target says that it's passed in both registers | |
2627 | and on the stack. */ | |
2628 | else if (GET_CODE (data->entry_parm) == PARALLEL | |
2629 | && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX) | |
2630 | ; | |
2631 | /* Also true if the target says that there's stack allocated for | |
2632 | all register parameters. */ | |
2633 | else if (all->reg_parm_stack_space > 0) | |
2634 | ; | |
2635 | /* Otherwise, no, this parameter has no ABI defined stack slot. */ | |
2636 | else | |
2637 | return false; | |
6f086dfc | 2638 | |
6071dc7f RH |
2639 | all->stack_args_size.constant += data->locate.size.constant; |
2640 | if (data->locate.size.var) | |
2641 | ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var); | |
718fe406 | 2642 | |
6071dc7f RH |
2643 | return true; |
2644 | } | |
0d1416c6 | 2645 | |
6071dc7f RH |
2646 | /* A subroutine of assign_parms. Given that this parameter is allocated |
2647 | stack space by the ABI, find it. */ | |
6f086dfc | 2648 | |
6071dc7f RH |
2649 | static void |
2650 | assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) | |
2651 | { | |
2652 | rtx offset_rtx, stack_parm; | |
2653 | unsigned int align, boundary; | |
6f086dfc | 2654 | |
6071dc7f RH |
2655 | /* If we're passing this arg using a reg, make its stack home the |
2656 | aligned stack slot. */ | |
2657 | if (data->entry_parm) | |
2658 | offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset); | |
2659 | else | |
2660 | offset_rtx = ARGS_SIZE_RTX (data->locate.offset); | |
2661 | ||
38173d38 | 2662 | stack_parm = crtl->args.internal_arg_pointer; |
6071dc7f RH |
2663 | if (offset_rtx != const0_rtx) |
2664 | stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); | |
2665 | stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); | |
2666 | ||
08ab0acf | 2667 | if (!data->passed_pointer) |
997f78fb | 2668 | { |
08ab0acf JJ |
2669 | set_mem_attributes (stack_parm, parm, 1); |
2670 | /* set_mem_attributes could set MEM_SIZE to the passed mode's size, | |
2671 | while promoted mode's size is needed. */ | |
2672 | if (data->promoted_mode != BLKmode | |
2673 | && data->promoted_mode != DECL_MODE (parm)) | |
997f78fb | 2674 | { |
f5541398 | 2675 | set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode)); |
527210c4 | 2676 | if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm)) |
08ab0acf | 2677 | { |
91914e56 RS |
2678 | poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm), |
2679 | data->promoted_mode); | |
2680 | if (maybe_ne (offset, 0)) | |
527210c4 | 2681 | set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset); |
08ab0acf | 2682 | } |
997f78fb JJ |
2683 | } |
2684 | } | |
6071dc7f | 2685 | |
bfc45551 AM |
2686 | boundary = data->locate.boundary; |
2687 | align = BITS_PER_UNIT; | |
6071dc7f RH |
2688 | |
2689 | /* If we're padding upward, we know that the alignment of the slot | |
c2ed6cf8 | 2690 | is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're |
6071dc7f RH |
2691 | intentionally forcing upward padding. Otherwise we have to come |
2692 | up with a guess at the alignment based on OFFSET_RTX. */ | |
a20c5714 | 2693 | poly_int64 offset; |
76b0cbf8 | 2694 | if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm) |
6071dc7f | 2695 | align = boundary; |
a20c5714 | 2696 | else if (poly_int_rtx_p (offset_rtx, &offset)) |
6071dc7f | 2697 | { |
a20c5714 RS |
2698 | align = least_bit_hwi (boundary); |
2699 | unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT; | |
2700 | if (offset_align != 0) | |
2701 | align = MIN (align, offset_align); | |
6071dc7f | 2702 | } |
bfc45551 | 2703 | set_mem_align (stack_parm, align); |
6071dc7f RH |
2704 | |
2705 | if (data->entry_parm) | |
2706 | set_reg_attrs_for_parm (data->entry_parm, stack_parm); | |
2707 | ||
2708 | data->stack_parm = stack_parm; | |
2709 | } | |
2710 | ||
2711 | /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's | |
2712 | always valid and contiguous. */ | |
2713 | ||
2714 | static void | |
2715 | assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) | |
2716 | { | |
2717 | rtx entry_parm = data->entry_parm; | |
2718 | rtx stack_parm = data->stack_parm; | |
2719 | ||
2720 | /* If this parm was passed part in regs and part in memory, pretend it | |
2721 | arrived entirely in memory by pushing the register-part onto the stack. | |
2722 | In the special case of a DImode or DFmode that is split, we could put | |
2723 | it together in a pseudoreg directly, but for now that's not worth | |
2724 | bothering with. */ | |
2725 | if (data->partial != 0) | |
2726 | { | |
2727 | /* Handle calls that pass values in multiple non-contiguous | |
2728 | locations. The Irix 6 ABI has examples of this. */ | |
2729 | if (GET_CODE (entry_parm) == PARALLEL) | |
1a8cb155 | 2730 | emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm, |
b8698a0f | 2731 | data->passed_type, |
6071dc7f | 2732 | int_size_in_bytes (data->passed_type)); |
6f086dfc | 2733 | else |
78a52f11 RH |
2734 | { |
2735 | gcc_assert (data->partial % UNITS_PER_WORD == 0); | |
1a8cb155 RS |
2736 | move_block_from_reg (REGNO (entry_parm), |
2737 | validize_mem (copy_rtx (stack_parm)), | |
78a52f11 RH |
2738 | data->partial / UNITS_PER_WORD); |
2739 | } | |
6f086dfc | 2740 | |
6071dc7f RH |
2741 | entry_parm = stack_parm; |
2742 | } | |
6f086dfc | 2743 | |
6071dc7f RH |
2744 | /* If we didn't decide this parm came in a register, by default it came |
2745 | on the stack. */ | |
2746 | else if (entry_parm == NULL) | |
2747 | entry_parm = stack_parm; | |
2748 | ||
2749 | /* When an argument is passed in multiple locations, we can't make use | |
2750 | of this information, but we can save some copying if the whole argument | |
2751 | is passed in a single register. */ | |
2752 | else if (GET_CODE (entry_parm) == PARALLEL | |
2753 | && data->nominal_mode != BLKmode | |
2754 | && data->passed_mode != BLKmode) | |
2755 | { | |
2756 | size_t i, len = XVECLEN (entry_parm, 0); | |
2757 | ||
2758 | for (i = 0; i < len; i++) | |
2759 | if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX | |
2760 | && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0)) | |
2761 | && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) | |
2762 | == data->passed_mode) | |
2763 | && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0) | |
2764 | { | |
2765 | entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0); | |
2766 | break; | |
2767 | } | |
2768 | } | |
e68a6ce1 | 2769 | |
6071dc7f RH |
2770 | data->entry_parm = entry_parm; |
2771 | } | |
6f086dfc | 2772 | |
4d2a9850 DJ |
2773 | /* A subroutine of assign_parms. Reconstitute any values which were |
2774 | passed in multiple registers and would fit in a single register. */ | |
2775 | ||
2776 | static void | |
2777 | assign_parm_remove_parallels (struct assign_parm_data_one *data) | |
2778 | { | |
2779 | rtx entry_parm = data->entry_parm; | |
2780 | ||
2781 | /* Convert the PARALLEL to a REG of the same mode as the parallel. | |
2782 | This can be done with register operations rather than on the | |
2783 | stack, even if we will store the reconstituted parameter on the | |
2784 | stack later. */ | |
85776d60 | 2785 | if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode) |
4d2a9850 DJ |
2786 | { |
2787 | rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm)); | |
bbd46fd5 | 2788 | emit_group_store (parmreg, entry_parm, data->passed_type, |
4d2a9850 DJ |
2789 | GET_MODE_SIZE (GET_MODE (entry_parm))); |
2790 | entry_parm = parmreg; | |
2791 | } | |
2792 | ||
2793 | data->entry_parm = entry_parm; | |
2794 | } | |
2795 | ||
6071dc7f RH |
2796 | /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's |
2797 | always valid and properly aligned. */ | |
6f086dfc | 2798 | |
6071dc7f | 2799 | static void |
f11a7b6d | 2800 | assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data) |
6071dc7f RH |
2801 | { |
2802 | rtx stack_parm = data->stack_parm; | |
2803 | ||
2804 | /* If we can't trust the parm stack slot to be aligned enough for its | |
2805 | ultimate type, don't use that slot after entry. We'll make another | |
2806 | stack slot, if we need one. */ | |
f11a7b6d AO |
2807 | if (stack_parm |
2808 | && ((STRICT_ALIGNMENT | |
2809 | && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)) | |
2810 | || (data->nominal_type | |
2811 | && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm) | |
2812 | && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY))) | |
6071dc7f RH |
2813 | stack_parm = NULL; |
2814 | ||
2815 | /* If parm was passed in memory, and we need to convert it on entry, | |
2816 | don't store it back in that same slot. */ | |
2817 | else if (data->entry_parm == stack_parm | |
2818 | && data->nominal_mode != BLKmode | |
2819 | && data->nominal_mode != data->passed_mode) | |
2820 | stack_parm = NULL; | |
2821 | ||
7d69de61 RH |
2822 | /* If stack protection is in effect for this function, don't leave any |
2823 | pointers in their passed stack slots. */ | |
cb91fab0 | 2824 | else if (crtl->stack_protect_guard |
7d69de61 RH |
2825 | && (flag_stack_protect == 2 |
2826 | || data->passed_pointer | |
2827 | || POINTER_TYPE_P (data->nominal_type))) | |
2828 | stack_parm = NULL; | |
2829 | ||
6071dc7f RH |
2830 | data->stack_parm = stack_parm; |
2831 | } | |
a0506b54 | 2832 | |
6071dc7f RH |
2833 | /* A subroutine of assign_parms. Return true if the current parameter |
2834 | should be stored as a BLKmode in the current frame. */ | |
2835 | ||
2836 | static bool | |
2837 | assign_parm_setup_block_p (struct assign_parm_data_one *data) | |
2838 | { | |
2839 | if (data->nominal_mode == BLKmode) | |
2840 | return true; | |
85776d60 DJ |
2841 | if (GET_MODE (data->entry_parm) == BLKmode) |
2842 | return true; | |
531547e9 | 2843 | |
6e985040 | 2844 | #ifdef BLOCK_REG_PADDING |
ae8c9754 RS |
2845 | /* Only assign_parm_setup_block knows how to deal with register arguments |
2846 | that are padded at the least significant end. */ | |
2847 | if (REG_P (data->entry_parm) | |
cf098191 | 2848 | && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD) |
ae8c9754 | 2849 | && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1) |
76b0cbf8 | 2850 | == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
6071dc7f | 2851 | return true; |
6e985040 | 2852 | #endif |
6071dc7f RH |
2853 | |
2854 | return false; | |
2855 | } | |
2856 | ||
b8698a0f | 2857 | /* A subroutine of assign_parms. Arrange for the parameter to be |
6071dc7f RH |
2858 | present and valid in DATA->STACK_RTL. */ |
2859 | ||
2860 | static void | |
27e29549 RH |
2861 | assign_parm_setup_block (struct assign_parm_data_all *all, |
2862 | tree parm, struct assign_parm_data_one *data) | |
6071dc7f RH |
2863 | { |
2864 | rtx entry_parm = data->entry_parm; | |
2865 | rtx stack_parm = data->stack_parm; | |
f11a7b6d | 2866 | rtx target_reg = NULL_RTX; |
a029addd | 2867 | bool in_conversion_seq = false; |
bfc45551 AM |
2868 | HOST_WIDE_INT size; |
2869 | HOST_WIDE_INT size_stored; | |
6071dc7f | 2870 | |
27e29549 RH |
2871 | if (GET_CODE (entry_parm) == PARALLEL) |
2872 | entry_parm = emit_group_move_into_temps (entry_parm); | |
2873 | ||
f11a7b6d AO |
2874 | /* If we want the parameter in a pseudo, don't use a stack slot. */ |
2875 | if (is_gimple_reg (parm) && use_register_for_decl (parm)) | |
2876 | { | |
2877 | tree def = ssa_default_def (cfun, parm); | |
2878 | gcc_assert (def); | |
2879 | machine_mode mode = promote_ssa_mode (def, NULL); | |
2880 | rtx reg = gen_reg_rtx (mode); | |
2881 | if (GET_CODE (reg) != CONCAT) | |
2882 | stack_parm = reg; | |
2883 | else | |
a029addd AO |
2884 | { |
2885 | target_reg = reg; | |
2886 | /* Avoid allocating a stack slot, if there isn't one | |
2887 | preallocated by the ABI. It might seem like we should | |
2888 | always prefer a pseudo, but converting between | |
2889 | floating-point and integer modes goes through the stack | |
2890 | on various machines, so it's better to use the reserved | |
2891 | stack slot than to risk wasting it and allocating more | |
2892 | for the conversion. */ | |
2893 | if (stack_parm == NULL_RTX) | |
2894 | { | |
2895 | int save = generating_concat_p; | |
2896 | generating_concat_p = 0; | |
2897 | stack_parm = gen_reg_rtx (mode); | |
2898 | generating_concat_p = save; | |
2899 | } | |
2900 | } | |
f11a7b6d AO |
2901 | data->stack_parm = NULL; |
2902 | } | |
2903 | ||
bfc45551 AM |
2904 | size = int_size_in_bytes (data->passed_type); |
2905 | size_stored = CEIL_ROUND (size, UNITS_PER_WORD); | |
2906 | if (stack_parm == 0) | |
2907 | { | |
fe37c7af | 2908 | SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD)); |
f11a7b6d AO |
2909 | stack_parm = assign_stack_local (BLKmode, size_stored, |
2910 | DECL_ALIGN (parm)); | |
cf098191 | 2911 | if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size)) |
f11a7b6d AO |
2912 | PUT_MODE (stack_parm, GET_MODE (entry_parm)); |
2913 | set_mem_attributes (stack_parm, parm, 1); | |
bfc45551 AM |
2914 | } |
2915 | ||
6071dc7f RH |
2916 | /* If a BLKmode arrives in registers, copy it to a stack slot. Handle |
2917 | calls that pass values in multiple non-contiguous locations. */ | |
2918 | if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL) | |
2919 | { | |
6071dc7f RH |
2920 | rtx mem; |
2921 | ||
2922 | /* Note that we will be storing an integral number of words. | |
2923 | So we have to be careful to ensure that we allocate an | |
bfc45551 | 2924 | integral number of words. We do this above when we call |
6071dc7f RH |
2925 | assign_stack_local if space was not allocated in the argument |
2926 | list. If it was, this will not work if PARM_BOUNDARY is not | |
2927 | a multiple of BITS_PER_WORD. It isn't clear how to fix this | |
2928 | if it becomes a problem. Exception is when BLKmode arrives | |
2929 | with arguments not conforming to word_mode. */ | |
2930 | ||
bfc45551 AM |
2931 | if (data->stack_parm == 0) |
2932 | ; | |
6071dc7f RH |
2933 | else if (GET_CODE (entry_parm) == PARALLEL) |
2934 | ; | |
0bccc606 NS |
2935 | else |
2936 | gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD)); | |
6f086dfc | 2937 | |
1a8cb155 | 2938 | mem = validize_mem (copy_rtx (stack_parm)); |
c6b97fac | 2939 | |
6071dc7f | 2940 | /* Handle values in multiple non-contiguous locations. */ |
a029addd AO |
2941 | if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem)) |
2942 | emit_group_store (mem, entry_parm, data->passed_type, size); | |
2943 | else if (GET_CODE (entry_parm) == PARALLEL) | |
27e29549 | 2944 | { |
bb27eeda SE |
2945 | push_to_sequence2 (all->first_conversion_insn, |
2946 | all->last_conversion_insn); | |
27e29549 | 2947 | emit_group_store (mem, entry_parm, data->passed_type, size); |
bb27eeda SE |
2948 | all->first_conversion_insn = get_insns (); |
2949 | all->last_conversion_insn = get_last_insn (); | |
27e29549 | 2950 | end_sequence (); |
a029addd | 2951 | in_conversion_seq = true; |
27e29549 | 2952 | } |
c6b97fac | 2953 | |
6071dc7f RH |
2954 | else if (size == 0) |
2955 | ; | |
5c07bd7a | 2956 | |
6071dc7f RH |
2957 | /* If SIZE is that of a mode no bigger than a word, just use |
2958 | that mode's store operation. */ | |
2959 | else if (size <= UNITS_PER_WORD) | |
2960 | { | |
f4b31647 RS |
2961 | unsigned int bits = size * BITS_PER_UNIT; |
2962 | machine_mode mode = int_mode_for_size (bits, 0).else_blk (); | |
c6b97fac | 2963 | |
6071dc7f | 2964 | if (mode != BLKmode |
6e985040 | 2965 | #ifdef BLOCK_REG_PADDING |
6071dc7f RH |
2966 | && (size == UNITS_PER_WORD |
2967 | || (BLOCK_REG_PADDING (mode, data->passed_type, 1) | |
76b0cbf8 | 2968 | != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))) |
6e985040 | 2969 | #endif |
6071dc7f RH |
2970 | ) |
2971 | { | |
208996c7 RS |
2972 | rtx reg; |
2973 | ||
2974 | /* We are really truncating a word_mode value containing | |
2975 | SIZE bytes into a value of mode MODE. If such an | |
2976 | operation requires no actual instructions, we can refer | |
2977 | to the value directly in mode MODE, otherwise we must | |
2978 | start with the register in word_mode and explicitly | |
2979 | convert it. */ | |
bb149ca2 RS |
2980 | if (targetm.truly_noop_truncation (size * BITS_PER_UNIT, |
2981 | BITS_PER_WORD)) | |
208996c7 RS |
2982 | reg = gen_rtx_REG (mode, REGNO (entry_parm)); |
2983 | else | |
2984 | { | |
2985 | reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); | |
2986 | reg = convert_to_mode (mode, copy_to_reg (reg), 1); | |
2987 | } | |
6071dc7f RH |
2988 | emit_move_insn (change_address (mem, mode, 0), reg); |
2989 | } | |
c6b97fac | 2990 | |
1e5d7fd6 AO |
2991 | #ifdef BLOCK_REG_PADDING |
2992 | /* Storing the register in memory as a full word, as | |
2993 | move_block_from_reg below would do, and then using the | |
2994 | MEM in a smaller mode, has the effect of shifting right | |
2995 | if BYTES_BIG_ENDIAN. If we're bypassing memory, the | |
2996 | shifting must be explicit. */ | |
2997 | else if (!MEM_P (mem)) | |
2998 | { | |
2999 | rtx x; | |
3000 | ||
3001 | /* If the assert below fails, we should have taken the | |
3002 | mode != BLKmode path above, unless we have downward | |
3003 | padding of smaller-than-word arguments on a machine | |
3004 | with little-endian bytes, which would likely require | |
3005 | additional changes to work correctly. */ | |
3006 | gcc_checking_assert (BYTES_BIG_ENDIAN | |
3007 | && (BLOCK_REG_PADDING (mode, | |
3008 | data->passed_type, 1) | |
76b0cbf8 | 3009 | == PAD_UPWARD)); |
1e5d7fd6 AO |
3010 | |
3011 | int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
3012 | ||
3013 | x = gen_rtx_REG (word_mode, REGNO (entry_parm)); | |
3014 | x = expand_shift (RSHIFT_EXPR, word_mode, x, by, | |
3015 | NULL_RTX, 1); | |
3016 | x = force_reg (word_mode, x); | |
3017 | x = gen_lowpart_SUBREG (GET_MODE (mem), x); | |
3018 | ||
3019 | emit_move_insn (mem, x); | |
3020 | } | |
3021 | #endif | |
3022 | ||
6071dc7f RH |
3023 | /* Blocks smaller than a word on a BYTES_BIG_ENDIAN |
3024 | machine must be aligned to the left before storing | |
3025 | to memory. Note that the previous test doesn't | |
3026 | handle all cases (e.g. SIZE == 3). */ | |
3027 | else if (size != UNITS_PER_WORD | |
6e985040 | 3028 | #ifdef BLOCK_REG_PADDING |
6071dc7f | 3029 | && (BLOCK_REG_PADDING (mode, data->passed_type, 1) |
76b0cbf8 | 3030 | == PAD_DOWNWARD) |
6e985040 | 3031 | #else |
6071dc7f | 3032 | && BYTES_BIG_ENDIAN |
6e985040 | 3033 | #endif |
6071dc7f RH |
3034 | ) |
3035 | { | |
3036 | rtx tem, x; | |
3037 | int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT; | |
65c844e2 | 3038 | rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm)); |
6071dc7f | 3039 | |
eb6c3df1 | 3040 | x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1); |
6071dc7f RH |
3041 | tem = change_address (mem, word_mode, 0); |
3042 | emit_move_insn (tem, x); | |
6f086dfc | 3043 | } |
6071dc7f | 3044 | else |
27e29549 | 3045 | move_block_from_reg (REGNO (entry_parm), mem, |
6071dc7f | 3046 | size_stored / UNITS_PER_WORD); |
6f086dfc | 3047 | } |
f11a7b6d | 3048 | else if (!MEM_P (mem)) |
1e5d7fd6 AO |
3049 | { |
3050 | gcc_checking_assert (size > UNITS_PER_WORD); | |
3051 | #ifdef BLOCK_REG_PADDING | |
3052 | gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem), | |
3053 | data->passed_type, 0) | |
76b0cbf8 | 3054 | == PAD_UPWARD); |
1e5d7fd6 AO |
3055 | #endif |
3056 | emit_move_insn (mem, entry_parm); | |
3057 | } | |
6071dc7f | 3058 | else |
27e29549 | 3059 | move_block_from_reg (REGNO (entry_parm), mem, |
6071dc7f RH |
3060 | size_stored / UNITS_PER_WORD); |
3061 | } | |
bfc45551 AM |
3062 | else if (data->stack_parm == 0) |
3063 | { | |
bb27eeda | 3064 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
bfc45551 AM |
3065 | emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), |
3066 | BLOCK_OP_NORMAL); | |
bb27eeda SE |
3067 | all->first_conversion_insn = get_insns (); |
3068 | all->last_conversion_insn = get_last_insn (); | |
bfc45551 | 3069 | end_sequence (); |
a029addd | 3070 | in_conversion_seq = true; |
bfc45551 | 3071 | } |
6071dc7f | 3072 | |
f11a7b6d AO |
3073 | if (target_reg) |
3074 | { | |
a029addd AO |
3075 | if (!in_conversion_seq) |
3076 | emit_move_insn (target_reg, stack_parm); | |
3077 | else | |
3078 | { | |
3079 | push_to_sequence2 (all->first_conversion_insn, | |
3080 | all->last_conversion_insn); | |
3081 | emit_move_insn (target_reg, stack_parm); | |
3082 | all->first_conversion_insn = get_insns (); | |
3083 | all->last_conversion_insn = get_last_insn (); | |
3084 | end_sequence (); | |
3085 | } | |
f11a7b6d AO |
3086 | stack_parm = target_reg; |
3087 | } | |
3088 | ||
bfc45551 | 3089 | data->stack_parm = stack_parm; |
f11a7b6d | 3090 | set_parm_rtl (parm, stack_parm); |
6071dc7f RH |
3091 | } |
3092 | ||
3093 | /* A subroutine of assign_parms. Allocate a pseudo to hold the current | |
3094 | parameter. Get it there. Perform all ABI specified conversions. */ | |
3095 | ||
3096 | static void | |
3097 | assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, | |
3098 | struct assign_parm_data_one *data) | |
3099 | { | |
71008de4 BS |
3100 | rtx parmreg, validated_mem; |
3101 | rtx equiv_stack_parm; | |
ef4bddc2 | 3102 | machine_mode promoted_nominal_mode; |
6071dc7f RH |
3103 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm)); |
3104 | bool did_conversion = false; | |
71008de4 | 3105 | bool need_conversion, moved; |
f11a7b6d | 3106 | rtx rtl; |
6071dc7f RH |
3107 | |
3108 | /* Store the parm in a pseudoregister during the function, but we may | |
666e3ceb PB |
3109 | need to do it in a wider mode. Using 2 here makes the result |
3110 | consistent with promote_decl_mode and thus expand_expr_real_1. */ | |
6071dc7f | 3111 | promoted_nominal_mode |
cde0f3fd | 3112 | = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp, |
666e3ceb | 3113 | TREE_TYPE (current_function_decl), 2); |
6071dc7f | 3114 | |
f11a7b6d AO |
3115 | parmreg = gen_reg_rtx (promoted_nominal_mode); |
3116 | if (!DECL_ARTIFICIAL (parm)) | |
3117 | mark_user_reg (parmreg); | |
6071dc7f RH |
3118 | |
3119 | /* If this was an item that we received a pointer to, | |
f11a7b6d AO |
3120 | set rtl appropriately. */ |
3121 | if (data->passed_pointer) | |
6071dc7f | 3122 | { |
f11a7b6d AO |
3123 | rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg); |
3124 | set_mem_attributes (rtl, parm, 1); | |
6071dc7f RH |
3125 | } |
3126 | else | |
f11a7b6d | 3127 | rtl = parmreg; |
6071dc7f | 3128 | |
4d2a9850 DJ |
3129 | assign_parm_remove_parallels (data); |
3130 | ||
666e3ceb PB |
3131 | /* Copy the value into the register, thus bridging between |
3132 | assign_parm_find_data_types and expand_expr_real_1. */ | |
6071dc7f | 3133 | |
71008de4 | 3134 | equiv_stack_parm = data->stack_parm; |
1a8cb155 | 3135 | validated_mem = validize_mem (copy_rtx (data->entry_parm)); |
71008de4 BS |
3136 | |
3137 | need_conversion = (data->nominal_mode != data->passed_mode | |
3138 | || promoted_nominal_mode != data->promoted_mode); | |
3139 | moved = false; | |
3140 | ||
dbb94435 BS |
3141 | if (need_conversion |
3142 | && GET_MODE_CLASS (data->nominal_mode) == MODE_INT | |
3143 | && data->nominal_mode == data->passed_mode | |
3144 | && data->nominal_mode == GET_MODE (data->entry_parm)) | |
71008de4 | 3145 | { |
6071dc7f RH |
3146 | /* ENTRY_PARM has been converted to PROMOTED_MODE, its |
3147 | mode, by the caller. We now have to convert it to | |
3148 | NOMINAL_MODE, if different. However, PARMREG may be in | |
3149 | a different mode than NOMINAL_MODE if it is being stored | |
3150 | promoted. | |
3151 | ||
3152 | If ENTRY_PARM is a hard register, it might be in a register | |
3153 | not valid for operating in its mode (e.g., an odd-numbered | |
3154 | register for a DFmode). In that case, moves are the only | |
3155 | thing valid, so we can't do a convert from there. This | |
3156 | occurs when the calling sequence allow such misaligned | |
3157 | usages. | |
3158 | ||
3159 | In addition, the conversion may involve a call, which could | |
3160 | clobber parameters which haven't been copied to pseudo | |
71008de4 BS |
3161 | registers yet. |
3162 | ||
3163 | First, we try to emit an insn which performs the necessary | |
3164 | conversion. We verify that this insn does not clobber any | |
3165 | hard registers. */ | |
3166 | ||
3167 | enum insn_code icode; | |
3168 | rtx op0, op1; | |
3169 | ||
3170 | icode = can_extend_p (promoted_nominal_mode, data->passed_mode, | |
3171 | unsignedp); | |
3172 | ||
3173 | op0 = parmreg; | |
3174 | op1 = validated_mem; | |
3175 | if (icode != CODE_FOR_nothing | |
2ef6ce06 RS |
3176 | && insn_operand_matches (icode, 0, op0) |
3177 | && insn_operand_matches (icode, 1, op1)) | |
71008de4 BS |
3178 | { |
3179 | enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND; | |
b32d5189 DM |
3180 | rtx_insn *insn, *insns; |
3181 | rtx t = op1; | |
71008de4 BS |
3182 | HARD_REG_SET hardregs; |
3183 | ||
3184 | start_sequence (); | |
f9fef349 JJ |
3185 | /* If op1 is a hard register that is likely spilled, first |
3186 | force it into a pseudo, otherwise combiner might extend | |
3187 | its lifetime too much. */ | |
3188 | if (GET_CODE (t) == SUBREG) | |
3189 | t = SUBREG_REG (t); | |
3190 | if (REG_P (t) | |
3191 | && HARD_REGISTER_P (t) | |
3192 | && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t)) | |
3193 | && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t)))) | |
3194 | { | |
3195 | t = gen_reg_rtx (GET_MODE (op1)); | |
3196 | emit_move_insn (t, op1); | |
3197 | } | |
3198 | else | |
3199 | t = op1; | |
e67d1102 RS |
3200 | rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode, |
3201 | data->passed_mode, unsignedp); | |
a11899b2 | 3202 | emit_insn (pat); |
71008de4 BS |
3203 | insns = get_insns (); |
3204 | ||
3205 | moved = true; | |
3206 | CLEAR_HARD_REG_SET (hardregs); | |
3207 | for (insn = insns; insn && moved; insn = NEXT_INSN (insn)) | |
3208 | { | |
3209 | if (INSN_P (insn)) | |
3210 | note_stores (PATTERN (insn), record_hard_reg_sets, | |
3211 | &hardregs); | |
3212 | if (!hard_reg_set_empty_p (hardregs)) | |
3213 | moved = false; | |
3214 | } | |
3215 | ||
3216 | end_sequence (); | |
3217 | ||
3218 | if (moved) | |
3219 | { | |
3220 | emit_insn (insns); | |
dbb94435 BS |
3221 | if (equiv_stack_parm != NULL_RTX) |
3222 | equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg), | |
3223 | equiv_stack_parm); | |
71008de4 BS |
3224 | } |
3225 | } | |
3226 | } | |
3227 | ||
3228 | if (moved) | |
3229 | /* Nothing to do. */ | |
3230 | ; | |
3231 | else if (need_conversion) | |
3232 | { | |
3233 | /* We did not have an insn to convert directly, or the sequence | |
3234 | generated appeared unsafe. We must first copy the parm to a | |
3235 | pseudo reg, and save the conversion until after all | |
6071dc7f RH |
3236 | parameters have been moved. */ |
3237 | ||
71008de4 | 3238 | int save_tree_used; |
6071dc7f RH |
3239 | rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); |
3240 | ||
71008de4 | 3241 | emit_move_insn (tempreg, validated_mem); |
6071dc7f | 3242 | |
bb27eeda | 3243 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
6071dc7f RH |
3244 | tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); |
3245 | ||
bd4288c0 | 3246 | if (partial_subreg_p (tempreg) |
6071dc7f RH |
3247 | && GET_MODE (tempreg) == data->nominal_mode |
3248 | && REG_P (SUBREG_REG (tempreg)) | |
3249 | && data->nominal_mode == data->passed_mode | |
bd4288c0 | 3250 | && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)) |
6f086dfc | 3251 | { |
6071dc7f RH |
3252 | /* The argument is already sign/zero extended, so note it |
3253 | into the subreg. */ | |
3254 | SUBREG_PROMOTED_VAR_P (tempreg) = 1; | |
362d42dc | 3255 | SUBREG_PROMOTED_SET (tempreg, unsignedp); |
6071dc7f | 3256 | } |
00d8a4c1 | 3257 | |
6071dc7f RH |
3258 | /* TREE_USED gets set erroneously during expand_assignment. */ |
3259 | save_tree_used = TREE_USED (parm); | |
f11a7b6d | 3260 | SET_DECL_RTL (parm, rtl); |
79f5e442 | 3261 | expand_assignment (parm, make_tree (data->nominal_type, tempreg), false); |
f11a7b6d | 3262 | SET_DECL_RTL (parm, NULL_RTX); |
6071dc7f | 3263 | TREE_USED (parm) = save_tree_used; |
bb27eeda SE |
3264 | all->first_conversion_insn = get_insns (); |
3265 | all->last_conversion_insn = get_last_insn (); | |
6071dc7f | 3266 | end_sequence (); |
00d8a4c1 | 3267 | |
6071dc7f RH |
3268 | did_conversion = true; |
3269 | } | |
f11a7b6d | 3270 | else |
71008de4 | 3271 | emit_move_insn (parmreg, validated_mem); |
6071dc7f RH |
3272 | |
3273 | /* If we were passed a pointer but the actual value can safely live | |
f7e088e7 | 3274 | in a register, retrieve it and use it directly. */ |
f11a7b6d | 3275 | if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode) |
6071dc7f RH |
3276 | { |
3277 | /* We can't use nominal_mode, because it will have been set to | |
3278 | Pmode above. We must use the actual mode of the parm. */ | |
f11a7b6d | 3279 | if (use_register_for_decl (parm)) |
f7e088e7 EB |
3280 | { |
3281 | parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm))); | |
3282 | mark_user_reg (parmreg); | |
3283 | } | |
3284 | else | |
3285 | { | |
3286 | int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), | |
3287 | TYPE_MODE (TREE_TYPE (parm)), | |
3288 | TYPE_ALIGN (TREE_TYPE (parm))); | |
3289 | parmreg | |
3290 | = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)), | |
3291 | GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))), | |
3292 | align); | |
3293 | set_mem_attributes (parmreg, parm, 1); | |
3294 | } | |
cd5b3469 | 3295 | |
951d8c8a EB |
3296 | /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for |
3297 | the debug info in case it is not legitimate. */ | |
f11a7b6d | 3298 | if (GET_MODE (parmreg) != GET_MODE (rtl)) |
6071dc7f | 3299 | { |
f11a7b6d | 3300 | rtx tempreg = gen_reg_rtx (GET_MODE (rtl)); |
6071dc7f RH |
3301 | int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm)); |
3302 | ||
bb27eeda SE |
3303 | push_to_sequence2 (all->first_conversion_insn, |
3304 | all->last_conversion_insn); | |
f11a7b6d | 3305 | emit_move_insn (tempreg, rtl); |
6071dc7f | 3306 | tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p); |
951d8c8a EB |
3307 | emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, |
3308 | tempreg); | |
bb27eeda SE |
3309 | all->first_conversion_insn = get_insns (); |
3310 | all->last_conversion_insn = get_last_insn (); | |
6071dc7f | 3311 | end_sequence (); |
6f086dfc | 3312 | |
6071dc7f RH |
3313 | did_conversion = true; |
3314 | } | |
3315 | else | |
951d8c8a | 3316 | emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl); |
6f086dfc | 3317 | |
f11a7b6d | 3318 | rtl = parmreg; |
797a6ac1 | 3319 | |
6071dc7f RH |
3320 | /* STACK_PARM is the pointer, not the parm, and PARMREG is |
3321 | now the parm. */ | |
f11a7b6d | 3322 | data->stack_parm = NULL; |
6071dc7f | 3323 | } |
ddef6bc7 | 3324 | |
f11a7b6d AO |
3325 | set_parm_rtl (parm, rtl); |
3326 | ||
6071dc7f RH |
3327 | /* Mark the register as eliminable if we did no conversion and it was |
3328 | copied from memory at a fixed offset, and the arg pointer was not | |
3329 | copied to a pseudo-reg. If the arg pointer is a pseudo reg or the | |
3330 | offset formed an invalid address, such memory-equivalences as we | |
3331 | make here would screw up life analysis for it. */ | |
3332 | if (data->nominal_mode == data->passed_mode | |
3333 | && !did_conversion | |
f11a7b6d AO |
3334 | && data->stack_parm != 0 |
3335 | && MEM_P (data->stack_parm) | |
6071dc7f RH |
3336 | && data->locate.offset.var == 0 |
3337 | && reg_mentioned_p (virtual_incoming_args_rtx, | |
f11a7b6d | 3338 | XEXP (data->stack_parm, 0))) |
6071dc7f | 3339 | { |
691fe203 DM |
3340 | rtx_insn *linsn = get_last_insn (); |
3341 | rtx_insn *sinsn; | |
3342 | rtx set; | |
a03caf76 | 3343 | |
6071dc7f RH |
3344 | /* Mark complex types separately. */ |
3345 | if (GET_CODE (parmreg) == CONCAT) | |
3346 | { | |
d21cefc2 | 3347 | scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg)); |
1466e387 RH |
3348 | int regnor = REGNO (XEXP (parmreg, 0)); |
3349 | int regnoi = REGNO (XEXP (parmreg, 1)); | |
f11a7b6d AO |
3350 | rtx stackr = adjust_address_nv (data->stack_parm, submode, 0); |
3351 | rtx stacki = adjust_address_nv (data->stack_parm, submode, | |
1466e387 | 3352 | GET_MODE_SIZE (submode)); |
6071dc7f RH |
3353 | |
3354 | /* Scan backwards for the set of the real and | |
3355 | imaginary parts. */ | |
3356 | for (sinsn = linsn; sinsn != 0; | |
3357 | sinsn = prev_nonnote_insn (sinsn)) | |
3358 | { | |
3359 | set = single_set (sinsn); | |
3360 | if (set == 0) | |
3361 | continue; | |
3362 | ||
3363 | if (SET_DEST (set) == regno_reg_rtx [regnoi]) | |
a31830a7 | 3364 | set_unique_reg_note (sinsn, REG_EQUIV, stacki); |
6071dc7f | 3365 | else if (SET_DEST (set) == regno_reg_rtx [regnor]) |
a31830a7 | 3366 | set_unique_reg_note (sinsn, REG_EQUIV, stackr); |
a03caf76 | 3367 | } |
6071dc7f | 3368 | } |
f11a7b6d | 3369 | else |
7543f918 | 3370 | set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg); |
6071dc7f RH |
3371 | } |
3372 | ||
3373 | /* For pointer data type, suggest pointer register. */ | |
3374 | if (POINTER_TYPE_P (TREE_TYPE (parm))) | |
3375 | mark_reg_pointer (parmreg, | |
3376 | TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); | |
3377 | } | |
3378 | ||
3379 | /* A subroutine of assign_parms. Allocate stack space to hold the current | |
3380 | parameter. Get it there. Perform all ABI specified conversions. */ | |
3381 | ||
3382 | static void | |
3383 | assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, | |
3384 | struct assign_parm_data_one *data) | |
3385 | { | |
3386 | /* Value must be stored in the stack slot STACK_PARM during function | |
3387 | execution. */ | |
bfc45551 | 3388 | bool to_conversion = false; |
6071dc7f | 3389 | |
4d2a9850 DJ |
3390 | assign_parm_remove_parallels (data); |
3391 | ||
6071dc7f RH |
3392 | if (data->promoted_mode != data->nominal_mode) |
3393 | { | |
3394 | /* Conversion is required. */ | |
3395 | rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm)); | |
6f086dfc | 3396 | |
1a8cb155 | 3397 | emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm))); |
6071dc7f | 3398 | |
bb27eeda | 3399 | push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); |
bfc45551 AM |
3400 | to_conversion = true; |
3401 | ||
6071dc7f RH |
3402 | data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, |
3403 | TYPE_UNSIGNED (TREE_TYPE (parm))); | |
3404 | ||
3405 | if (data->stack_parm) | |
dd67163f | 3406 | { |
91914e56 RS |
3407 | poly_int64 offset |
3408 | = subreg_lowpart_offset (data->nominal_mode, | |
3409 | GET_MODE (data->stack_parm)); | |
dd67163f JJ |
3410 | /* ??? This may need a big-endian conversion on sparc64. */ |
3411 | data->stack_parm | |
3412 | = adjust_address (data->stack_parm, data->nominal_mode, 0); | |
91914e56 | 3413 | if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm)) |
dd67163f | 3414 | set_mem_offset (data->stack_parm, |
527210c4 | 3415 | MEM_OFFSET (data->stack_parm) + offset); |
dd67163f | 3416 | } |
6071dc7f RH |
3417 | } |
3418 | ||
3419 | if (data->entry_parm != data->stack_parm) | |
3420 | { | |
bfc45551 | 3421 | rtx src, dest; |
1f9ceff1 | 3422 | |
6071dc7f RH |
3423 | if (data->stack_parm == 0) |
3424 | { | |
3a695389 UW |
3425 | int align = STACK_SLOT_ALIGNMENT (data->passed_type, |
3426 | GET_MODE (data->entry_parm), | |
3427 | TYPE_ALIGN (data->passed_type)); | |
6071dc7f RH |
3428 | data->stack_parm |
3429 | = assign_stack_local (GET_MODE (data->entry_parm), | |
3430 | GET_MODE_SIZE (GET_MODE (data->entry_parm)), | |
3a695389 | 3431 | align); |
f11a7b6d | 3432 | set_mem_attributes (data->stack_parm, parm, 1); |
6f086dfc | 3433 | } |
6071dc7f | 3434 | |
1a8cb155 RS |
3435 | dest = validize_mem (copy_rtx (data->stack_parm)); |
3436 | src = validize_mem (copy_rtx (data->entry_parm)); | |
bfc45551 AM |
3437 | |
3438 | if (MEM_P (src)) | |
6f086dfc | 3439 | { |
bfc45551 AM |
3440 | /* Use a block move to handle potentially misaligned entry_parm. */ |
3441 | if (!to_conversion) | |
bb27eeda SE |
3442 | push_to_sequence2 (all->first_conversion_insn, |
3443 | all->last_conversion_insn); | |
bfc45551 AM |
3444 | to_conversion = true; |
3445 | ||
3446 | emit_block_move (dest, src, | |
3447 | GEN_INT (int_size_in_bytes (data->passed_type)), | |
3448 | BLOCK_OP_NORMAL); | |
6071dc7f RH |
3449 | } |
3450 | else | |
4a235312 L |
3451 | { |
3452 | if (!REG_P (src)) | |
3453 | src = force_reg (GET_MODE (src), src); | |
3454 | emit_move_insn (dest, src); | |
3455 | } | |
bfc45551 AM |
3456 | } |
3457 | ||
3458 | if (to_conversion) | |
3459 | { | |
bb27eeda SE |
3460 | all->first_conversion_insn = get_insns (); |
3461 | all->last_conversion_insn = get_last_insn (); | |
bfc45551 | 3462 | end_sequence (); |
6071dc7f | 3463 | } |
6f086dfc | 3464 | |
f11a7b6d | 3465 | set_parm_rtl (parm, data->stack_parm); |
6071dc7f | 3466 | } |
3412b298 | 3467 | |
6071dc7f RH |
3468 | /* A subroutine of assign_parms. If the ABI splits complex arguments, then |
3469 | undo the frobbing that we did in assign_parms_augmented_arg_list. */ | |
86f8eff3 | 3470 | |
6071dc7f | 3471 | static void |
3b3f318a | 3472 | assign_parms_unsplit_complex (struct assign_parm_data_all *all, |
9771b263 | 3473 | vec<tree> fnargs) |
6071dc7f RH |
3474 | { |
3475 | tree parm; | |
6ccd356e | 3476 | tree orig_fnargs = all->orig_fnargs; |
3b3f318a | 3477 | unsigned i = 0; |
f4ef873c | 3478 | |
3b3f318a | 3479 | for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i) |
6071dc7f RH |
3480 | { |
3481 | if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE | |
3482 | && targetm.calls.split_complex_arg (TREE_TYPE (parm))) | |
3483 | { | |
3484 | rtx tmp, real, imag; | |
d21cefc2 | 3485 | scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm)); |
6f086dfc | 3486 | |
9771b263 DN |
3487 | real = DECL_RTL (fnargs[i]); |
3488 | imag = DECL_RTL (fnargs[i + 1]); | |
6071dc7f | 3489 | if (inner != GET_MODE (real)) |
6f086dfc | 3490 | { |
f11a7b6d AO |
3491 | real = gen_lowpart_SUBREG (inner, real); |
3492 | imag = gen_lowpart_SUBREG (inner, imag); | |
6071dc7f | 3493 | } |
6ccd356e | 3494 | |
f11a7b6d | 3495 | if (TREE_ADDRESSABLE (parm)) |
6ccd356e AM |
3496 | { |
3497 | rtx rmem, imem; | |
3498 | HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm)); | |
3a695389 UW |
3499 | int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), |
3500 | DECL_MODE (parm), | |
3501 | TYPE_ALIGN (TREE_TYPE (parm))); | |
6ccd356e AM |
3502 | |
3503 | /* split_complex_arg put the real and imag parts in | |
3504 | pseudos. Move them to memory. */ | |
3a695389 | 3505 | tmp = assign_stack_local (DECL_MODE (parm), size, align); |
6ccd356e AM |
3506 | set_mem_attributes (tmp, parm, 1); |
3507 | rmem = adjust_address_nv (tmp, inner, 0); | |
3508 | imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); | |
bb27eeda SE |
3509 | push_to_sequence2 (all->first_conversion_insn, |
3510 | all->last_conversion_insn); | |
6ccd356e AM |
3511 | emit_move_insn (rmem, real); |
3512 | emit_move_insn (imem, imag); | |
bb27eeda SE |
3513 | all->first_conversion_insn = get_insns (); |
3514 | all->last_conversion_insn = get_last_insn (); | |
6ccd356e AM |
3515 | end_sequence (); |
3516 | } | |
3517 | else | |
3518 | tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); | |
f11a7b6d | 3519 | set_parm_rtl (parm, tmp); |
7e41ffa2 | 3520 | |
9771b263 DN |
3521 | real = DECL_INCOMING_RTL (fnargs[i]); |
3522 | imag = DECL_INCOMING_RTL (fnargs[i + 1]); | |
6071dc7f RH |
3523 | if (inner != GET_MODE (real)) |
3524 | { | |
3525 | real = gen_lowpart_SUBREG (inner, real); | |
3526 | imag = gen_lowpart_SUBREG (inner, imag); | |
6f086dfc | 3527 | } |
6071dc7f | 3528 | tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); |
5141868d | 3529 | set_decl_incoming_rtl (parm, tmp, false); |
3b3f318a | 3530 | i++; |
6f086dfc | 3531 | } |
6f086dfc | 3532 | } |
6071dc7f RH |
3533 | } |
3534 | ||
3535 | /* Assign RTL expressions to the function's parameters. This may involve | |
3536 | copying them into registers and using those registers as the DECL_RTL. */ | |
3537 | ||
6fe79279 | 3538 | static void |
6071dc7f RH |
3539 | assign_parms (tree fndecl) |
3540 | { | |
3541 | struct assign_parm_data_all all; | |
3b3f318a | 3542 | tree parm; |
9771b263 | 3543 | vec<tree> fnargs; |
31db0fe0 | 3544 | unsigned i; |
6f086dfc | 3545 | |
38173d38 | 3546 | crtl->args.internal_arg_pointer |
150cdc9e | 3547 | = targetm.calls.internal_arg_pointer (); |
6071dc7f RH |
3548 | |
3549 | assign_parms_initialize_all (&all); | |
3550 | fnargs = assign_parms_augmented_arg_list (&all); | |
3551 | ||
9771b263 | 3552 | FOR_EACH_VEC_ELT (fnargs, i, parm) |
ded9bf77 | 3553 | { |
6071dc7f RH |
3554 | struct assign_parm_data_one data; |
3555 | ||
3556 | /* Extract the type of PARM; adjust it according to ABI. */ | |
3557 | assign_parm_find_data_types (&all, parm, &data); | |
3558 | ||
3559 | /* Early out for errors and void parameters. */ | |
3560 | if (data.passed_mode == VOIDmode) | |
ded9bf77 | 3561 | { |
6071dc7f RH |
3562 | SET_DECL_RTL (parm, const0_rtx); |
3563 | DECL_INCOMING_RTL (parm) = DECL_RTL (parm); | |
3564 | continue; | |
3565 | } | |
196c42cd | 3566 | |
2e3f842f L |
3567 | /* Estimate stack alignment from parameter alignment. */ |
3568 | if (SUPPORTS_STACK_ALIGNMENT) | |
3569 | { | |
c2ed6cf8 NF |
3570 | unsigned int align |
3571 | = targetm.calls.function_arg_boundary (data.promoted_mode, | |
3572 | data.passed_type); | |
ae58e548 JJ |
3573 | align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, |
3574 | align); | |
2e3f842f | 3575 | if (TYPE_ALIGN (data.nominal_type) > align) |
ae58e548 JJ |
3576 | align = MINIMUM_ALIGNMENT (data.nominal_type, |
3577 | TYPE_MODE (data.nominal_type), | |
3578 | TYPE_ALIGN (data.nominal_type)); | |
2e3f842f L |
3579 | if (crtl->stack_alignment_estimated < align) |
3580 | { | |
3581 | gcc_assert (!crtl->stack_realign_processed); | |
3582 | crtl->stack_alignment_estimated = align; | |
3583 | } | |
3584 | } | |
b8698a0f | 3585 | |
6071dc7f RH |
3586 | /* Find out where the parameter arrives in this function. */ |
3587 | assign_parm_find_entry_rtl (&all, &data); | |
3588 | ||
3589 | /* Find out where stack space for this parameter might be. */ | |
3590 | if (assign_parm_is_stack_parm (&all, &data)) | |
3591 | { | |
3592 | assign_parm_find_stack_rtl (parm, &data); | |
3593 | assign_parm_adjust_entry_rtl (&data); | |
ded9bf77 | 3594 | } |
6071dc7f | 3595 | /* Record permanently how this parm was passed. */ |
a82ff31f JJ |
3596 | if (data.passed_pointer) |
3597 | { | |
3598 | rtx incoming_rtl | |
3599 | = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)), | |
3600 | data.entry_parm); | |
3601 | set_decl_incoming_rtl (parm, incoming_rtl, true); | |
3602 | } | |
3603 | else | |
3604 | set_decl_incoming_rtl (parm, data.entry_parm, false); | |
6071dc7f | 3605 | |
f11a7b6d | 3606 | assign_parm_adjust_stack_rtl (&data); |
1f9ceff1 | 3607 | |
31db0fe0 ML |
3608 | if (assign_parm_setup_block_p (&data)) |
3609 | assign_parm_setup_block (&all, parm, &data); | |
3610 | else if (data.passed_pointer || use_register_for_decl (parm)) | |
3611 | assign_parm_setup_reg (&all, parm, &data); | |
d5e254e1 | 3612 | else |
31db0fe0 | 3613 | assign_parm_setup_stack (&all, parm, &data); |
d5e254e1 IE |
3614 | |
3615 | if (cfun->stdarg && !DECL_CHAIN (parm)) | |
31db0fe0 | 3616 | assign_parms_setup_varargs (&all, &data, false); |
d5e254e1 | 3617 | |
6071dc7f | 3618 | /* Update info on where next arg arrives in registers. */ |
d5cc9181 | 3619 | targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, |
3c07301f | 3620 | data.passed_type, data.named_arg); |
ded9bf77 AH |
3621 | } |
3622 | ||
3b3f318a | 3623 | if (targetm.calls.split_complex_arg) |
6ccd356e | 3624 | assign_parms_unsplit_complex (&all, fnargs); |
6071dc7f | 3625 | |
9771b263 | 3626 | fnargs.release (); |
3b3f318a | 3627 | |
3412b298 JW |
3628 | /* Output all parameter conversion instructions (possibly including calls) |
3629 | now that all parameters have been copied out of hard registers. */ | |
bb27eeda | 3630 | emit_insn (all.first_conversion_insn); |
3412b298 | 3631 | |
2e3f842f L |
3632 | /* Estimate reload stack alignment from scalar return mode. */ |
3633 | if (SUPPORTS_STACK_ALIGNMENT) | |
3634 | { | |
3635 | if (DECL_RESULT (fndecl)) | |
3636 | { | |
3637 | tree type = TREE_TYPE (DECL_RESULT (fndecl)); | |
ef4bddc2 | 3638 | machine_mode mode = TYPE_MODE (type); |
2e3f842f L |
3639 | |
3640 | if (mode != BLKmode | |
3641 | && mode != VOIDmode | |
3642 | && !AGGREGATE_TYPE_P (type)) | |
3643 | { | |
3644 | unsigned int align = GET_MODE_ALIGNMENT (mode); | |
3645 | if (crtl->stack_alignment_estimated < align) | |
3646 | { | |
3647 | gcc_assert (!crtl->stack_realign_processed); | |
3648 | crtl->stack_alignment_estimated = align; | |
3649 | } | |
3650 | } | |
b8698a0f | 3651 | } |
2e3f842f L |
3652 | } |
3653 | ||
b36a8cc2 OH |
3654 | /* If we are receiving a struct value address as the first argument, set up |
3655 | the RTL for the function result. As this might require code to convert | |
3656 | the transmitted address to Pmode, we do this here to ensure that possible | |
3657 | preliminary conversions of the address have been emitted already. */ | |
6071dc7f | 3658 | if (all.function_result_decl) |
b36a8cc2 | 3659 | { |
6071dc7f RH |
3660 | tree result = DECL_RESULT (current_function_decl); |
3661 | rtx addr = DECL_RTL (all.function_result_decl); | |
b36a8cc2 | 3662 | rtx x; |
fa8db1f7 | 3663 | |
cc77ae10 | 3664 | if (DECL_BY_REFERENCE (result)) |
8dcfef8f AO |
3665 | { |
3666 | SET_DECL_VALUE_EXPR (result, all.function_result_decl); | |
3667 | x = addr; | |
3668 | } | |
cc77ae10 JM |
3669 | else |
3670 | { | |
8dcfef8f AO |
3671 | SET_DECL_VALUE_EXPR (result, |
3672 | build1 (INDIRECT_REF, TREE_TYPE (result), | |
3673 | all.function_result_decl)); | |
cc77ae10 JM |
3674 | addr = convert_memory_address (Pmode, addr); |
3675 | x = gen_rtx_MEM (DECL_MODE (result), addr); | |
3676 | set_mem_attributes (x, result, 1); | |
3677 | } | |
8dcfef8f AO |
3678 | |
3679 | DECL_HAS_VALUE_EXPR_P (result) = 1; | |
3680 | ||
f11a7b6d | 3681 | set_parm_rtl (result, x); |
b36a8cc2 OH |
3682 | } |
3683 | ||
53c428d0 | 3684 | /* We have aligned all the args, so add space for the pretend args. */ |
38173d38 | 3685 | crtl->args.pretend_args_size = all.pretend_args_size; |
6071dc7f | 3686 | all.stack_args_size.constant += all.extra_pretend_bytes; |
38173d38 | 3687 | crtl->args.size = all.stack_args_size.constant; |
6f086dfc RS |
3688 | |
3689 | /* Adjust function incoming argument size for alignment and | |
3690 | minimum length. */ | |
3691 | ||
a20c5714 RS |
3692 | crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space); |
3693 | crtl->args.size = aligned_upper_bound (crtl->args.size, | |
3694 | PARM_BOUNDARY / BITS_PER_UNIT); | |
4433e339 | 3695 | |
6dad9361 TS |
3696 | if (ARGS_GROW_DOWNWARD) |
3697 | { | |
3698 | crtl->args.arg_offset_rtx | |
a20c5714 RS |
3699 | = (all.stack_args_size.var == 0 |
3700 | ? gen_int_mode (-all.stack_args_size.constant, Pmode) | |
6dad9361 TS |
3701 | : expand_expr (size_diffop (all.stack_args_size.var, |
3702 | size_int (-all.stack_args_size.constant)), | |
3703 | NULL_RTX, VOIDmode, EXPAND_NORMAL)); | |
3704 | } | |
3705 | else | |
3706 | crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); | |
6f086dfc RS |
3707 | |
3708 | /* See how many bytes, if any, of its args a function should try to pop | |
3709 | on return. */ | |
3710 | ||
079e7538 NF |
3711 | crtl->args.pops_args = targetm.calls.return_pops_args (fndecl, |
3712 | TREE_TYPE (fndecl), | |
3713 | crtl->args.size); | |
6f086dfc | 3714 | |
3b69d50e RK |
3715 | /* For stdarg.h function, save info about |
3716 | regs and stack space used by the named args. */ | |
6f086dfc | 3717 | |
d5cc9181 | 3718 | crtl->args.info = all.args_so_far_v; |
6f086dfc RS |
3719 | |
3720 | /* Set the rtx used for the function return value. Put this in its | |
3721 | own variable so any optimizers that need this information don't have | |
3722 | to include tree.h. Do this here so it gets done when an inlined | |
3723 | function gets output. */ | |
3724 | ||
38173d38 | 3725 | crtl->return_rtx |
19e7881c MM |
3726 | = (DECL_RTL_SET_P (DECL_RESULT (fndecl)) |
3727 | ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX); | |
ce5e43d0 JJ |
3728 | |
3729 | /* If scalar return value was computed in a pseudo-reg, or was a named | |
3730 | return value that got dumped to the stack, copy that to the hard | |
3731 | return register. */ | |
3732 | if (DECL_RTL_SET_P (DECL_RESULT (fndecl))) | |
3733 | { | |
3734 | tree decl_result = DECL_RESULT (fndecl); | |
3735 | rtx decl_rtl = DECL_RTL (decl_result); | |
3736 | ||
3737 | if (REG_P (decl_rtl) | |
3738 | ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER | |
3739 | : DECL_REGISTER (decl_result)) | |
3740 | { | |
3741 | rtx real_decl_rtl; | |
3742 | ||
1d636cc6 RG |
3743 | real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), |
3744 | fndecl, true); | |
ce5e43d0 | 3745 | REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; |
38173d38 | 3746 | /* The delay slot scheduler assumes that crtl->return_rtx |
ce5e43d0 JJ |
3747 | holds the hard register containing the return value, not a |
3748 | temporary pseudo. */ | |
38173d38 | 3749 | crtl->return_rtx = real_decl_rtl; |
ce5e43d0 JJ |
3750 | } |
3751 | } | |
6f086dfc | 3752 | } |
4744afba RH |
3753 | |
3754 | /* A subroutine of gimplify_parameters, invoked via walk_tree. | |
3755 | For all seen types, gimplify their sizes. */ | |
3756 | ||
3757 | static tree | |
3758 | gimplify_parm_type (tree *tp, int *walk_subtrees, void *data) | |
3759 | { | |
3760 | tree t = *tp; | |
3761 | ||
3762 | *walk_subtrees = 0; | |
3763 | if (TYPE_P (t)) | |
3764 | { | |
3765 | if (POINTER_TYPE_P (t)) | |
3766 | *walk_subtrees = 1; | |
ad50bc8d RH |
3767 | else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t)) |
3768 | && !TYPE_SIZES_GIMPLIFIED (t)) | |
4744afba | 3769 | { |
726a989a | 3770 | gimplify_type_sizes (t, (gimple_seq *) data); |
4744afba RH |
3771 | *walk_subtrees = 1; |
3772 | } | |
3773 | } | |
3774 | ||
3775 | return NULL; | |
3776 | } | |
3777 | ||
3778 | /* Gimplify the parameter list for current_function_decl. This involves | |
3779 | evaluating SAVE_EXPRs of variable sized parameters and generating code | |
726a989a RB |
3780 | to implement callee-copies reference parameters. Returns a sequence of |
3781 | statements to add to the beginning of the function. */ | |
4744afba | 3782 | |
726a989a | 3783 | gimple_seq |
6aee2fd0 | 3784 | gimplify_parameters (gimple_seq *cleanup) |
4744afba RH |
3785 | { |
3786 | struct assign_parm_data_all all; | |
3b3f318a | 3787 | tree parm; |
726a989a | 3788 | gimple_seq stmts = NULL; |
9771b263 | 3789 | vec<tree> fnargs; |
3b3f318a | 3790 | unsigned i; |
4744afba RH |
3791 | |
3792 | assign_parms_initialize_all (&all); | |
3793 | fnargs = assign_parms_augmented_arg_list (&all); | |
3794 | ||
9771b263 | 3795 | FOR_EACH_VEC_ELT (fnargs, i, parm) |
4744afba RH |
3796 | { |
3797 | struct assign_parm_data_one data; | |
3798 | ||
3799 | /* Extract the type of PARM; adjust it according to ABI. */ | |
3800 | assign_parm_find_data_types (&all, parm, &data); | |
3801 | ||
3802 | /* Early out for errors and void parameters. */ | |
3803 | if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL) | |
3804 | continue; | |
3805 | ||
3806 | /* Update info on where next arg arrives in registers. */ | |
d5cc9181 | 3807 | targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode, |
3c07301f | 3808 | data.passed_type, data.named_arg); |
4744afba RH |
3809 | |
3810 | /* ??? Once upon a time variable_size stuffed parameter list | |
3811 | SAVE_EXPRs (amongst others) onto a pending sizes list. This | |
3812 | turned out to be less than manageable in the gimple world. | |
3813 | Now we have to hunt them down ourselves. */ | |
3814 | walk_tree_without_duplicates (&data.passed_type, | |
3815 | gimplify_parm_type, &stmts); | |
3816 | ||
b38f3813 | 3817 | if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST) |
4744afba RH |
3818 | { |
3819 | gimplify_one_sizepos (&DECL_SIZE (parm), &stmts); | |
3820 | gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts); | |
3821 | } | |
3822 | ||
3823 | if (data.passed_pointer) | |
3824 | { | |
3825 | tree type = TREE_TYPE (data.passed_type); | |
d5cc9181 | 3826 | if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type), |
4744afba RH |
3827 | type, data.named_arg)) |
3828 | { | |
3829 | tree local, t; | |
3830 | ||
b38f3813 | 3831 | /* For constant-sized objects, this is trivial; for |
4744afba | 3832 | variable-sized objects, we have to play games. */ |
b38f3813 EB |
3833 | if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST |
3834 | && !(flag_stack_check == GENERIC_STACK_CHECK | |
3835 | && compare_tree_int (DECL_SIZE_UNIT (parm), | |
3836 | STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
4744afba | 3837 | { |
5dac1dae | 3838 | local = create_tmp_var (type, get_name (parm)); |
4744afba | 3839 | DECL_IGNORED_P (local) = 0; |
04487a2f JJ |
3840 | /* If PARM was addressable, move that flag over |
3841 | to the local copy, as its address will be taken, | |
37609bf0 RG |
3842 | not the PARMs. Keep the parms address taken |
3843 | as we'll query that flag during gimplification. */ | |
04487a2f | 3844 | if (TREE_ADDRESSABLE (parm)) |
37609bf0 | 3845 | TREE_ADDRESSABLE (local) = 1; |
5dac1dae JJ |
3846 | else if (TREE_CODE (type) == COMPLEX_TYPE |
3847 | || TREE_CODE (type) == VECTOR_TYPE) | |
3848 | DECL_GIMPLE_REG_P (local) = 1; | |
6aee2fd0 JJ |
3849 | |
3850 | if (!is_gimple_reg (local) | |
3851 | && flag_stack_reuse != SR_NONE) | |
3852 | { | |
3853 | tree clobber = build_constructor (type, NULL); | |
3854 | gimple *clobber_stmt; | |
3855 | TREE_THIS_VOLATILE (clobber) = 1; | |
3856 | clobber_stmt = gimple_build_assign (local, clobber); | |
3857 | gimple_seq_add_stmt (cleanup, clobber_stmt); | |
3858 | } | |
4744afba RH |
3859 | } |
3860 | else | |
3861 | { | |
5039610b | 3862 | tree ptr_type, addr; |
4744afba RH |
3863 | |
3864 | ptr_type = build_pointer_type (type); | |
c98b08ff | 3865 | addr = create_tmp_reg (ptr_type, get_name (parm)); |
4744afba RH |
3866 | DECL_IGNORED_P (addr) = 0; |
3867 | local = build_fold_indirect_ref (addr); | |
3868 | ||
9e878cf1 EB |
3869 | t = build_alloca_call_expr (DECL_SIZE_UNIT (parm), |
3870 | DECL_ALIGN (parm), | |
3871 | max_int_size_in_bytes (type)); | |
d3c12306 | 3872 | /* The call has been built for a variable-sized object. */ |
63d2a353 | 3873 | CALL_ALLOCA_FOR_VAR_P (t) = 1; |
4744afba | 3874 | t = fold_convert (ptr_type, t); |
726a989a | 3875 | t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
4744afba RH |
3876 | gimplify_and_add (t, &stmts); |
3877 | } | |
3878 | ||
726a989a | 3879 | gimplify_assign (local, parm, &stmts); |
4744afba | 3880 | |
833b3afe DB |
3881 | SET_DECL_VALUE_EXPR (parm, local); |
3882 | DECL_HAS_VALUE_EXPR_P (parm) = 1; | |
4744afba RH |
3883 | } |
3884 | } | |
3885 | } | |
3886 | ||
9771b263 | 3887 | fnargs.release (); |
3b3f318a | 3888 | |
4744afba RH |
3889 | return stmts; |
3890 | } | |
75dc3319 | 3891 | \f |
6f086dfc RS |
3892 | /* Compute the size and offset from the start of the stacked arguments for a |
3893 | parm passed in mode PASSED_MODE and with type TYPE. | |
3894 | ||
3895 | INITIAL_OFFSET_PTR points to the current offset into the stacked | |
3896 | arguments. | |
3897 | ||
e7949876 AM |
3898 | The starting offset and size for this parm are returned in |
3899 | LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is | |
3900 | nonzero, the offset is that of stack slot, which is returned in | |
3901 | LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of | |
3902 | padding required from the initial offset ptr to the stack slot. | |
6f086dfc | 3903 | |
cc2902df | 3904 | IN_REGS is nonzero if the argument will be passed in registers. It will |
6f086dfc RS |
3905 | never be set if REG_PARM_STACK_SPACE is not defined. |
3906 | ||
2e4ceca5 UW |
3907 | REG_PARM_STACK_SPACE is the number of bytes of stack space reserved |
3908 | for arguments which are passed in registers. | |
3909 | ||
6f086dfc RS |
3910 | FNDECL is the function in which the argument was defined. |
3911 | ||
3912 | There are two types of rounding that are done. The first, controlled by | |
c2ed6cf8 NF |
3913 | TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the |
3914 | argument list to be aligned to the specific boundary (in bits). This | |
3915 | rounding affects the initial and starting offsets, but not the argument | |
3916 | size. | |
6f086dfc | 3917 | |
76b0cbf8 | 3918 | The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY, |
6f086dfc RS |
3919 | optionally rounds the size of the parm to PARM_BOUNDARY. The |
3920 | initial offset is not affected by this rounding, while the size always | |
3921 | is and the starting offset may be. */ | |
3922 | ||
e7949876 AM |
3923 | /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case; |
3924 | INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's | |
6f086dfc | 3925 | callers pass in the total size of args so far as |
e7949876 | 3926 | INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */ |
6f086dfc | 3927 | |
6f086dfc | 3928 | void |
ef4bddc2 | 3929 | locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs, |
2e4ceca5 UW |
3930 | int reg_parm_stack_space, int partial, |
3931 | tree fndecl ATTRIBUTE_UNUSED, | |
fa8db1f7 AJ |
3932 | struct args_size *initial_offset_ptr, |
3933 | struct locate_and_pad_arg_data *locate) | |
6f086dfc | 3934 | { |
e7949876 | 3935 | tree sizetree; |
76b0cbf8 | 3936 | pad_direction where_pad; |
123148b5 | 3937 | unsigned int boundary, round_boundary; |
e7949876 | 3938 | int part_size_in_regs; |
6f086dfc | 3939 | |
6f086dfc RS |
3940 | /* If we have found a stack parm before we reach the end of the |
3941 | area reserved for registers, skip that area. */ | |
3942 | if (! in_regs) | |
3943 | { | |
6f086dfc RS |
3944 | if (reg_parm_stack_space > 0) |
3945 | { | |
a20c5714 RS |
3946 | if (initial_offset_ptr->var |
3947 | || !ordered_p (initial_offset_ptr->constant, | |
3948 | reg_parm_stack_space)) | |
6f086dfc RS |
3949 | { |
3950 | initial_offset_ptr->var | |
3951 | = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr), | |
fed3cef0 | 3952 | ssize_int (reg_parm_stack_space)); |
6f086dfc RS |
3953 | initial_offset_ptr->constant = 0; |
3954 | } | |
a20c5714 RS |
3955 | else |
3956 | initial_offset_ptr->constant | |
3957 | = ordered_max (initial_offset_ptr->constant, | |
3958 | reg_parm_stack_space); | |
6f086dfc RS |
3959 | } |
3960 | } | |
6f086dfc | 3961 | |
78a52f11 | 3962 | part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0); |
e7949876 | 3963 | |
974aedcc MP |
3964 | sizetree = (type |
3965 | ? arg_size_in_bytes (type) | |
3966 | : size_int (GET_MODE_SIZE (passed_mode))); | |
76b0cbf8 | 3967 | where_pad = targetm.calls.function_arg_padding (passed_mode, type); |
c2ed6cf8 | 3968 | boundary = targetm.calls.function_arg_boundary (passed_mode, type); |
123148b5 BS |
3969 | round_boundary = targetm.calls.function_arg_round_boundary (passed_mode, |
3970 | type); | |
6e985040 | 3971 | locate->where_pad = where_pad; |
2e3f842f L |
3972 | |
3973 | /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */ | |
3974 | if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) | |
3975 | boundary = MAX_SUPPORTED_STACK_ALIGNMENT; | |
3976 | ||
bfc45551 | 3977 | locate->boundary = boundary; |
6f086dfc | 3978 | |
2e3f842f L |
3979 | if (SUPPORTS_STACK_ALIGNMENT) |
3980 | { | |
3981 | /* stack_alignment_estimated can't change after stack has been | |
3982 | realigned. */ | |
3983 | if (crtl->stack_alignment_estimated < boundary) | |
3984 | { | |
3985 | if (!crtl->stack_realign_processed) | |
3986 | crtl->stack_alignment_estimated = boundary; | |
3987 | else | |
3988 | { | |
3989 | /* If stack is realigned and stack alignment value | |
3990 | hasn't been finalized, it is OK not to increase | |
3991 | stack_alignment_estimated. The bigger alignment | |
3992 | requirement is recorded in stack_alignment_needed | |
3993 | below. */ | |
3994 | gcc_assert (!crtl->stack_realign_finalized | |
3995 | && crtl->stack_realign_needed); | |
3996 | } | |
3997 | } | |
3998 | } | |
3999 | ||
c7e777b5 RH |
4000 | /* Remember if the outgoing parameter requires extra alignment on the |
4001 | calling function side. */ | |
cb91fab0 JH |
4002 | if (crtl->stack_alignment_needed < boundary) |
4003 | crtl->stack_alignment_needed = boundary; | |
2e3f842f L |
4004 | if (crtl->preferred_stack_boundary < boundary) |
4005 | crtl->preferred_stack_boundary = boundary; | |
c7e777b5 | 4006 | |
6dad9361 TS |
4007 | if (ARGS_GROW_DOWNWARD) |
4008 | { | |
4009 | locate->slot_offset.constant = -initial_offset_ptr->constant; | |
4010 | if (initial_offset_ptr->var) | |
4011 | locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0), | |
4012 | initial_offset_ptr->var); | |
4013 | ||
a589e68f DM |
4014 | { |
4015 | tree s2 = sizetree; | |
76b0cbf8 | 4016 | if (where_pad != PAD_NONE |
a589e68f DM |
4017 | && (!tree_fits_uhwi_p (sizetree) |
4018 | || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) | |
4019 | s2 = round_up (s2, round_boundary / BITS_PER_UNIT); | |
4020 | SUB_PARM_SIZE (locate->slot_offset, s2); | |
4021 | } | |
6dad9361 TS |
4022 | |
4023 | locate->slot_offset.constant += part_size_in_regs; | |
4024 | ||
4025 | if (!in_regs || reg_parm_stack_space > 0) | |
4026 | pad_to_arg_alignment (&locate->slot_offset, boundary, | |
4027 | &locate->alignment_pad); | |
4028 | ||
4029 | locate->size.constant = (-initial_offset_ptr->constant | |
4030 | - locate->slot_offset.constant); | |
4031 | if (initial_offset_ptr->var) | |
4032 | locate->size.var = size_binop (MINUS_EXPR, | |
4033 | size_binop (MINUS_EXPR, | |
4034 | ssize_int (0), | |
4035 | initial_offset_ptr->var), | |
4036 | locate->slot_offset.var); | |
4037 | ||
4038 | /* Pad_below needs the pre-rounded size to know how much to pad | |
4039 | below. */ | |
4040 | locate->offset = locate->slot_offset; | |
76b0cbf8 | 4041 | if (where_pad == PAD_DOWNWARD) |
6dad9361 TS |
4042 | pad_below (&locate->offset, passed_mode, sizetree); |
4043 | ||
4044 | } | |
4045 | else | |
4046 | { | |
4047 | if (!in_regs || reg_parm_stack_space > 0) | |
4048 | pad_to_arg_alignment (initial_offset_ptr, boundary, | |
4049 | &locate->alignment_pad); | |
4050 | locate->slot_offset = *initial_offset_ptr; | |
6f086dfc RS |
4051 | |
4052 | #ifdef PUSH_ROUNDING | |
6dad9361 TS |
4053 | if (passed_mode != BLKmode) |
4054 | sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree))); | |
6f086dfc RS |
4055 | #endif |
4056 | ||
6dad9361 TS |
4057 | /* Pad_below needs the pre-rounded size to know how much to pad below |
4058 | so this must be done before rounding up. */ | |
4059 | locate->offset = locate->slot_offset; | |
76b0cbf8 | 4060 | if (where_pad == PAD_DOWNWARD) |
6dad9361 | 4061 | pad_below (&locate->offset, passed_mode, sizetree); |
d4b0a7a0 | 4062 | |
76b0cbf8 | 4063 | if (where_pad != PAD_NONE |
6dad9361 TS |
4064 | && (!tree_fits_uhwi_p (sizetree) |
4065 | || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary)) | |
4066 | sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT); | |
6f086dfc | 4067 | |
6dad9361 | 4068 | ADD_PARM_SIZE (locate->size, sizetree); |
e7949876 | 4069 | |
6dad9361 TS |
4070 | locate->size.constant -= part_size_in_regs; |
4071 | } | |
099590dc | 4072 | |
870118b7 RS |
4073 | locate->offset.constant |
4074 | += targetm.calls.function_arg_offset (passed_mode, type); | |
6f086dfc RS |
4075 | } |
4076 | ||
e16c591a RS |
4077 | /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY. |
4078 | BOUNDARY is measured in bits, but must be a multiple of a storage unit. */ | |
4079 | ||
6f086dfc | 4080 | static void |
fa8db1f7 AJ |
4081 | pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, |
4082 | struct args_size *alignment_pad) | |
6f086dfc | 4083 | { |
a544cfd2 | 4084 | tree save_var = NULL_TREE; |
a20c5714 | 4085 | poly_int64 save_constant = 0; |
a751cd5b | 4086 | int boundary_in_bytes = boundary / BITS_PER_UNIT; |
a20c5714 | 4087 | poly_int64 sp_offset = STACK_POINTER_OFFSET; |
a594a19c GK |
4088 | |
4089 | #ifdef SPARC_STACK_BOUNDARY_HACK | |
2358ff91 EB |
4090 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than |
4091 | the real alignment of %sp. However, when it does this, the | |
4092 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
a594a19c GK |
4093 | if (SPARC_STACK_BOUNDARY_HACK) |
4094 | sp_offset = 0; | |
4095 | #endif | |
4fc026cd | 4096 | |
6f6b8f81 | 4097 | if (boundary > PARM_BOUNDARY) |
4fc026cd CM |
4098 | { |
4099 | save_var = offset_ptr->var; | |
4100 | save_constant = offset_ptr->constant; | |
4101 | } | |
4102 | ||
4103 | alignment_pad->var = NULL_TREE; | |
4104 | alignment_pad->constant = 0; | |
4fc026cd | 4105 | |
6f086dfc RS |
4106 | if (boundary > BITS_PER_UNIT) |
4107 | { | |
a20c5714 RS |
4108 | int misalign; |
4109 | if (offset_ptr->var | |
4110 | || !known_misalignment (offset_ptr->constant + sp_offset, | |
4111 | boundary_in_bytes, &misalign)) | |
6f086dfc | 4112 | { |
a594a19c GK |
4113 | tree sp_offset_tree = ssize_int (sp_offset); |
4114 | tree offset = size_binop (PLUS_EXPR, | |
4115 | ARGS_SIZE_TREE (*offset_ptr), | |
4116 | sp_offset_tree); | |
6dad9361 TS |
4117 | tree rounded; |
4118 | if (ARGS_GROW_DOWNWARD) | |
4119 | rounded = round_down (offset, boundary / BITS_PER_UNIT); | |
4120 | else | |
4121 | rounded = round_up (offset, boundary / BITS_PER_UNIT); | |
a594a19c GK |
4122 | |
4123 | offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree); | |
e7949876 AM |
4124 | /* ARGS_SIZE_TREE includes constant term. */ |
4125 | offset_ptr->constant = 0; | |
6f6b8f81 | 4126 | if (boundary > PARM_BOUNDARY) |
dd3f0101 | 4127 | alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var, |
fed3cef0 | 4128 | save_var); |
6f086dfc RS |
4129 | } |
4130 | else | |
718fe406 | 4131 | { |
a20c5714 RS |
4132 | if (ARGS_GROW_DOWNWARD) |
4133 | offset_ptr->constant -= misalign; | |
4134 | else | |
4135 | offset_ptr->constant += -misalign & (boundary_in_bytes - 1); | |
6dad9361 | 4136 | |
a20c5714 RS |
4137 | if (boundary > PARM_BOUNDARY) |
4138 | alignment_pad->constant = offset_ptr->constant - save_constant; | |
718fe406 | 4139 | } |
6f086dfc RS |
4140 | } |
4141 | } | |
4142 | ||
4143 | static void | |
ef4bddc2 | 4144 | pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree) |
6f086dfc | 4145 | { |
b66fd4fc | 4146 | unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT; |
cf098191 RS |
4147 | int misalign; |
4148 | if (passed_mode != BLKmode | |
4149 | && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign)) | |
4150 | offset_ptr->constant += -misalign & (align - 1); | |
6f086dfc RS |
4151 | else |
4152 | { | |
4153 | if (TREE_CODE (sizetree) != INTEGER_CST | |
b66fd4fc | 4154 | || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0) |
6f086dfc RS |
4155 | { |
4156 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
b66fd4fc | 4157 | tree s2 = round_up (sizetree, align); |
6f086dfc RS |
4158 | /* Add it in. */ |
4159 | ADD_PARM_SIZE (*offset_ptr, s2); | |
4160 | SUB_PARM_SIZE (*offset_ptr, sizetree); | |
4161 | } | |
4162 | } | |
4163 | } | |
6f086dfc | 4164 | \f |
6f086dfc | 4165 | |
6fb5fa3c DB |
4166 | /* True if register REGNO was alive at a place where `setjmp' was |
4167 | called and was set more than once or is an argument. Such regs may | |
4168 | be clobbered by `longjmp'. */ | |
4169 | ||
4170 | static bool | |
4171 | regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno) | |
4172 | { | |
4173 | /* There appear to be cases where some local vars never reach the | |
4174 | backend but have bogus regnos. */ | |
4175 | if (regno >= max_reg_num ()) | |
4176 | return false; | |
4177 | ||
4178 | return ((REG_N_SETS (regno) > 1 | |
fefa31b5 DM |
4179 | || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), |
4180 | regno)) | |
6fb5fa3c DB |
4181 | && REGNO_REG_SET_P (setjmp_crosses, regno)); |
4182 | } | |
4183 | ||
4184 | /* Walk the tree of blocks describing the binding levels within a | |
4185 | function and warn about variables the might be killed by setjmp or | |
4186 | vfork. This is done after calling flow_analysis before register | |
4187 | allocation since that will clobber the pseudo-regs to hard | |
4188 | regs. */ | |
4189 | ||
4190 | static void | |
4191 | setjmp_vars_warning (bitmap setjmp_crosses, tree block) | |
6f086dfc | 4192 | { |
b3694847 | 4193 | tree decl, sub; |
6de9cd9a | 4194 | |
910ad8de | 4195 | for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl)) |
6f086dfc | 4196 | { |
8813a647 | 4197 | if (VAR_P (decl) |
bc41842b | 4198 | && DECL_RTL_SET_P (decl) |
f8cfc6aa | 4199 | && REG_P (DECL_RTL (decl)) |
6fb5fa3c | 4200 | && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) |
b8698a0f | 4201 | warning (OPT_Wclobbered, "variable %q+D might be clobbered by" |
2b001724 | 4202 | " %<longjmp%> or %<vfork%>", decl); |
6f086dfc | 4203 | } |
6de9cd9a | 4204 | |
87caf699 | 4205 | for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) |
6fb5fa3c | 4206 | setjmp_vars_warning (setjmp_crosses, sub); |
6f086dfc RS |
4207 | } |
4208 | ||
6de9cd9a | 4209 | /* Do the appropriate part of setjmp_vars_warning |
6f086dfc RS |
4210 | but for arguments instead of local variables. */ |
4211 | ||
6fb5fa3c DB |
4212 | static void |
4213 | setjmp_args_warning (bitmap setjmp_crosses) | |
6f086dfc | 4214 | { |
b3694847 | 4215 | tree decl; |
6f086dfc | 4216 | for (decl = DECL_ARGUMENTS (current_function_decl); |
910ad8de | 4217 | decl; decl = DECL_CHAIN (decl)) |
6f086dfc | 4218 | if (DECL_RTL (decl) != 0 |
f8cfc6aa | 4219 | && REG_P (DECL_RTL (decl)) |
6fb5fa3c | 4220 | && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) |
b8698a0f | 4221 | warning (OPT_Wclobbered, |
2b001724 | 4222 | "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>", |
dee15844 | 4223 | decl); |
6f086dfc RS |
4224 | } |
4225 | ||
6fb5fa3c DB |
4226 | /* Generate warning messages for variables live across setjmp. */ |
4227 | ||
b8698a0f | 4228 | void |
6fb5fa3c DB |
4229 | generate_setjmp_warnings (void) |
4230 | { | |
4231 | bitmap setjmp_crosses = regstat_get_setjmp_crosses (); | |
4232 | ||
0cae8d31 | 4233 | if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS |
6fb5fa3c DB |
4234 | || bitmap_empty_p (setjmp_crosses)) |
4235 | return; | |
4236 | ||
4237 | setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); | |
4238 | setjmp_args_warning (setjmp_crosses); | |
4239 | } | |
4240 | ||
6f086dfc | 4241 | \f |
3373692b | 4242 | /* Reverse the order of elements in the fragment chain T of blocks, |
1e3c1d95 JJ |
4243 | and return the new head of the chain (old last element). |
4244 | In addition to that clear BLOCK_SAME_RANGE flags when needed | |
4245 | and adjust BLOCK_SUPERCONTEXT from the super fragment to | |
4246 | its super fragment origin. */ | |
3373692b JJ |
4247 | |
4248 | static tree | |
4249 | block_fragments_nreverse (tree t) | |
4250 | { | |
1e3c1d95 JJ |
4251 | tree prev = 0, block, next, prev_super = 0; |
4252 | tree super = BLOCK_SUPERCONTEXT (t); | |
4253 | if (BLOCK_FRAGMENT_ORIGIN (super)) | |
4254 | super = BLOCK_FRAGMENT_ORIGIN (super); | |
3373692b JJ |
4255 | for (block = t; block; block = next) |
4256 | { | |
4257 | next = BLOCK_FRAGMENT_CHAIN (block); | |
4258 | BLOCK_FRAGMENT_CHAIN (block) = prev; | |
1e3c1d95 JJ |
4259 | if ((prev && !BLOCK_SAME_RANGE (prev)) |
4260 | || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block)) | |
4261 | != prev_super)) | |
4262 | BLOCK_SAME_RANGE (block) = 0; | |
4263 | prev_super = BLOCK_SUPERCONTEXT (block); | |
4264 | BLOCK_SUPERCONTEXT (block) = super; | |
3373692b JJ |
4265 | prev = block; |
4266 | } | |
1e3c1d95 JJ |
4267 | t = BLOCK_FRAGMENT_ORIGIN (t); |
4268 | if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t)) | |
4269 | != prev_super) | |
4270 | BLOCK_SAME_RANGE (t) = 0; | |
4271 | BLOCK_SUPERCONTEXT (t) = super; | |
3373692b JJ |
4272 | return prev; |
4273 | } | |
4274 | ||
4275 | /* Reverse the order of elements in the chain T of blocks, | |
4276 | and return the new head of the chain (old last element). | |
4277 | Also do the same on subblocks and reverse the order of elements | |
4278 | in BLOCK_FRAGMENT_CHAIN as well. */ | |
4279 | ||
4280 | static tree | |
4281 | blocks_nreverse_all (tree t) | |
4282 | { | |
4283 | tree prev = 0, block, next; | |
4284 | for (block = t; block; block = next) | |
4285 | { | |
4286 | next = BLOCK_CHAIN (block); | |
4287 | BLOCK_CHAIN (block) = prev; | |
3373692b JJ |
4288 | if (BLOCK_FRAGMENT_CHAIN (block) |
4289 | && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE) | |
1e3c1d95 JJ |
4290 | { |
4291 | BLOCK_FRAGMENT_CHAIN (block) | |
4292 | = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block)); | |
4293 | if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block))) | |
4294 | BLOCK_SAME_RANGE (block) = 0; | |
4295 | } | |
4296 | BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); | |
3373692b JJ |
4297 | prev = block; |
4298 | } | |
4299 | return prev; | |
4300 | } | |
4301 | ||
4302 | ||
a20612aa RH |
4303 | /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, |
4304 | and create duplicate blocks. */ | |
4305 | /* ??? Need an option to either create block fragments or to create | |
4306 | abstract origin duplicates of a source block. It really depends | |
4307 | on what optimization has been performed. */ | |
467456d0 | 4308 | |
116eebd6 | 4309 | void |
fa8db1f7 | 4310 | reorder_blocks (void) |
467456d0 | 4311 | { |
116eebd6 | 4312 | tree block = DECL_INITIAL (current_function_decl); |
467456d0 | 4313 | |
1a4450c7 | 4314 | if (block == NULL_TREE) |
116eebd6 | 4315 | return; |
fc289cd1 | 4316 | |
00f96dc9 | 4317 | auto_vec<tree, 10> block_stack; |
18c038b9 | 4318 | |
a20612aa | 4319 | /* Reset the TREE_ASM_WRITTEN bit for all blocks. */ |
6de9cd9a | 4320 | clear_block_marks (block); |
a20612aa | 4321 | |
116eebd6 MM |
4322 | /* Prune the old trees away, so that they don't get in the way. */ |
4323 | BLOCK_SUBBLOCKS (block) = NULL_TREE; | |
4324 | BLOCK_CHAIN (block) = NULL_TREE; | |
fc289cd1 | 4325 | |
a20612aa | 4326 | /* Recreate the block tree from the note nesting. */ |
116eebd6 | 4327 | reorder_blocks_1 (get_insns (), block, &block_stack); |
3373692b | 4328 | BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block)); |
467456d0 RS |
4329 | } |
4330 | ||
a20612aa | 4331 | /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */ |
0a1c58a2 | 4332 | |
6de9cd9a DN |
4333 | void |
4334 | clear_block_marks (tree block) | |
cc1fe44f | 4335 | { |
a20612aa | 4336 | while (block) |
cc1fe44f | 4337 | { |
a20612aa | 4338 | TREE_ASM_WRITTEN (block) = 0; |
6de9cd9a | 4339 | clear_block_marks (BLOCK_SUBBLOCKS (block)); |
a20612aa | 4340 | block = BLOCK_CHAIN (block); |
cc1fe44f DD |
4341 | } |
4342 | } | |
4343 | ||
0a1c58a2 | 4344 | static void |
691fe203 DM |
4345 | reorder_blocks_1 (rtx_insn *insns, tree current_block, |
4346 | vec<tree> *p_block_stack) | |
0a1c58a2 | 4347 | { |
691fe203 | 4348 | rtx_insn *insn; |
1e3c1d95 | 4349 | tree prev_beg = NULL_TREE, prev_end = NULL_TREE; |
0a1c58a2 JL |
4350 | |
4351 | for (insn = insns; insn; insn = NEXT_INSN (insn)) | |
4352 | { | |
4b4bf941 | 4353 | if (NOTE_P (insn)) |
0a1c58a2 | 4354 | { |
a38e7aa5 | 4355 | if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) |
0a1c58a2 JL |
4356 | { |
4357 | tree block = NOTE_BLOCK (insn); | |
51b7d006 DJ |
4358 | tree origin; |
4359 | ||
3373692b JJ |
4360 | gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE); |
4361 | origin = block; | |
a20612aa | 4362 | |
1e3c1d95 JJ |
4363 | if (prev_end) |
4364 | BLOCK_SAME_RANGE (prev_end) = 0; | |
4365 | prev_end = NULL_TREE; | |
4366 | ||
a20612aa RH |
4367 | /* If we have seen this block before, that means it now |
4368 | spans multiple address regions. Create a new fragment. */ | |
0a1c58a2 JL |
4369 | if (TREE_ASM_WRITTEN (block)) |
4370 | { | |
a20612aa | 4371 | tree new_block = copy_node (block); |
a20612aa | 4372 | |
1e3c1d95 | 4373 | BLOCK_SAME_RANGE (new_block) = 0; |
a20612aa RH |
4374 | BLOCK_FRAGMENT_ORIGIN (new_block) = origin; |
4375 | BLOCK_FRAGMENT_CHAIN (new_block) | |
4376 | = BLOCK_FRAGMENT_CHAIN (origin); | |
4377 | BLOCK_FRAGMENT_CHAIN (origin) = new_block; | |
4378 | ||
4379 | NOTE_BLOCK (insn) = new_block; | |
4380 | block = new_block; | |
0a1c58a2 | 4381 | } |
a20612aa | 4382 | |
1e3c1d95 JJ |
4383 | if (prev_beg == current_block && prev_beg) |
4384 | BLOCK_SAME_RANGE (block) = 1; | |
4385 | ||
4386 | prev_beg = origin; | |
4387 | ||
0a1c58a2 JL |
4388 | BLOCK_SUBBLOCKS (block) = 0; |
4389 | TREE_ASM_WRITTEN (block) = 1; | |
339a28b9 ZW |
4390 | /* When there's only one block for the entire function, |
4391 | current_block == block and we mustn't do this, it | |
4392 | will cause infinite recursion. */ | |
4393 | if (block != current_block) | |
4394 | { | |
1e3c1d95 | 4395 | tree super; |
51b7d006 | 4396 | if (block != origin) |
1e3c1d95 JJ |
4397 | gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block |
4398 | || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT | |
4399 | (origin)) | |
4400 | == current_block); | |
9771b263 | 4401 | if (p_block_stack->is_empty ()) |
1e3c1d95 JJ |
4402 | super = current_block; |
4403 | else | |
4404 | { | |
9771b263 | 4405 | super = p_block_stack->last (); |
1e3c1d95 JJ |
4406 | gcc_assert (super == current_block |
4407 | || BLOCK_FRAGMENT_ORIGIN (super) | |
4408 | == current_block); | |
4409 | } | |
4410 | BLOCK_SUPERCONTEXT (block) = super; | |
339a28b9 ZW |
4411 | BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); |
4412 | BLOCK_SUBBLOCKS (current_block) = block; | |
51b7d006 | 4413 | current_block = origin; |
339a28b9 | 4414 | } |
9771b263 | 4415 | p_block_stack->safe_push (block); |
0a1c58a2 | 4416 | } |
a38e7aa5 | 4417 | else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) |
0a1c58a2 | 4418 | { |
9771b263 | 4419 | NOTE_BLOCK (insn) = p_block_stack->pop (); |
0a1c58a2 | 4420 | current_block = BLOCK_SUPERCONTEXT (current_block); |
1e3c1d95 JJ |
4421 | if (BLOCK_FRAGMENT_ORIGIN (current_block)) |
4422 | current_block = BLOCK_FRAGMENT_ORIGIN (current_block); | |
4423 | prev_beg = NULL_TREE; | |
4424 | prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn)) | |
4425 | ? NOTE_BLOCK (insn) : NULL_TREE; | |
0a1c58a2 JL |
4426 | } |
4427 | } | |
1e3c1d95 JJ |
4428 | else |
4429 | { | |
4430 | prev_beg = NULL_TREE; | |
4431 | if (prev_end) | |
4432 | BLOCK_SAME_RANGE (prev_end) = 0; | |
4433 | prev_end = NULL_TREE; | |
4434 | } | |
0a1c58a2 JL |
4435 | } |
4436 | } | |
4437 | ||
467456d0 RS |
4438 | /* Reverse the order of elements in the chain T of blocks, |
4439 | and return the new head of the chain (old last element). */ | |
4440 | ||
6de9cd9a | 4441 | tree |
fa8db1f7 | 4442 | blocks_nreverse (tree t) |
467456d0 | 4443 | { |
3373692b JJ |
4444 | tree prev = 0, block, next; |
4445 | for (block = t; block; block = next) | |
467456d0 | 4446 | { |
3373692b JJ |
4447 | next = BLOCK_CHAIN (block); |
4448 | BLOCK_CHAIN (block) = prev; | |
4449 | prev = block; | |
467456d0 RS |
4450 | } |
4451 | return prev; | |
4452 | } | |
4453 | ||
61e46a7d NF |
4454 | /* Concatenate two chains of blocks (chained through BLOCK_CHAIN) |
4455 | by modifying the last node in chain 1 to point to chain 2. */ | |
4456 | ||
4457 | tree | |
4458 | block_chainon (tree op1, tree op2) | |
4459 | { | |
4460 | tree t1; | |
4461 | ||
4462 | if (!op1) | |
4463 | return op2; | |
4464 | if (!op2) | |
4465 | return op1; | |
4466 | ||
4467 | for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1)) | |
4468 | continue; | |
4469 | BLOCK_CHAIN (t1) = op2; | |
4470 | ||
4471 | #ifdef ENABLE_TREE_CHECKING | |
4472 | { | |
4473 | tree t2; | |
4474 | for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2)) | |
4475 | gcc_assert (t2 != t1); | |
4476 | } | |
4477 | #endif | |
4478 | ||
4479 | return op1; | |
4480 | } | |
4481 | ||
18c038b9 MM |
4482 | /* Count the subblocks of the list starting with BLOCK. If VECTOR is |
4483 | non-NULL, list them all into VECTOR, in a depth-first preorder | |
4484 | traversal of the block tree. Also clear TREE_ASM_WRITTEN in all | |
b2a59b15 | 4485 | blocks. */ |
467456d0 RS |
4486 | |
4487 | static int | |
fa8db1f7 | 4488 | all_blocks (tree block, tree *vector) |
467456d0 | 4489 | { |
b2a59b15 MS |
4490 | int n_blocks = 0; |
4491 | ||
a84efb51 JO |
4492 | while (block) |
4493 | { | |
4494 | TREE_ASM_WRITTEN (block) = 0; | |
b2a59b15 | 4495 | |
a84efb51 JO |
4496 | /* Record this block. */ |
4497 | if (vector) | |
4498 | vector[n_blocks] = block; | |
b2a59b15 | 4499 | |
a84efb51 | 4500 | ++n_blocks; |
718fe406 | 4501 | |
a84efb51 JO |
4502 | /* Record the subblocks, and their subblocks... */ |
4503 | n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), | |
4504 | vector ? vector + n_blocks : 0); | |
4505 | block = BLOCK_CHAIN (block); | |
4506 | } | |
467456d0 RS |
4507 | |
4508 | return n_blocks; | |
4509 | } | |
18c038b9 MM |
4510 | |
4511 | /* Return a vector containing all the blocks rooted at BLOCK. The | |
4512 | number of elements in the vector is stored in N_BLOCKS_P. The | |
4513 | vector is dynamically allocated; it is the caller's responsibility | |
4514 | to call `free' on the pointer returned. */ | |
718fe406 | 4515 | |
18c038b9 | 4516 | static tree * |
fa8db1f7 | 4517 | get_block_vector (tree block, int *n_blocks_p) |
18c038b9 MM |
4518 | { |
4519 | tree *block_vector; | |
4520 | ||
4521 | *n_blocks_p = all_blocks (block, NULL); | |
5ed6ace5 | 4522 | block_vector = XNEWVEC (tree, *n_blocks_p); |
18c038b9 MM |
4523 | all_blocks (block, block_vector); |
4524 | ||
4525 | return block_vector; | |
4526 | } | |
4527 | ||
f83b236e | 4528 | static GTY(()) int next_block_index = 2; |
18c038b9 MM |
4529 | |
4530 | /* Set BLOCK_NUMBER for all the blocks in FN. */ | |
4531 | ||
4532 | void | |
fa8db1f7 | 4533 | number_blocks (tree fn) |
18c038b9 MM |
4534 | { |
4535 | int i; | |
4536 | int n_blocks; | |
4537 | tree *block_vector; | |
4538 | ||
180295ed | 4539 | /* For XCOFF debugging output, we start numbering the blocks |
18c038b9 MM |
4540 | from 1 within each function, rather than keeping a running |
4541 | count. */ | |
180295ed JW |
4542 | #if defined (XCOFF_DEBUGGING_INFO) |
4543 | if (write_symbols == XCOFF_DEBUG) | |
b0e3a658 | 4544 | next_block_index = 1; |
18c038b9 MM |
4545 | #endif |
4546 | ||
4547 | block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); | |
4548 | ||
4549 | /* The top-level BLOCK isn't numbered at all. */ | |
4550 | for (i = 1; i < n_blocks; ++i) | |
4551 | /* We number the blocks from two. */ | |
4552 | BLOCK_NUMBER (block_vector[i]) = next_block_index++; | |
4553 | ||
4554 | free (block_vector); | |
4555 | ||
4556 | return; | |
4557 | } | |
df8992f8 RH |
4558 | |
4559 | /* If VAR is present in a subblock of BLOCK, return the subblock. */ | |
4560 | ||
24e47c76 | 4561 | DEBUG_FUNCTION tree |
fa8db1f7 | 4562 | debug_find_var_in_block_tree (tree var, tree block) |
df8992f8 RH |
4563 | { |
4564 | tree t; | |
4565 | ||
4566 | for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t)) | |
4567 | if (t == var) | |
4568 | return block; | |
4569 | ||
4570 | for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t)) | |
4571 | { | |
4572 | tree ret = debug_find_var_in_block_tree (var, t); | |
4573 | if (ret) | |
4574 | return ret; | |
4575 | } | |
4576 | ||
4577 | return NULL_TREE; | |
4578 | } | |
467456d0 | 4579 | \f |
db2960f4 SL |
4580 | /* Keep track of whether we're in a dummy function context. If we are, |
4581 | we don't want to invoke the set_current_function hook, because we'll | |
4582 | get into trouble if the hook calls target_reinit () recursively or | |
4583 | when the initial initialization is not yet complete. */ | |
4584 | ||
4585 | static bool in_dummy_function; | |
4586 | ||
ab442df7 MM |
4587 | /* Invoke the target hook when setting cfun. Update the optimization options |
4588 | if the function uses different options than the default. */ | |
db2960f4 SL |
4589 | |
4590 | static void | |
4591 | invoke_set_current_function_hook (tree fndecl) | |
4592 | { | |
4593 | if (!in_dummy_function) | |
ab442df7 MM |
4594 | { |
4595 | tree opts = ((fndecl) | |
4596 | ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) | |
4597 | : optimization_default_node); | |
4598 | ||
4599 | if (!opts) | |
4600 | opts = optimization_default_node; | |
4601 | ||
4602 | /* Change optimization options if needed. */ | |
4603 | if (optimization_current_node != opts) | |
4604 | { | |
4605 | optimization_current_node = opts; | |
46625112 | 4606 | cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); |
ab442df7 MM |
4607 | } |
4608 | ||
892c4745 | 4609 | targetm.set_current_function (fndecl); |
4b1baac8 | 4610 | this_fn_optabs = this_target_optabs; |
135204dd | 4611 | |
4b1baac8 | 4612 | if (opts != optimization_default_node) |
135204dd | 4613 | { |
4b1baac8 RS |
4614 | init_tree_optimization_optabs (opts); |
4615 | if (TREE_OPTIMIZATION_OPTABS (opts)) | |
4616 | this_fn_optabs = (struct target_optabs *) | |
4617 | TREE_OPTIMIZATION_OPTABS (opts); | |
135204dd | 4618 | } |
ab442df7 | 4619 | } |
db2960f4 SL |
4620 | } |
4621 | ||
4622 | /* cfun should never be set directly; use this function. */ | |
4623 | ||
4624 | void | |
77719b06 | 4625 | set_cfun (struct function *new_cfun, bool force) |
db2960f4 | 4626 | { |
77719b06 | 4627 | if (cfun != new_cfun || force) |
db2960f4 SL |
4628 | { |
4629 | cfun = new_cfun; | |
4630 | invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE); | |
b3e46655 | 4631 | redirect_edge_var_map_empty (); |
db2960f4 SL |
4632 | } |
4633 | } | |
4634 | ||
db2960f4 SL |
4635 | /* Initialized with NOGC, making this poisonous to the garbage collector. */ |
4636 | ||
526ceb68 | 4637 | static vec<function *> cfun_stack; |
db2960f4 | 4638 | |
af16bc76 MJ |
4639 | /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set |
4640 | current_function_decl accordingly. */ | |
db2960f4 SL |
4641 | |
4642 | void | |
4643 | push_cfun (struct function *new_cfun) | |
4644 | { | |
af16bc76 MJ |
4645 | gcc_assert ((!cfun && !current_function_decl) |
4646 | || (cfun && current_function_decl == cfun->decl)); | |
9771b263 | 4647 | cfun_stack.safe_push (cfun); |
af16bc76 | 4648 | current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE; |
db2960f4 SL |
4649 | set_cfun (new_cfun); |
4650 | } | |
4651 | ||
af16bc76 | 4652 | /* Pop cfun from the stack. Also set current_function_decl accordingly. */ |
db2960f4 SL |
4653 | |
4654 | void | |
4655 | pop_cfun (void) | |
4656 | { | |
9771b263 | 4657 | struct function *new_cfun = cfun_stack.pop (); |
af16bc76 MJ |
4658 | /* When in_dummy_function, we do have a cfun but current_function_decl is |
4659 | NULL. We also allow pushing NULL cfun and subsequently changing | |
4660 | current_function_decl to something else and have both restored by | |
4661 | pop_cfun. */ | |
4662 | gcc_checking_assert (in_dummy_function | |
4663 | || !cfun | |
4664 | || current_function_decl == cfun->decl); | |
38d34676 | 4665 | set_cfun (new_cfun); |
af16bc76 | 4666 | current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE; |
db2960f4 | 4667 | } |
3e87758a RL |
4668 | |
4669 | /* Return value of funcdef and increase it. */ | |
4670 | int | |
b8698a0f | 4671 | get_next_funcdef_no (void) |
3e87758a RL |
4672 | { |
4673 | return funcdef_no++; | |
4674 | } | |
4675 | ||
903d1e67 XDL |
4676 | /* Return value of funcdef. */ |
4677 | int | |
4678 | get_last_funcdef_no (void) | |
4679 | { | |
4680 | return funcdef_no; | |
4681 | } | |
4682 | ||
3a70d621 | 4683 | /* Allocate a function structure for FNDECL and set its contents |
db2960f4 SL |
4684 | to the defaults. Set cfun to the newly-allocated object. |
4685 | Some of the helper functions invoked during initialization assume | |
4686 | that cfun has already been set. Therefore, assign the new object | |
4687 | directly into cfun and invoke the back end hook explicitly at the | |
4688 | very end, rather than initializing a temporary and calling set_cfun | |
4689 | on it. | |
182e0d71 AK |
4690 | |
4691 | ABSTRACT_P is true if this is a function that will never be seen by | |
4692 | the middle-end. Such functions are front-end concepts (like C++ | |
4693 | function templates) that do not correspond directly to functions | |
4694 | placed in object files. */ | |
7a80cf9a | 4695 | |
3a70d621 | 4696 | void |
182e0d71 | 4697 | allocate_struct_function (tree fndecl, bool abstract_p) |
6f086dfc | 4698 | { |
6de9cd9a | 4699 | tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE; |
6f086dfc | 4700 | |
766090c2 | 4701 | cfun = ggc_cleared_alloc<function> (); |
b384405b | 4702 | |
3a70d621 | 4703 | init_eh_for_function (); |
6f086dfc | 4704 | |
3a70d621 RH |
4705 | if (init_machine_status) |
4706 | cfun->machine = (*init_machine_status) (); | |
e2ecd91c | 4707 | |
7c800926 KT |
4708 | #ifdef OVERRIDE_ABI_FORMAT |
4709 | OVERRIDE_ABI_FORMAT (fndecl); | |
4710 | #endif | |
4711 | ||
81464b2c | 4712 | if (fndecl != NULL_TREE) |
3a70d621 | 4713 | { |
db2960f4 SL |
4714 | DECL_STRUCT_FUNCTION (fndecl) = cfun; |
4715 | cfun->decl = fndecl; | |
70cf5bc1 | 4716 | current_function_funcdef_no = get_next_funcdef_no (); |
5b9db1bc MJ |
4717 | } |
4718 | ||
4719 | invoke_set_current_function_hook (fndecl); | |
db2960f4 | 4720 | |
5b9db1bc MJ |
4721 | if (fndecl != NULL_TREE) |
4722 | { | |
4723 | tree result = DECL_RESULT (fndecl); | |
f11a7b6d AO |
4724 | |
4725 | if (!abstract_p) | |
4726 | { | |
4727 | /* Now that we have activated any function-specific attributes | |
4728 | that might affect layout, particularly vector modes, relayout | |
4729 | each of the parameters and the result. */ | |
4730 | relayout_decl (result); | |
4731 | for (tree parm = DECL_ARGUMENTS (fndecl); parm; | |
4732 | parm = DECL_CHAIN (parm)) | |
4733 | relayout_decl (parm); | |
63b0cb04 CB |
4734 | |
4735 | /* Similarly relayout the function decl. */ | |
4736 | targetm.target_option.relayout_function (fndecl); | |
f11a7b6d AO |
4737 | } |
4738 | ||
182e0d71 | 4739 | if (!abstract_p && aggregate_value_p (result, fndecl)) |
db2960f4 | 4740 | { |
3a70d621 | 4741 | #ifdef PCC_STATIC_STRUCT_RETURN |
e3b5732b | 4742 | cfun->returns_pcc_struct = 1; |
3a70d621 | 4743 | #endif |
e3b5732b | 4744 | cfun->returns_struct = 1; |
db2960f4 SL |
4745 | } |
4746 | ||
f38958e8 | 4747 | cfun->stdarg = stdarg_p (fntype); |
b8698a0f | 4748 | |
db2960f4 SL |
4749 | /* Assume all registers in stdarg functions need to be saved. */ |
4750 | cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; | |
4751 | cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; | |
8f4f502f EB |
4752 | |
4753 | /* ??? This could be set on a per-function basis by the front-end | |
4754 | but is this worth the hassle? */ | |
4755 | cfun->can_throw_non_call_exceptions = flag_non_call_exceptions; | |
d764963b | 4756 | cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions; |
0b37ba8a AK |
4757 | |
4758 | if (!profile_flag && !flag_instrument_function_entry_exit) | |
4759 | DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1; | |
3a70d621 | 4760 | } |
96a95ac1 AO |
4761 | |
4762 | /* Don't enable begin stmt markers if var-tracking at assignments is | |
4763 | disabled. The markers make little sense without the variable | |
4764 | binding annotations among them. */ | |
4765 | cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt | |
4766 | && MAY_HAVE_DEBUG_MARKER_STMTS; | |
db2960f4 SL |
4767 | } |
4768 | ||
4769 | /* This is like allocate_struct_function, but pushes a new cfun for FNDECL | |
4770 | instead of just setting it. */ | |
9d30f3c1 | 4771 | |
db2960f4 SL |
4772 | void |
4773 | push_struct_function (tree fndecl) | |
4774 | { | |
af16bc76 MJ |
4775 | /* When in_dummy_function we might be in the middle of a pop_cfun and |
4776 | current_function_decl and cfun may not match. */ | |
4777 | gcc_assert (in_dummy_function | |
4778 | || (!cfun && !current_function_decl) | |
4779 | || (cfun && current_function_decl == cfun->decl)); | |
9771b263 | 4780 | cfun_stack.safe_push (cfun); |
af16bc76 | 4781 | current_function_decl = fndecl; |
182e0d71 | 4782 | allocate_struct_function (fndecl, false); |
3a70d621 | 4783 | } |
6f086dfc | 4784 | |
8f4f502f | 4785 | /* Reset crtl and other non-struct-function variables to defaults as |
2067c116 | 4786 | appropriate for emitting rtl at the start of a function. */ |
6f086dfc | 4787 | |
3a70d621 | 4788 | static void |
db2960f4 | 4789 | prepare_function_start (void) |
3a70d621 | 4790 | { |
614d5bd8 | 4791 | gcc_assert (!get_last_insn ()); |
fb0703f7 | 4792 | init_temp_slots (); |
0de456a5 | 4793 | init_emit (); |
bd60bab2 | 4794 | init_varasm_status (); |
0de456a5 | 4795 | init_expr (); |
bf08ebeb | 4796 | default_rtl_profile (); |
6f086dfc | 4797 | |
a11e0df4 | 4798 | if (flag_stack_usage_info) |
d3c12306 | 4799 | { |
766090c2 | 4800 | cfun->su = ggc_cleared_alloc<stack_usage> (); |
d3c12306 EB |
4801 | cfun->su->static_stack_size = -1; |
4802 | } | |
4803 | ||
3a70d621 | 4804 | cse_not_expected = ! optimize; |
6f086dfc | 4805 | |
3a70d621 RH |
4806 | /* Caller save not needed yet. */ |
4807 | caller_save_needed = 0; | |
6f086dfc | 4808 | |
3a70d621 RH |
4809 | /* We haven't done register allocation yet. */ |
4810 | reg_renumber = 0; | |
6f086dfc | 4811 | |
b384405b BS |
4812 | /* Indicate that we have not instantiated virtual registers yet. */ |
4813 | virtuals_instantiated = 0; | |
4814 | ||
1b3d8f8a GK |
4815 | /* Indicate that we want CONCATs now. */ |
4816 | generating_concat_p = 1; | |
4817 | ||
b384405b BS |
4818 | /* Indicate we have no need of a frame pointer yet. */ |
4819 | frame_pointer_needed = 0; | |
b384405b BS |
4820 | } |
4821 | ||
5283d1ec TV |
4822 | void |
4823 | push_dummy_function (bool with_decl) | |
4824 | { | |
4825 | tree fn_decl, fn_type, fn_result_decl; | |
4826 | ||
4827 | gcc_assert (!in_dummy_function); | |
4828 | in_dummy_function = true; | |
4829 | ||
4830 | if (with_decl) | |
4831 | { | |
4832 | fn_type = build_function_type_list (void_type_node, NULL_TREE); | |
4833 | fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE, | |
4834 | fn_type); | |
4835 | fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL, | |
4836 | NULL_TREE, void_type_node); | |
4837 | DECL_RESULT (fn_decl) = fn_result_decl; | |
4838 | } | |
4839 | else | |
4840 | fn_decl = NULL_TREE; | |
4841 | ||
4842 | push_struct_function (fn_decl); | |
4843 | } | |
4844 | ||
b384405b BS |
4845 | /* Initialize the rtl expansion mechanism so that we can do simple things |
4846 | like generate sequences. This is used to provide a context during global | |
db2960f4 SL |
4847 | initialization of some passes. You must call expand_dummy_function_end |
4848 | to exit this context. */ | |
4849 | ||
b384405b | 4850 | void |
fa8db1f7 | 4851 | init_dummy_function_start (void) |
b384405b | 4852 | { |
5283d1ec | 4853 | push_dummy_function (false); |
db2960f4 | 4854 | prepare_function_start (); |
b384405b BS |
4855 | } |
4856 | ||
4857 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
4858 | and initialize static variables for generating RTL for the statements | |
4859 | of the function. */ | |
4860 | ||
4861 | void | |
fa8db1f7 | 4862 | init_function_start (tree subr) |
b384405b | 4863 | { |
b9b5f433 JH |
4864 | /* Initialize backend, if needed. */ |
4865 | initialize_rtl (); | |
4866 | ||
db2960f4 | 4867 | prepare_function_start (); |
2c7eebae | 4868 | decide_function_section (subr); |
b384405b | 4869 | |
6f086dfc RS |
4870 | /* Warn if this value is an aggregate type, |
4871 | regardless of which calling convention we are using for it. */ | |
ccf08a6e DD |
4872 | if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) |
4873 | warning (OPT_Waggregate_return, "function returns an aggregate"); | |
49ad7cfa | 4874 | } |
5c7675e9 | 4875 | |
7d69de61 RH |
4876 | /* Expand code to verify the stack_protect_guard. This is invoked at |
4877 | the end of a function to be protected. */ | |
4878 | ||
b755446c | 4879 | void |
7d69de61 RH |
4880 | stack_protect_epilogue (void) |
4881 | { | |
4882 | tree guard_decl = targetm.stack_protect_guard (); | |
19f8b229 | 4883 | rtx_code_label *label = gen_label_rtx (); |
9a24a3cc | 4884 | rtx x, y; |
ebd765d4 | 4885 | rtx_insn *seq; |
7d69de61 | 4886 | |
08d4cc33 | 4887 | x = expand_normal (crtl->stack_protect_guard); |
1202f33e JJ |
4888 | if (guard_decl) |
4889 | y = expand_normal (guard_decl); | |
4890 | else | |
4891 | y = const0_rtx; | |
7d69de61 RH |
4892 | |
4893 | /* Allow the target to compare Y with X without leaking either into | |
4894 | a register. */ | |
ebd765d4 KC |
4895 | if (targetm.have_stack_protect_test () |
4896 | && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX)) | |
4897 | emit_insn (seq); | |
4898 | else | |
4899 | emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label); | |
7d69de61 RH |
4900 | |
4901 | /* The noreturn predictor has been moved to the tree level. The rtl-level | |
4902 | predictors estimate this branch about 20%, which isn't enough to get | |
4903 | things moved out of line. Since this is the only extant case of adding | |
4904 | a noreturn function at the rtl level, it doesn't seem worth doing ought | |
4905 | except adding the prediction by hand. */ | |
9a24a3cc | 4906 | rtx_insn *tmp = get_last_insn (); |
7d69de61 | 4907 | if (JUMP_P (tmp)) |
9a24a3cc | 4908 | predict_insn_def (tmp, PRED_NORETURN, TAKEN); |
7d69de61 | 4909 | |
b3c144a3 SB |
4910 | expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true); |
4911 | free_temp_slots (); | |
7d69de61 RH |
4912 | emit_label (label); |
4913 | } | |
4914 | \f | |
6f086dfc RS |
4915 | /* Start the RTL for a new function, and set variables used for |
4916 | emitting RTL. | |
4917 | SUBR is the FUNCTION_DECL node. | |
4918 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
4919 | the function's parameters, which must be run at any return statement. */ | |
4920 | ||
4921 | void | |
b79c5284 | 4922 | expand_function_start (tree subr) |
6f086dfc | 4923 | { |
6f086dfc RS |
4924 | /* Make sure volatile mem refs aren't considered |
4925 | valid operands of arithmetic insns. */ | |
4926 | init_recog_no_volatile (); | |
4927 | ||
e3b5732b | 4928 | crtl->profile |
70f4f91c WC |
4929 | = (profile_flag |
4930 | && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr)); | |
4931 | ||
e3b5732b | 4932 | crtl->limit_stack |
a157febd GK |
4933 | = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr)); |
4934 | ||
52a11cbf RH |
4935 | /* Make the label for return statements to jump to. Do not special |
4936 | case machines with special return instructions -- they will be | |
4937 | handled later during jump, ifcvt, or epilogue creation. */ | |
6f086dfc | 4938 | return_label = gen_label_rtx (); |
6f086dfc RS |
4939 | |
4940 | /* Initialize rtx used to return the value. */ | |
4941 | /* Do this before assign_parms so that we copy the struct value address | |
4942 | before any library calls that assign parms might generate. */ | |
4943 | ||
4944 | /* Decide whether to return the value in memory or in a register. */ | |
1f9ceff1 | 4945 | tree res = DECL_RESULT (subr); |
1f9ceff1 | 4946 | if (aggregate_value_p (res, subr)) |
6f086dfc RS |
4947 | { |
4948 | /* Returning something that won't go in a register. */ | |
b3694847 | 4949 | rtx value_address = 0; |
6f086dfc RS |
4950 | |
4951 | #ifdef PCC_STATIC_STRUCT_RETURN | |
e3b5732b | 4952 | if (cfun->returns_pcc_struct) |
6f086dfc | 4953 | { |
1f9ceff1 | 4954 | int size = int_size_in_bytes (TREE_TYPE (res)); |
6f086dfc RS |
4955 | value_address = assemble_static_space (size); |
4956 | } | |
4957 | else | |
4958 | #endif | |
4959 | { | |
2225b57c | 4960 | rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); |
6f086dfc RS |
4961 | /* Expect to be passed the address of a place to store the value. |
4962 | If it is passed as an argument, assign_parms will take care of | |
4963 | it. */ | |
61f71b34 | 4964 | if (sv) |
6f086dfc | 4965 | { |
f11a7b6d | 4966 | value_address = gen_reg_rtx (Pmode); |
61f71b34 | 4967 | emit_move_insn (value_address, sv); |
6f086dfc RS |
4968 | } |
4969 | } | |
4970 | if (value_address) | |
ccdecf58 | 4971 | { |
01c98570 | 4972 | rtx x = value_address; |
1f9ceff1 | 4973 | if (!DECL_BY_REFERENCE (res)) |
01c98570 | 4974 | { |
f11a7b6d AO |
4975 | x = gen_rtx_MEM (DECL_MODE (res), x); |
4976 | set_mem_attributes (x, res, 1); | |
01c98570 | 4977 | } |
f11a7b6d | 4978 | set_parm_rtl (res, x); |
ccdecf58 | 4979 | } |
6f086dfc | 4980 | } |
1f9ceff1 | 4981 | else if (DECL_MODE (res) == VOIDmode) |
6f086dfc | 4982 | /* If return mode is void, this decl rtl should not be used. */ |
f11a7b6d AO |
4983 | set_parm_rtl (res, NULL_RTX); |
4984 | else | |
a53e14c0 | 4985 | { |
d5bf1143 RH |
4986 | /* Compute the return values into a pseudo reg, which we will copy |
4987 | into the true return register after the cleanups are done. */ | |
1f9ceff1 | 4988 | tree return_type = TREE_TYPE (res); |
058c6384 EB |
4989 | |
4990 | /* If we may coalesce this result, make sure it has the expected mode | |
4991 | in case it was promoted. But we need not bother about BLKmode. */ | |
4992 | machine_mode promoted_mode | |
4993 | = flag_tree_coalesce_vars && is_gimple_reg (res) | |
4994 | ? promote_ssa_mode (ssa_default_def (cfun, res), NULL) | |
4995 | : BLKmode; | |
4996 | ||
4997 | if (promoted_mode != BLKmode) | |
4998 | set_parm_rtl (res, gen_reg_rtx (promoted_mode)); | |
1f9ceff1 AO |
4999 | else if (TYPE_MODE (return_type) != BLKmode |
5000 | && targetm.calls.return_in_msb (return_type)) | |
bef5d8b6 RS |
5001 | /* expand_function_end will insert the appropriate padding in |
5002 | this case. Use the return value's natural (unpadded) mode | |
5003 | within the function proper. */ | |
f11a7b6d | 5004 | set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type))); |
80a480ca | 5005 | else |
0bccc606 | 5006 | { |
bef5d8b6 RS |
5007 | /* In order to figure out what mode to use for the pseudo, we |
5008 | figure out what the mode of the eventual return register will | |
5009 | actually be, and use that. */ | |
1d636cc6 | 5010 | rtx hard_reg = hard_function_value (return_type, subr, 0, 1); |
bef5d8b6 RS |
5011 | |
5012 | /* Structures that are returned in registers are not | |
5013 | aggregate_value_p, so we may see a PARALLEL or a REG. */ | |
5014 | if (REG_P (hard_reg)) | |
f11a7b6d | 5015 | set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg))); |
bef5d8b6 RS |
5016 | else |
5017 | { | |
5018 | gcc_assert (GET_CODE (hard_reg) == PARALLEL); | |
f11a7b6d | 5019 | set_parm_rtl (res, gen_group_rtx (hard_reg)); |
bef5d8b6 | 5020 | } |
0bccc606 | 5021 | } |
a53e14c0 | 5022 | |
084a1106 JDA |
5023 | /* Set DECL_REGISTER flag so that expand_function_end will copy the |
5024 | result to the real return register(s). */ | |
1f9ceff1 | 5025 | DECL_REGISTER (res) = 1; |
a53e14c0 | 5026 | } |
6f086dfc RS |
5027 | |
5028 | /* Initialize rtx for parameters and local variables. | |
5029 | In some cases this requires emitting insns. */ | |
0d1416c6 | 5030 | assign_parms (subr); |
6f086dfc | 5031 | |
6de9cd9a DN |
5032 | /* If function gets a static chain arg, store it. */ |
5033 | if (cfun->static_chain_decl) | |
5034 | { | |
7e140280 | 5035 | tree parm = cfun->static_chain_decl; |
21afc57d | 5036 | rtx local, chain; |
f11a7b6d AO |
5037 | rtx_insn *insn; |
5038 | int unsignedp; | |
7e140280 | 5039 | |
f11a7b6d | 5040 | local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp)); |
531ca746 RH |
5041 | chain = targetm.calls.static_chain (current_function_decl, true); |
5042 | ||
5043 | set_decl_incoming_rtl (parm, chain, false); | |
f11a7b6d | 5044 | set_parm_rtl (parm, local); |
7e140280 | 5045 | mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); |
6de9cd9a | 5046 | |
f11a7b6d AO |
5047 | if (GET_MODE (local) != GET_MODE (chain)) |
5048 | { | |
5049 | convert_move (local, chain, unsignedp); | |
5050 | insn = get_last_insn (); | |
5051 | } | |
5052 | else | |
5053 | insn = emit_move_insn (local, chain); | |
531ca746 RH |
5054 | |
5055 | /* Mark the register as eliminable, similar to parameters. */ | |
5056 | if (MEM_P (chain) | |
5057 | && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0))) | |
7543f918 | 5058 | set_dst_reg_note (insn, REG_EQUIV, chain, local); |
3fd48b12 EB |
5059 | |
5060 | /* If we aren't optimizing, save the static chain onto the stack. */ | |
5061 | if (!optimize) | |
5062 | { | |
5063 | tree saved_static_chain_decl | |
5064 | = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL, | |
5065 | DECL_NAME (parm), TREE_TYPE (parm)); | |
5066 | rtx saved_static_chain_rtx | |
5067 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); | |
5068 | SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx); | |
5069 | emit_move_insn (saved_static_chain_rtx, chain); | |
5070 | SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl); | |
5071 | DECL_HAS_VALUE_EXPR_P (parm) = 1; | |
5072 | } | |
6de9cd9a DN |
5073 | } |
5074 | ||
108c3c88 | 5075 | /* The following was moved from init_function_start. |
180295ed | 5076 | The move was supposed to make sdb output more accurate. */ |
108c3c88 ML |
5077 | /* Indicate the beginning of the function body, |
5078 | as opposed to parm setup. */ | |
5079 | emit_note (NOTE_INSN_FUNCTION_BEG); | |
5080 | ||
5081 | gcc_assert (NOTE_P (get_last_insn ())); | |
5082 | ||
5083 | parm_birth_insn = get_last_insn (); | |
5084 | ||
6de9cd9a DN |
5085 | /* If the function receives a non-local goto, then store the |
5086 | bits we need to restore the frame pointer. */ | |
5087 | if (cfun->nonlocal_goto_save_area) | |
5088 | { | |
5089 | tree t_save; | |
5090 | rtx r_save; | |
5091 | ||
4846b435 | 5092 | tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0); |
ca5f4331 | 5093 | gcc_assert (DECL_RTL_SET_P (var)); |
6de9cd9a | 5094 | |
6bbec3e1 L |
5095 | t_save = build4 (ARRAY_REF, |
5096 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
3244e67d RS |
5097 | cfun->nonlocal_goto_save_area, |
5098 | integer_zero_node, NULL_TREE, NULL_TREE); | |
6de9cd9a | 5099 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
6bbec3e1 | 5100 | gcc_assert (GET_MODE (r_save) == Pmode); |
f0c51a1e | 5101 | |
88280cf9 | 5102 | emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ()); |
6de9cd9a DN |
5103 | update_nonlocal_goto_save_area (); |
5104 | } | |
f0c51a1e | 5105 | |
e3b5732b | 5106 | if (crtl->profile) |
f6f315fe | 5107 | { |
f6f315fe | 5108 | #ifdef PROFILE_HOOK |
df696a75 | 5109 | PROFILE_HOOK (current_function_funcdef_no); |
411707f4 | 5110 | #endif |
f6f315fe | 5111 | } |
411707f4 | 5112 | |
6d3cc8f0 EB |
5113 | /* If we are doing generic stack checking, the probe should go here. */ |
5114 | if (flag_stack_check == GENERIC_STACK_CHECK) | |
ede497cf | 5115 | stack_check_probe_note = emit_note (NOTE_INSN_DELETED); |
6f086dfc RS |
5116 | } |
5117 | \f | |
5283d1ec TV |
5118 | void |
5119 | pop_dummy_function (void) | |
5120 | { | |
5121 | pop_cfun (); | |
5122 | in_dummy_function = false; | |
5123 | } | |
5124 | ||
49ad7cfa BS |
5125 | /* Undo the effects of init_dummy_function_start. */ |
5126 | void | |
fa8db1f7 | 5127 | expand_dummy_function_end (void) |
49ad7cfa | 5128 | { |
db2960f4 SL |
5129 | gcc_assert (in_dummy_function); |
5130 | ||
49ad7cfa BS |
5131 | /* End any sequences that failed to be closed due to syntax errors. */ |
5132 | while (in_sequence_p ()) | |
5133 | end_sequence (); | |
5134 | ||
5135 | /* Outside function body, can't compute type's actual size | |
5136 | until next function's body starts. */ | |
fa51b01b | 5137 | |
01d939e8 BS |
5138 | free_after_parsing (cfun); |
5139 | free_after_compilation (cfun); | |
5283d1ec | 5140 | pop_dummy_function (); |
49ad7cfa BS |
5141 | } |
5142 | ||
d5e254e1 | 5143 | /* Helper for diddle_return_value. */ |
bd695e1e RH |
5144 | |
5145 | void | |
d5e254e1 | 5146 | diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing) |
bd695e1e | 5147 | { |
c13fde05 RH |
5148 | if (! outgoing) |
5149 | return; | |
bd695e1e | 5150 | |
f8cfc6aa | 5151 | if (REG_P (outgoing)) |
c13fde05 RH |
5152 | (*doit) (outgoing, arg); |
5153 | else if (GET_CODE (outgoing) == PARALLEL) | |
5154 | { | |
5155 | int i; | |
bd695e1e | 5156 | |
c13fde05 RH |
5157 | for (i = 0; i < XVECLEN (outgoing, 0); i++) |
5158 | { | |
5159 | rtx x = XEXP (XVECEXP (outgoing, 0, i), 0); | |
5160 | ||
f8cfc6aa | 5161 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
c13fde05 | 5162 | (*doit) (x, arg); |
bd695e1e RH |
5163 | } |
5164 | } | |
5165 | } | |
5166 | ||
d5e254e1 IE |
5167 | /* Call DOIT for each hard register used as a return value from |
5168 | the current function. */ | |
5169 | ||
5170 | void | |
5171 | diddle_return_value (void (*doit) (rtx, void *), void *arg) | |
5172 | { | |
d5e254e1 | 5173 | diddle_return_value_1 (doit, arg, crtl->return_bnd); |
e9ae68af | 5174 | diddle_return_value_1 (doit, arg, crtl->return_rtx); |
d5e254e1 IE |
5175 | } |
5176 | ||
c13fde05 | 5177 | static void |
fa8db1f7 | 5178 | do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) |
c13fde05 | 5179 | { |
c41c1387 | 5180 | emit_clobber (reg); |
c13fde05 RH |
5181 | } |
5182 | ||
5183 | void | |
fa8db1f7 | 5184 | clobber_return_register (void) |
c13fde05 RH |
5185 | { |
5186 | diddle_return_value (do_clobber_return_reg, NULL); | |
9c65bbf4 JH |
5187 | |
5188 | /* In case we do use pseudo to return value, clobber it too. */ | |
5189 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) | |
5190 | { | |
5191 | tree decl_result = DECL_RESULT (current_function_decl); | |
5192 | rtx decl_rtl = DECL_RTL (decl_result); | |
5193 | if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER) | |
5194 | { | |
5195 | do_clobber_return_reg (decl_rtl, NULL); | |
5196 | } | |
5197 | } | |
c13fde05 RH |
5198 | } |
5199 | ||
5200 | static void | |
fa8db1f7 | 5201 | do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) |
c13fde05 | 5202 | { |
c41c1387 | 5203 | emit_use (reg); |
c13fde05 RH |
5204 | } |
5205 | ||
0bf8477d | 5206 | static void |
fa8db1f7 | 5207 | use_return_register (void) |
c13fde05 RH |
5208 | { |
5209 | diddle_return_value (do_use_return_reg, NULL); | |
5210 | } | |
5211 | ||
862d0b35 DN |
5212 | /* Set the location of the insn chain starting at INSN to LOC. */ |
5213 | ||
5214 | static void | |
dc01c3d1 | 5215 | set_insn_locations (rtx_insn *insn, int loc) |
862d0b35 | 5216 | { |
dc01c3d1 | 5217 | while (insn != NULL) |
862d0b35 DN |
5218 | { |
5219 | if (INSN_P (insn)) | |
5220 | INSN_LOCATION (insn) = loc; | |
5221 | insn = NEXT_INSN (insn); | |
5222 | } | |
5223 | } | |
5224 | ||
71c0e7fc | 5225 | /* Generate RTL for the end of the current function. */ |
6f086dfc RS |
5226 | |
5227 | void | |
fa8db1f7 | 5228 | expand_function_end (void) |
6f086dfc | 5229 | { |
964be02f RH |
5230 | /* If arg_pointer_save_area was referenced only from a nested |
5231 | function, we will not have initialized it yet. Do that now. */ | |
e3b5732b | 5232 | if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init) |
bd60bab2 | 5233 | get_arg_pointer_save_area (); |
964be02f | 5234 | |
b38f3813 | 5235 | /* If we are doing generic stack checking and this function makes calls, |
11044f66 RK |
5236 | do a stack probe at the start of the function to ensure we have enough |
5237 | space for another stack frame. */ | |
b38f3813 | 5238 | if (flag_stack_check == GENERIC_STACK_CHECK) |
11044f66 | 5239 | { |
691fe203 | 5240 | rtx_insn *insn, *seq; |
11044f66 RK |
5241 | |
5242 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
4b4bf941 | 5243 | if (CALL_P (insn)) |
11044f66 | 5244 | { |
c35af30f | 5245 | rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE); |
11044f66 | 5246 | start_sequence (); |
c35af30f EB |
5247 | if (STACK_CHECK_MOVING_SP) |
5248 | anti_adjust_stack_and_probe (max_frame_size, true); | |
5249 | else | |
5250 | probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); | |
11044f66 RK |
5251 | seq = get_insns (); |
5252 | end_sequence (); | |
5368224f | 5253 | set_insn_locations (seq, prologue_location); |
ede497cf | 5254 | emit_insn_before (seq, stack_check_probe_note); |
11044f66 RK |
5255 | break; |
5256 | } | |
5257 | } | |
5258 | ||
6f086dfc RS |
5259 | /* End any sequences that failed to be closed due to syntax errors. */ |
5260 | while (in_sequence_p ()) | |
5f4f0e22 | 5261 | end_sequence (); |
6f086dfc | 5262 | |
6f086dfc RS |
5263 | clear_pending_stack_adjust (); |
5264 | do_pending_stack_adjust (); | |
5265 | ||
6f086dfc | 5266 | /* Output a linenumber for the end of the function. |
180295ed | 5267 | SDB depended on this. */ |
5368224f | 5268 | set_curr_insn_location (input_location); |
6f086dfc | 5269 | |
fbffc70a | 5270 | /* Before the return label (if any), clobber the return |
a1f300c0 | 5271 | registers so that they are not propagated live to the rest of |
fbffc70a GK |
5272 | the function. This can only happen with functions that drop |
5273 | through; if there had been a return statement, there would | |
932f0847 JH |
5274 | have either been a return rtx, or a jump to the return label. |
5275 | ||
5276 | We delay actual code generation after the current_function_value_rtx | |
5277 | is computed. */ | |
e67d1102 | 5278 | rtx_insn *clobber_after = get_last_insn (); |
fbffc70a | 5279 | |
526c334b KH |
5280 | /* Output the label for the actual return from the function. */ |
5281 | emit_label (return_label); | |
6f086dfc | 5282 | |
677f3fa8 | 5283 | if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) |
815eb8f0 AM |
5284 | { |
5285 | /* Let except.c know where it should emit the call to unregister | |
5286 | the function context for sjlj exceptions. */ | |
5287 | if (flag_exceptions) | |
5288 | sjlj_emit_function_exit_after (get_last_insn ()); | |
5289 | } | |
6fb5fa3c DB |
5290 | else |
5291 | { | |
5292 | /* We want to ensure that instructions that may trap are not | |
5293 | moved into the epilogue by scheduling, because we don't | |
5294 | always emit unwind information for the epilogue. */ | |
8f4f502f | 5295 | if (cfun->can_throw_non_call_exceptions) |
6fb5fa3c DB |
5296 | emit_insn (gen_blockage ()); |
5297 | } | |
0b59e81e | 5298 | |
652b0932 RH |
5299 | /* If this is an implementation of throw, do what's necessary to |
5300 | communicate between __builtin_eh_return and the epilogue. */ | |
5301 | expand_eh_return (); | |
5302 | ||
3e4eac3f RH |
5303 | /* If scalar return value was computed in a pseudo-reg, or was a named |
5304 | return value that got dumped to the stack, copy that to the hard | |
5305 | return register. */ | |
19e7881c | 5306 | if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) |
6f086dfc | 5307 | { |
3e4eac3f RH |
5308 | tree decl_result = DECL_RESULT (current_function_decl); |
5309 | rtx decl_rtl = DECL_RTL (decl_result); | |
5310 | ||
5311 | if (REG_P (decl_rtl) | |
5312 | ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER | |
5313 | : DECL_REGISTER (decl_result)) | |
5314 | { | |
38173d38 | 5315 | rtx real_decl_rtl = crtl->return_rtx; |
a97390bf | 5316 | complex_mode cmode; |
6f086dfc | 5317 | |
ce5e43d0 | 5318 | /* This should be set in assign_parms. */ |
0bccc606 | 5319 | gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl)); |
3e4eac3f RH |
5320 | |
5321 | /* If this is a BLKmode structure being returned in registers, | |
5322 | then use the mode computed in expand_return. Note that if | |
797a6ac1 | 5323 | decl_rtl is memory, then its mode may have been changed, |
38173d38 | 5324 | but that crtl->return_rtx has not. */ |
3e4eac3f | 5325 | if (GET_MODE (real_decl_rtl) == BLKmode) |
ce5e43d0 | 5326 | PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl)); |
3e4eac3f | 5327 | |
bef5d8b6 RS |
5328 | /* If a non-BLKmode return value should be padded at the least |
5329 | significant end of the register, shift it left by the appropriate | |
5330 | amount. BLKmode results are handled using the group load/store | |
5331 | machinery. */ | |
5332 | if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode | |
66de4d7c | 5333 | && REG_P (real_decl_rtl) |
bef5d8b6 RS |
5334 | && targetm.calls.return_in_msb (TREE_TYPE (decl_result))) |
5335 | { | |
5336 | emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl), | |
5337 | REGNO (real_decl_rtl)), | |
5338 | decl_rtl); | |
5339 | shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl); | |
5340 | } | |
aa570f54 | 5341 | else if (GET_CODE (real_decl_rtl) == PARALLEL) |
084a1106 JDA |
5342 | { |
5343 | /* If expand_function_start has created a PARALLEL for decl_rtl, | |
5344 | move the result to the real return registers. Otherwise, do | |
5345 | a group load from decl_rtl for a named return. */ | |
5346 | if (GET_CODE (decl_rtl) == PARALLEL) | |
5347 | emit_group_move (real_decl_rtl, decl_rtl); | |
5348 | else | |
5349 | emit_group_load (real_decl_rtl, decl_rtl, | |
6e985040 | 5350 | TREE_TYPE (decl_result), |
084a1106 JDA |
5351 | int_size_in_bytes (TREE_TYPE (decl_result))); |
5352 | } | |
652b0932 RH |
5353 | /* In the case of complex integer modes smaller than a word, we'll |
5354 | need to generate some non-trivial bitfield insertions. Do that | |
5355 | on a pseudo and not the hard register. */ | |
5356 | else if (GET_CODE (decl_rtl) == CONCAT | |
a97390bf RS |
5357 | && is_complex_int_mode (GET_MODE (decl_rtl), &cmode) |
5358 | && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD) | |
652b0932 RH |
5359 | { |
5360 | int old_generating_concat_p; | |
5361 | rtx tmp; | |
5362 | ||
5363 | old_generating_concat_p = generating_concat_p; | |
5364 | generating_concat_p = 0; | |
5365 | tmp = gen_reg_rtx (GET_MODE (decl_rtl)); | |
5366 | generating_concat_p = old_generating_concat_p; | |
5367 | ||
5368 | emit_move_insn (tmp, decl_rtl); | |
5369 | emit_move_insn (real_decl_rtl, tmp); | |
5370 | } | |
fc5851fe AO |
5371 | /* If a named return value dumped decl_return to memory, then |
5372 | we may need to re-do the PROMOTE_MODE signed/unsigned | |
5373 | extension. */ | |
5374 | else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) | |
5375 | { | |
5376 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); | |
5377 | promote_function_mode (TREE_TYPE (decl_result), | |
5378 | GET_MODE (decl_rtl), &unsignedp, | |
5379 | TREE_TYPE (current_function_decl), 1); | |
5380 | ||
5381 | convert_move (real_decl_rtl, decl_rtl, unsignedp); | |
5382 | } | |
3e4eac3f RH |
5383 | else |
5384 | emit_move_insn (real_decl_rtl, decl_rtl); | |
3e4eac3f | 5385 | } |
6f086dfc RS |
5386 | } |
5387 | ||
5388 | /* If returning a structure, arrange to return the address of the value | |
5389 | in a place where debuggers expect to find it. | |
5390 | ||
5391 | If returning a structure PCC style, | |
5392 | the caller also depends on this value. | |
e3b5732b | 5393 | And cfun->returns_pcc_struct is not necessarily set. */ |
e0d14c39 BS |
5394 | if ((cfun->returns_struct || cfun->returns_pcc_struct) |
5395 | && !targetm.calls.omit_struct_return_reg) | |
6f086dfc | 5396 | { |
cc77ae10 | 5397 | rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl)); |
6f086dfc | 5398 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); |
cc77ae10 JM |
5399 | rtx outgoing; |
5400 | ||
5401 | if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))) | |
5402 | type = TREE_TYPE (type); | |
5403 | else | |
5404 | value_address = XEXP (value_address, 0); | |
5405 | ||
1d636cc6 RG |
5406 | outgoing = targetm.calls.function_value (build_pointer_type (type), |
5407 | current_function_decl, true); | |
6f086dfc RS |
5408 | |
5409 | /* Mark this as a function return value so integrate will delete the | |
5410 | assignment and USE below when inlining this function. */ | |
5411 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
5412 | ||
d1608933 | 5413 | /* The address may be ptr_mode and OUTGOING may be Pmode. */ |
c7ad039d RS |
5414 | scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing)); |
5415 | value_address = convert_memory_address (mode, value_address); | |
d1608933 | 5416 | |
6f086dfc | 5417 | emit_move_insn (outgoing, value_address); |
d1608933 RK |
5418 | |
5419 | /* Show return register used to hold result (in this case the address | |
5420 | of the result. */ | |
38173d38 | 5421 | crtl->return_rtx = outgoing; |
6f086dfc RS |
5422 | } |
5423 | ||
79c7fda6 JJ |
5424 | /* Emit the actual code to clobber return register. Don't emit |
5425 | it if clobber_after is a barrier, then the previous basic block | |
5426 | certainly doesn't fall thru into the exit block. */ | |
5427 | if (!BARRIER_P (clobber_after)) | |
5428 | { | |
79c7fda6 JJ |
5429 | start_sequence (); |
5430 | clobber_return_register (); | |
e67d1102 | 5431 | rtx_insn *seq = get_insns (); |
79c7fda6 | 5432 | end_sequence (); |
932f0847 | 5433 | |
79c7fda6 JJ |
5434 | emit_insn_after (seq, clobber_after); |
5435 | } | |
932f0847 | 5436 | |
609c3937 | 5437 | /* Output the label for the naked return from the function. */ |
4c33221c UW |
5438 | if (naked_return_label) |
5439 | emit_label (naked_return_label); | |
6e3077c6 | 5440 | |
25108646 AH |
5441 | /* @@@ This is a kludge. We want to ensure that instructions that |
5442 | may trap are not moved into the epilogue by scheduling, because | |
56d17681 | 5443 | we don't always emit unwind information for the epilogue. */ |
f0a0390e | 5444 | if (cfun->can_throw_non_call_exceptions |
677f3fa8 | 5445 | && targetm_common.except_unwind_info (&global_options) != UI_SJLJ) |
56d17681 | 5446 | emit_insn (gen_blockage ()); |
25108646 | 5447 | |
7d69de61 | 5448 | /* If stack protection is enabled for this function, check the guard. */ |
87a5dc2d | 5449 | if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ()) |
7d69de61 RH |
5450 | stack_protect_epilogue (); |
5451 | ||
40184445 BS |
5452 | /* If we had calls to alloca, and this machine needs |
5453 | an accurate stack pointer to exit the function, | |
5454 | insert some code to save and restore the stack pointer. */ | |
5455 | if (! EXIT_IGNORE_STACK | |
e3b5732b | 5456 | && cfun->calls_alloca) |
40184445 | 5457 | { |
e67d1102 | 5458 | rtx tem = 0; |
40184445 | 5459 | |
9eac0f2a RH |
5460 | start_sequence (); |
5461 | emit_stack_save (SAVE_FUNCTION, &tem); | |
e67d1102 | 5462 | rtx_insn *seq = get_insns (); |
9eac0f2a RH |
5463 | end_sequence (); |
5464 | emit_insn_before (seq, parm_birth_insn); | |
5465 | ||
5466 | emit_stack_restore (SAVE_FUNCTION, tem); | |
40184445 BS |
5467 | } |
5468 | ||
c13fde05 RH |
5469 | /* ??? This should no longer be necessary since stupid is no longer with |
5470 | us, but there are some parts of the compiler (eg reload_combine, and | |
5471 | sh mach_dep_reorg) that still try and compute their own lifetime info | |
5472 | instead of using the general framework. */ | |
5473 | use_return_register (); | |
6f086dfc | 5474 | } |
278ed218 RH |
5475 | |
5476 | rtx | |
bd60bab2 | 5477 | get_arg_pointer_save_area (void) |
278ed218 | 5478 | { |
bd60bab2 | 5479 | rtx ret = arg_pointer_save_area; |
278ed218 RH |
5480 | |
5481 | if (! ret) | |
5482 | { | |
bd60bab2 JH |
5483 | ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); |
5484 | arg_pointer_save_area = ret; | |
964be02f RH |
5485 | } |
5486 | ||
e3b5732b | 5487 | if (! crtl->arg_pointer_save_area_init) |
964be02f | 5488 | { |
797a6ac1 | 5489 | /* Save the arg pointer at the beginning of the function. The |
964be02f | 5490 | generated stack slot may not be a valid memory address, so we |
278ed218 RH |
5491 | have to check it and fix it if necessary. */ |
5492 | start_sequence (); | |
1a8cb155 | 5493 | emit_move_insn (validize_mem (copy_rtx (ret)), |
2e3f842f | 5494 | crtl->args.internal_arg_pointer); |
e67d1102 | 5495 | rtx_insn *seq = get_insns (); |
278ed218 RH |
5496 | end_sequence (); |
5497 | ||
964be02f | 5498 | push_topmost_sequence (); |
1cb2fc7b | 5499 | emit_insn_after (seq, entry_of_function ()); |
964be02f | 5500 | pop_topmost_sequence (); |
c1d9a70a ILT |
5501 | |
5502 | crtl->arg_pointer_save_area_init = true; | |
278ed218 RH |
5503 | } |
5504 | ||
5505 | return ret; | |
5506 | } | |
bdac5f58 | 5507 | \f |
8a502a80 JL |
5508 | |
5509 | /* If debugging dumps are requested, dump information about how the | |
5510 | target handled -fstack-check=clash for the prologue. | |
5511 | ||
5512 | PROBES describes what if any probes were emitted. | |
5513 | ||
5514 | RESIDUALS indicates if the prologue had any residual allocation | |
5515 | (i.e. total allocation was not a multiple of PROBE_INTERVAL). */ | |
5516 | ||
5517 | void | |
5518 | dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals) | |
5519 | { | |
5520 | if (!dump_file) | |
5521 | return; | |
5522 | ||
5523 | switch (probes) | |
5524 | { | |
5525 | case NO_PROBE_NO_FRAME: | |
5526 | fprintf (dump_file, | |
5527 | "Stack clash no probe no stack adjustment in prologue.\n"); | |
5528 | break; | |
5529 | case NO_PROBE_SMALL_FRAME: | |
5530 | fprintf (dump_file, | |
5531 | "Stack clash no probe small stack adjustment in prologue.\n"); | |
5532 | break; | |
5533 | case PROBE_INLINE: | |
5534 | fprintf (dump_file, "Stack clash inline probes in prologue.\n"); | |
5535 | break; | |
5536 | case PROBE_LOOP: | |
5537 | fprintf (dump_file, "Stack clash probe loop in prologue.\n"); | |
5538 | break; | |
5539 | } | |
5540 | ||
5541 | if (residuals) | |
5542 | fprintf (dump_file, "Stack clash residual allocation in prologue.\n"); | |
5543 | else | |
5544 | fprintf (dump_file, "Stack clash no residual allocation in prologue.\n"); | |
5545 | ||
5546 | if (frame_pointer_needed) | |
5547 | fprintf (dump_file, "Stack clash frame pointer needed.\n"); | |
5548 | else | |
5549 | fprintf (dump_file, "Stack clash no frame pointer needed.\n"); | |
5550 | ||
5551 | if (TREE_THIS_VOLATILE (cfun->decl)) | |
5552 | fprintf (dump_file, | |
5553 | "Stack clash noreturn prologue, assuming no implicit" | |
5554 | " probes in caller.\n"); | |
5555 | else | |
5556 | fprintf (dump_file, | |
5557 | "Stack clash not noreturn prologue.\n"); | |
5558 | } | |
5559 | ||
cd9c1ca8 RH |
5560 | /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP |
5561 | for the first time. */ | |
bdac5f58 | 5562 | |
0a1c58a2 | 5563 | static void |
d242408f | 5564 | record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp) |
bdac5f58 | 5565 | { |
dc01c3d1 | 5566 | rtx_insn *tmp; |
d242408f | 5567 | hash_table<insn_cache_hasher> *hash = *hashp; |
0a1c58a2 | 5568 | |
cd9c1ca8 | 5569 | if (hash == NULL) |
d242408f | 5570 | *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17); |
cd9c1ca8 RH |
5571 | |
5572 | for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) | |
5573 | { | |
d242408f | 5574 | rtx *slot = hash->find_slot (tmp, INSERT); |
cd9c1ca8 RH |
5575 | gcc_assert (*slot == NULL); |
5576 | *slot = tmp; | |
5577 | } | |
5578 | } | |
5579 | ||
cd400280 RH |
5580 | /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a |
5581 | basic block, splitting or peepholes. If INSN is a prologue or epilogue | |
5582 | insn, then record COPY as well. */ | |
cd9c1ca8 RH |
5583 | |
5584 | void | |
cd400280 | 5585 | maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy) |
cd9c1ca8 | 5586 | { |
d242408f TS |
5587 | hash_table<insn_cache_hasher> *hash; |
5588 | rtx *slot; | |
cd9c1ca8 | 5589 | |
cd400280 | 5590 | hash = epilogue_insn_hash; |
d242408f | 5591 | if (!hash || !hash->find (insn)) |
cd400280 RH |
5592 | { |
5593 | hash = prologue_insn_hash; | |
d242408f | 5594 | if (!hash || !hash->find (insn)) |
cd400280 RH |
5595 | return; |
5596 | } | |
cd9c1ca8 | 5597 | |
d242408f | 5598 | slot = hash->find_slot (copy, INSERT); |
cd9c1ca8 RH |
5599 | gcc_assert (*slot == NULL); |
5600 | *slot = copy; | |
bdac5f58 TW |
5601 | } |
5602 | ||
cd9c1ca8 RH |
5603 | /* Determine if any INSNs in HASH are, or are part of, INSN. Because |
5604 | we can be running after reorg, SEQUENCE rtl is possible. */ | |
bdac5f58 | 5605 | |
cd9c1ca8 | 5606 | static bool |
87ac59a0 | 5607 | contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash) |
bdac5f58 | 5608 | { |
cd9c1ca8 RH |
5609 | if (hash == NULL) |
5610 | return false; | |
bdac5f58 | 5611 | |
cd9c1ca8 | 5612 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
bdac5f58 | 5613 | { |
e0944870 | 5614 | rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); |
cd9c1ca8 | 5615 | int i; |
e0944870 | 5616 | for (i = seq->len () - 1; i >= 0; i--) |
d242408f | 5617 | if (hash->find (seq->element (i))) |
cd9c1ca8 RH |
5618 | return true; |
5619 | return false; | |
bdac5f58 | 5620 | } |
cd9c1ca8 | 5621 | |
87ac59a0 | 5622 | return hash->find (const_cast<rtx_insn *> (insn)) != NULL; |
bdac5f58 | 5623 | } |
5c7675e9 | 5624 | |
64f6e1e1 | 5625 | int |
87ac59a0 | 5626 | prologue_contains (const rtx_insn *insn) |
64f6e1e1 SB |
5627 | { |
5628 | return contains (insn, prologue_insn_hash); | |
5629 | } | |
5630 | ||
5631 | int | |
87ac59a0 | 5632 | epilogue_contains (const rtx_insn *insn) |
64f6e1e1 SB |
5633 | { |
5634 | return contains (insn, epilogue_insn_hash); | |
5635 | } | |
5636 | ||
5c7675e9 | 5637 | int |
87ac59a0 | 5638 | prologue_epilogue_contains (const rtx_insn *insn) |
5c7675e9 | 5639 | { |
cd9c1ca8 | 5640 | if (contains (insn, prologue_insn_hash)) |
5c7675e9 | 5641 | return 1; |
cd9c1ca8 | 5642 | if (contains (insn, epilogue_insn_hash)) |
5c7675e9 RH |
5643 | return 1; |
5644 | return 0; | |
5645 | } | |
bdac5f58 | 5646 | |
64f6e1e1 SB |
5647 | void |
5648 | record_prologue_seq (rtx_insn *seq) | |
5649 | { | |
5650 | record_insns (seq, NULL, &prologue_insn_hash); | |
5651 | } | |
5652 | ||
5653 | void | |
5654 | record_epilogue_seq (rtx_insn *seq) | |
5655 | { | |
5656 | record_insns (seq, NULL, &epilogue_insn_hash); | |
5657 | } | |
69732dcb | 5658 | |
387748de AM |
5659 | /* Set JUMP_LABEL for a return insn. */ |
5660 | ||
5661 | void | |
d38ff8dd | 5662 | set_return_jump_label (rtx_insn *returnjump) |
387748de AM |
5663 | { |
5664 | rtx pat = PATTERN (returnjump); | |
5665 | if (GET_CODE (pat) == PARALLEL) | |
5666 | pat = XVECEXP (pat, 0, 0); | |
5667 | if (ANY_RETURN_P (pat)) | |
5668 | JUMP_LABEL (returnjump) = pat; | |
5669 | else | |
5670 | JUMP_LABEL (returnjump) = ret_rtx; | |
5671 | } | |
5672 | ||
fb42ed99 SB |
5673 | /* Return a sequence to be used as the split prologue for the current |
5674 | function, or NULL. */ | |
5675 | ||
5676 | static rtx_insn * | |
5677 | make_split_prologue_seq (void) | |
5678 | { | |
5679 | if (!flag_split_stack | |
5680 | || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))) | |
5681 | return NULL; | |
5682 | ||
5683 | start_sequence (); | |
5684 | emit_insn (targetm.gen_split_stack_prologue ()); | |
5685 | rtx_insn *seq = get_insns (); | |
5686 | end_sequence (); | |
5687 | ||
5688 | record_insns (seq, NULL, &prologue_insn_hash); | |
5689 | set_insn_locations (seq, prologue_location); | |
5690 | ||
5691 | return seq; | |
5692 | } | |
5693 | ||
5694 | /* Return a sequence to be used as the prologue for the current function, | |
5695 | or NULL. */ | |
5696 | ||
5697 | static rtx_insn * | |
5698 | make_prologue_seq (void) | |
5699 | { | |
5700 | if (!targetm.have_prologue ()) | |
5701 | return NULL; | |
5702 | ||
5703 | start_sequence (); | |
5704 | rtx_insn *seq = targetm.gen_prologue (); | |
5705 | emit_insn (seq); | |
5706 | ||
5707 | /* Insert an explicit USE for the frame pointer | |
5708 | if the profiling is on and the frame pointer is required. */ | |
5709 | if (crtl->profile && frame_pointer_needed) | |
5710 | emit_use (hard_frame_pointer_rtx); | |
5711 | ||
5712 | /* Retain a map of the prologue insns. */ | |
5713 | record_insns (seq, NULL, &prologue_insn_hash); | |
5714 | emit_note (NOTE_INSN_PROLOGUE_END); | |
5715 | ||
5716 | /* Ensure that instructions are not moved into the prologue when | |
5717 | profiling is on. The call to the profiling routine can be | |
5718 | emitted within the live range of a call-clobbered register. */ | |
5719 | if (!targetm.profile_before_prologue () && crtl->profile) | |
5720 | emit_insn (gen_blockage ()); | |
5721 | ||
5722 | seq = get_insns (); | |
5723 | end_sequence (); | |
5724 | set_insn_locations (seq, prologue_location); | |
5725 | ||
5726 | return seq; | |
5727 | } | |
5728 | ||
5729 | /* Return a sequence to be used as the epilogue for the current function, | |
5730 | or NULL. */ | |
5731 | ||
5732 | static rtx_insn * | |
33fec8d5 | 5733 | make_epilogue_seq (void) |
fb42ed99 SB |
5734 | { |
5735 | if (!targetm.have_epilogue ()) | |
5736 | return NULL; | |
5737 | ||
5738 | start_sequence (); | |
33fec8d5 | 5739 | emit_note (NOTE_INSN_EPILOGUE_BEG); |
fb42ed99 SB |
5740 | rtx_insn *seq = targetm.gen_epilogue (); |
5741 | if (seq) | |
5742 | emit_jump_insn (seq); | |
5743 | ||
5744 | /* Retain a map of the epilogue insns. */ | |
5745 | record_insns (seq, NULL, &epilogue_insn_hash); | |
5746 | set_insn_locations (seq, epilogue_location); | |
5747 | ||
5748 | seq = get_insns (); | |
5749 | rtx_insn *returnjump = get_last_insn (); | |
5750 | end_sequence (); | |
5751 | ||
5752 | if (JUMP_P (returnjump)) | |
5753 | set_return_jump_label (returnjump); | |
5754 | ||
5755 | return seq; | |
5756 | } | |
5757 | ||
ffe14686 | 5758 | |
9faa82d8 | 5759 | /* Generate the prologue and epilogue RTL if the machine supports it. Thread |
bdac5f58 | 5760 | this into place with notes indicating where the prologue ends and where |
484db665 BS |
5761 | the epilogue begins. Update the basic block information when possible. |
5762 | ||
5763 | Notes on epilogue placement: | |
5764 | There are several kinds of edges to the exit block: | |
5765 | * a single fallthru edge from LAST_BB | |
5766 | * possibly, edges from blocks containing sibcalls | |
5767 | * possibly, fake edges from infinite loops | |
5768 | ||
5769 | The epilogue is always emitted on the fallthru edge from the last basic | |
5770 | block in the function, LAST_BB, into the exit block. | |
5771 | ||
5772 | If LAST_BB is empty except for a label, it is the target of every | |
5773 | other basic block in the function that ends in a return. If a | |
5774 | target has a return or simple_return pattern (possibly with | |
5775 | conditional variants), these basic blocks can be changed so that a | |
5776 | return insn is emitted into them, and their target is adjusted to | |
5777 | the real exit block. | |
5778 | ||
5779 | Notes on shrink wrapping: We implement a fairly conservative | |
5780 | version of shrink-wrapping rather than the textbook one. We only | |
5781 | generate a single prologue and a single epilogue. This is | |
5782 | sufficient to catch a number of interesting cases involving early | |
5783 | exits. | |
5784 | ||
5785 | First, we identify the blocks that require the prologue to occur before | |
5786 | them. These are the ones that modify a call-saved register, or reference | |
5787 | any of the stack or frame pointer registers. To simplify things, we then | |
5788 | mark everything reachable from these blocks as also requiring a prologue. | |
5789 | This takes care of loops automatically, and avoids the need to examine | |
5790 | whether MEMs reference the frame, since it is sufficient to check for | |
5791 | occurrences of the stack or frame pointer. | |
5792 | ||
5793 | We then compute the set of blocks for which the need for a prologue | |
5794 | is anticipatable (borrowing terminology from the shrink-wrapping | |
5795 | description in Muchnick's book). These are the blocks which either | |
5796 | require a prologue themselves, or those that have only successors | |
5797 | where the prologue is anticipatable. The prologue needs to be | |
5798 | inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1 | |
5799 | is not. For the moment, we ensure that only one such edge exists. | |
5800 | ||
5801 | The epilogue is placed as described above, but we make a | |
5802 | distinction between inserting return and simple_return patterns | |
5803 | when modifying other blocks that end in a return. Blocks that end | |
5804 | in a sibcall omit the sibcall_epilogue if the block is not in | |
5805 | ANTIC. */ | |
bdac5f58 | 5806 | |
c81b4a0e | 5807 | void |
6fb5fa3c | 5808 | thread_prologue_and_epilogue_insns (void) |
bdac5f58 | 5809 | { |
484db665 | 5810 | df_analyze (); |
e881bb1b | 5811 | |
7458026b ILT |
5812 | /* Can't deal with multiple successors of the entry block at the |
5813 | moment. Function should always have at least one entry | |
5814 | point. */ | |
fefa31b5 | 5815 | gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun))); |
33fec8d5 SB |
5816 | |
5817 | edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
5818 | edge orig_entry_edge = entry_edge; | |
484db665 | 5819 | |
7dca85bf | 5820 | rtx_insn *split_prologue_seq = make_split_prologue_seq (); |
fb42ed99 | 5821 | rtx_insn *prologue_seq = make_prologue_seq (); |
7dca85bf | 5822 | rtx_insn *epilogue_seq = make_epilogue_seq (); |
484db665 | 5823 | |
484db665 BS |
5824 | /* Try to perform a kind of shrink-wrapping, making sure the |
5825 | prologue/epilogue is emitted only around those parts of the | |
5826 | function that require it. */ | |
33fec8d5 | 5827 | try_shrink_wrapping (&entry_edge, prologue_seq); |
484db665 | 5828 | |
c997869f SB |
5829 | /* If the target can handle splitting the prologue/epilogue into separate |
5830 | components, try to shrink-wrap these components separately. */ | |
5831 | try_shrink_wrapping_separate (entry_edge->dest); | |
5832 | ||
5833 | /* If that did anything for any component we now need the generate the | |
7dca85bf SB |
5834 | "main" prologue again. Because some targets require some of these |
5835 | to be called in a specific order (i386 requires the split prologue | |
5836 | to be first, for example), we create all three sequences again here. | |
5837 | If this does not work for some target, that target should not enable | |
5838 | separate shrink-wrapping. */ | |
c997869f | 5839 | if (crtl->shrink_wrapped_separate) |
7dca85bf SB |
5840 | { |
5841 | split_prologue_seq = make_split_prologue_seq (); | |
5842 | prologue_seq = make_prologue_seq (); | |
5843 | epilogue_seq = make_epilogue_seq (); | |
5844 | } | |
19d3c25c | 5845 | |
fefa31b5 | 5846 | rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)); |
484db665 | 5847 | |
cd9c1ca8 RH |
5848 | /* A small fib -- epilogue is not yet completed, but we wish to re-use |
5849 | this marker for the splits of EH_RETURN patterns, and nothing else | |
5850 | uses the flag in the meantime. */ | |
5851 | epilogue_completed = 1; | |
5852 | ||
cd9c1ca8 RH |
5853 | /* Find non-fallthru edges that end with EH_RETURN instructions. On |
5854 | some targets, these get split to a special version of the epilogue | |
5855 | code. In order to be able to properly annotate these with unwind | |
5856 | info, try to split them now. If we get a valid split, drop an | |
5857 | EPILOGUE_BEG note and mark the insns as epilogue insns. */ | |
33fec8d5 SB |
5858 | edge e; |
5859 | edge_iterator ei; | |
fefa31b5 | 5860 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
cd9c1ca8 | 5861 | { |
691fe203 | 5862 | rtx_insn *prev, *last, *trial; |
cd9c1ca8 RH |
5863 | |
5864 | if (e->flags & EDGE_FALLTHRU) | |
5865 | continue; | |
5866 | last = BB_END (e->src); | |
5867 | if (!eh_returnjump_p (last)) | |
5868 | continue; | |
5869 | ||
5870 | prev = PREV_INSN (last); | |
5871 | trial = try_split (PATTERN (last), last, 1); | |
5872 | if (trial == last) | |
5873 | continue; | |
5874 | ||
5875 | record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash); | |
5876 | emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev); | |
5877 | } | |
cd9c1ca8 | 5878 | |
33fec8d5 | 5879 | edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); |
cc1f86f3 | 5880 | |
33fec8d5 | 5881 | if (exit_fallthru_edge) |
623a66fa | 5882 | { |
33fec8d5 SB |
5883 | if (epilogue_seq) |
5884 | { | |
5885 | insert_insn_on_edge (epilogue_seq, exit_fallthru_edge); | |
e93044fc | 5886 | commit_edge_insertions (); |
33fec8d5 SB |
5887 | |
5888 | /* The epilogue insns we inserted may cause the exit edge to no longer | |
5889 | be fallthru. */ | |
5890 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) | |
5891 | { | |
5892 | if (((e->flags & EDGE_FALLTHRU) != 0) | |
5893 | && returnjump_p (BB_END (e->src))) | |
5894 | e->flags &= ~EDGE_FALLTHRU; | |
5895 | } | |
5896 | } | |
5897 | else if (next_active_insn (BB_END (exit_fallthru_edge->src))) | |
5898 | { | |
5899 | /* We have a fall-through edge to the exit block, the source is not | |
5900 | at the end of the function, and there will be an assembler epilogue | |
5901 | at the end of the function. | |
5902 | We can't use force_nonfallthru here, because that would try to | |
5903 | use return. Inserting a jump 'by hand' is extremely messy, so | |
5904 | we take advantage of cfg_layout_finalize using | |
5905 | fixup_fallthru_exit_predecessor. */ | |
5906 | cfg_layout_initialize (0); | |
5907 | basic_block cur_bb; | |
5908 | FOR_EACH_BB_FN (cur_bb, cfun) | |
5909 | if (cur_bb->index >= NUM_FIXED_BLOCKS | |
5910 | && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) | |
5911 | cur_bb->aux = cur_bb->next_bb; | |
5912 | cfg_layout_finalize (); | |
5913 | } | |
623a66fa | 5914 | } |
cf103ca4 | 5915 | |
33fec8d5 | 5916 | /* Insert the prologue. */ |
484db665 | 5917 | |
33fec8d5 | 5918 | rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
e881bb1b | 5919 | |
33fec8d5 | 5920 | if (split_prologue_seq || prologue_seq) |
30a873c3 | 5921 | { |
349721b7 | 5922 | rtx_insn *split_prologue_insn = split_prologue_seq; |
33fec8d5 | 5923 | if (split_prologue_seq) |
349721b7 JJ |
5924 | { |
5925 | while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn)) | |
5926 | split_prologue_insn = NEXT_INSN (split_prologue_insn); | |
5927 | insert_insn_on_edge (split_prologue_seq, orig_entry_edge); | |
5928 | } | |
33fec8d5 | 5929 | |
349721b7 | 5930 | rtx_insn *prologue_insn = prologue_seq; |
33fec8d5 | 5931 | if (prologue_seq) |
349721b7 JJ |
5932 | { |
5933 | while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn)) | |
5934 | prologue_insn = NEXT_INSN (prologue_insn); | |
5935 | insert_insn_on_edge (prologue_seq, entry_edge); | |
5936 | } | |
cf103ca4 | 5937 | |
30a873c3 ZD |
5938 | commit_edge_insertions (); |
5939 | ||
cf103ca4 | 5940 | /* Look for basic blocks within the prologue insns. */ |
349721b7 JJ |
5941 | if (split_prologue_insn |
5942 | && BLOCK_FOR_INSN (split_prologue_insn) == NULL) | |
5943 | split_prologue_insn = NULL; | |
5944 | if (prologue_insn | |
5945 | && BLOCK_FOR_INSN (prologue_insn) == NULL) | |
5946 | prologue_insn = NULL; | |
5947 | if (split_prologue_insn || prologue_insn) | |
5948 | { | |
5949 | auto_sbitmap blocks (last_basic_block_for_fn (cfun)); | |
5950 | bitmap_clear (blocks); | |
5951 | if (split_prologue_insn) | |
5952 | bitmap_set_bit (blocks, | |
5953 | BLOCK_FOR_INSN (split_prologue_insn)->index); | |
5954 | if (prologue_insn) | |
5955 | bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index); | |
5956 | find_many_sub_basic_blocks (blocks); | |
5957 | } | |
30a873c3 | 5958 | } |
0a1c58a2 | 5959 | |
33fec8d5 SB |
5960 | default_rtl_profile (); |
5961 | ||
0a1c58a2 | 5962 | /* Emit sibling epilogues before any sibling call sites. */ |
33fec8d5 SB |
5963 | for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); |
5964 | (e = ei_safe_edge (ei)); | |
5965 | ei_next (&ei)) | |
0a1c58a2 | 5966 | { |
33fec8d5 SB |
5967 | /* Skip those already handled, the ones that run without prologue. */ |
5968 | if (e->flags & EDGE_IGNORE) | |
628f6a4e | 5969 | { |
33fec8d5 | 5970 | e->flags &= ~EDGE_IGNORE; |
628f6a4e BE |
5971 | continue; |
5972 | } | |
0a1c58a2 | 5973 | |
33fec8d5 SB |
5974 | rtx_insn *insn = BB_END (e->src); |
5975 | ||
5976 | if (!(CALL_P (insn) && SIBLING_CALL_P (insn))) | |
5977 | continue; | |
5978 | ||
e86a9946 | 5979 | if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ()) |
484db665 BS |
5980 | { |
5981 | start_sequence (); | |
5982 | emit_note (NOTE_INSN_EPILOGUE_BEG); | |
5983 | emit_insn (ep_seq); | |
dc01c3d1 | 5984 | rtx_insn *seq = get_insns (); |
484db665 | 5985 | end_sequence (); |
0a1c58a2 | 5986 | |
484db665 BS |
5987 | /* Retain a map of the epilogue insns. Used in life analysis to |
5988 | avoid getting rid of sibcall epilogue insns. Do this before we | |
5989 | actually emit the sequence. */ | |
5990 | record_insns (seq, NULL, &epilogue_insn_hash); | |
5368224f | 5991 | set_insn_locations (seq, epilogue_location); |
2f937369 | 5992 | |
484db665 BS |
5993 | emit_insn_before (seq, insn); |
5994 | } | |
0a1c58a2 | 5995 | } |
ca1117cc | 5996 | |
33fec8d5 | 5997 | if (epilogue_seq) |
86c82654 | 5998 | { |
9c8348cf | 5999 | rtx_insn *insn, *next; |
86c82654 RH |
6000 | |
6001 | /* Similarly, move any line notes that appear after the epilogue. | |
ff7cc307 | 6002 | There is no need, however, to be quite so anal about the existence |
071a42f9 | 6003 | of such a note. Also possibly move |
84c1fa24 UW |
6004 | NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug |
6005 | info generation. */ | |
33fec8d5 | 6006 | for (insn = epilogue_seq; insn; insn = next) |
86c82654 RH |
6007 | { |
6008 | next = NEXT_INSN (insn); | |
b8698a0f | 6009 | if (NOTE_P (insn) |
a38e7aa5 | 6010 | && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) |
33fec8d5 | 6011 | reorder_insns (insn, insn, PREV_INSN (epilogue_seq)); |
86c82654 RH |
6012 | } |
6013 | } | |
6fb5fa3c DB |
6014 | |
6015 | /* Threading the prologue and epilogue changes the artificial refs | |
6016 | in the entry and exit blocks. */ | |
6017 | epilogue_completed = 1; | |
6018 | df_update_entry_exit_and_calls (); | |
bdac5f58 TW |
6019 | } |
6020 | ||
cd9c1ca8 RH |
6021 | /* Reposition the prologue-end and epilogue-begin notes after |
6022 | instruction scheduling. */ | |
bdac5f58 TW |
6023 | |
6024 | void | |
6fb5fa3c | 6025 | reposition_prologue_and_epilogue_notes (void) |
bdac5f58 | 6026 | { |
e86a9946 RS |
6027 | if (!targetm.have_prologue () |
6028 | && !targetm.have_epilogue () | |
6029 | && !targetm.have_sibcall_epilogue ()) | |
5251b8b3 | 6030 | return; |
5251b8b3 | 6031 | |
cd9c1ca8 RH |
6032 | /* Since the hash table is created on demand, the fact that it is |
6033 | non-null is a signal that it is non-empty. */ | |
6034 | if (prologue_insn_hash != NULL) | |
bdac5f58 | 6035 | { |
d242408f | 6036 | size_t len = prologue_insn_hash->elements (); |
691fe203 | 6037 | rtx_insn *insn, *last = NULL, *note = NULL; |
bdac5f58 | 6038 | |
cd9c1ca8 RH |
6039 | /* Scan from the beginning until we reach the last prologue insn. */ |
6040 | /* ??? While we do have the CFG intact, there are two problems: | |
6041 | (1) The prologue can contain loops (typically probing the stack), | |
6042 | which means that the end of the prologue isn't in the first bb. | |
6043 | (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */ | |
6fb5fa3c | 6044 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
bdac5f58 | 6045 | { |
4b4bf941 | 6046 | if (NOTE_P (insn)) |
9392c110 | 6047 | { |
a38e7aa5 | 6048 | if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) |
0a1c58a2 JL |
6049 | note = insn; |
6050 | } | |
cd9c1ca8 | 6051 | else if (contains (insn, prologue_insn_hash)) |
0a1c58a2 | 6052 | { |
9f53e965 RH |
6053 | last = insn; |
6054 | if (--len == 0) | |
6055 | break; | |
6056 | } | |
6057 | } | |
797a6ac1 | 6058 | |
9f53e965 RH |
6059 | if (last) |
6060 | { | |
cd9c1ca8 | 6061 | if (note == NULL) |
9f53e965 | 6062 | { |
cd9c1ca8 RH |
6063 | /* Scan forward looking for the PROLOGUE_END note. It should |
6064 | be right at the beginning of the block, possibly with other | |
6065 | insn notes that got moved there. */ | |
6066 | for (note = NEXT_INSN (last); ; note = NEXT_INSN (note)) | |
6067 | { | |
6068 | if (NOTE_P (note) | |
6069 | && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) | |
6070 | break; | |
6071 | } | |
9f53e965 | 6072 | } |
c93b03c2 | 6073 | |
9f53e965 | 6074 | /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ |
4b4bf941 | 6075 | if (LABEL_P (last)) |
9f53e965 RH |
6076 | last = NEXT_INSN (last); |
6077 | reorder_insns (note, note, last); | |
bdac5f58 | 6078 | } |
0a1c58a2 JL |
6079 | } |
6080 | ||
cd9c1ca8 | 6081 | if (epilogue_insn_hash != NULL) |
0a1c58a2 | 6082 | { |
cd9c1ca8 RH |
6083 | edge_iterator ei; |
6084 | edge e; | |
bdac5f58 | 6085 | |
fefa31b5 | 6086 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
bdac5f58 | 6087 | { |
691fe203 | 6088 | rtx_insn *insn, *first = NULL, *note = NULL; |
997704f1 | 6089 | basic_block bb = e->src; |
c93b03c2 | 6090 | |
997704f1 | 6091 | /* Scan from the beginning until we reach the first epilogue insn. */ |
cd9c1ca8 | 6092 | FOR_BB_INSNS (bb, insn) |
9f53e965 | 6093 | { |
cd9c1ca8 RH |
6094 | if (NOTE_P (insn)) |
6095 | { | |
6096 | if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) | |
6097 | { | |
6098 | note = insn; | |
997704f1 | 6099 | if (first != NULL) |
cd9c1ca8 RH |
6100 | break; |
6101 | } | |
6102 | } | |
997704f1 | 6103 | else if (first == NULL && contains (insn, epilogue_insn_hash)) |
cd9c1ca8 | 6104 | { |
997704f1 | 6105 | first = insn; |
cd9c1ca8 RH |
6106 | if (note != NULL) |
6107 | break; | |
6108 | } | |
9392c110 | 6109 | } |
997704f1 RH |
6110 | |
6111 | if (note) | |
6112 | { | |
6113 | /* If the function has a single basic block, and no real | |
b8698a0f | 6114 | epilogue insns (e.g. sibcall with no cleanup), the |
997704f1 RH |
6115 | epilogue note can get scheduled before the prologue |
6116 | note. If we have frame related prologue insns, having | |
6117 | them scanned during the epilogue will result in a crash. | |
6118 | In this case re-order the epilogue note to just before | |
6119 | the last insn in the block. */ | |
6120 | if (first == NULL) | |
6121 | first = BB_END (bb); | |
6122 | ||
6123 | if (PREV_INSN (first) != note) | |
6124 | reorder_insns (note, note, PREV_INSN (first)); | |
6125 | } | |
bdac5f58 TW |
6126 | } |
6127 | } | |
bdac5f58 | 6128 | } |
87ff9c8e | 6129 | |
df92c640 SB |
6130 | /* Returns the name of function declared by FNDECL. */ |
6131 | const char * | |
6132 | fndecl_name (tree fndecl) | |
6133 | { | |
6134 | if (fndecl == NULL) | |
6135 | return "(nofn)"; | |
60591d4e | 6136 | return lang_hooks.decl_printable_name (fndecl, 1); |
df92c640 SB |
6137 | } |
6138 | ||
532aafad SB |
6139 | /* Returns the name of function FN. */ |
6140 | const char * | |
6141 | function_name (struct function *fn) | |
6142 | { | |
df92c640 SB |
6143 | tree fndecl = (fn == NULL) ? NULL : fn->decl; |
6144 | return fndecl_name (fndecl); | |
532aafad SB |
6145 | } |
6146 | ||
faed5cc3 SB |
6147 | /* Returns the name of the current function. */ |
6148 | const char * | |
6149 | current_function_name (void) | |
6150 | { | |
532aafad | 6151 | return function_name (cfun); |
faed5cc3 | 6152 | } |
ef330312 PB |
6153 | \f |
6154 | ||
c2924966 | 6155 | static unsigned int |
ef330312 PB |
6156 | rest_of_handle_check_leaf_regs (void) |
6157 | { | |
6158 | #ifdef LEAF_REGISTERS | |
416ff32e | 6159 | crtl->uses_only_leaf_regs |
ef330312 PB |
6160 | = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); |
6161 | #endif | |
c2924966 | 6162 | return 0; |
ef330312 PB |
6163 | } |
6164 | ||
8d8d1a28 | 6165 | /* Insert a TYPE into the used types hash table of CFUN. */ |
b646ba3f | 6166 | |
8d8d1a28 AH |
6167 | static void |
6168 | used_types_insert_helper (tree type, struct function *func) | |
33c9159e | 6169 | { |
8d8d1a28 | 6170 | if (type != NULL && func != NULL) |
33c9159e | 6171 | { |
33c9159e | 6172 | if (func->used_types_hash == NULL) |
b086d530 TS |
6173 | func->used_types_hash = hash_set<tree>::create_ggc (37); |
6174 | ||
6175 | func->used_types_hash->add (type); | |
33c9159e AH |
6176 | } |
6177 | } | |
6178 | ||
8d8d1a28 AH |
6179 | /* Given a type, insert it into the used hash table in cfun. */ |
6180 | void | |
6181 | used_types_insert (tree t) | |
6182 | { | |
6183 | while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) | |
095c7b3c JJ |
6184 | if (TYPE_NAME (t)) |
6185 | break; | |
6186 | else | |
6187 | t = TREE_TYPE (t); | |
29ce73cb PB |
6188 | if (TREE_CODE (t) == ERROR_MARK) |
6189 | return; | |
095c7b3c JJ |
6190 | if (TYPE_NAME (t) == NULL_TREE |
6191 | || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) | |
6192 | t = TYPE_MAIN_VARIANT (t); | |
8d8d1a28 | 6193 | if (debug_info_level > DINFO_LEVEL_NONE) |
b646ba3f DS |
6194 | { |
6195 | if (cfun) | |
6196 | used_types_insert_helper (t, cfun); | |
6197 | else | |
9771b263 DN |
6198 | { |
6199 | /* So this might be a type referenced by a global variable. | |
6200 | Record that type so that we can later decide to emit its | |
6201 | debug information. */ | |
6202 | vec_safe_push (types_used_by_cur_var_decl, t); | |
6203 | } | |
b646ba3f DS |
6204 | } |
6205 | } | |
6206 | ||
6207 | /* Helper to Hash a struct types_used_by_vars_entry. */ | |
6208 | ||
6209 | static hashval_t | |
6210 | hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) | |
6211 | { | |
6212 | gcc_assert (entry && entry->var_decl && entry->type); | |
6213 | ||
6214 | return iterative_hash_object (entry->type, | |
6215 | iterative_hash_object (entry->var_decl, 0)); | |
6216 | } | |
6217 | ||
6218 | /* Hash function of the types_used_by_vars_entry hash table. */ | |
6219 | ||
6220 | hashval_t | |
2a22f99c | 6221 | used_type_hasher::hash (types_used_by_vars_entry *entry) |
b646ba3f | 6222 | { |
b646ba3f DS |
6223 | return hash_types_used_by_vars_entry (entry); |
6224 | } | |
6225 | ||
6226 | /*Equality function of the types_used_by_vars_entry hash table. */ | |
6227 | ||
2a22f99c TS |
6228 | bool |
6229 | used_type_hasher::equal (types_used_by_vars_entry *e1, | |
6230 | types_used_by_vars_entry *e2) | |
b646ba3f | 6231 | { |
b646ba3f DS |
6232 | return (e1->var_decl == e2->var_decl && e1->type == e2->type); |
6233 | } | |
6234 | ||
6235 | /* Inserts an entry into the types_used_by_vars_hash hash table. */ | |
6236 | ||
6237 | void | |
6238 | types_used_by_var_decl_insert (tree type, tree var_decl) | |
6239 | { | |
6240 | if (type != NULL && var_decl != NULL) | |
6241 | { | |
2a22f99c | 6242 | types_used_by_vars_entry **slot; |
b646ba3f DS |
6243 | struct types_used_by_vars_entry e; |
6244 | e.var_decl = var_decl; | |
6245 | e.type = type; | |
6246 | if (types_used_by_vars_hash == NULL) | |
2a22f99c TS |
6247 | types_used_by_vars_hash |
6248 | = hash_table<used_type_hasher>::create_ggc (37); | |
6249 | ||
6250 | slot = types_used_by_vars_hash->find_slot (&e, INSERT); | |
b646ba3f DS |
6251 | if (*slot == NULL) |
6252 | { | |
6253 | struct types_used_by_vars_entry *entry; | |
766090c2 | 6254 | entry = ggc_alloc<types_used_by_vars_entry> (); |
b646ba3f DS |
6255 | entry->type = type; |
6256 | entry->var_decl = var_decl; | |
6257 | *slot = entry; | |
6258 | } | |
6259 | } | |
8d8d1a28 AH |
6260 | } |
6261 | ||
27a4cd48 DM |
6262 | namespace { |
6263 | ||
6264 | const pass_data pass_data_leaf_regs = | |
6265 | { | |
6266 | RTL_PASS, /* type */ | |
6267 | "*leaf_regs", /* name */ | |
6268 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
6269 | TV_NONE, /* tv_id */ |
6270 | 0, /* properties_required */ | |
6271 | 0, /* properties_provided */ | |
6272 | 0, /* properties_destroyed */ | |
6273 | 0, /* todo_flags_start */ | |
6274 | 0, /* todo_flags_finish */ | |
ef330312 PB |
6275 | }; |
6276 | ||
27a4cd48 DM |
6277 | class pass_leaf_regs : public rtl_opt_pass |
6278 | { | |
6279 | public: | |
c3284718 RS |
6280 | pass_leaf_regs (gcc::context *ctxt) |
6281 | : rtl_opt_pass (pass_data_leaf_regs, ctxt) | |
27a4cd48 DM |
6282 | {} |
6283 | ||
6284 | /* opt_pass methods: */ | |
be55bfe6 TS |
6285 | virtual unsigned int execute (function *) |
6286 | { | |
6287 | return rest_of_handle_check_leaf_regs (); | |
6288 | } | |
27a4cd48 DM |
6289 | |
6290 | }; // class pass_leaf_regs | |
6291 | ||
6292 | } // anon namespace | |
6293 | ||
6294 | rtl_opt_pass * | |
6295 | make_pass_leaf_regs (gcc::context *ctxt) | |
6296 | { | |
6297 | return new pass_leaf_regs (ctxt); | |
6298 | } | |
6299 | ||
6fb5fa3c DB |
6300 | static unsigned int |
6301 | rest_of_handle_thread_prologue_and_epilogue (void) | |
6302 | { | |
63d0f6ab SB |
6303 | /* prepare_shrink_wrap is sensitive to the block structure of the control |
6304 | flow graph, so clean it up first. */ | |
6fb5fa3c | 6305 | if (optimize) |
63d0f6ab | 6306 | cleanup_cfg (0); |
d3c12306 | 6307 | |
6fb5fa3c DB |
6308 | /* On some machines, the prologue and epilogue code, or parts thereof, |
6309 | can be represented as RTL. Doing so lets us schedule insns between | |
6310 | it and the rest of the code and also allows delayed branch | |
6311 | scheduling to operate in the epilogue. */ | |
6fb5fa3c | 6312 | thread_prologue_and_epilogue_insns (); |
d3c12306 | 6313 | |
86b107ae SB |
6314 | /* Some non-cold blocks may now be only reachable from cold blocks. |
6315 | Fix that up. */ | |
6316 | fixup_partitions (); | |
6317 | ||
bdc6e1ae SB |
6318 | /* Shrink-wrapping can result in unreachable edges in the epilogue, |
6319 | see PR57320. */ | |
63d0f6ab | 6320 | cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0); |
bdc6e1ae | 6321 | |
d3c12306 | 6322 | /* The stack usage info is finalized during prologue expansion. */ |
a11e0df4 | 6323 | if (flag_stack_usage_info) |
d3c12306 EB |
6324 | output_stack_usage (); |
6325 | ||
6fb5fa3c DB |
6326 | return 0; |
6327 | } | |
6328 | ||
27a4cd48 DM |
6329 | namespace { |
6330 | ||
6331 | const pass_data pass_data_thread_prologue_and_epilogue = | |
6332 | { | |
6333 | RTL_PASS, /* type */ | |
6334 | "pro_and_epilogue", /* name */ | |
6335 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
6336 | TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */ |
6337 | 0, /* properties_required */ | |
6338 | 0, /* properties_provided */ | |
6339 | 0, /* properties_destroyed */ | |
3bea341f RB |
6340 | 0, /* todo_flags_start */ |
6341 | ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */ | |
6fb5fa3c | 6342 | }; |
27a4cd48 DM |
6343 | |
6344 | class pass_thread_prologue_and_epilogue : public rtl_opt_pass | |
6345 | { | |
6346 | public: | |
c3284718 RS |
6347 | pass_thread_prologue_and_epilogue (gcc::context *ctxt) |
6348 | : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt) | |
27a4cd48 DM |
6349 | {} |
6350 | ||
6351 | /* opt_pass methods: */ | |
be55bfe6 TS |
6352 | virtual unsigned int execute (function *) |
6353 | { | |
6354 | return rest_of_handle_thread_prologue_and_epilogue (); | |
6355 | } | |
27a4cd48 DM |
6356 | |
6357 | }; // class pass_thread_prologue_and_epilogue | |
6358 | ||
6359 | } // anon namespace | |
6360 | ||
6361 | rtl_opt_pass * | |
6362 | make_pass_thread_prologue_and_epilogue (gcc::context *ctxt) | |
6363 | { | |
6364 | return new pass_thread_prologue_and_epilogue (ctxt); | |
6365 | } | |
d8d72314 PB |
6366 | \f |
6367 | ||
6368 | /* This mini-pass fixes fall-out from SSA in asm statements that have | |
b8698a0f | 6369 | in-out constraints. Say you start with |
d8d72314 PB |
6370 | |
6371 | orig = inout; | |
6372 | asm ("": "+mr" (inout)); | |
6373 | use (orig); | |
6374 | ||
6375 | which is transformed very early to use explicit output and match operands: | |
6376 | ||
6377 | orig = inout; | |
6378 | asm ("": "=mr" (inout) : "0" (inout)); | |
6379 | use (orig); | |
6380 | ||
6381 | Or, after SSA and copyprop, | |
6382 | ||
6383 | asm ("": "=mr" (inout_2) : "0" (inout_1)); | |
6384 | use (inout_1); | |
6385 | ||
6386 | Clearly inout_2 and inout_1 can't be coalesced easily anymore, as | |
6387 | they represent two separate values, so they will get different pseudo | |
6388 | registers during expansion. Then, since the two operands need to match | |
6389 | per the constraints, but use different pseudo registers, reload can | |
6390 | only register a reload for these operands. But reloads can only be | |
6391 | satisfied by hardregs, not by memory, so we need a register for this | |
6392 | reload, just because we are presented with non-matching operands. | |
6393 | So, even though we allow memory for this operand, no memory can be | |
6394 | used for it, just because the two operands don't match. This can | |
6395 | cause reload failures on register-starved targets. | |
6396 | ||
6397 | So it's a symptom of reload not being able to use memory for reloads | |
6398 | or, alternatively it's also a symptom of both operands not coming into | |
6399 | reload as matching (in which case the pseudo could go to memory just | |
6400 | fine, as the alternative allows it, and no reload would be necessary). | |
6401 | We fix the latter problem here, by transforming | |
6402 | ||
6403 | asm ("": "=mr" (inout_2) : "0" (inout_1)); | |
6404 | ||
6405 | back to | |
6406 | ||
6407 | inout_2 = inout_1; | |
6408 | asm ("": "=mr" (inout_2) : "0" (inout_2)); */ | |
6409 | ||
6410 | static void | |
691fe203 | 6411 | match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs) |
d8d72314 PB |
6412 | { |
6413 | int i; | |
6414 | bool changed = false; | |
6415 | rtx op = SET_SRC (p_sets[0]); | |
6416 | int ninputs = ASM_OPERANDS_INPUT_LENGTH (op); | |
6417 | rtvec inputs = ASM_OPERANDS_INPUT_VEC (op); | |
1b4572a8 | 6418 | bool *output_matched = XALLOCAVEC (bool, noutputs); |
d8d72314 | 6419 | |
d7b8033f | 6420 | memset (output_matched, 0, noutputs * sizeof (bool)); |
d8d72314 PB |
6421 | for (i = 0; i < ninputs; i++) |
6422 | { | |
691fe203 DM |
6423 | rtx input, output; |
6424 | rtx_insn *insns; | |
d8d72314 PB |
6425 | const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i); |
6426 | char *end; | |
53220215 | 6427 | int match, j; |
d8d72314 | 6428 | |
70f16287 JJ |
6429 | if (*constraint == '%') |
6430 | constraint++; | |
6431 | ||
d8d72314 PB |
6432 | match = strtoul (constraint, &end, 10); |
6433 | if (end == constraint) | |
6434 | continue; | |
6435 | ||
6436 | gcc_assert (match < noutputs); | |
6437 | output = SET_DEST (p_sets[match]); | |
6438 | input = RTVEC_ELT (inputs, i); | |
53220215 MM |
6439 | /* Only do the transformation for pseudos. */ |
6440 | if (! REG_P (output) | |
6441 | || rtx_equal_p (output, input) | |
3c896da0 | 6442 | || !(REG_P (input) || SUBREG_P (input) |
b3d89380 JJ |
6443 | || MEM_P (input) || CONSTANT_P (input)) |
6444 | || !general_operand (input, GET_MODE (output))) | |
d8d72314 PB |
6445 | continue; |
6446 | ||
53220215 MM |
6447 | /* We can't do anything if the output is also used as input, |
6448 | as we're going to overwrite it. */ | |
6449 | for (j = 0; j < ninputs; j++) | |
6450 | if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j))) | |
6451 | break; | |
6452 | if (j != ninputs) | |
6453 | continue; | |
6454 | ||
d7b8033f JJ |
6455 | /* Avoid changing the same input several times. For |
6456 | asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in)); | |
6457 | only change in once (to out1), rather than changing it | |
6458 | first to out1 and afterwards to out2. */ | |
6459 | if (i > 0) | |
6460 | { | |
6461 | for (j = 0; j < noutputs; j++) | |
6462 | if (output_matched[j] && input == SET_DEST (p_sets[j])) | |
6463 | break; | |
6464 | if (j != noutputs) | |
6465 | continue; | |
6466 | } | |
6467 | output_matched[match] = true; | |
6468 | ||
d8d72314 | 6469 | start_sequence (); |
53220215 | 6470 | emit_move_insn (output, input); |
d8d72314 PB |
6471 | insns = get_insns (); |
6472 | end_sequence (); | |
d8d72314 | 6473 | emit_insn_before (insns, insn); |
53220215 MM |
6474 | |
6475 | /* Now replace all mentions of the input with output. We can't | |
fa10beec | 6476 | just replace the occurrence in inputs[i], as the register might |
53220215 MM |
6477 | also be used in some other input (or even in an address of an |
6478 | output), which would mean possibly increasing the number of | |
6479 | inputs by one (namely 'output' in addition), which might pose | |
6480 | a too complicated problem for reload to solve. E.g. this situation: | |
6481 | ||
6482 | asm ("" : "=r" (output), "=m" (input) : "0" (input)) | |
6483 | ||
84fbffb2 | 6484 | Here 'input' is used in two occurrences as input (once for the |
53220215 | 6485 | input operand, once for the address in the second output operand). |
fa10beec | 6486 | If we would replace only the occurrence of the input operand (to |
53220215 MM |
6487 | make the matching) we would be left with this: |
6488 | ||
6489 | output = input | |
6490 | asm ("" : "=r" (output), "=m" (input) : "0" (output)) | |
6491 | ||
6492 | Now we suddenly have two different input values (containing the same | |
6493 | value, but different pseudos) where we formerly had only one. | |
6494 | With more complicated asms this might lead to reload failures | |
6495 | which wouldn't have happen without this pass. So, iterate over | |
84fbffb2 | 6496 | all operands and replace all occurrences of the register used. */ |
53220215 | 6497 | for (j = 0; j < noutputs; j++) |
1596d61e | 6498 | if (!rtx_equal_p (SET_DEST (p_sets[j]), input) |
53220215 MM |
6499 | && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j]))) |
6500 | SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]), | |
6501 | input, output); | |
6502 | for (j = 0; j < ninputs; j++) | |
6503 | if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j))) | |
6504 | RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j), | |
6505 | input, output); | |
6506 | ||
d8d72314 PB |
6507 | changed = true; |
6508 | } | |
6509 | ||
6510 | if (changed) | |
6511 | df_insn_rescan (insn); | |
6512 | } | |
6513 | ||
5cf18d25 ML |
6514 | /* Add the decl D to the local_decls list of FUN. */ |
6515 | ||
6516 | void | |
6517 | add_local_decl (struct function *fun, tree d) | |
6518 | { | |
8813a647 | 6519 | gcc_assert (VAR_P (d)); |
5cf18d25 ML |
6520 | vec_safe_push (fun->local_decls, d); |
6521 | } | |
6522 | ||
be55bfe6 TS |
6523 | namespace { |
6524 | ||
6525 | const pass_data pass_data_match_asm_constraints = | |
6526 | { | |
6527 | RTL_PASS, /* type */ | |
6528 | "asmcons", /* name */ | |
6529 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
6530 | TV_NONE, /* tv_id */ |
6531 | 0, /* properties_required */ | |
6532 | 0, /* properties_provided */ | |
6533 | 0, /* properties_destroyed */ | |
6534 | 0, /* todo_flags_start */ | |
6535 | 0, /* todo_flags_finish */ | |
6536 | }; | |
6537 | ||
6538 | class pass_match_asm_constraints : public rtl_opt_pass | |
6539 | { | |
6540 | public: | |
6541 | pass_match_asm_constraints (gcc::context *ctxt) | |
6542 | : rtl_opt_pass (pass_data_match_asm_constraints, ctxt) | |
6543 | {} | |
6544 | ||
6545 | /* opt_pass methods: */ | |
6546 | virtual unsigned int execute (function *); | |
6547 | ||
6548 | }; // class pass_match_asm_constraints | |
6549 | ||
6550 | unsigned | |
6551 | pass_match_asm_constraints::execute (function *fun) | |
d8d72314 PB |
6552 | { |
6553 | basic_block bb; | |
691fe203 DM |
6554 | rtx_insn *insn; |
6555 | rtx pat, *p_sets; | |
d8d72314 PB |
6556 | int noutputs; |
6557 | ||
e3b5732b | 6558 | if (!crtl->has_asm_statement) |
d8d72314 PB |
6559 | return 0; |
6560 | ||
6561 | df_set_flags (DF_DEFER_INSN_RESCAN); | |
be55bfe6 | 6562 | FOR_EACH_BB_FN (bb, fun) |
d8d72314 PB |
6563 | { |
6564 | FOR_BB_INSNS (bb, insn) | |
6565 | { | |
6566 | if (!INSN_P (insn)) | |
6567 | continue; | |
6568 | ||
6569 | pat = PATTERN (insn); | |
6570 | if (GET_CODE (pat) == PARALLEL) | |
6571 | p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0); | |
6572 | else if (GET_CODE (pat) == SET) | |
6573 | p_sets = &PATTERN (insn), noutputs = 1; | |
6574 | else | |
6575 | continue; | |
6576 | ||
6577 | if (GET_CODE (*p_sets) == SET | |
6578 | && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS) | |
6579 | match_asm_constraints_1 (insn, p_sets, noutputs); | |
6580 | } | |
6581 | } | |
6582 | ||
6583 | return TODO_df_finish; | |
6584 | } | |
6585 | ||
27a4cd48 DM |
6586 | } // anon namespace |
6587 | ||
6588 | rtl_opt_pass * | |
6589 | make_pass_match_asm_constraints (gcc::context *ctxt) | |
6590 | { | |
6591 | return new pass_match_asm_constraints (ctxt); | |
6592 | } | |
6593 | ||
faed5cc3 | 6594 | |
e2500fed | 6595 | #include "gt-function.h" |