]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
call.c (enforce_access): Emit error + inform.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
6f086dfc 3
1322177d 4This file is part of GCC.
6f086dfc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6f086dfc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6f086dfc
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6f086dfc 19
6f086dfc
RS
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 32 not get a hard register. */
6f086dfc
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
0cbd9993 38#include "rtl-error.h"
40e23961
MC
39#include "hash-set.h"
40#include "machmode.h"
41#include "vec.h"
42#include "double-int.h"
43#include "input.h"
44#include "alias.h"
45#include "symtab.h"
46#include "wide-int.h"
47#include "inchash.h"
6f086dfc 48#include "tree.h"
40e23961 49#include "fold-const.h"
d8a2d370
DN
50#include "stor-layout.h"
51#include "varasm.h"
52#include "stringpool.h"
6f086dfc 53#include "flags.h"
1ef08c63 54#include "except.h"
83685514 55#include "hashtab.h"
83685514 56#include "hard-reg-set.h"
6f086dfc 57#include "function.h"
36566b39
PK
58#include "rtl.h"
59#include "statistics.h"
60#include "real.h"
61#include "fixed-value.h"
62#include "insn-config.h"
63#include "expmed.h"
64#include "dojump.h"
65#include "explow.h"
66#include "calls.h"
67#include "emit-rtl.h"
68#include "stmt.h"
6f086dfc 69#include "expr.h"
b0710fe1 70#include "insn-codes.h"
c6b97fac 71#include "optabs.h"
e78d8e51 72#include "libfuncs.h"
6f086dfc 73#include "regs.h"
6f086dfc
RS
74#include "recog.h"
75#include "output.h"
b1474bb7 76#include "tm_p.h"
7afff7cf 77#include "langhooks.h"
61f71b34 78#include "target.h"
677f3fa8 79#include "common/common-target.h"
2fb9a547 80#include "gimple-expr.h"
45b0be94 81#include "gimplify.h"
ef330312 82#include "tree-pass.h"
7d69de61 83#include "predict.h"
60393bbc
AM
84#include "dominance.h"
85#include "cfg.h"
86#include "cfgrtl.h"
87#include "cfganal.h"
88#include "cfgbuild.h"
89#include "cfgcleanup.h"
90#include "basic-block.h"
6fb5fa3c 91#include "df.h"
ffe14686
AM
92#include "params.h"
93#include "bb-reorder.h"
f30e25a3 94#include "shrink-wrap.h"
b9b5f433 95#include "toplev.h"
b8704801 96#include "rtl-iter.h"
d5e254e1
IE
97#include "tree-chkp.h"
98#include "rtl-chkp.h"
7d69de61 99
5576d6f2
TT
100/* So we can assign to cfun in this file. */
101#undef cfun
102
95f3f59e
JDA
103#ifndef STACK_ALIGNMENT_NEEDED
104#define STACK_ALIGNMENT_NEEDED 1
105#endif
106
975f3818
RS
107#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
108
6f086dfc
RS
109/* Round a value to the lowest integer less than it that is a multiple of
110 the required alignment. Avoid using division in case the value is
111 negative. Assume the alignment is a power of two. */
112#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
113
114/* Similar, but round to the next highest integer that meets the
115 alignment. */
116#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
117
6f086dfc 118/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
119 assign_stack_local uses frame_pointer_rtx when this is nonzero.
120 calls.c:emit_library_call_value_1 uses it to set up
121 post-instantiation libcalls. */
122int virtuals_instantiated;
6f086dfc 123
df696a75 124/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 125static GTY(()) int funcdef_no;
f6f315fe 126
414c4dc4
NC
127/* These variables hold pointers to functions to create and destroy
128 target specific, per-function data structures. */
fa8db1f7 129struct machine_function * (*init_machine_status) (void);
46766466 130
b384405b 131/* The currently compiled function. */
01d939e8 132struct function *cfun = 0;
b384405b 133
cd9c1ca8 134/* These hashes record the prologue and epilogue insns. */
d242408f
TS
135
136struct insn_cache_hasher : ggc_cache_hasher<rtx>
137{
138 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
139 static bool equal (rtx a, rtx b) { return a == b; }
140};
141
142static GTY((cache))
143 hash_table<insn_cache_hasher> *prologue_insn_hash;
144static GTY((cache))
145 hash_table<insn_cache_hasher> *epilogue_insn_hash;
6f086dfc 146\f
b646ba3f 147
2a22f99c 148hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
9771b263 149vec<tree, va_gc> *types_used_by_cur_var_decl;
b646ba3f 150
e15679f8
RK
151/* Forward declarations. */
152
fa8db1f7 153static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7 154static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
ef4bddc2 155static void pad_below (struct args_size *, machine_mode, tree);
691fe203 156static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
fa8db1f7
AJ
157static int all_blocks (tree, tree *);
158static tree *get_block_vector (tree, int *);
159extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 160/* We always define `record_insns' even if it's not used so that we
ec97b83a 161 can always export `prologue_epilogue_contains'. */
d242408f
TS
162static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
163 ATTRIBUTE_UNUSED;
164static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
db2960f4 165static void prepare_function_start (void);
fa8db1f7
AJ
166static void do_clobber_return_reg (rtx, void *);
167static void do_use_return_reg (rtx, void *);
c20bf1f3 168\f
936fc9ba
JH
169/* Stack of nested functions. */
170/* Keep track of the cfun stack. */
e5e809f4 171
936fc9ba 172typedef struct function *function_p;
e5e809f4 173
9771b263 174static vec<function_p> function_context_stack;
6f086dfc
RS
175
176/* Save the current context for compilation of a nested function.
d2784db4 177 This is called from language-specific code. */
6f086dfc
RS
178
179void
d2784db4 180push_function_context (void)
6f086dfc 181{
01d939e8 182 if (cfun == 0)
182e0d71 183 allocate_struct_function (NULL, false);
b384405b 184
9771b263 185 function_context_stack.safe_push (cfun);
db2960f4 186 set_cfun (NULL);
6f086dfc
RS
187}
188
189/* Restore the last saved context, at the end of a nested function.
190 This function is called from language-specific code. */
191
192void
d2784db4 193pop_function_context (void)
6f086dfc 194{
9771b263 195 struct function *p = function_context_stack.pop ();
db2960f4 196 set_cfun (p);
6f086dfc 197 current_function_decl = p->decl;
6f086dfc 198
6f086dfc 199 /* Reset variables that have known state during rtx generation. */
6f086dfc 200 virtuals_instantiated = 0;
1b3d8f8a 201 generating_concat_p = 1;
6f086dfc 202}
e4a4639e 203
fa51b01b
RH
204/* Clear out all parts of the state in F that can safely be discarded
205 after the function has been parsed, but not compiled, to let
206 garbage collection reclaim the memory. */
207
208void
fa8db1f7 209free_after_parsing (struct function *f)
fa51b01b 210{
e8924938 211 f->language = 0;
fa51b01b
RH
212}
213
e2ecd91c
BS
214/* Clear out all parts of the state in F that can safely be discarded
215 after the function has been compiled, to let garbage collection
0a8a198c 216 reclaim the memory. */
21cd906e 217
e2ecd91c 218void
fa8db1f7 219free_after_compilation (struct function *f)
e2ecd91c 220{
cd9c1ca8
RH
221 prologue_insn_hash = NULL;
222 epilogue_insn_hash = NULL;
223
04695783 224 free (crtl->emit.regno_pointer_align);
f995dcfe 225
3e029763 226 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 227 f->eh = NULL;
e2500fed 228 f->machine = NULL;
997de8ed 229 f->cfg = NULL;
fa51b01b 230
57b9e367 231 regno_reg_rtx = NULL;
e2ecd91c 232}
6f086dfc 233\f
49ad7cfa
BS
234/* Return size needed for stack frame based on slots so far allocated.
235 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
236 the caller may have to do that. */
9fb798d7 237
49ad7cfa 238HOST_WIDE_INT
fa8db1f7 239get_frame_size (void)
49ad7cfa 240{
bd60bab2
JH
241 if (FRAME_GROWS_DOWNWARD)
242 return -frame_offset;
243 else
244 return frame_offset;
49ad7cfa
BS
245}
246
9fb798d7
EB
247/* Issue an error message and return TRUE if frame OFFSET overflows in
248 the signed target pointer arithmetics for function FUNC. Otherwise
249 return FALSE. */
250
251bool
252frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 253{
9fb798d7
EB
254 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
255
256 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
257 /* Leave room for the fixed part of the frame. */
258 - 64 * UNITS_PER_WORD)
259 {
c5d75364
MLI
260 error_at (DECL_SOURCE_LOCATION (func),
261 "total size of local objects too large");
9fb798d7
EB
262 return TRUE;
263 }
264
265 return FALSE;
266}
267
76fe54f0
L
268/* Return stack slot alignment in bits for TYPE and MODE. */
269
270static unsigned int
ef4bddc2 271get_stack_local_alignment (tree type, machine_mode mode)
76fe54f0
L
272{
273 unsigned int alignment;
274
275 if (mode == BLKmode)
276 alignment = BIGGEST_ALIGNMENT;
277 else
278 alignment = GET_MODE_ALIGNMENT (mode);
279
280 /* Allow the frond-end to (possibly) increase the alignment of this
281 stack slot. */
282 if (! type)
283 type = lang_hooks.types.type_for_mode (mode, 0);
284
285 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
286}
287
56731d64
BS
288/* Determine whether it is possible to fit a stack slot of size SIZE and
289 alignment ALIGNMENT into an area in the stack frame that starts at
290 frame offset START and has a length of LENGTH. If so, store the frame
291 offset to be used for the stack slot in *POFFSET and return true;
292 return false otherwise. This function will extend the frame size when
293 given a start/length pair that lies at the end of the frame. */
294
295static bool
296try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
297 HOST_WIDE_INT size, unsigned int alignment,
298 HOST_WIDE_INT *poffset)
299{
300 HOST_WIDE_INT this_frame_offset;
301 int frame_off, frame_alignment, frame_phase;
302
303 /* Calculate how many bytes the start of local variables is off from
304 stack alignment. */
305 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
306 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
307 frame_phase = frame_off ? frame_alignment - frame_off : 0;
308
309 /* Round the frame offset to the specified alignment. */
310
311 /* We must be careful here, since FRAME_OFFSET might be negative and
312 division with a negative dividend isn't as well defined as we might
313 like. So we instead assume that ALIGNMENT is a power of two and
314 use logical operations which are unambiguous. */
315 if (FRAME_GROWS_DOWNWARD)
316 this_frame_offset
317 = (FLOOR_ROUND (start + length - size - frame_phase,
318 (unsigned HOST_WIDE_INT) alignment)
319 + frame_phase);
320 else
321 this_frame_offset
322 = (CEIL_ROUND (start - frame_phase,
323 (unsigned HOST_WIDE_INT) alignment)
324 + frame_phase);
325
326 /* See if it fits. If this space is at the edge of the frame,
327 consider extending the frame to make it fit. Our caller relies on
328 this when allocating a new slot. */
329 if (frame_offset == start && this_frame_offset < frame_offset)
330 frame_offset = this_frame_offset;
331 else if (this_frame_offset < start)
332 return false;
333 else if (start + length == frame_offset
334 && this_frame_offset + size > start + length)
335 frame_offset = this_frame_offset + size;
336 else if (this_frame_offset + size > start + length)
337 return false;
338
339 *poffset = this_frame_offset;
340 return true;
341}
342
343/* Create a new frame_space structure describing free space in the stack
344 frame beginning at START and ending at END, and chain it into the
345 function's frame_space_list. */
346
347static void
348add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
349{
766090c2 350 struct frame_space *space = ggc_alloc<frame_space> ();
56731d64
BS
351 space->next = crtl->frame_space_list;
352 crtl->frame_space_list = space;
353 space->start = start;
354 space->length = end - start;
355}
356
6f086dfc
RS
357/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
358 with machine mode MODE.
718fe406 359
6f086dfc
RS
360 ALIGN controls the amount of alignment for the address of the slot:
361 0 means according to MODE,
362 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 363 -2 means use BITS_PER_UNIT,
6f086dfc
RS
364 positive specifies alignment boundary in bits.
365
80a832cd
JJ
366 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
367 alignment and ASLK_RECORD_PAD bit set if we should remember
368 extra space we allocated for alignment purposes. When we are
369 called from assign_stack_temp_for_type, it is not set so we don't
370 track the same stack slot in two independent lists.
2e3f842f 371
bd60bab2 372 We do not round to stack_boundary here. */
6f086dfc 373
bd60bab2 374rtx
ef4bddc2 375assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
80a832cd 376 int align, int kind)
6f086dfc 377{
b3694847 378 rtx x, addr;
6f086dfc 379 int bigend_correction = 0;
427188d5 380 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 381 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
382
383 if (align == 0)
384 {
76fe54f0 385 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 386 alignment /= BITS_PER_UNIT;
6f086dfc
RS
387 }
388 else if (align == -1)
389 {
390 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
391 size = CEIL_ROUND (size, alignment);
392 }
cfa29a4c
EB
393 else if (align == -2)
394 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
395 else
396 alignment = align / BITS_PER_UNIT;
397
2e3f842f
L
398 alignment_in_bits = alignment * BITS_PER_UNIT;
399
2e3f842f
L
400 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
401 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
402 {
403 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
404 alignment = alignment_in_bits / BITS_PER_UNIT;
405 }
a0871656 406
2e3f842f
L
407 if (SUPPORTS_STACK_ALIGNMENT)
408 {
409 if (crtl->stack_alignment_estimated < alignment_in_bits)
410 {
411 if (!crtl->stack_realign_processed)
412 crtl->stack_alignment_estimated = alignment_in_bits;
413 else
414 {
415 /* If stack is realigned and stack alignment value
416 hasn't been finalized, it is OK not to increase
417 stack_alignment_estimated. The bigger alignment
418 requirement is recorded in stack_alignment_needed
419 below. */
420 gcc_assert (!crtl->stack_realign_finalized);
421 if (!crtl->stack_realign_needed)
422 {
423 /* It is OK to reduce the alignment as long as the
424 requested size is 0 or the estimated stack
425 alignment >= mode alignment. */
80a832cd 426 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
427 || size == 0
428 || (crtl->stack_alignment_estimated
429 >= GET_MODE_ALIGNMENT (mode)));
430 alignment_in_bits = crtl->stack_alignment_estimated;
431 alignment = alignment_in_bits / BITS_PER_UNIT;
432 }
433 }
434 }
435 }
76fe54f0
L
436
437 if (crtl->stack_alignment_needed < alignment_in_bits)
438 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
439 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
440 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 441
56731d64
BS
442 if (mode != BLKmode || size != 0)
443 {
80a832cd 444 if (kind & ASLK_RECORD_PAD)
56731d64 445 {
80a832cd
JJ
446 struct frame_space **psp;
447
448 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
449 {
450 struct frame_space *space = *psp;
451 if (!try_fit_stack_local (space->start, space->length, size,
452 alignment, &slot_offset))
453 continue;
454 *psp = space->next;
455 if (slot_offset > space->start)
456 add_frame_space (space->start, slot_offset);
457 if (slot_offset + size < space->start + space->length)
458 add_frame_space (slot_offset + size,
459 space->start + space->length);
460 goto found_space;
461 }
56731d64
BS
462 }
463 }
464 else if (!STACK_ALIGNMENT_NEEDED)
465 {
466 slot_offset = frame_offset;
467 goto found_space;
468 }
469
470 old_frame_offset = frame_offset;
471
472 if (FRAME_GROWS_DOWNWARD)
473 {
474 frame_offset -= size;
475 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 476
80a832cd
JJ
477 if (kind & ASLK_RECORD_PAD)
478 {
479 if (slot_offset > frame_offset)
480 add_frame_space (frame_offset, slot_offset);
481 if (slot_offset + size < old_frame_offset)
482 add_frame_space (slot_offset + size, old_frame_offset);
483 }
56731d64
BS
484 }
485 else
95f3f59e 486 {
56731d64
BS
487 frame_offset += size;
488 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
489
80a832cd
JJ
490 if (kind & ASLK_RECORD_PAD)
491 {
492 if (slot_offset > old_frame_offset)
493 add_frame_space (old_frame_offset, slot_offset);
494 if (slot_offset + size < frame_offset)
495 add_frame_space (slot_offset + size, frame_offset);
496 }
95f3f59e 497 }
6f086dfc 498
56731d64 499 found_space:
6f086dfc
RS
500 /* On a big-endian machine, if we are allocating more space than we will use,
501 use the least significant bytes of those that are allocated. */
d70eadf7 502 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 503 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 504
6f086dfc
RS
505 /* If we have already instantiated virtual registers, return the actual
506 address relative to the frame pointer. */
bd60bab2 507 if (virtuals_instantiated)
0a81f074 508 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 509 trunc_int_for_mode
56731d64 510 (slot_offset + bigend_correction
c41536f5 511 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 512 else
0a81f074 513 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 514 trunc_int_for_mode
56731d64 515 (slot_offset + bigend_correction,
c41536f5 516 Pmode));
6f086dfc 517
38a448ca 518 x = gen_rtx_MEM (mode, addr);
76fe54f0 519 set_mem_align (x, alignment_in_bits);
be0c514c 520 MEM_NOTRAP_P (x) = 1;
6f086dfc 521
bd60bab2
JH
522 stack_slot_list
523 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 524
bd60bab2
JH
525 if (frame_offset_overflow (frame_offset, current_function_decl))
526 frame_offset = 0;
9070115b 527
6f086dfc
RS
528 return x;
529}
2e3f842f
L
530
531/* Wrap up assign_stack_local_1 with last parameter as false. */
532
533rtx
ef4bddc2 534assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
2e3f842f 535{
80a832cd 536 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 537}
0aea6467 538\f
fb0703f7
SB
539/* In order to evaluate some expressions, such as function calls returning
540 structures in memory, we need to temporarily allocate stack locations.
541 We record each allocated temporary in the following structure.
542
543 Associated with each temporary slot is a nesting level. When we pop up
544 one level, all temporaries associated with the previous level are freed.
545 Normally, all temporaries are freed after the execution of the statement
546 in which they were created. However, if we are inside a ({...}) grouping,
547 the result may be in a temporary and hence must be preserved. If the
548 result could be in a temporary, we preserve it if we can determine which
549 one it is in. If we cannot determine which temporary may contain the
550 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 551 pretending it was allocated at the previous nesting level. */
fb0703f7 552
d1b38208 553struct GTY(()) temp_slot {
fb0703f7
SB
554 /* Points to next temporary slot. */
555 struct temp_slot *next;
556 /* Points to previous temporary slot. */
557 struct temp_slot *prev;
558 /* The rtx to used to reference the slot. */
559 rtx slot;
fb0703f7
SB
560 /* The size, in units, of the slot. */
561 HOST_WIDE_INT size;
562 /* The type of the object in the slot, or zero if it doesn't correspond
563 to a type. We use this to determine whether a slot can be reused.
564 It can be reused if objects of the type of the new slot will always
565 conflict with objects of the type of the old slot. */
566 tree type;
8f5929e1
JJ
567 /* The alignment (in bits) of the slot. */
568 unsigned int align;
fb0703f7
SB
569 /* Nonzero if this temporary is currently in use. */
570 char in_use;
fb0703f7
SB
571 /* Nesting level at which this slot is being used. */
572 int level;
fb0703f7
SB
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size;
579};
580
2a22f99c
TS
581/* Entry for the below hash table. */
582struct GTY((for_user)) temp_slot_address_entry {
fb0703f7
SB
583 hashval_t hash;
584 rtx address;
585 struct temp_slot *temp_slot;
586};
587
2a22f99c
TS
588struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
589{
590 static hashval_t hash (temp_slot_address_entry *);
591 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
592};
593
594/* A table of addresses that represent a stack slot. The table is a mapping
595 from address RTXen to a temp slot. */
596static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
597static size_t n_temp_slots_in_use;
598
0aea6467
ZD
599/* Removes temporary slot TEMP from LIST. */
600
601static void
602cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
603{
604 if (temp->next)
605 temp->next->prev = temp->prev;
606 if (temp->prev)
607 temp->prev->next = temp->next;
608 else
609 *list = temp->next;
610
611 temp->prev = temp->next = NULL;
612}
613
614/* Inserts temporary slot TEMP to LIST. */
615
616static void
617insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
618{
619 temp->next = *list;
620 if (*list)
621 (*list)->prev = temp;
622 temp->prev = NULL;
623 *list = temp;
624}
625
626/* Returns the list of used temp slots at LEVEL. */
627
628static struct temp_slot **
629temp_slots_at_level (int level)
630{
9771b263
DN
631 if (level >= (int) vec_safe_length (used_temp_slots))
632 vec_safe_grow_cleared (used_temp_slots, level + 1);
0aea6467 633
9771b263 634 return &(*used_temp_slots)[level];
0aea6467
ZD
635}
636
637/* Returns the maximal temporary slot level. */
638
639static int
640max_slot_level (void)
641{
642 if (!used_temp_slots)
643 return -1;
644
9771b263 645 return used_temp_slots->length () - 1;
0aea6467
ZD
646}
647
648/* Moves temporary slot TEMP to LEVEL. */
649
650static void
651move_slot_to_level (struct temp_slot *temp, int level)
652{
653 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
654 insert_slot_to_list (temp, temp_slots_at_level (level));
655 temp->level = level;
656}
657
658/* Make temporary slot TEMP available. */
659
660static void
661make_slot_available (struct temp_slot *temp)
662{
663 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
664 insert_slot_to_list (temp, &avail_temp_slots);
665 temp->in_use = 0;
666 temp->level = -1;
f8395d62 667 n_temp_slots_in_use--;
0aea6467 668}
fb0703f7
SB
669
670/* Compute the hash value for an address -> temp slot mapping.
671 The value is cached on the mapping entry. */
672static hashval_t
673temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
674{
675 int do_not_record = 0;
676 return hash_rtx (t->address, GET_MODE (t->address),
677 &do_not_record, NULL, false);
678}
679
680/* Return the hash value for an address -> temp slot mapping. */
2a22f99c
TS
681hashval_t
682temp_address_hasher::hash (temp_slot_address_entry *t)
fb0703f7 683{
fb0703f7
SB
684 return t->hash;
685}
686
687/* Compare two address -> temp slot mapping entries. */
2a22f99c
TS
688bool
689temp_address_hasher::equal (temp_slot_address_entry *t1,
690 temp_slot_address_entry *t2)
fb0703f7 691{
fb0703f7
SB
692 return exp_equiv_p (t1->address, t2->address, 0, true);
693}
694
695/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
696static void
697insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
698{
766090c2 699 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fb0703f7
SB
700 t->address = address;
701 t->temp_slot = temp_slot;
702 t->hash = temp_slot_address_compute_hash (t);
2a22f99c 703 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fb0703f7
SB
704}
705
706/* Remove an address -> temp slot mapping entry if the temp slot is
707 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2a22f99c
TS
708int
709remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fb0703f7 710{
2a22f99c 711 const struct temp_slot_address_entry *t = *slot;
fb0703f7 712 if (! t->temp_slot->in_use)
2a22f99c 713 temp_slot_address_table->clear_slot (slot);
fb0703f7
SB
714 return 1;
715}
716
717/* Remove all mappings of addresses to unused temp slots. */
718static void
719remove_unused_temp_slot_addresses (void)
720{
f8395d62
MM
721 /* Use quicker clearing if there aren't any active temp slots. */
722 if (n_temp_slots_in_use)
2a22f99c
TS
723 temp_slot_address_table->traverse
724 <void *, remove_unused_temp_slot_addresses_1> (NULL);
f8395d62 725 else
2a22f99c 726 temp_slot_address_table->empty ();
fb0703f7
SB
727}
728
729/* Find the temp slot corresponding to the object at address X. */
730
731static struct temp_slot *
732find_temp_slot_from_address (rtx x)
733{
734 struct temp_slot *p;
735 struct temp_slot_address_entry tmp, *t;
736
737 /* First try the easy way:
738 See if X exists in the address -> temp slot mapping. */
739 tmp.address = x;
740 tmp.temp_slot = NULL;
741 tmp.hash = temp_slot_address_compute_hash (&tmp);
2a22f99c 742 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fb0703f7
SB
743 if (t)
744 return t->temp_slot;
745
746 /* If we have a sum involving a register, see if it points to a temp
747 slot. */
748 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
749 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
750 return p;
751 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
752 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
753 return p;
754
755 /* Last resort: Address is a virtual stack var address. */
756 if (GET_CODE (x) == PLUS
757 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 758 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
759 {
760 int i;
761 for (i = max_slot_level (); i >= 0; i--)
762 for (p = *temp_slots_at_level (i); p; p = p->next)
763 {
764 if (INTVAL (XEXP (x, 1)) >= p->base_offset
765 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
766 return p;
767 }
768 }
769
770 return NULL;
771}
6f086dfc
RS
772\f
773/* Allocate a temporary stack slot and record it for possible later
774 reuse.
775
776 MODE is the machine mode to be given to the returned rtx.
777
778 SIZE is the size in units of the space required. We do no rounding here
779 since assign_stack_local will do any required rounding.
780
a4c6502a 781 TYPE is the type that will be used for the stack slot. */
6f086dfc 782
a06ef755 783rtx
ef4bddc2 784assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
9474e8ab 785 tree type)
6f086dfc 786{
74e2819c 787 unsigned int align;
0aea6467 788 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 789 rtx slot;
6f086dfc 790
303ec2aa
RK
791 /* If SIZE is -1 it means that somebody tried to allocate a temporary
792 of a variable size. */
0bccc606 793 gcc_assert (size != -1);
303ec2aa 794
76fe54f0 795 align = get_stack_local_alignment (type, mode);
d16790f2
JW
796
797 /* Try to find an available, already-allocated temporary of the proper
798 mode which meets the size and alignment requirements. Choose the
3e8b0446 799 smallest one with the closest alignment.
b8698a0f 800
3e8b0446
ZD
801 If assign_stack_temp is called outside of the tree->rtl expansion,
802 we cannot reuse the stack slots (that may still refer to
803 VIRTUAL_STACK_VARS_REGNUM). */
804 if (!virtuals_instantiated)
0aea6467 805 {
3e8b0446 806 for (p = avail_temp_slots; p; p = p->next)
0aea6467 807 {
3e8b0446
ZD
808 if (p->align >= align && p->size >= size
809 && GET_MODE (p->slot) == mode
810 && objects_must_conflict_p (p->type, type)
811 && (best_p == 0 || best_p->size > p->size
812 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 813 {
3e8b0446
ZD
814 if (p->align == align && p->size == size)
815 {
816 selected = p;
817 cut_slot_from_list (selected, &avail_temp_slots);
818 best_p = 0;
819 break;
820 }
821 best_p = p;
0aea6467 822 }
0aea6467
ZD
823 }
824 }
6f086dfc
RS
825
826 /* Make our best, if any, the one to use. */
827 if (best_p)
a45035b6 828 {
0aea6467
ZD
829 selected = best_p;
830 cut_slot_from_list (selected, &avail_temp_slots);
831
a45035b6
JW
832 /* If there are enough aligned bytes left over, make them into a new
833 temp_slot so that the extra bytes don't get wasted. Do this only
834 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 835 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 836 {
d16790f2 837 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 838 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
839
840 if (best_p->size - rounded_size >= alignment)
841 {
766090c2 842 p = ggc_alloc<temp_slot> ();
9474e8ab 843 p->in_use = 0;
a45035b6 844 p->size = best_p->size - rounded_size;
307d8cd6
RK
845 p->base_offset = best_p->base_offset + rounded_size;
846 p->full_size = best_p->full_size - rounded_size;
be0c514c 847 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 848 p->align = best_p->align;
1da68f56 849 p->type = best_p->type;
0aea6467 850 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 851
38a448ca
RH
852 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
853 stack_slot_list);
a45035b6
JW
854
855 best_p->size = rounded_size;
291dde90 856 best_p->full_size = rounded_size;
a45035b6
JW
857 }
858 }
a45035b6 859 }
718fe406 860
6f086dfc 861 /* If we still didn't find one, make a new temporary. */
0aea6467 862 if (selected == 0)
6f086dfc 863 {
e5e809f4
JL
864 HOST_WIDE_INT frame_offset_old = frame_offset;
865
766090c2 866 p = ggc_alloc<temp_slot> ();
e5e809f4 867
c87a0a39
JL
868 /* We are passing an explicit alignment request to assign_stack_local.
869 One side effect of that is assign_stack_local will not round SIZE
870 to ensure the frame offset remains suitably aligned.
871
872 So for requests which depended on the rounding of SIZE, we go ahead
873 and round it now. We also make sure ALIGNMENT is at least
874 BIGGEST_ALIGNMENT. */
0bccc606 875 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
876 p->slot = assign_stack_local_1 (mode,
877 (mode == BLKmode
878 ? CEIL_ROUND (size,
879 (int) align
880 / BITS_PER_UNIT)
881 : size),
882 align, 0);
d16790f2
JW
883
884 p->align = align;
e5e809f4 885
b2a80c0d
DE
886 /* The following slot size computation is necessary because we don't
887 know the actual size of the temporary slot until assign_stack_local
888 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
889 requested temporary. Note that extra space added for alignment
890 can be either above or below this stack slot depending on which
891 way the frame grows. We include the extra space if and only if it
892 is above this slot. */
f62c8a5c
JJ
893 if (FRAME_GROWS_DOWNWARD)
894 p->size = frame_offset_old - frame_offset;
895 else
896 p->size = size;
e5e809f4 897
fc91b0d0 898 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
899 if (FRAME_GROWS_DOWNWARD)
900 {
901 p->base_offset = frame_offset;
902 p->full_size = frame_offset_old - frame_offset;
903 }
904 else
905 {
906 p->base_offset = frame_offset_old;
907 p->full_size = frame_offset - frame_offset_old;
908 }
0aea6467
ZD
909
910 selected = p;
6f086dfc
RS
911 }
912
0aea6467 913 p = selected;
6f086dfc 914 p->in_use = 1;
1da68f56 915 p->type = type;
7efcb746 916 p->level = temp_slot_level;
f8395d62 917 n_temp_slots_in_use++;
1995f267 918
0aea6467
ZD
919 pp = temp_slots_at_level (p->level);
920 insert_slot_to_list (p, pp);
fb0703f7 921 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
922
923 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
924 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
925 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 926
1da68f56
RK
927 /* If we know the alias set for the memory that will be used, use
928 it. If there's no TYPE, then we don't know anything about the
929 alias set for the memory. */
faa964e5
UW
930 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
931 set_mem_align (slot, align);
1da68f56 932
30f7a378 933 /* If a type is specified, set the relevant flags. */
3bdf5ad1 934 if (type != 0)
55356334 935 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 936 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 937
faa964e5 938 return slot;
6f086dfc 939}
d16790f2
JW
940
941/* Allocate a temporary stack slot and record it for possible later
9474e8ab 942 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
943
944rtx
ef4bddc2 945assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
d16790f2 946{
9474e8ab 947 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 948}
638141a6 949\f
9432c136
EB
950/* Assign a temporary.
951 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
952 and so that should be used in error messages. In either case, we
953 allocate of the given type.
230f21b4 954 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
955 it is 0 if a register is OK.
956 DONT_PROMOTE is 1 if we should not promote values in register
957 to wider modes. */
230f21b4
PB
958
959rtx
9474e8ab 960assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 961 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 962{
9432c136 963 tree type, decl;
ef4bddc2 964 machine_mode mode;
9e1622ed 965#ifdef PROMOTE_MODE
9432c136
EB
966 int unsignedp;
967#endif
968
969 if (DECL_P (type_or_decl))
970 decl = type_or_decl, type = TREE_TYPE (decl);
971 else
972 decl = NULL, type = type_or_decl;
973
974 mode = TYPE_MODE (type);
9e1622ed 975#ifdef PROMOTE_MODE
8df83eae 976 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 977#endif
638141a6 978
230f21b4
PB
979 if (mode == BLKmode || memory_required)
980 {
e5e809f4 981 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
982 rtx tmp;
983
44affdae
JH
984 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
985 problems with allocating the stack space. */
986 if (size == 0)
987 size = 1;
988
230f21b4 989 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
990 temporaries. However, sometimes we can find a fixed upper limit on
991 the size, so try that instead. */
992 else if (size == -1)
993 size = max_int_size_in_bytes (type);
e30bb772 994
9432c136
EB
995 /* The size of the temporary may be too large to fit into an integer. */
996 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 997 this to things that aren't compiler-generated temporaries. The
535a42b1 998 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
999 if (decl && size == -1
1000 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1001 {
dee15844 1002 error ("size of variable %q+D is too large", decl);
9432c136
EB
1003 size = 1;
1004 }
1005
9474e8ab 1006 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
1007 return tmp;
1008 }
638141a6 1009
9e1622ed 1010#ifdef PROMOTE_MODE
b55d9ff8 1011 if (! dont_promote)
cde0f3fd 1012 mode = promote_mode (type, mode, &unsignedp);
230f21b4 1013#endif
638141a6 1014
230f21b4
PB
1015 return gen_reg_rtx (mode);
1016}
638141a6 1017\f
a45035b6
JW
1018/* Combine temporary stack slots which are adjacent on the stack.
1019
1020 This allows for better use of already allocated stack space. This is only
1021 done for BLKmode slots because we can be sure that we won't have alignment
1022 problems in this case. */
1023
6fe79279 1024static void
fa8db1f7 1025combine_temp_slots (void)
a45035b6 1026{
0aea6467 1027 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1028 int num_slots;
1029
a4c6502a
MM
1030 /* We can't combine slots, because the information about which slot
1031 is in which alias set will be lost. */
1032 if (flag_strict_aliasing)
1033 return;
1034
718fe406 1035 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1036 high levels of optimization. */
e5e809f4 1037 if (! flag_expensive_optimizations)
0aea6467 1038 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1039 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1040 return;
a45035b6 1041
0aea6467 1042 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1043 {
1044 int delete_p = 0;
e5e809f4 1045
0aea6467
ZD
1046 next = p->next;
1047
1048 if (GET_MODE (p->slot) != BLKmode)
1049 continue;
1050
1051 for (q = p->next; q; q = next_q)
e9b7093a 1052 {
0aea6467
ZD
1053 int delete_q = 0;
1054
1055 next_q = q->next;
1056
1057 if (GET_MODE (q->slot) != BLKmode)
1058 continue;
1059
1060 if (p->base_offset + p->full_size == q->base_offset)
1061 {
1062 /* Q comes after P; combine Q into P. */
1063 p->size += q->size;
1064 p->full_size += q->full_size;
1065 delete_q = 1;
1066 }
1067 else if (q->base_offset + q->full_size == p->base_offset)
1068 {
1069 /* P comes after Q; combine P into Q. */
1070 q->size += p->size;
1071 q->full_size += p->full_size;
1072 delete_p = 1;
1073 break;
1074 }
1075 if (delete_q)
1076 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1077 }
0aea6467
ZD
1078
1079 /* Either delete P or advance past it. */
1080 if (delete_p)
1081 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1082 }
a45035b6 1083}
6f086dfc 1084\f
82d6e6fc
KG
1085/* Indicate that NEW_RTX is an alternate way of referring to the temp
1086 slot that previously was known by OLD_RTX. */
e5e76139
RK
1087
1088void
82d6e6fc 1089update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1090{
14a774a9 1091 struct temp_slot *p;
e5e76139 1092
82d6e6fc 1093 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1094 return;
14a774a9 1095
82d6e6fc 1096 p = find_temp_slot_from_address (old_rtx);
14a774a9 1097
82d6e6fc
KG
1098 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1099 NEW_RTX is a register, see if one operand of the PLUS is a
1100 temporary location. If so, NEW_RTX points into it. Otherwise,
1101 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1102 in common between them. If so, try a recursive call on those
1103 values. */
14a774a9
RK
1104 if (p == 0)
1105 {
82d6e6fc 1106 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1107 return;
1108
82d6e6fc 1109 if (REG_P (new_rtx))
700f19f0 1110 {
82d6e6fc
KG
1111 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1112 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1113 return;
1114 }
82d6e6fc 1115 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1116 return;
1117
82d6e6fc
KG
1118 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1119 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1120 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1121 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1122 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1123 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1124 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1125 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1126
1127 return;
1128 }
1129
718fe406 1130 /* Otherwise add an alias for the temp's address. */
fb0703f7 1131 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1132}
1133
9cca6a99
MS
1134/* If X could be a reference to a temporary slot, mark that slot as
1135 belonging to the to one level higher than the current level. If X
1136 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1137 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1138
1139 This is called when an ({...}) construct occurs and a statement
1140 returns a value in memory. */
1141
1142void
fa8db1f7 1143preserve_temp_slots (rtx x)
6f086dfc 1144{
0aea6467 1145 struct temp_slot *p = 0, *next;
6f086dfc 1146
e3a77161 1147 if (x == 0)
9474e8ab 1148 return;
f7b6d104 1149
8fff4fc1 1150 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1151 a temporary slot we know it points to. */
8fff4fc1
RH
1152 if (REG_P (x) && REG_POINTER (x))
1153 p = find_temp_slot_from_address (x);
f7b6d104 1154
8fff4fc1 1155 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1156 a temporary slot. */
8fff4fc1 1157 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1158 return;
8fff4fc1
RH
1159
1160 /* First see if we can find a match. */
1161 if (p == 0)
1162 p = find_temp_slot_from_address (XEXP (x, 0));
1163
1164 if (p != 0)
1165 {
8fff4fc1 1166 if (p->level == temp_slot_level)
9474e8ab 1167 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1168 return;
f7b6d104 1169 }
e9a25f70 1170
8fff4fc1
RH
1171 /* Otherwise, preserve all non-kept slots at this level. */
1172 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1173 {
8fff4fc1 1174 next = p->next;
9474e8ab 1175 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1176 }
fe9b4957
MM
1177}
1178
8fff4fc1
RH
1179/* Free all temporaries used so far. This is normally called at the
1180 end of generating code for a statement. */
fe9b4957 1181
8fff4fc1
RH
1182void
1183free_temp_slots (void)
fe9b4957 1184{
8fff4fc1 1185 struct temp_slot *p, *next;
5d7cefe5 1186 bool some_available = false;
fe9b4957 1187
8fff4fc1
RH
1188 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1189 {
1190 next = p->next;
9474e8ab
MM
1191 make_slot_available (p);
1192 some_available = true;
8fff4fc1 1193 }
fe9b4957 1194
5d7cefe5
MM
1195 if (some_available)
1196 {
1197 remove_unused_temp_slot_addresses ();
1198 combine_temp_slots ();
1199 }
8fff4fc1 1200}
fe9b4957 1201
8fff4fc1 1202/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1203
8fff4fc1
RH
1204void
1205push_temp_slots (void)
fe9b4957 1206{
8fff4fc1 1207 temp_slot_level++;
fe9b4957
MM
1208}
1209
8fff4fc1
RH
1210/* Pop a temporary nesting level. All slots in use in the current level
1211 are freed. */
fe9b4957 1212
8fff4fc1
RH
1213void
1214pop_temp_slots (void)
fe9b4957 1215{
9474e8ab 1216 free_temp_slots ();
8fff4fc1 1217 temp_slot_level--;
8c36698e
NC
1218}
1219
8fff4fc1 1220/* Initialize temporary slots. */
e9a25f70
JL
1221
1222void
8fff4fc1 1223init_temp_slots (void)
e9a25f70 1224{
8fff4fc1
RH
1225 /* We have not allocated any temporaries yet. */
1226 avail_temp_slots = 0;
9771b263 1227 vec_alloc (used_temp_slots, 0);
8fff4fc1 1228 temp_slot_level = 0;
f8395d62 1229 n_temp_slots_in_use = 0;
fb0703f7
SB
1230
1231 /* Set up the table to map addresses to temp slots. */
1232 if (! temp_slot_address_table)
2a22f99c 1233 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fb0703f7 1234 else
2a22f99c 1235 temp_slot_address_table->empty ();
8fff4fc1
RH
1236}
1237\f
6399c0ab
SB
1238/* Functions and data structures to keep track of the values hard regs
1239 had at the start of the function. */
1240
1241/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1242 and has_hard_reg_initial_val.. */
1243typedef struct GTY(()) initial_value_pair {
1244 rtx hard_reg;
1245 rtx pseudo;
1246} initial_value_pair;
1247/* ??? This could be a VEC but there is currently no way to define an
1248 opaque VEC type. This could be worked around by defining struct
1249 initial_value_pair in function.h. */
1250typedef struct GTY(()) initial_value_struct {
1251 int num_entries;
1252 int max_entries;
1253 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1254} initial_value_struct;
1255
1256/* If a pseudo represents an initial hard reg (or expression), return
1257 it, else return NULL_RTX. */
1258
1259rtx
1260get_hard_reg_initial_reg (rtx reg)
1261{
1262 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1263 int i;
1264
1265 if (ivs == 0)
1266 return NULL_RTX;
1267
1268 for (i = 0; i < ivs->num_entries; i++)
1269 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1270 return ivs->entries[i].hard_reg;
1271
1272 return NULL_RTX;
1273}
1274
1275/* Make sure that there's a pseudo register of mode MODE that stores the
1276 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1277
1278rtx
ef4bddc2 1279get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1280{
1281 struct initial_value_struct *ivs;
1282 rtx rv;
1283
1284 rv = has_hard_reg_initial_val (mode, regno);
1285 if (rv)
1286 return rv;
1287
1288 ivs = crtl->hard_reg_initial_vals;
1289 if (ivs == 0)
1290 {
766090c2 1291 ivs = ggc_alloc<initial_value_struct> ();
6399c0ab
SB
1292 ivs->num_entries = 0;
1293 ivs->max_entries = 5;
766090c2 1294 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
6399c0ab
SB
1295 crtl->hard_reg_initial_vals = ivs;
1296 }
1297
1298 if (ivs->num_entries >= ivs->max_entries)
1299 {
1300 ivs->max_entries += 5;
1301 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1302 ivs->max_entries);
1303 }
1304
1305 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1306 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1307
1308 return ivs->entries[ivs->num_entries++].pseudo;
1309}
1310
1311/* See if get_hard_reg_initial_val has been used to create a pseudo
1312 for the initial value of hard register REGNO in mode MODE. Return
1313 the associated pseudo if so, otherwise return NULL. */
1314
1315rtx
ef4bddc2 1316has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1317{
1318 struct initial_value_struct *ivs;
1319 int i;
1320
1321 ivs = crtl->hard_reg_initial_vals;
1322 if (ivs != 0)
1323 for (i = 0; i < ivs->num_entries; i++)
1324 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1325 && REGNO (ivs->entries[i].hard_reg) == regno)
1326 return ivs->entries[i].pseudo;
1327
1328 return NULL_RTX;
1329}
1330
1331unsigned int
1332emit_initial_value_sets (void)
1333{
1334 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1335 int i;
691fe203 1336 rtx_insn *seq;
6399c0ab
SB
1337
1338 if (ivs == 0)
1339 return 0;
1340
1341 start_sequence ();
1342 for (i = 0; i < ivs->num_entries; i++)
1343 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1344 seq = get_insns ();
1345 end_sequence ();
1346
1347 emit_insn_at_entry (seq);
1348 return 0;
1349}
1350
1351/* Return the hardreg-pseudoreg initial values pair entry I and
1352 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1353bool
1354initial_value_entry (int i, rtx *hreg, rtx *preg)
1355{
1356 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1357 if (!ivs || i >= ivs->num_entries)
1358 return false;
1359
1360 *hreg = ivs->entries[i].hard_reg;
1361 *preg = ivs->entries[i].pseudo;
1362 return true;
1363}
1364\f
8fff4fc1
RH
1365/* These routines are responsible for converting virtual register references
1366 to the actual hard register references once RTL generation is complete.
718fe406 1367
8fff4fc1
RH
1368 The following four variables are used for communication between the
1369 routines. They contain the offsets of the virtual registers from their
1370 respective hard registers. */
fe9b4957 1371
8fff4fc1
RH
1372static int in_arg_offset;
1373static int var_offset;
1374static int dynamic_offset;
1375static int out_arg_offset;
1376static int cfa_offset;
8a5275eb 1377
8fff4fc1
RH
1378/* In most machines, the stack pointer register is equivalent to the bottom
1379 of the stack. */
718fe406 1380
8fff4fc1
RH
1381#ifndef STACK_POINTER_OFFSET
1382#define STACK_POINTER_OFFSET 0
1383#endif
8c36698e 1384
ddbb449f
AM
1385#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1386#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1387#endif
1388
8fff4fc1
RH
1389/* If not defined, pick an appropriate default for the offset of dynamically
1390 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
ddbb449f 1391 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1392
8fff4fc1 1393#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1394
8fff4fc1
RH
1395/* The bottom of the stack points to the actual arguments. If
1396 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1397 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1398 stack space for register parameters is not pushed by the caller, but
1399 rather part of the fixed stack areas and hence not included in
38173d38 1400 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1401 for it when allocating stack dynamic objects. */
8a5275eb 1402
ddbb449f 1403#ifdef INCOMING_REG_PARM_STACK_SPACE
8fff4fc1
RH
1404#define STACK_DYNAMIC_OFFSET(FNDECL) \
1405((ACCUMULATE_OUTGOING_ARGS \
38173d38 1406 ? (crtl->outgoing_args_size \
81464b2c 1407 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
ddbb449f 1408 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1409 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1410#else
1411#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1412((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1413 + (STACK_POINTER_OFFSET))
1414#endif
1415#endif
4fa48eae 1416
659e47fb 1417\f
bbf9b913
RH
1418/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1419 is a virtual register, return the equivalent hard register and set the
1420 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1421
bbf9b913
RH
1422static rtx
1423instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1424{
82d6e6fc 1425 rtx new_rtx;
bbf9b913 1426 HOST_WIDE_INT offset;
6f086dfc 1427
bbf9b913 1428 if (x == virtual_incoming_args_rtx)
2e3f842f 1429 {
d015f7cc 1430 if (stack_realign_drap)
2e3f842f 1431 {
d015f7cc
L
1432 /* Replace virtual_incoming_args_rtx with internal arg
1433 pointer if DRAP is used to realign stack. */
82d6e6fc 1434 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1435 offset = 0;
1436 }
1437 else
82d6e6fc 1438 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1439 }
bbf9b913 1440 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1441 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1442 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1443 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1444 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1445 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1446 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1447 {
1448#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1449 new_rtx = frame_pointer_rtx;
f6672e8e 1450#else
82d6e6fc 1451 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1452#endif
1453 offset = cfa_offset;
1454 }
32990d5b
JJ
1455 else if (x == virtual_preferred_stack_boundary_rtx)
1456 {
1457 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1458 offset = 0;
1459 }
bbf9b913
RH
1460 else
1461 return NULL_RTX;
6f086dfc 1462
bbf9b913 1463 *poffset = offset;
82d6e6fc 1464 return new_rtx;
6f086dfc
RS
1465}
1466
b8704801
RS
1467/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1468 registers present inside of *LOC. The expression is simplified,
1469 as much as possible, but is not to be considered "valid" in any sense
1470 implied by the target. Return true if any change is made. */
6f086dfc 1471
b8704801
RS
1472static bool
1473instantiate_virtual_regs_in_rtx (rtx *loc)
6f086dfc 1474{
b8704801
RS
1475 if (!*loc)
1476 return false;
1477 bool changed = false;
1478 subrtx_ptr_iterator::array_type array;
1479 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
6f086dfc 1480 {
b8704801
RS
1481 rtx *loc = *iter;
1482 if (rtx x = *loc)
bbf9b913 1483 {
b8704801
RS
1484 rtx new_rtx;
1485 HOST_WIDE_INT offset;
1486 switch (GET_CODE (x))
1487 {
1488 case REG:
1489 new_rtx = instantiate_new_reg (x, &offset);
1490 if (new_rtx)
1491 {
1492 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1493 changed = true;
1494 }
1495 iter.skip_subrtxes ();
1496 break;
bbf9b913 1497
b8704801
RS
1498 case PLUS:
1499 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1500 if (new_rtx)
1501 {
1502 XEXP (x, 0) = new_rtx;
1503 *loc = plus_constant (GET_MODE (x), x, offset, true);
1504 changed = true;
1505 iter.skip_subrtxes ();
1506 break;
1507 }
e5e809f4 1508
b8704801
RS
1509 /* FIXME -- from old code */
1510 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1511 we can commute the PLUS and SUBREG because pointers into the
1512 frame are well-behaved. */
1513 break;
ce717ce4 1514
b8704801
RS
1515 default:
1516 break;
1517 }
1518 }
6f086dfc 1519 }
b8704801 1520 return changed;
6f086dfc
RS
1521}
1522
bbf9b913
RH
1523/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1524 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1525
bbf9b913
RH
1526static int
1527safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1528{
2ef6ce06 1529 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1530}
5a73491b 1531
bbf9b913
RH
1532/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1533 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1534
1535static void
691fe203 1536instantiate_virtual_regs_in_insn (rtx_insn *insn)
5a73491b 1537{
bbf9b913
RH
1538 HOST_WIDE_INT offset;
1539 int insn_code, i;
9325973e 1540 bool any_change = false;
691fe203
DM
1541 rtx set, new_rtx, x;
1542 rtx_insn *seq;
32e66afd 1543
bbf9b913
RH
1544 /* There are some special cases to be handled first. */
1545 set = single_set (insn);
1546 if (set)
32e66afd 1547 {
bbf9b913
RH
1548 /* We're allowed to assign to a virtual register. This is interpreted
1549 to mean that the underlying register gets assigned the inverse
1550 transformation. This is used, for example, in the handling of
1551 non-local gotos. */
82d6e6fc
KG
1552 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1553 if (new_rtx)
bbf9b913
RH
1554 {
1555 start_sequence ();
32e66afd 1556
b8704801 1557 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
82d6e6fc 1558 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
69a59f0f 1559 gen_int_mode (-offset, GET_MODE (new_rtx)));
82d6e6fc
KG
1560 x = force_operand (x, new_rtx);
1561 if (x != new_rtx)
1562 emit_move_insn (new_rtx, x);
5a73491b 1563
bbf9b913
RH
1564 seq = get_insns ();
1565 end_sequence ();
5a73491b 1566
bbf9b913
RH
1567 emit_insn_before (seq, insn);
1568 delete_insn (insn);
1569 return;
1570 }
5a73491b 1571
bbf9b913
RH
1572 /* Handle a straight copy from a virtual register by generating a
1573 new add insn. The difference between this and falling through
1574 to the generic case is avoiding a new pseudo and eliminating a
1575 move insn in the initial rtl stream. */
82d6e6fc
KG
1576 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1577 if (new_rtx && offset != 0
bbf9b913
RH
1578 && REG_P (SET_DEST (set))
1579 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1580 {
1581 start_sequence ();
5a73491b 1582
2f1cd2eb
RS
1583 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1584 gen_int_mode (offset,
1585 GET_MODE (SET_DEST (set))),
1586 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1587 if (x != SET_DEST (set))
1588 emit_move_insn (SET_DEST (set), x);
770ae6cc 1589
bbf9b913
RH
1590 seq = get_insns ();
1591 end_sequence ();
87ce34d6 1592
bbf9b913
RH
1593 emit_insn_before (seq, insn);
1594 delete_insn (insn);
87ce34d6 1595 return;
bbf9b913 1596 }
5a73491b 1597
bbf9b913 1598 extract_insn (insn);
9325973e 1599 insn_code = INSN_CODE (insn);
5a73491b 1600
bbf9b913
RH
1601 /* Handle a plus involving a virtual register by determining if the
1602 operands remain valid if they're modified in place. */
1603 if (GET_CODE (SET_SRC (set)) == PLUS
1604 && recog_data.n_operands >= 3
1605 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1606 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1607 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1608 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1609 {
1610 offset += INTVAL (recog_data.operand[2]);
5a73491b 1611
bbf9b913 1612 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1613 if (offset == 0
1614 && REG_P (SET_DEST (set))
1615 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1616 {
1617 start_sequence ();
82d6e6fc 1618 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1619 seq = get_insns ();
1620 end_sequence ();
d1405722 1621
bbf9b913
RH
1622 emit_insn_before (seq, insn);
1623 delete_insn (insn);
1624 return;
1625 }
d1405722 1626
bbf9b913 1627 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1628
1629 /* Using validate_change and apply_change_group here leaves
1630 recog_data in an invalid state. Since we know exactly what
1631 we want to check, do those two by hand. */
82d6e6fc 1632 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1633 && safe_insn_predicate (insn_code, 2, x))
1634 {
82d6e6fc 1635 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1636 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1637 any_change = true;
9325973e
RH
1638
1639 /* Fall through into the regular operand fixup loop in
1640 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1641 }
1642 }
1643 }
d1405722 1644 else
9325973e
RH
1645 {
1646 extract_insn (insn);
1647 insn_code = INSN_CODE (insn);
1648 }
5dc96d60 1649
bbf9b913
RH
1650 /* In the general case, we expect virtual registers to appear only in
1651 operands, and then only as either bare registers or inside memories. */
1652 for (i = 0; i < recog_data.n_operands; ++i)
1653 {
1654 x = recog_data.operand[i];
1655 switch (GET_CODE (x))
1656 {
1657 case MEM:
1658 {
1659 rtx addr = XEXP (x, 0);
bbf9b913 1660
b8704801 1661 if (!instantiate_virtual_regs_in_rtx (&addr))
bbf9b913
RH
1662 continue;
1663
1664 start_sequence ();
23b33725 1665 x = replace_equiv_address (x, addr, true);
a5bfb13a
MM
1666 /* It may happen that the address with the virtual reg
1667 was valid (e.g. based on the virtual stack reg, which might
1668 be acceptable to the predicates with all offsets), whereas
1669 the address now isn't anymore, for instance when the address
1670 is still offsetted, but the base reg isn't virtual-stack-reg
1671 anymore. Below we would do a force_reg on the whole operand,
1672 but this insn might actually only accept memory. Hence,
1673 before doing that last resort, try to reload the address into
1674 a register, so this operand stays a MEM. */
1675 if (!safe_insn_predicate (insn_code, i, x))
1676 {
1677 addr = force_reg (GET_MODE (addr), addr);
23b33725 1678 x = replace_equiv_address (x, addr, true);
a5bfb13a 1679 }
bbf9b913
RH
1680 seq = get_insns ();
1681 end_sequence ();
1682 if (seq)
1683 emit_insn_before (seq, insn);
1684 }
1685 break;
1686
1687 case REG:
82d6e6fc
KG
1688 new_rtx = instantiate_new_reg (x, &offset);
1689 if (new_rtx == NULL)
bbf9b913
RH
1690 continue;
1691 if (offset == 0)
82d6e6fc 1692 x = new_rtx;
bbf9b913
RH
1693 else
1694 {
1695 start_sequence ();
6f086dfc 1696
bbf9b913
RH
1697 /* Careful, special mode predicates may have stuff in
1698 insn_data[insn_code].operand[i].mode that isn't useful
1699 to us for computing a new value. */
1700 /* ??? Recognize address_operand and/or "p" constraints
1701 to see if (plus new offset) is a valid before we put
1702 this through expand_simple_binop. */
82d6e6fc 1703 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
2f1cd2eb
RS
1704 gen_int_mode (offset, GET_MODE (x)),
1705 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1706 seq = get_insns ();
1707 end_sequence ();
1708 emit_insn_before (seq, insn);
1709 }
1710 break;
6f086dfc 1711
bbf9b913 1712 case SUBREG:
82d6e6fc
KG
1713 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1714 if (new_rtx == NULL)
bbf9b913
RH
1715 continue;
1716 if (offset != 0)
1717 {
1718 start_sequence ();
2f1cd2eb
RS
1719 new_rtx = expand_simple_binop
1720 (GET_MODE (new_rtx), PLUS, new_rtx,
1721 gen_int_mode (offset, GET_MODE (new_rtx)),
1722 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1723 seq = get_insns ();
1724 end_sequence ();
1725 emit_insn_before (seq, insn);
1726 }
82d6e6fc
KG
1727 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1728 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1729 gcc_assert (x);
bbf9b913 1730 break;
6f086dfc 1731
bbf9b913
RH
1732 default:
1733 continue;
1734 }
6f086dfc 1735
bbf9b913
RH
1736 /* At this point, X contains the new value for the operand.
1737 Validate the new value vs the insn predicate. Note that
1738 asm insns will have insn_code -1 here. */
1739 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1740 {
1741 start_sequence ();
f7ce0951
SE
1742 if (REG_P (x))
1743 {
1744 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1745 x = copy_to_reg (x);
1746 }
1747 else
1748 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1749 seq = get_insns ();
1750 end_sequence ();
1751 if (seq)
1752 emit_insn_before (seq, insn);
1753 }
6f086dfc 1754
bbf9b913
RH
1755 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1756 any_change = true;
1757 }
6f086dfc 1758
bbf9b913
RH
1759 if (any_change)
1760 {
1761 /* Propagate operand changes into the duplicates. */
1762 for (i = 0; i < recog_data.n_dups; ++i)
1763 *recog_data.dup_loc[i]
3e916873 1764 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1765
bbf9b913
RH
1766 /* Force re-recognition of the instruction for validation. */
1767 INSN_CODE (insn) = -1;
1768 }
6f086dfc 1769
bbf9b913 1770 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1771 {
bbf9b913 1772 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1773 {
bbf9b913 1774 error_for_asm (insn, "impossible constraint in %<asm%>");
5a860835
JJ
1775 /* For asm goto, instead of fixing up all the edges
1776 just clear the template and clear input operands
1777 (asm goto doesn't have any output operands). */
1778 if (JUMP_P (insn))
1779 {
1780 rtx asm_op = extract_asm_operands (PATTERN (insn));
1781 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1782 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1783 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1784 }
1785 else
1786 delete_insn (insn);
bbf9b913
RH
1787 }
1788 }
1789 else
1790 {
1791 if (recog_memoized (insn) < 0)
1792 fatal_insn_not_found (insn);
1793 }
1794}
14a774a9 1795
bbf9b913
RH
1796/* Subroutine of instantiate_decls. Given RTL representing a decl,
1797 do any instantiation required. */
14a774a9 1798
e41b2a33
PB
1799void
1800instantiate_decl_rtl (rtx x)
bbf9b913
RH
1801{
1802 rtx addr;
6f086dfc 1803
bbf9b913
RH
1804 if (x == 0)
1805 return;
6f086dfc 1806
bbf9b913
RH
1807 /* If this is a CONCAT, recurse for the pieces. */
1808 if (GET_CODE (x) == CONCAT)
1809 {
e41b2a33
PB
1810 instantiate_decl_rtl (XEXP (x, 0));
1811 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1812 return;
1813 }
6f086dfc 1814
bbf9b913
RH
1815 /* If this is not a MEM, no need to do anything. Similarly if the
1816 address is a constant or a register that is not a virtual register. */
1817 if (!MEM_P (x))
1818 return;
6f086dfc 1819
bbf9b913
RH
1820 addr = XEXP (x, 0);
1821 if (CONSTANT_P (addr)
1822 || (REG_P (addr)
1823 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1824 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1825 return;
6f086dfc 1826
b8704801 1827 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
bbf9b913 1828}
6f086dfc 1829
434eba35
JJ
1830/* Helper for instantiate_decls called via walk_tree: Process all decls
1831 in the given DECL_VALUE_EXPR. */
1832
1833static tree
1834instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1835{
1836 tree t = *tp;
726a989a 1837 if (! EXPR_P (t))
434eba35
JJ
1838 {
1839 *walk_subtrees = 0;
37d6a488
AO
1840 if (DECL_P (t))
1841 {
1842 if (DECL_RTL_SET_P (t))
1843 instantiate_decl_rtl (DECL_RTL (t));
1844 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1845 && DECL_INCOMING_RTL (t))
1846 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1847 if ((TREE_CODE (t) == VAR_DECL
1848 || TREE_CODE (t) == RESULT_DECL)
1849 && DECL_HAS_VALUE_EXPR_P (t))
1850 {
1851 tree v = DECL_VALUE_EXPR (t);
1852 walk_tree (&v, instantiate_expr, NULL, NULL);
1853 }
1854 }
434eba35
JJ
1855 }
1856 return NULL;
1857}
1858
bbf9b913
RH
1859/* Subroutine of instantiate_decls: Process all decls in the given
1860 BLOCK node and all its subblocks. */
6f086dfc 1861
bbf9b913
RH
1862static void
1863instantiate_decls_1 (tree let)
1864{
1865 tree t;
6f086dfc 1866
910ad8de 1867 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1868 {
1869 if (DECL_RTL_SET_P (t))
e41b2a33 1870 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1871 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1872 {
1873 tree v = DECL_VALUE_EXPR (t);
1874 walk_tree (&v, instantiate_expr, NULL, NULL);
1875 }
1876 }
6f086dfc 1877
bbf9b913 1878 /* Process all subblocks. */
87caf699 1879 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1880 instantiate_decls_1 (t);
1881}
6f086dfc 1882
bbf9b913
RH
1883/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1884 all virtual registers in their DECL_RTL's. */
6f086dfc 1885
bbf9b913
RH
1886static void
1887instantiate_decls (tree fndecl)
1888{
c021f10b
NF
1889 tree decl;
1890 unsigned ix;
6f086dfc 1891
bbf9b913 1892 /* Process all parameters of the function. */
910ad8de 1893 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1894 {
e41b2a33
PB
1895 instantiate_decl_rtl (DECL_RTL (decl));
1896 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1897 if (DECL_HAS_VALUE_EXPR_P (decl))
1898 {
1899 tree v = DECL_VALUE_EXPR (decl);
1900 walk_tree (&v, instantiate_expr, NULL, NULL);
1901 }
bbf9b913 1902 }
4fd796bb 1903
37d6a488
AO
1904 if ((decl = DECL_RESULT (fndecl))
1905 && TREE_CODE (decl) == RESULT_DECL)
1906 {
1907 if (DECL_RTL_SET_P (decl))
1908 instantiate_decl_rtl (DECL_RTL (decl));
1909 if (DECL_HAS_VALUE_EXPR_P (decl))
1910 {
1911 tree v = DECL_VALUE_EXPR (decl);
1912 walk_tree (&v, instantiate_expr, NULL, NULL);
1913 }
1914 }
1915
3fd48b12
EB
1916 /* Process the saved static chain if it exists. */
1917 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1918 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1919 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1920
bbf9b913
RH
1921 /* Now process all variables defined in the function or its subblocks. */
1922 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1923
c021f10b
NF
1924 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1925 if (DECL_RTL_SET_P (decl))
1926 instantiate_decl_rtl (DECL_RTL (decl));
9771b263 1927 vec_free (cfun->local_decls);
bbf9b913 1928}
6f086dfc 1929
bbf9b913
RH
1930/* Pass through the INSNS of function FNDECL and convert virtual register
1931 references to hard register references. */
6f086dfc 1932
c2924966 1933static unsigned int
bbf9b913
RH
1934instantiate_virtual_regs (void)
1935{
691fe203 1936 rtx_insn *insn;
6f086dfc 1937
bbf9b913
RH
1938 /* Compute the offsets to use for this function. */
1939 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1940 var_offset = STARTING_FRAME_OFFSET;
1941 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1942 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1943#ifdef FRAME_POINTER_CFA_OFFSET
1944 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1945#else
bbf9b913 1946 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1947#endif
e9a25f70 1948
bbf9b913
RH
1949 /* Initialize recognition, indicating that volatile is OK. */
1950 init_recog ();
6f086dfc 1951
bbf9b913
RH
1952 /* Scan through all the insns, instantiating every virtual register still
1953 present. */
45dbce1b
NF
1954 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1955 if (INSN_P (insn))
1956 {
1957 /* These patterns in the instruction stream can never be recognized.
1958 Fortunately, they shouldn't contain virtual registers either. */
39718607 1959 if (GET_CODE (PATTERN (insn)) == USE
45dbce1b 1960 || GET_CODE (PATTERN (insn)) == CLOBBER
45dbce1b
NF
1961 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1962 continue;
1963 else if (DEBUG_INSN_P (insn))
b8704801 1964 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
45dbce1b
NF
1965 else
1966 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1967
4654c0cf 1968 if (insn->deleted ())
45dbce1b 1969 continue;
7114321e 1970
b8704801 1971 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
ba4807a0 1972
45dbce1b
NF
1973 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1974 if (CALL_P (insn))
b8704801 1975 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
45dbce1b 1976 }
6f086dfc 1977
bbf9b913
RH
1978 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1979 instantiate_decls (current_function_decl);
1980
e41b2a33
PB
1981 targetm.instantiate_decls ();
1982
bbf9b913
RH
1983 /* Indicate that, from now on, assign_stack_local should use
1984 frame_pointer_rtx. */
1985 virtuals_instantiated = 1;
d3c12306 1986
c2924966 1987 return 0;
6f086dfc 1988}
ef330312 1989
27a4cd48
DM
1990namespace {
1991
1992const pass_data pass_data_instantiate_virtual_regs =
1993{
1994 RTL_PASS, /* type */
1995 "vregs", /* name */
1996 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1997 TV_NONE, /* tv_id */
1998 0, /* properties_required */
1999 0, /* properties_provided */
2000 0, /* properties_destroyed */
2001 0, /* todo_flags_start */
2002 0, /* todo_flags_finish */
ef330312
PB
2003};
2004
27a4cd48
DM
2005class pass_instantiate_virtual_regs : public rtl_opt_pass
2006{
2007public:
c3284718
RS
2008 pass_instantiate_virtual_regs (gcc::context *ctxt)
2009 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
27a4cd48
DM
2010 {}
2011
2012 /* opt_pass methods: */
be55bfe6
TS
2013 virtual unsigned int execute (function *)
2014 {
2015 return instantiate_virtual_regs ();
2016 }
27a4cd48
DM
2017
2018}; // class pass_instantiate_virtual_regs
2019
2020} // anon namespace
2021
2022rtl_opt_pass *
2023make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2024{
2025 return new pass_instantiate_virtual_regs (ctxt);
2026}
2027
6f086dfc 2028\f
d181c154
RS
2029/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2030 This means a type for which function calls must pass an address to the
2031 function or get an address back from the function.
2032 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
2033
2034int
586de218 2035aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 2036{
d47d0a8d 2037 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
2038 int i, regno, nregs;
2039 rtx reg;
2f939d94 2040
61f71b34
DD
2041 if (fntype)
2042 switch (TREE_CODE (fntype))
2043 {
2044 case CALL_EXPR:
d47d0a8d
EB
2045 {
2046 tree fndecl = get_callee_fndecl (fntype);
1304953e
JJ
2047 if (fndecl)
2048 fntype = TREE_TYPE (fndecl);
2049 else if (CALL_EXPR_FN (fntype))
2050 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2051 else
2052 /* For internal functions, assume nothing needs to be
2053 returned in memory. */
2054 return 0;
d47d0a8d 2055 }
61f71b34
DD
2056 break;
2057 case FUNCTION_DECL:
d47d0a8d 2058 fntype = TREE_TYPE (fntype);
61f71b34
DD
2059 break;
2060 case FUNCTION_TYPE:
2061 case METHOD_TYPE:
2062 break;
2063 case IDENTIFIER_NODE:
d47d0a8d 2064 fntype = NULL_TREE;
61f71b34
DD
2065 break;
2066 default:
d47d0a8d 2067 /* We don't expect other tree types here. */
0bccc606 2068 gcc_unreachable ();
61f71b34
DD
2069 }
2070
d47d0a8d 2071 if (VOID_TYPE_P (type))
d7bf8ada 2072 return 0;
500c353d 2073
ebf0bf7f
JJ
2074 /* If a record should be passed the same as its first (and only) member
2075 don't pass it as an aggregate. */
2076 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2077 return aggregate_value_p (first_field (type), fntype);
2078
cc77ae10
JM
2079 /* If the front end has decided that this needs to be passed by
2080 reference, do so. */
2081 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2082 && DECL_BY_REFERENCE (exp))
2083 return 1;
500c353d 2084
d47d0a8d
EB
2085 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2086 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2087 return 1;
b8698a0f 2088
956d6950 2089 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2090 and thus can't be returned in registers. */
2091 if (TREE_ADDRESSABLE (type))
2092 return 1;
d47d0a8d 2093
05e3bdb9 2094 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2095 return 1;
d47d0a8d
EB
2096
2097 if (targetm.calls.return_in_memory (type, fntype))
2098 return 1;
2099
9d790a4f
RS
2100 /* Make sure we have suitable call-clobbered regs to return
2101 the value in; if not, we must return it in memory. */
1d636cc6 2102 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2103
2104 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2105 it is OK. */
f8cfc6aa 2106 if (!REG_P (reg))
e71f7aa5
JW
2107 return 0;
2108
9d790a4f 2109 regno = REGNO (reg);
66fd46b6 2110 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2111 for (i = 0; i < nregs; i++)
2112 if (! call_used_regs[regno + i])
2113 return 1;
d47d0a8d 2114
6f086dfc
RS
2115 return 0;
2116}
2117\f
8fff4fc1
RH
2118/* Return true if we should assign DECL a pseudo register; false if it
2119 should live on the local stack. */
2120
2121bool
fa233e34 2122use_register_for_decl (const_tree decl)
8fff4fc1 2123{
c3284718 2124 if (!targetm.calls.allocate_stack_slots_for_args ())
007e61c2 2125 return true;
b8698a0f 2126
8fff4fc1
RH
2127 /* Honor volatile. */
2128 if (TREE_SIDE_EFFECTS (decl))
2129 return false;
2130
2131 /* Honor addressability. */
2132 if (TREE_ADDRESSABLE (decl))
2133 return false;
2134
d5e254e1
IE
2135 /* Decl is implicitly addressible by bound stores and loads
2136 if it is an aggregate holding bounds. */
2137 if (chkp_function_instrumented_p (current_function_decl)
2138 && TREE_TYPE (decl)
2139 && !BOUNDED_P (decl)
2140 && chkp_type_has_pointer (TREE_TYPE (decl)))
2141 return false;
2142
8fff4fc1
RH
2143 /* Only register-like things go in registers. */
2144 if (DECL_MODE (decl) == BLKmode)
2145 return false;
2146
2147 /* If -ffloat-store specified, don't put explicit float variables
2148 into registers. */
2149 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2150 propagates values across these stores, and it probably shouldn't. */
2151 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2152 return false;
2153
78e0d62b
RH
2154 /* If we're not interested in tracking debugging information for
2155 this decl, then we can certainly put it in a register. */
2156 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2157 return true;
2158
d130d647
JJ
2159 if (optimize)
2160 return true;
2161
2162 if (!DECL_REGISTER (decl))
2163 return false;
2164
2165 switch (TREE_CODE (TREE_TYPE (decl)))
2166 {
2167 case RECORD_TYPE:
2168 case UNION_TYPE:
2169 case QUAL_UNION_TYPE:
2170 /* When not optimizing, disregard register keyword for variables with
2171 types containing methods, otherwise the methods won't be callable
2172 from the debugger. */
2173 if (TYPE_METHODS (TREE_TYPE (decl)))
2174 return false;
2175 break;
2176 default:
2177 break;
2178 }
2179
2180 return true;
8fff4fc1
RH
2181}
2182
0976078c
RH
2183/* Return true if TYPE should be passed by invisible reference. */
2184
2185bool
ef4bddc2 2186pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
8cd5a4e0 2187 tree type, bool named_arg)
0976078c
RH
2188{
2189 if (type)
2190 {
2191 /* If this type contains non-trivial constructors, then it is
2192 forbidden for the middle-end to create any new copies. */
2193 if (TREE_ADDRESSABLE (type))
2194 return true;
2195
d58247a3
RH
2196 /* GCC post 3.4 passes *all* variable sized types by reference. */
2197 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2198 return true;
ebf0bf7f
JJ
2199
2200 /* If a record type should be passed the same as its first (and only)
2201 member, use the type and mode of that member. */
2202 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2203 {
2204 type = TREE_TYPE (first_field (type));
2205 mode = TYPE_MODE (type);
2206 }
0976078c
RH
2207 }
2208
d5cc9181
JR
2209 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2210 type, named_arg);
0976078c
RH
2211}
2212
6cdd5672
RH
2213/* Return true if TYPE, which is passed by reference, should be callee
2214 copied instead of caller copied. */
2215
2216bool
ef4bddc2 2217reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
6cdd5672
RH
2218 tree type, bool named_arg)
2219{
2220 if (type && TREE_ADDRESSABLE (type))
2221 return false;
d5cc9181
JR
2222 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2223 named_arg);
6cdd5672
RH
2224}
2225
6071dc7f
RH
2226/* Structures to communicate between the subroutines of assign_parms.
2227 The first holds data persistent across all parameters, the second
2228 is cleared out for each parameter. */
6f086dfc 2229
6071dc7f 2230struct assign_parm_data_all
6f086dfc 2231{
d5cc9181
JR
2232 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2233 should become a job of the target or otherwise encapsulated. */
2234 CUMULATIVE_ARGS args_so_far_v;
2235 cumulative_args_t args_so_far;
6f086dfc 2236 struct args_size stack_args_size;
6071dc7f
RH
2237 tree function_result_decl;
2238 tree orig_fnargs;
7a688d52
DM
2239 rtx_insn *first_conversion_insn;
2240 rtx_insn *last_conversion_insn;
6071dc7f
RH
2241 HOST_WIDE_INT pretend_args_size;
2242 HOST_WIDE_INT extra_pretend_bytes;
2243 int reg_parm_stack_space;
2244};
6f086dfc 2245
6071dc7f
RH
2246struct assign_parm_data_one
2247{
2248 tree nominal_type;
2249 tree passed_type;
2250 rtx entry_parm;
2251 rtx stack_parm;
ef4bddc2
RS
2252 machine_mode nominal_mode;
2253 machine_mode passed_mode;
2254 machine_mode promoted_mode;
6071dc7f
RH
2255 struct locate_and_pad_arg_data locate;
2256 int partial;
2257 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2258 BOOL_BITFIELD passed_pointer : 1;
2259 BOOL_BITFIELD on_stack : 1;
2260 BOOL_BITFIELD loaded_in_reg : 1;
2261};
ebb904cb 2262
d5e254e1
IE
2263struct bounds_parm_data
2264{
2265 assign_parm_data_one parm_data;
2266 tree bounds_parm;
2267 tree ptr_parm;
2268 rtx ptr_entry;
2269 int bound_no;
2270};
2271
6071dc7f 2272/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2273
6071dc7f
RH
2274static void
2275assign_parms_initialize_all (struct assign_parm_data_all *all)
2276{
fc2f1f53 2277 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2278
6071dc7f
RH
2279 memset (all, 0, sizeof (*all));
2280
2281 fntype = TREE_TYPE (current_function_decl);
2282
2283#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2284 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2285#else
d5cc9181 2286 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2287 current_function_decl, -1);
2288#endif
d5cc9181 2289 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f 2290
ddbb449f
AM
2291#ifdef INCOMING_REG_PARM_STACK_SPACE
2292 all->reg_parm_stack_space
2293 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
6071dc7f
RH
2294#endif
2295}
6f086dfc 2296
6071dc7f
RH
2297/* If ARGS contains entries with complex types, split the entry into two
2298 entries of the component type. Return a new list of substitutions are
2299 needed, else the old list. */
2300
3b3f318a 2301static void
9771b263 2302split_complex_args (vec<tree> *args)
6071dc7f 2303{
3b3f318a 2304 unsigned i;
6071dc7f
RH
2305 tree p;
2306
9771b263 2307 FOR_EACH_VEC_ELT (*args, i, p)
6071dc7f
RH
2308 {
2309 tree type = TREE_TYPE (p);
2310 if (TREE_CODE (type) == COMPLEX_TYPE
2311 && targetm.calls.split_complex_arg (type))
2312 {
2313 tree decl;
2314 tree subtype = TREE_TYPE (type);
6ccd356e 2315 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2316
2317 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2318 p = copy_node (p);
6071dc7f
RH
2319 TREE_TYPE (p) = subtype;
2320 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2321 DECL_MODE (p) = VOIDmode;
2322 DECL_SIZE (p) = NULL;
2323 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2324 /* If this arg must go in memory, put it in a pseudo here.
2325 We can't allow it to go in memory as per normal parms,
2326 because the usual place might not have the imag part
2327 adjacent to the real part. */
2328 DECL_ARTIFICIAL (p) = addressable;
2329 DECL_IGNORED_P (p) = addressable;
2330 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2331 layout_decl (p, 0);
9771b263 2332 (*args)[i] = p;
6071dc7f
RH
2333
2334 /* Build a second synthetic decl. */
c2255bc4
AH
2335 decl = build_decl (EXPR_LOCATION (p),
2336 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2337 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2338 DECL_ARTIFICIAL (decl) = addressable;
2339 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2340 layout_decl (decl, 0);
9771b263 2341 args->safe_insert (++i, decl);
6071dc7f
RH
2342 }
2343 }
6071dc7f
RH
2344}
2345
2346/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2347 the hidden struct return argument, and (abi willing) complex args.
2348 Return the new parameter list. */
2349
9771b263 2350static vec<tree>
6071dc7f
RH
2351assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2352{
2353 tree fndecl = current_function_decl;
2354 tree fntype = TREE_TYPE (fndecl);
6e1aa848 2355 vec<tree> fnargs = vNULL;
3b3f318a
RG
2356 tree arg;
2357
910ad8de 2358 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
9771b263 2359 fnargs.safe_push (arg);
3b3f318a
RG
2360
2361 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2362
2363 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2364 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2365 && ! cfun->returns_pcc_struct
61f71b34 2366 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2367 {
f9f29478 2368 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2369 tree decl;
6f086dfc 2370
c2255bc4 2371 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2372 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2373 DECL_ARG_TYPE (decl) = type;
2374 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2375 DECL_NAMELESS (decl) = 1;
2376 TREE_CONSTANT (decl) = 1;
6f086dfc 2377
910ad8de 2378 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a 2379 all->orig_fnargs = decl;
9771b263 2380 fnargs.safe_insert (0, decl);
3b3f318a 2381
6071dc7f 2382 all->function_result_decl = decl;
d5e254e1
IE
2383
2384 /* If function is instrumented then bounds of the
2385 passed structure address is the second argument. */
2386 if (chkp_function_instrumented_p (fndecl))
2387 {
2388 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2389 PARM_DECL, get_identifier (".result_bnd"),
2390 pointer_bounds_type_node);
2391 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2392 DECL_ARTIFICIAL (decl) = 1;
2393 DECL_NAMELESS (decl) = 1;
2394 TREE_CONSTANT (decl) = 1;
2395
2396 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2397 DECL_CHAIN (all->orig_fnargs) = decl;
2398 fnargs.safe_insert (1, decl);
2399 }
6f086dfc 2400 }
718fe406 2401
42ba5130
RH
2402 /* If the target wants to split complex arguments into scalars, do so. */
2403 if (targetm.calls.split_complex_arg)
3b3f318a 2404 split_complex_args (&fnargs);
ded9bf77 2405
6071dc7f
RH
2406 return fnargs;
2407}
e7949876 2408
6071dc7f
RH
2409/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2410 data for the parameter. Incorporate ABI specifics such as pass-by-
2411 reference and type promotion. */
6f086dfc 2412
6071dc7f
RH
2413static void
2414assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2415 struct assign_parm_data_one *data)
2416{
2417 tree nominal_type, passed_type;
ef4bddc2 2418 machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2419 int unsignedp;
6f086dfc 2420
6071dc7f
RH
2421 memset (data, 0, sizeof (*data));
2422
fa10beec 2423 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2424 if (!cfun->stdarg)
fa10beec 2425 data->named_arg = 1; /* No variadic parms. */
910ad8de 2426 else if (DECL_CHAIN (parm))
fa10beec 2427 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2428 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2429 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2430 else
fa10beec 2431 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2432
2433 nominal_type = TREE_TYPE (parm);
2434 passed_type = DECL_ARG_TYPE (parm);
2435
2436 /* Look out for errors propagating this far. Also, if the parameter's
2437 type is void then its value doesn't matter. */
2438 if (TREE_TYPE (parm) == error_mark_node
2439 /* This can happen after weird syntax errors
2440 or if an enum type is defined among the parms. */
2441 || TREE_CODE (parm) != PARM_DECL
2442 || passed_type == NULL
2443 || VOID_TYPE_P (nominal_type))
2444 {
2445 nominal_type = passed_type = void_type_node;
2446 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2447 goto egress;
2448 }
108b7d3d 2449
6071dc7f
RH
2450 /* Find mode of arg as it is passed, and mode of arg as it should be
2451 during execution of this function. */
2452 passed_mode = TYPE_MODE (passed_type);
2453 nominal_mode = TYPE_MODE (nominal_type);
2454
ebf0bf7f
JJ
2455 /* If the parm is to be passed as a transparent union or record, use the
2456 type of the first field for the tests below. We have already verified
2457 that the modes are the same. */
2458 if ((TREE_CODE (passed_type) == UNION_TYPE
2459 || TREE_CODE (passed_type) == RECORD_TYPE)
2460 && TYPE_TRANSPARENT_AGGR (passed_type))
2461 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2462
0976078c 2463 /* See if this arg was passed by invisible reference. */
d5cc9181 2464 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2465 passed_type, data->named_arg))
6071dc7f
RH
2466 {
2467 passed_type = nominal_type = build_pointer_type (passed_type);
2468 data->passed_pointer = true;
fd91cfe3 2469 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
6071dc7f 2470 }
6f086dfc 2471
6071dc7f 2472 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2473 unsignedp = TYPE_UNSIGNED (passed_type);
2474 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2475 TREE_TYPE (current_function_decl), 0);
6f086dfc 2476
6071dc7f
RH
2477 egress:
2478 data->nominal_type = nominal_type;
2479 data->passed_type = passed_type;
2480 data->nominal_mode = nominal_mode;
2481 data->passed_mode = passed_mode;
2482 data->promoted_mode = promoted_mode;
2483}
16bae307 2484
6071dc7f 2485/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2486
6071dc7f
RH
2487static void
2488assign_parms_setup_varargs (struct assign_parm_data_all *all,
2489 struct assign_parm_data_one *data, bool no_rtl)
2490{
2491 int varargs_pretend_bytes = 0;
2492
d5cc9181 2493 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2494 data->promoted_mode,
2495 data->passed_type,
2496 &varargs_pretend_bytes, no_rtl);
2497
2498 /* If the back-end has requested extra stack space, record how much is
2499 needed. Do not change pretend_args_size otherwise since it may be
2500 nonzero from an earlier partial argument. */
2501 if (varargs_pretend_bytes > 0)
2502 all->pretend_args_size = varargs_pretend_bytes;
2503}
a53e14c0 2504
6071dc7f
RH
2505/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2506 the incoming location of the current parameter. */
2507
2508static void
2509assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2510 struct assign_parm_data_one *data)
2511{
2512 HOST_WIDE_INT pretend_bytes = 0;
2513 rtx entry_parm;
2514 bool in_regs;
2515
2516 if (data->promoted_mode == VOIDmode)
2517 {
2518 data->entry_parm = data->stack_parm = const0_rtx;
2519 return;
2520 }
a53e14c0 2521
d5cc9181 2522 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2523 data->promoted_mode,
2524 data->passed_type,
2525 data->named_arg);
6f086dfc 2526
6071dc7f
RH
2527 if (entry_parm == 0)
2528 data->promoted_mode = data->passed_mode;
6f086dfc 2529
6071dc7f
RH
2530 /* Determine parm's home in the stack, in case it arrives in the stack
2531 or we should pretend it did. Compute the stack position and rtx where
2532 the argument arrives and its size.
6f086dfc 2533
6071dc7f
RH
2534 There is one complexity here: If this was a parameter that would
2535 have been passed in registers, but wasn't only because it is
2536 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2537 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2538 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2539 as it was the previous time. */
d5e254e1 2540 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
6f086dfc 2541#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2542 in_regs = true;
e7949876 2543#endif
6071dc7f
RH
2544 if (!in_regs && !data->named_arg)
2545 {
d5cc9181 2546 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2547 {
6071dc7f 2548 rtx tem;
d5cc9181 2549 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2550 data->promoted_mode,
2551 data->passed_type, true);
6071dc7f 2552 in_regs = tem != NULL;
e7949876 2553 }
6071dc7f 2554 }
e7949876 2555
6071dc7f
RH
2556 /* If this parameter was passed both in registers and in the stack, use
2557 the copy on the stack. */
fe984136
RH
2558 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2559 data->passed_type))
6071dc7f 2560 entry_parm = 0;
e7949876 2561
6071dc7f
RH
2562 if (entry_parm)
2563 {
2564 int partial;
2565
d5cc9181 2566 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2567 data->promoted_mode,
2568 data->passed_type,
2569 data->named_arg);
6071dc7f
RH
2570 data->partial = partial;
2571
2572 /* The caller might already have allocated stack space for the
2573 register parameters. */
2574 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2575 {
6071dc7f
RH
2576 /* Part of this argument is passed in registers and part
2577 is passed on the stack. Ask the prologue code to extend
2578 the stack part so that we can recreate the full value.
2579
2580 PRETEND_BYTES is the size of the registers we need to store.
2581 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2582 stack space that the prologue should allocate.
2583
2584 Internally, gcc assumes that the argument pointer is aligned
2585 to STACK_BOUNDARY bits. This is used both for alignment
2586 optimizations (see init_emit) and to locate arguments that are
2587 aligned to more than PARM_BOUNDARY bits. We must preserve this
2588 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2589 a stack boundary. */
2590
2591 /* We assume at most one partial arg, and it must be the first
2592 argument on the stack. */
0bccc606 2593 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2594
78a52f11 2595 pretend_bytes = partial;
6071dc7f
RH
2596 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2597
2598 /* We want to align relative to the actual stack pointer, so
2599 don't include this in the stack size until later. */
2600 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2601 }
6071dc7f 2602 }
e7949876 2603
6071dc7f 2604 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e4ceca5 2605 all->reg_parm_stack_space,
6071dc7f
RH
2606 entry_parm ? data->partial : 0, current_function_decl,
2607 &all->stack_args_size, &data->locate);
6f086dfc 2608
e94a448f
L
2609 /* Update parm_stack_boundary if this parameter is passed in the
2610 stack. */
2611 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2612 crtl->parm_stack_boundary = data->locate.boundary;
2613
6071dc7f
RH
2614 /* Adjust offsets to include the pretend args. */
2615 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2616 data->locate.slot_offset.constant += pretend_bytes;
2617 data->locate.offset.constant += pretend_bytes;
ebca59c3 2618
6071dc7f
RH
2619 data->entry_parm = entry_parm;
2620}
6f086dfc 2621
6071dc7f
RH
2622/* A subroutine of assign_parms. If there is actually space on the stack
2623 for this parm, count it in stack_args_size and return true. */
6f086dfc 2624
6071dc7f
RH
2625static bool
2626assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2627 struct assign_parm_data_one *data)
2628{
d5e254e1
IE
2629 /* Bounds are never passed on the stack to keep compatibility
2630 with not instrumented code. */
2631 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2632 return false;
2e6ae27f 2633 /* Trivially true if we've no incoming register. */
d5e254e1 2634 else if (data->entry_parm == NULL)
6071dc7f
RH
2635 ;
2636 /* Also true if we're partially in registers and partially not,
2637 since we've arranged to drop the entire argument on the stack. */
2638 else if (data->partial != 0)
2639 ;
2640 /* Also true if the target says that it's passed in both registers
2641 and on the stack. */
2642 else if (GET_CODE (data->entry_parm) == PARALLEL
2643 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2644 ;
2645 /* Also true if the target says that there's stack allocated for
2646 all register parameters. */
2647 else if (all->reg_parm_stack_space > 0)
2648 ;
2649 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2650 else
2651 return false;
6f086dfc 2652
6071dc7f
RH
2653 all->stack_args_size.constant += data->locate.size.constant;
2654 if (data->locate.size.var)
2655 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2656
6071dc7f
RH
2657 return true;
2658}
0d1416c6 2659
6071dc7f
RH
2660/* A subroutine of assign_parms. Given that this parameter is allocated
2661 stack space by the ABI, find it. */
6f086dfc 2662
6071dc7f
RH
2663static void
2664assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2665{
2666 rtx offset_rtx, stack_parm;
2667 unsigned int align, boundary;
6f086dfc 2668
6071dc7f
RH
2669 /* If we're passing this arg using a reg, make its stack home the
2670 aligned stack slot. */
2671 if (data->entry_parm)
2672 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2673 else
2674 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2675
38173d38 2676 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2677 if (offset_rtx != const0_rtx)
2678 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2679 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2680
08ab0acf 2681 if (!data->passed_pointer)
997f78fb 2682 {
08ab0acf
JJ
2683 set_mem_attributes (stack_parm, parm, 1);
2684 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2685 while promoted mode's size is needed. */
2686 if (data->promoted_mode != BLKmode
2687 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2688 {
f5541398 2689 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2690 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2691 {
2692 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2693 data->promoted_mode);
2694 if (offset)
527210c4 2695 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2696 }
997f78fb
JJ
2697 }
2698 }
6071dc7f 2699
bfc45551
AM
2700 boundary = data->locate.boundary;
2701 align = BITS_PER_UNIT;
6071dc7f
RH
2702
2703 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2704 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2705 intentionally forcing upward padding. Otherwise we have to come
2706 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2707 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2708 align = boundary;
481683e1 2709 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2710 {
2711 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2712 align = align & -align;
2713 }
bfc45551 2714 set_mem_align (stack_parm, align);
6071dc7f
RH
2715
2716 if (data->entry_parm)
2717 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2718
2719 data->stack_parm = stack_parm;
2720}
2721
2722/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2723 always valid and contiguous. */
2724
2725static void
2726assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2727{
2728 rtx entry_parm = data->entry_parm;
2729 rtx stack_parm = data->stack_parm;
2730
2731 /* If this parm was passed part in regs and part in memory, pretend it
2732 arrived entirely in memory by pushing the register-part onto the stack.
2733 In the special case of a DImode or DFmode that is split, we could put
2734 it together in a pseudoreg directly, but for now that's not worth
2735 bothering with. */
2736 if (data->partial != 0)
2737 {
2738 /* Handle calls that pass values in multiple non-contiguous
2739 locations. The Irix 6 ABI has examples of this. */
2740 if (GET_CODE (entry_parm) == PARALLEL)
1a8cb155 2741 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
b8698a0f 2742 data->passed_type,
6071dc7f 2743 int_size_in_bytes (data->passed_type));
6f086dfc 2744 else
78a52f11
RH
2745 {
2746 gcc_assert (data->partial % UNITS_PER_WORD == 0);
1a8cb155
RS
2747 move_block_from_reg (REGNO (entry_parm),
2748 validize_mem (copy_rtx (stack_parm)),
78a52f11
RH
2749 data->partial / UNITS_PER_WORD);
2750 }
6f086dfc 2751
6071dc7f
RH
2752 entry_parm = stack_parm;
2753 }
6f086dfc 2754
6071dc7f
RH
2755 /* If we didn't decide this parm came in a register, by default it came
2756 on the stack. */
2757 else if (entry_parm == NULL)
2758 entry_parm = stack_parm;
2759
2760 /* When an argument is passed in multiple locations, we can't make use
2761 of this information, but we can save some copying if the whole argument
2762 is passed in a single register. */
2763 else if (GET_CODE (entry_parm) == PARALLEL
2764 && data->nominal_mode != BLKmode
2765 && data->passed_mode != BLKmode)
2766 {
2767 size_t i, len = XVECLEN (entry_parm, 0);
2768
2769 for (i = 0; i < len; i++)
2770 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2771 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2772 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2773 == data->passed_mode)
2774 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2775 {
2776 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2777 break;
2778 }
2779 }
e68a6ce1 2780
6071dc7f
RH
2781 data->entry_parm = entry_parm;
2782}
6f086dfc 2783
4d2a9850
DJ
2784/* A subroutine of assign_parms. Reconstitute any values which were
2785 passed in multiple registers and would fit in a single register. */
2786
2787static void
2788assign_parm_remove_parallels (struct assign_parm_data_one *data)
2789{
2790 rtx entry_parm = data->entry_parm;
2791
2792 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2793 This can be done with register operations rather than on the
2794 stack, even if we will store the reconstituted parameter on the
2795 stack later. */
85776d60 2796 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2797 {
2798 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2799 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2800 GET_MODE_SIZE (GET_MODE (entry_parm)));
2801 entry_parm = parmreg;
2802 }
2803
2804 data->entry_parm = entry_parm;
2805}
2806
6071dc7f
RH
2807/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2808 always valid and properly aligned. */
6f086dfc 2809
6071dc7f
RH
2810static void
2811assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2812{
2813 rtx stack_parm = data->stack_parm;
2814
2815 /* If we can't trust the parm stack slot to be aligned enough for its
2816 ultimate type, don't use that slot after entry. We'll make another
2817 stack slot, if we need one. */
bfc45551
AM
2818 if (stack_parm
2819 && ((STRICT_ALIGNMENT
2820 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2821 || (data->nominal_type
2822 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2823 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2824 stack_parm = NULL;
2825
2826 /* If parm was passed in memory, and we need to convert it on entry,
2827 don't store it back in that same slot. */
2828 else if (data->entry_parm == stack_parm
2829 && data->nominal_mode != BLKmode
2830 && data->nominal_mode != data->passed_mode)
2831 stack_parm = NULL;
2832
7d69de61
RH
2833 /* If stack protection is in effect for this function, don't leave any
2834 pointers in their passed stack slots. */
cb91fab0 2835 else if (crtl->stack_protect_guard
7d69de61
RH
2836 && (flag_stack_protect == 2
2837 || data->passed_pointer
2838 || POINTER_TYPE_P (data->nominal_type)))
2839 stack_parm = NULL;
2840
6071dc7f
RH
2841 data->stack_parm = stack_parm;
2842}
a0506b54 2843
6071dc7f
RH
2844/* A subroutine of assign_parms. Return true if the current parameter
2845 should be stored as a BLKmode in the current frame. */
2846
2847static bool
2848assign_parm_setup_block_p (struct assign_parm_data_one *data)
2849{
2850 if (data->nominal_mode == BLKmode)
2851 return true;
85776d60
DJ
2852 if (GET_MODE (data->entry_parm) == BLKmode)
2853 return true;
531547e9 2854
6e985040 2855#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2856 /* Only assign_parm_setup_block knows how to deal with register arguments
2857 that are padded at the least significant end. */
2858 if (REG_P (data->entry_parm)
2859 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2860 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2861 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2862 return true;
6e985040 2863#endif
6071dc7f
RH
2864
2865 return false;
2866}
2867
b8698a0f 2868/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2869 present and valid in DATA->STACK_RTL. */
2870
2871static void
27e29549
RH
2872assign_parm_setup_block (struct assign_parm_data_all *all,
2873 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2874{
2875 rtx entry_parm = data->entry_parm;
2876 rtx stack_parm = data->stack_parm;
bfc45551
AM
2877 HOST_WIDE_INT size;
2878 HOST_WIDE_INT size_stored;
6071dc7f 2879
27e29549
RH
2880 if (GET_CODE (entry_parm) == PARALLEL)
2881 entry_parm = emit_group_move_into_temps (entry_parm);
2882
bfc45551
AM
2883 size = int_size_in_bytes (data->passed_type);
2884 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2885 if (stack_parm == 0)
2886 {
a561d88b 2887 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2888 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2889 DECL_ALIGN (parm));
bfc45551
AM
2890 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2891 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2892 set_mem_attributes (stack_parm, parm, 1);
2893 }
2894
6071dc7f
RH
2895 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2896 calls that pass values in multiple non-contiguous locations. */
2897 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2898 {
6071dc7f
RH
2899 rtx mem;
2900
2901 /* Note that we will be storing an integral number of words.
2902 So we have to be careful to ensure that we allocate an
bfc45551 2903 integral number of words. We do this above when we call
6071dc7f
RH
2904 assign_stack_local if space was not allocated in the argument
2905 list. If it was, this will not work if PARM_BOUNDARY is not
2906 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2907 if it becomes a problem. Exception is when BLKmode arrives
2908 with arguments not conforming to word_mode. */
2909
bfc45551
AM
2910 if (data->stack_parm == 0)
2911 ;
6071dc7f
RH
2912 else if (GET_CODE (entry_parm) == PARALLEL)
2913 ;
0bccc606
NS
2914 else
2915 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2916
1a8cb155 2917 mem = validize_mem (copy_rtx (stack_parm));
c6b97fac 2918
6071dc7f
RH
2919 /* Handle values in multiple non-contiguous locations. */
2920 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2921 {
bb27eeda
SE
2922 push_to_sequence2 (all->first_conversion_insn,
2923 all->last_conversion_insn);
27e29549 2924 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2925 all->first_conversion_insn = get_insns ();
2926 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2927 end_sequence ();
2928 }
c6b97fac 2929
6071dc7f
RH
2930 else if (size == 0)
2931 ;
5c07bd7a 2932
6071dc7f
RH
2933 /* If SIZE is that of a mode no bigger than a word, just use
2934 that mode's store operation. */
2935 else if (size <= UNITS_PER_WORD)
2936 {
ef4bddc2 2937 machine_mode mode
6071dc7f 2938 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2939
6071dc7f 2940 if (mode != BLKmode
6e985040 2941#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2942 && (size == UNITS_PER_WORD
2943 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2944 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2945#endif
6071dc7f
RH
2946 )
2947 {
208996c7
RS
2948 rtx reg;
2949
2950 /* We are really truncating a word_mode value containing
2951 SIZE bytes into a value of mode MODE. If such an
2952 operation requires no actual instructions, we can refer
2953 to the value directly in mode MODE, otherwise we must
2954 start with the register in word_mode and explicitly
2955 convert it. */
2956 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2957 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2958 else
2959 {
2960 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2961 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2962 }
6071dc7f
RH
2963 emit_move_insn (change_address (mem, mode, 0), reg);
2964 }
c6b97fac 2965
6071dc7f
RH
2966 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2967 machine must be aligned to the left before storing
2968 to memory. Note that the previous test doesn't
2969 handle all cases (e.g. SIZE == 3). */
2970 else if (size != UNITS_PER_WORD
6e985040 2971#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2972 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2973 == downward)
6e985040 2974#else
6071dc7f 2975 && BYTES_BIG_ENDIAN
6e985040 2976#endif
6071dc7f
RH
2977 )
2978 {
2979 rtx tem, x;
2980 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2981 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2982
eb6c3df1 2983 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2984 tem = change_address (mem, word_mode, 0);
2985 emit_move_insn (tem, x);
6f086dfc 2986 }
6071dc7f 2987 else
27e29549 2988 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2989 size_stored / UNITS_PER_WORD);
6f086dfc 2990 }
6071dc7f 2991 else
27e29549 2992 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2993 size_stored / UNITS_PER_WORD);
2994 }
bfc45551
AM
2995 else if (data->stack_parm == 0)
2996 {
bb27eeda 2997 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2998 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2999 BLOCK_OP_NORMAL);
bb27eeda
SE
3000 all->first_conversion_insn = get_insns ();
3001 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
3002 end_sequence ();
3003 }
6071dc7f 3004
bfc45551 3005 data->stack_parm = stack_parm;
6071dc7f
RH
3006 SET_DECL_RTL (parm, stack_parm);
3007}
3008
3009/* A subroutine of assign_parms. Allocate a pseudo to hold the current
3010 parameter. Get it there. Perform all ABI specified conversions. */
3011
3012static void
3013assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3014 struct assign_parm_data_one *data)
3015{
71008de4
BS
3016 rtx parmreg, validated_mem;
3017 rtx equiv_stack_parm;
ef4bddc2 3018 machine_mode promoted_nominal_mode;
6071dc7f
RH
3019 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3020 bool did_conversion = false;
71008de4 3021 bool need_conversion, moved;
6071dc7f
RH
3022
3023 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
3024 need to do it in a wider mode. Using 2 here makes the result
3025 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 3026 promoted_nominal_mode
cde0f3fd 3027 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 3028 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
3029
3030 parmreg = gen_reg_rtx (promoted_nominal_mode);
3031
3032 if (!DECL_ARTIFICIAL (parm))
3033 mark_user_reg (parmreg);
3034
3035 /* If this was an item that we received a pointer to,
3036 set DECL_RTL appropriately. */
3037 if (data->passed_pointer)
3038 {
3039 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3040 set_mem_attributes (x, parm, 1);
3041 SET_DECL_RTL (parm, x);
3042 }
3043 else
389fdba0 3044 SET_DECL_RTL (parm, parmreg);
6071dc7f 3045
4d2a9850
DJ
3046 assign_parm_remove_parallels (data);
3047
666e3ceb
PB
3048 /* Copy the value into the register, thus bridging between
3049 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 3050
71008de4 3051 equiv_stack_parm = data->stack_parm;
1a8cb155 3052 validated_mem = validize_mem (copy_rtx (data->entry_parm));
71008de4
BS
3053
3054 need_conversion = (data->nominal_mode != data->passed_mode
3055 || promoted_nominal_mode != data->promoted_mode);
3056 moved = false;
3057
dbb94435
BS
3058 if (need_conversion
3059 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3060 && data->nominal_mode == data->passed_mode
3061 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 3062 {
6071dc7f
RH
3063 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3064 mode, by the caller. We now have to convert it to
3065 NOMINAL_MODE, if different. However, PARMREG may be in
3066 a different mode than NOMINAL_MODE if it is being stored
3067 promoted.
3068
3069 If ENTRY_PARM is a hard register, it might be in a register
3070 not valid for operating in its mode (e.g., an odd-numbered
3071 register for a DFmode). In that case, moves are the only
3072 thing valid, so we can't do a convert from there. This
3073 occurs when the calling sequence allow such misaligned
3074 usages.
3075
3076 In addition, the conversion may involve a call, which could
3077 clobber parameters which haven't been copied to pseudo
71008de4
BS
3078 registers yet.
3079
3080 First, we try to emit an insn which performs the necessary
3081 conversion. We verify that this insn does not clobber any
3082 hard registers. */
3083
3084 enum insn_code icode;
3085 rtx op0, op1;
3086
3087 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3088 unsignedp);
3089
3090 op0 = parmreg;
3091 op1 = validated_mem;
3092 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3093 && insn_operand_matches (icode, 0, op0)
3094 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3095 {
3096 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
b32d5189
DM
3097 rtx_insn *insn, *insns;
3098 rtx t = op1;
71008de4
BS
3099 HARD_REG_SET hardregs;
3100
3101 start_sequence ();
f9fef349
JJ
3102 /* If op1 is a hard register that is likely spilled, first
3103 force it into a pseudo, otherwise combiner might extend
3104 its lifetime too much. */
3105 if (GET_CODE (t) == SUBREG)
3106 t = SUBREG_REG (t);
3107 if (REG_P (t)
3108 && HARD_REGISTER_P (t)
3109 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3110 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3111 {
3112 t = gen_reg_rtx (GET_MODE (op1));
3113 emit_move_insn (t, op1);
3114 }
3115 else
3116 t = op1;
a11899b2
DM
3117 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3118 data->passed_mode, unsignedp);
3119 emit_insn (pat);
71008de4
BS
3120 insns = get_insns ();
3121
3122 moved = true;
3123 CLEAR_HARD_REG_SET (hardregs);
3124 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3125 {
3126 if (INSN_P (insn))
3127 note_stores (PATTERN (insn), record_hard_reg_sets,
3128 &hardregs);
3129 if (!hard_reg_set_empty_p (hardregs))
3130 moved = false;
3131 }
3132
3133 end_sequence ();
3134
3135 if (moved)
3136 {
3137 emit_insn (insns);
dbb94435
BS
3138 if (equiv_stack_parm != NULL_RTX)
3139 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3140 equiv_stack_parm);
71008de4
BS
3141 }
3142 }
3143 }
3144
3145 if (moved)
3146 /* Nothing to do. */
3147 ;
3148 else if (need_conversion)
3149 {
3150 /* We did not have an insn to convert directly, or the sequence
3151 generated appeared unsafe. We must first copy the parm to a
3152 pseudo reg, and save the conversion until after all
6071dc7f
RH
3153 parameters have been moved. */
3154
71008de4 3155 int save_tree_used;
6071dc7f
RH
3156 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3157
71008de4 3158 emit_move_insn (tempreg, validated_mem);
6071dc7f 3159
bb27eeda 3160 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3161 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3162
3163 if (GET_CODE (tempreg) == SUBREG
3164 && GET_MODE (tempreg) == data->nominal_mode
3165 && REG_P (SUBREG_REG (tempreg))
3166 && data->nominal_mode == data->passed_mode
3167 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3168 && GET_MODE_SIZE (GET_MODE (tempreg))
3169 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3170 {
6071dc7f
RH
3171 /* The argument is already sign/zero extended, so note it
3172 into the subreg. */
3173 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
362d42dc 3174 SUBREG_PROMOTED_SET (tempreg, unsignedp);
6071dc7f 3175 }
00d8a4c1 3176
6071dc7f
RH
3177 /* TREE_USED gets set erroneously during expand_assignment. */
3178 save_tree_used = TREE_USED (parm);
79f5e442 3179 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3180 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3181 all->first_conversion_insn = get_insns ();
3182 all->last_conversion_insn = get_last_insn ();
6071dc7f 3183 end_sequence ();
00d8a4c1 3184
6071dc7f
RH
3185 did_conversion = true;
3186 }
3187 else
71008de4 3188 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3189
3190 /* If we were passed a pointer but the actual value can safely live
f7e088e7
EB
3191 in a register, retrieve it and use it directly. */
3192 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
6071dc7f
RH
3193 {
3194 /* We can't use nominal_mode, because it will have been set to
3195 Pmode above. We must use the actual mode of the parm. */
f7e088e7
EB
3196 if (use_register_for_decl (parm))
3197 {
3198 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3199 mark_user_reg (parmreg);
3200 }
3201 else
3202 {
3203 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3204 TYPE_MODE (TREE_TYPE (parm)),
3205 TYPE_ALIGN (TREE_TYPE (parm)));
3206 parmreg
3207 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3208 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3209 align);
3210 set_mem_attributes (parmreg, parm, 1);
3211 }
cd5b3469 3212
6071dc7f
RH
3213 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3214 {
3215 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3216 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3217
bb27eeda
SE
3218 push_to_sequence2 (all->first_conversion_insn,
3219 all->last_conversion_insn);
6071dc7f
RH
3220 emit_move_insn (tempreg, DECL_RTL (parm));
3221 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3222 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3223 all->first_conversion_insn = get_insns ();
3224 all->last_conversion_insn = get_last_insn ();
6071dc7f 3225 end_sequence ();
6f086dfc 3226
6071dc7f
RH
3227 did_conversion = true;
3228 }
3229 else
3230 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3231
6071dc7f 3232 SET_DECL_RTL (parm, parmreg);
797a6ac1 3233
6071dc7f
RH
3234 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3235 now the parm. */
3236 data->stack_parm = NULL;
3237 }
ddef6bc7 3238
6071dc7f
RH
3239 /* Mark the register as eliminable if we did no conversion and it was
3240 copied from memory at a fixed offset, and the arg pointer was not
3241 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3242 offset formed an invalid address, such memory-equivalences as we
3243 make here would screw up life analysis for it. */
3244 if (data->nominal_mode == data->passed_mode
3245 && !did_conversion
3246 && data->stack_parm != 0
3247 && MEM_P (data->stack_parm)
3248 && data->locate.offset.var == 0
3249 && reg_mentioned_p (virtual_incoming_args_rtx,
3250 XEXP (data->stack_parm, 0)))
3251 {
691fe203
DM
3252 rtx_insn *linsn = get_last_insn ();
3253 rtx_insn *sinsn;
3254 rtx set;
a03caf76 3255
6071dc7f
RH
3256 /* Mark complex types separately. */
3257 if (GET_CODE (parmreg) == CONCAT)
3258 {
ef4bddc2 3259 machine_mode submode
6071dc7f 3260 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3261 int regnor = REGNO (XEXP (parmreg, 0));
3262 int regnoi = REGNO (XEXP (parmreg, 1));
3263 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3264 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3265 GET_MODE_SIZE (submode));
6071dc7f
RH
3266
3267 /* Scan backwards for the set of the real and
3268 imaginary parts. */
3269 for (sinsn = linsn; sinsn != 0;
3270 sinsn = prev_nonnote_insn (sinsn))
3271 {
3272 set = single_set (sinsn);
3273 if (set == 0)
3274 continue;
3275
3276 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3277 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3278 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3279 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3280 }
6071dc7f 3281 }
7543f918
JR
3282 else
3283 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3284 }
3285
3286 /* For pointer data type, suggest pointer register. */
3287 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3288 mark_reg_pointer (parmreg,
3289 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3290}
3291
3292/* A subroutine of assign_parms. Allocate stack space to hold the current
3293 parameter. Get it there. Perform all ABI specified conversions. */
3294
3295static void
3296assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3297 struct assign_parm_data_one *data)
3298{
3299 /* Value must be stored in the stack slot STACK_PARM during function
3300 execution. */
bfc45551 3301 bool to_conversion = false;
6071dc7f 3302
4d2a9850
DJ
3303 assign_parm_remove_parallels (data);
3304
6071dc7f
RH
3305 if (data->promoted_mode != data->nominal_mode)
3306 {
3307 /* Conversion is required. */
3308 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3309
1a8cb155 3310 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
6071dc7f 3311
bb27eeda 3312 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3313 to_conversion = true;
3314
6071dc7f
RH
3315 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3316 TYPE_UNSIGNED (TREE_TYPE (parm)));
3317
3318 if (data->stack_parm)
dd67163f
JJ
3319 {
3320 int offset = subreg_lowpart_offset (data->nominal_mode,
3321 GET_MODE (data->stack_parm));
3322 /* ??? This may need a big-endian conversion on sparc64. */
3323 data->stack_parm
3324 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3325 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3326 set_mem_offset (data->stack_parm,
527210c4 3327 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3328 }
6071dc7f
RH
3329 }
3330
3331 if (data->entry_parm != data->stack_parm)
3332 {
bfc45551
AM
3333 rtx src, dest;
3334
6071dc7f
RH
3335 if (data->stack_parm == 0)
3336 {
3a695389
UW
3337 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3338 GET_MODE (data->entry_parm),
3339 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3340 data->stack_parm
3341 = assign_stack_local (GET_MODE (data->entry_parm),
3342 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3343 align);
6071dc7f 3344 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3345 }
6071dc7f 3346
1a8cb155
RS
3347 dest = validize_mem (copy_rtx (data->stack_parm));
3348 src = validize_mem (copy_rtx (data->entry_parm));
bfc45551
AM
3349
3350 if (MEM_P (src))
6f086dfc 3351 {
bfc45551
AM
3352 /* Use a block move to handle potentially misaligned entry_parm. */
3353 if (!to_conversion)
bb27eeda
SE
3354 push_to_sequence2 (all->first_conversion_insn,
3355 all->last_conversion_insn);
bfc45551
AM
3356 to_conversion = true;
3357
3358 emit_block_move (dest, src,
3359 GEN_INT (int_size_in_bytes (data->passed_type)),
3360 BLOCK_OP_NORMAL);
6071dc7f
RH
3361 }
3362 else
bfc45551
AM
3363 emit_move_insn (dest, src);
3364 }
3365
3366 if (to_conversion)
3367 {
bb27eeda
SE
3368 all->first_conversion_insn = get_insns ();
3369 all->last_conversion_insn = get_last_insn ();
bfc45551 3370 end_sequence ();
6071dc7f 3371 }
6f086dfc 3372
6071dc7f
RH
3373 SET_DECL_RTL (parm, data->stack_parm);
3374}
3412b298 3375
6071dc7f
RH
3376/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3377 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3378
6071dc7f 3379static void
3b3f318a 3380assign_parms_unsplit_complex (struct assign_parm_data_all *all,
9771b263 3381 vec<tree> fnargs)
6071dc7f
RH
3382{
3383 tree parm;
6ccd356e 3384 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3385 unsigned i = 0;
f4ef873c 3386
3b3f318a 3387 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3388 {
3389 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3390 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3391 {
3392 rtx tmp, real, imag;
ef4bddc2 3393 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3394
9771b263
DN
3395 real = DECL_RTL (fnargs[i]);
3396 imag = DECL_RTL (fnargs[i + 1]);
6071dc7f 3397 if (inner != GET_MODE (real))
6f086dfc 3398 {
6071dc7f
RH
3399 real = gen_lowpart_SUBREG (inner, real);
3400 imag = gen_lowpart_SUBREG (inner, imag);
3401 }
6ccd356e
AM
3402
3403 if (TREE_ADDRESSABLE (parm))
3404 {
3405 rtx rmem, imem;
3406 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3407 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3408 DECL_MODE (parm),
3409 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3410
3411 /* split_complex_arg put the real and imag parts in
3412 pseudos. Move them to memory. */
3a695389 3413 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3414 set_mem_attributes (tmp, parm, 1);
3415 rmem = adjust_address_nv (tmp, inner, 0);
3416 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3417 push_to_sequence2 (all->first_conversion_insn,
3418 all->last_conversion_insn);
6ccd356e
AM
3419 emit_move_insn (rmem, real);
3420 emit_move_insn (imem, imag);
bb27eeda
SE
3421 all->first_conversion_insn = get_insns ();
3422 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3423 end_sequence ();
3424 }
3425 else
3426 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3427 SET_DECL_RTL (parm, tmp);
7e41ffa2 3428
9771b263
DN
3429 real = DECL_INCOMING_RTL (fnargs[i]);
3430 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
6071dc7f
RH
3431 if (inner != GET_MODE (real))
3432 {
3433 real = gen_lowpart_SUBREG (inner, real);
3434 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3435 }
6071dc7f 3436 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3437 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3438 i++;
6f086dfc 3439 }
6f086dfc 3440 }
6071dc7f
RH
3441}
3442
d5e254e1
IE
3443/* Load bounds of PARM from bounds table. */
3444static void
3445assign_parm_load_bounds (struct assign_parm_data_one *data,
3446 tree parm,
3447 rtx entry,
3448 unsigned bound_no)
3449{
3450 bitmap_iterator bi;
3451 unsigned i, offs = 0;
3452 int bnd_no = -1;
3453 rtx slot = NULL, ptr = NULL;
3454
3455 if (parm)
3456 {
3457 bitmap slots;
3458 bitmap_obstack_initialize (NULL);
3459 slots = BITMAP_ALLOC (NULL);
3460 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3461 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3462 {
3463 if (bound_no)
3464 bound_no--;
3465 else
3466 {
3467 bnd_no = i;
3468 break;
3469 }
3470 }
3471 BITMAP_FREE (slots);
3472 bitmap_obstack_release (NULL);
3473 }
3474
3475 /* We may have bounds not associated with any pointer. */
3476 if (bnd_no != -1)
3477 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3478
3479 /* Find associated pointer. */
3480 if (bnd_no == -1)
3481 {
3482 /* If bounds are not associated with any bounds,
3483 then it is passed in a register or special slot. */
3484 gcc_assert (data->entry_parm);
3485 ptr = const0_rtx;
3486 }
3487 else if (MEM_P (entry))
3488 slot = adjust_address (entry, Pmode, offs);
3489 else if (REG_P (entry))
3490 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3491 else if (GET_CODE (entry) == PARALLEL)
3492 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3493 else
3494 gcc_unreachable ();
3495 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3496 data->entry_parm);
3497}
3498
3499/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3500
3501static void
3502assign_bounds (vec<bounds_parm_data> &bndargs,
3503 struct assign_parm_data_all &all)
3504{
3505 unsigned i, pass, handled = 0;
3506 bounds_parm_data *pbdata;
3507
3508 if (!bndargs.exists ())
3509 return;
3510
3511 /* We make few passes to store input bounds. Firstly handle bounds
3512 passed in registers. After that we load bounds passed in special
3513 slots. Finally we load bounds from Bounds Table. */
3514 for (pass = 0; pass < 3; pass++)
3515 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3516 {
3517 /* Pass 0 => regs only. */
3518 if (pass == 0
3519 && (!pbdata->parm_data.entry_parm
3520 || GET_CODE (pbdata->parm_data.entry_parm) != REG))
3521 continue;
3522 /* Pass 1 => slots only. */
3523 else if (pass == 1
3524 && (!pbdata->parm_data.entry_parm
3525 || GET_CODE (pbdata->parm_data.entry_parm) == REG))
3526 continue;
3527 /* Pass 2 => BT only. */
3528 else if (pass == 2
3529 && pbdata->parm_data.entry_parm)
3530 continue;
3531
3532 if (!pbdata->parm_data.entry_parm
3533 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3534 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3535 pbdata->ptr_entry, pbdata->bound_no);
3536
3537 set_decl_incoming_rtl (pbdata->bounds_parm,
3538 pbdata->parm_data.entry_parm, false);
3539
3540 if (assign_parm_setup_block_p (&pbdata->parm_data))
3541 assign_parm_setup_block (&all, pbdata->bounds_parm,
3542 &pbdata->parm_data);
3543 else if (pbdata->parm_data.passed_pointer
3544 || use_register_for_decl (pbdata->bounds_parm))
3545 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3546 &pbdata->parm_data);
3547 else
3548 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3549 &pbdata->parm_data);
3550
3551 /* Count handled bounds to make sure we miss nothing. */
3552 handled++;
3553 }
3554
3555 gcc_assert (handled == bndargs.length ());
3556
3557 bndargs.release ();
3558}
3559
6071dc7f
RH
3560/* Assign RTL expressions to the function's parameters. This may involve
3561 copying them into registers and using those registers as the DECL_RTL. */
3562
6fe79279 3563static void
6071dc7f
RH
3564assign_parms (tree fndecl)
3565{
3566 struct assign_parm_data_all all;
3b3f318a 3567 tree parm;
9771b263 3568 vec<tree> fnargs;
d5e254e1
IE
3569 unsigned i, bound_no = 0;
3570 tree last_arg = NULL;
3571 rtx last_arg_entry = NULL;
3572 vec<bounds_parm_data> bndargs = vNULL;
3573 bounds_parm_data bdata;
6f086dfc 3574
38173d38 3575 crtl->args.internal_arg_pointer
150cdc9e 3576 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3577
3578 assign_parms_initialize_all (&all);
3579 fnargs = assign_parms_augmented_arg_list (&all);
3580
9771b263 3581 FOR_EACH_VEC_ELT (fnargs, i, parm)
ded9bf77 3582 {
6071dc7f
RH
3583 struct assign_parm_data_one data;
3584
3585 /* Extract the type of PARM; adjust it according to ABI. */
3586 assign_parm_find_data_types (&all, parm, &data);
3587
3588 /* Early out for errors and void parameters. */
3589 if (data.passed_mode == VOIDmode)
ded9bf77 3590 {
6071dc7f
RH
3591 SET_DECL_RTL (parm, const0_rtx);
3592 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3593 continue;
3594 }
196c42cd 3595
2e3f842f
L
3596 /* Estimate stack alignment from parameter alignment. */
3597 if (SUPPORTS_STACK_ALIGNMENT)
3598 {
c2ed6cf8
NF
3599 unsigned int align
3600 = targetm.calls.function_arg_boundary (data.promoted_mode,
3601 data.passed_type);
ae58e548
JJ
3602 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3603 align);
2e3f842f 3604 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3605 align = MINIMUM_ALIGNMENT (data.nominal_type,
3606 TYPE_MODE (data.nominal_type),
3607 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3608 if (crtl->stack_alignment_estimated < align)
3609 {
3610 gcc_assert (!crtl->stack_realign_processed);
3611 crtl->stack_alignment_estimated = align;
3612 }
3613 }
b8698a0f 3614
6071dc7f
RH
3615 /* Find out where the parameter arrives in this function. */
3616 assign_parm_find_entry_rtl (&all, &data);
3617
3618 /* Find out where stack space for this parameter might be. */
3619 if (assign_parm_is_stack_parm (&all, &data))
3620 {
3621 assign_parm_find_stack_rtl (parm, &data);
3622 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3623 }
d5e254e1
IE
3624 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3625 {
3626 /* Remember where last non bounds arg was passed in case
3627 we have to load associated bounds for it from Bounds
3628 Table. */
3629 last_arg = parm;
3630 last_arg_entry = data.entry_parm;
3631 bound_no = 0;
3632 }
6071dc7f 3633 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3634 if (data.passed_pointer)
3635 {
3636 rtx incoming_rtl
3637 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3638 data.entry_parm);
3639 set_decl_incoming_rtl (parm, incoming_rtl, true);
3640 }
3641 else
3642 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f 3643
d5e254e1
IE
3644 /* Boudns should be loaded in the particular order to
3645 have registers allocated correctly. Collect info about
3646 input bounds and load them later. */
3647 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3648 {
3649 /* Expect bounds in instrumented functions only. */
3650 gcc_assert (chkp_function_instrumented_p (fndecl));
3651
3652 bdata.parm_data = data;
3653 bdata.bounds_parm = parm;
3654 bdata.ptr_parm = last_arg;
3655 bdata.ptr_entry = last_arg_entry;
3656 bdata.bound_no = bound_no;
3657 bndargs.safe_push (bdata);
3658 }
3659 else
3660 {
3661 assign_parm_adjust_stack_rtl (&data);
3662
3663 if (assign_parm_setup_block_p (&data))
3664 assign_parm_setup_block (&all, parm, &data);
3665 else if (data.passed_pointer || use_register_for_decl (parm))
3666 assign_parm_setup_reg (&all, parm, &data);
3667 else
3668 assign_parm_setup_stack (&all, parm, &data);
3669 }
3670
3671 if (cfun->stdarg && !DECL_CHAIN (parm))
3672 {
3673 int pretend_bytes = 0;
3674
3675 assign_parms_setup_varargs (&all, &data, false);
3676
3677 if (chkp_function_instrumented_p (fndecl))
3678 {
3679 /* We expect this is the last parm. Otherwise it is wrong
3680 to assign bounds right now. */
3681 gcc_assert (i == (fnargs.length () - 1));
3682 assign_bounds (bndargs, all);
3683 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3684 data.promoted_mode,
3685 data.passed_type,
3686 &pretend_bytes,
3687 false);
3688 }
3689 }
3690
6071dc7f 3691 /* Update info on where next arg arrives in registers. */
d5cc9181 3692 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3693 data.passed_type, data.named_arg);
6071dc7f 3694
d5e254e1
IE
3695 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3696 bound_no++;
ded9bf77
AH
3697 }
3698
d5e254e1
IE
3699 assign_bounds (bndargs, all);
3700
3b3f318a 3701 if (targetm.calls.split_complex_arg)
6ccd356e 3702 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3703
9771b263 3704 fnargs.release ();
3b3f318a 3705
3412b298
JW
3706 /* Output all parameter conversion instructions (possibly including calls)
3707 now that all parameters have been copied out of hard registers. */
bb27eeda 3708 emit_insn (all.first_conversion_insn);
3412b298 3709
2e3f842f
L
3710 /* Estimate reload stack alignment from scalar return mode. */
3711 if (SUPPORTS_STACK_ALIGNMENT)
3712 {
3713 if (DECL_RESULT (fndecl))
3714 {
3715 tree type = TREE_TYPE (DECL_RESULT (fndecl));
ef4bddc2 3716 machine_mode mode = TYPE_MODE (type);
2e3f842f
L
3717
3718 if (mode != BLKmode
3719 && mode != VOIDmode
3720 && !AGGREGATE_TYPE_P (type))
3721 {
3722 unsigned int align = GET_MODE_ALIGNMENT (mode);
3723 if (crtl->stack_alignment_estimated < align)
3724 {
3725 gcc_assert (!crtl->stack_realign_processed);
3726 crtl->stack_alignment_estimated = align;
3727 }
3728 }
b8698a0f 3729 }
2e3f842f
L
3730 }
3731
b36a8cc2
OH
3732 /* If we are receiving a struct value address as the first argument, set up
3733 the RTL for the function result. As this might require code to convert
3734 the transmitted address to Pmode, we do this here to ensure that possible
3735 preliminary conversions of the address have been emitted already. */
6071dc7f 3736 if (all.function_result_decl)
b36a8cc2 3737 {
6071dc7f
RH
3738 tree result = DECL_RESULT (current_function_decl);
3739 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3740 rtx x;
fa8db1f7 3741
cc77ae10 3742 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3743 {
3744 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3745 x = addr;
3746 }
cc77ae10
JM
3747 else
3748 {
8dcfef8f
AO
3749 SET_DECL_VALUE_EXPR (result,
3750 build1 (INDIRECT_REF, TREE_TYPE (result),
3751 all.function_result_decl));
cc77ae10
JM
3752 addr = convert_memory_address (Pmode, addr);
3753 x = gen_rtx_MEM (DECL_MODE (result), addr);
3754 set_mem_attributes (x, result, 1);
3755 }
8dcfef8f
AO
3756
3757 DECL_HAS_VALUE_EXPR_P (result) = 1;
3758
b36a8cc2
OH
3759 SET_DECL_RTL (result, x);
3760 }
3761
53c428d0 3762 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3763 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3764 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3765 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3766
3767 /* Adjust function incoming argument size for alignment and
3768 minimum length. */
3769
2e4ceca5 3770 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
38173d38 3771 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3772 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3773
6f086dfc 3774#ifdef ARGS_GROW_DOWNWARD
38173d38 3775 crtl->args.arg_offset_rtx
477eff96 3776 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3777 : expand_expr (size_diffop (all.stack_args_size.var,
3778 size_int (-all.stack_args_size.constant)),
bbbbb16a 3779 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3780#else
38173d38 3781 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3782#endif
3783
3784 /* See how many bytes, if any, of its args a function should try to pop
3785 on return. */
3786
079e7538
NF
3787 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3788 TREE_TYPE (fndecl),
3789 crtl->args.size);
6f086dfc 3790
3b69d50e
RK
3791 /* For stdarg.h function, save info about
3792 regs and stack space used by the named args. */
6f086dfc 3793
d5cc9181 3794 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3795
3796 /* Set the rtx used for the function return value. Put this in its
3797 own variable so any optimizers that need this information don't have
3798 to include tree.h. Do this here so it gets done when an inlined
3799 function gets output. */
3800
38173d38 3801 crtl->return_rtx
19e7881c
MM
3802 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3803 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3804
3805 /* If scalar return value was computed in a pseudo-reg, or was a named
3806 return value that got dumped to the stack, copy that to the hard
3807 return register. */
3808 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3809 {
3810 tree decl_result = DECL_RESULT (fndecl);
3811 rtx decl_rtl = DECL_RTL (decl_result);
3812
3813 if (REG_P (decl_rtl)
3814 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3815 : DECL_REGISTER (decl_result))
3816 {
3817 rtx real_decl_rtl;
3818
1d636cc6
RG
3819 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3820 fndecl, true);
d5e254e1
IE
3821 if (chkp_function_instrumented_p (fndecl))
3822 crtl->return_bnd
3823 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3824 fndecl, true);
ce5e43d0 3825 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3826 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3827 holds the hard register containing the return value, not a
3828 temporary pseudo. */
38173d38 3829 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3830 }
3831 }
6f086dfc 3832}
4744afba
RH
3833
3834/* A subroutine of gimplify_parameters, invoked via walk_tree.
3835 For all seen types, gimplify their sizes. */
3836
3837static tree
3838gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3839{
3840 tree t = *tp;
3841
3842 *walk_subtrees = 0;
3843 if (TYPE_P (t))
3844 {
3845 if (POINTER_TYPE_P (t))
3846 *walk_subtrees = 1;
ad50bc8d
RH
3847 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3848 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3849 {
726a989a 3850 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3851 *walk_subtrees = 1;
3852 }
3853 }
3854
3855 return NULL;
3856}
3857
3858/* Gimplify the parameter list for current_function_decl. This involves
3859 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3860 to implement callee-copies reference parameters. Returns a sequence of
3861 statements to add to the beginning of the function. */
4744afba 3862
726a989a 3863gimple_seq
4744afba
RH
3864gimplify_parameters (void)
3865{
3866 struct assign_parm_data_all all;
3b3f318a 3867 tree parm;
726a989a 3868 gimple_seq stmts = NULL;
9771b263 3869 vec<tree> fnargs;
3b3f318a 3870 unsigned i;
4744afba
RH
3871
3872 assign_parms_initialize_all (&all);
3873 fnargs = assign_parms_augmented_arg_list (&all);
3874
9771b263 3875 FOR_EACH_VEC_ELT (fnargs, i, parm)
4744afba
RH
3876 {
3877 struct assign_parm_data_one data;
3878
3879 /* Extract the type of PARM; adjust it according to ABI. */
3880 assign_parm_find_data_types (&all, parm, &data);
3881
3882 /* Early out for errors and void parameters. */
3883 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3884 continue;
3885
3886 /* Update info on where next arg arrives in registers. */
d5cc9181 3887 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3888 data.passed_type, data.named_arg);
4744afba
RH
3889
3890 /* ??? Once upon a time variable_size stuffed parameter list
3891 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3892 turned out to be less than manageable in the gimple world.
3893 Now we have to hunt them down ourselves. */
3894 walk_tree_without_duplicates (&data.passed_type,
3895 gimplify_parm_type, &stmts);
3896
b38f3813 3897 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3898 {
3899 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3900 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3901 }
3902
3903 if (data.passed_pointer)
3904 {
3905 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3906 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3907 type, data.named_arg))
3908 {
3909 tree local, t;
3910
b38f3813 3911 /* For constant-sized objects, this is trivial; for
4744afba 3912 variable-sized objects, we have to play games. */
b38f3813
EB
3913 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3914 && !(flag_stack_check == GENERIC_STACK_CHECK
3915 && compare_tree_int (DECL_SIZE_UNIT (parm),
3916 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3917 {
5dac1dae 3918 local = create_tmp_var (type, get_name (parm));
4744afba 3919 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3920 /* If PARM was addressable, move that flag over
3921 to the local copy, as its address will be taken,
37609bf0
RG
3922 not the PARMs. Keep the parms address taken
3923 as we'll query that flag during gimplification. */
04487a2f 3924 if (TREE_ADDRESSABLE (parm))
37609bf0 3925 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3926 else if (TREE_CODE (type) == COMPLEX_TYPE
3927 || TREE_CODE (type) == VECTOR_TYPE)
3928 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3929 }
3930 else
3931 {
5039610b 3932 tree ptr_type, addr;
4744afba
RH
3933
3934 ptr_type = build_pointer_type (type);
c98b08ff 3935 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3936 DECL_IGNORED_P (addr) = 0;
3937 local = build_fold_indirect_ref (addr);
3938
e79983f4 3939 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3940 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3941 size_int (DECL_ALIGN (parm)));
3942
d3c12306 3943 /* The call has been built for a variable-sized object. */
63d2a353 3944 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3945 t = fold_convert (ptr_type, t);
726a989a 3946 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3947 gimplify_and_add (t, &stmts);
3948 }
3949
726a989a 3950 gimplify_assign (local, parm, &stmts);
4744afba 3951
833b3afe
DB
3952 SET_DECL_VALUE_EXPR (parm, local);
3953 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3954 }
3955 }
3956 }
3957
9771b263 3958 fnargs.release ();
3b3f318a 3959
4744afba
RH
3960 return stmts;
3961}
75dc3319 3962\f
6f086dfc
RS
3963/* Compute the size and offset from the start of the stacked arguments for a
3964 parm passed in mode PASSED_MODE and with type TYPE.
3965
3966 INITIAL_OFFSET_PTR points to the current offset into the stacked
3967 arguments.
3968
e7949876
AM
3969 The starting offset and size for this parm are returned in
3970 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3971 nonzero, the offset is that of stack slot, which is returned in
3972 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3973 padding required from the initial offset ptr to the stack slot.
6f086dfc 3974
cc2902df 3975 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3976 never be set if REG_PARM_STACK_SPACE is not defined.
3977
2e4ceca5
UW
3978 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3979 for arguments which are passed in registers.
3980
6f086dfc
RS
3981 FNDECL is the function in which the argument was defined.
3982
3983 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3984 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3985 argument list to be aligned to the specific boundary (in bits). This
3986 rounding affects the initial and starting offsets, but not the argument
3987 size.
6f086dfc
RS
3988
3989 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3990 optionally rounds the size of the parm to PARM_BOUNDARY. The
3991 initial offset is not affected by this rounding, while the size always
3992 is and the starting offset may be. */
3993
e7949876
AM
3994/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3995 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3996 callers pass in the total size of args so far as
e7949876 3997 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3998
6f086dfc 3999void
ef4bddc2 4000locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e4ceca5
UW
4001 int reg_parm_stack_space, int partial,
4002 tree fndecl ATTRIBUTE_UNUSED,
fa8db1f7
AJ
4003 struct args_size *initial_offset_ptr,
4004 struct locate_and_pad_arg_data *locate)
6f086dfc 4005{
e7949876
AM
4006 tree sizetree;
4007 enum direction where_pad;
123148b5 4008 unsigned int boundary, round_boundary;
e7949876 4009 int part_size_in_regs;
6f086dfc 4010
6f086dfc
RS
4011 /* If we have found a stack parm before we reach the end of the
4012 area reserved for registers, skip that area. */
4013 if (! in_regs)
4014 {
6f086dfc
RS
4015 if (reg_parm_stack_space > 0)
4016 {
4017 if (initial_offset_ptr->var)
4018 {
4019 initial_offset_ptr->var
4020 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 4021 ssize_int (reg_parm_stack_space));
6f086dfc
RS
4022 initial_offset_ptr->constant = 0;
4023 }
4024 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4025 initial_offset_ptr->constant = reg_parm_stack_space;
4026 }
4027 }
6f086dfc 4028
78a52f11 4029 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
4030
4031 sizetree
4032 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4033 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 4034 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
4035 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4036 type);
6e985040 4037 locate->where_pad = where_pad;
2e3f842f
L
4038
4039 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4040 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4041 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4042
bfc45551 4043 locate->boundary = boundary;
6f086dfc 4044
2e3f842f
L
4045 if (SUPPORTS_STACK_ALIGNMENT)
4046 {
4047 /* stack_alignment_estimated can't change after stack has been
4048 realigned. */
4049 if (crtl->stack_alignment_estimated < boundary)
4050 {
4051 if (!crtl->stack_realign_processed)
4052 crtl->stack_alignment_estimated = boundary;
4053 else
4054 {
4055 /* If stack is realigned and stack alignment value
4056 hasn't been finalized, it is OK not to increase
4057 stack_alignment_estimated. The bigger alignment
4058 requirement is recorded in stack_alignment_needed
4059 below. */
4060 gcc_assert (!crtl->stack_realign_finalized
4061 && crtl->stack_realign_needed);
4062 }
4063 }
4064 }
4065
c7e777b5
RH
4066 /* Remember if the outgoing parameter requires extra alignment on the
4067 calling function side. */
cb91fab0
JH
4068 if (crtl->stack_alignment_needed < boundary)
4069 crtl->stack_alignment_needed = boundary;
2e3f842f
L
4070 if (crtl->preferred_stack_boundary < boundary)
4071 crtl->preferred_stack_boundary = boundary;
c7e777b5 4072
6f086dfc 4073#ifdef ARGS_GROW_DOWNWARD
e7949876 4074 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 4075 if (initial_offset_ptr->var)
e7949876
AM
4076 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4077 initial_offset_ptr->var);
9dff28ab 4078
e7949876
AM
4079 {
4080 tree s2 = sizetree;
4081 if (where_pad != none
cc269bb6 4082 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 4083 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 4084 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
e7949876
AM
4085 SUB_PARM_SIZE (locate->slot_offset, s2);
4086 }
4087
4088 locate->slot_offset.constant += part_size_in_regs;
9dff28ab 4089
2e4ceca5 4090 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
4091 pad_to_arg_alignment (&locate->slot_offset, boundary,
4092 &locate->alignment_pad);
9dff28ab 4093
e7949876
AM
4094 locate->size.constant = (-initial_offset_ptr->constant
4095 - locate->slot_offset.constant);
6f086dfc 4096 if (initial_offset_ptr->var)
e7949876
AM
4097 locate->size.var = size_binop (MINUS_EXPR,
4098 size_binop (MINUS_EXPR,
4099 ssize_int (0),
4100 initial_offset_ptr->var),
4101 locate->slot_offset.var);
4102
4103 /* Pad_below needs the pre-rounded size to know how much to pad
4104 below. */
4105 locate->offset = locate->slot_offset;
4106 if (where_pad == downward)
4107 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 4108
6f086dfc 4109#else /* !ARGS_GROW_DOWNWARD */
2e4ceca5 4110 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
4111 pad_to_arg_alignment (initial_offset_ptr, boundary,
4112 &locate->alignment_pad);
4113 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
4114
4115#ifdef PUSH_ROUNDING
4116 if (passed_mode != BLKmode)
4117 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4118#endif
4119
d4b0a7a0
DE
4120 /* Pad_below needs the pre-rounded size to know how much to pad below
4121 so this must be done before rounding up. */
e7949876
AM
4122 locate->offset = locate->slot_offset;
4123 if (where_pad == downward)
4124 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 4125
6f086dfc 4126 if (where_pad != none
cc269bb6 4127 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 4128 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 4129 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 4130
e7949876
AM
4131 ADD_PARM_SIZE (locate->size, sizetree);
4132
4133 locate->size.constant -= part_size_in_regs;
6f086dfc 4134#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
4135
4136#ifdef FUNCTION_ARG_OFFSET
4137 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4138#endif
6f086dfc
RS
4139}
4140
e16c591a
RS
4141/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4142 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4143
6f086dfc 4144static void
fa8db1f7
AJ
4145pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4146 struct args_size *alignment_pad)
6f086dfc 4147{
a544cfd2
KG
4148 tree save_var = NULL_TREE;
4149 HOST_WIDE_INT save_constant = 0;
a751cd5b 4150 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
4151 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4152
4153#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
4154 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4155 the real alignment of %sp. However, when it does this, the
4156 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
4157 if (SPARC_STACK_BOUNDARY_HACK)
4158 sp_offset = 0;
4159#endif
4fc026cd 4160
6f6b8f81 4161 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
4162 {
4163 save_var = offset_ptr->var;
4164 save_constant = offset_ptr->constant;
4165 }
4166
4167 alignment_pad->var = NULL_TREE;
4168 alignment_pad->constant = 0;
4fc026cd 4169
6f086dfc
RS
4170 if (boundary > BITS_PER_UNIT)
4171 {
4172 if (offset_ptr->var)
4173 {
a594a19c
GK
4174 tree sp_offset_tree = ssize_int (sp_offset);
4175 tree offset = size_binop (PLUS_EXPR,
4176 ARGS_SIZE_TREE (*offset_ptr),
4177 sp_offset_tree);
6f086dfc 4178#ifdef ARGS_GROW_DOWNWARD
a594a19c 4179 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 4180#else
a594a19c 4181 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 4182#endif
a594a19c
GK
4183
4184 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
4185 /* ARGS_SIZE_TREE includes constant term. */
4186 offset_ptr->constant = 0;
6f6b8f81 4187 if (boundary > PARM_BOUNDARY)
dd3f0101 4188 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 4189 save_var);
6f086dfc
RS
4190 }
4191 else
718fe406 4192 {
a594a19c 4193 offset_ptr->constant = -sp_offset +
6f086dfc 4194#ifdef ARGS_GROW_DOWNWARD
a594a19c 4195 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 4196#else
a594a19c 4197 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 4198#endif
6f6b8f81 4199 if (boundary > PARM_BOUNDARY)
718fe406
KH
4200 alignment_pad->constant = offset_ptr->constant - save_constant;
4201 }
6f086dfc
RS
4202 }
4203}
4204
4205static void
ef4bddc2 4206pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
6f086dfc
RS
4207{
4208 if (passed_mode != BLKmode)
4209 {
4210 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4211 offset_ptr->constant
4212 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4213 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4214 - GET_MODE_SIZE (passed_mode));
4215 }
4216 else
4217 {
4218 if (TREE_CODE (sizetree) != INTEGER_CST
4219 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4220 {
4221 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4222 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4223 /* Add it in. */
4224 ADD_PARM_SIZE (*offset_ptr, s2);
4225 SUB_PARM_SIZE (*offset_ptr, sizetree);
4226 }
4227 }
4228}
6f086dfc 4229\f
6f086dfc 4230
6fb5fa3c
DB
4231/* True if register REGNO was alive at a place where `setjmp' was
4232 called and was set more than once or is an argument. Such regs may
4233 be clobbered by `longjmp'. */
4234
4235static bool
4236regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4237{
4238 /* There appear to be cases where some local vars never reach the
4239 backend but have bogus regnos. */
4240 if (regno >= max_reg_num ())
4241 return false;
4242
4243 return ((REG_N_SETS (regno) > 1
fefa31b5
DM
4244 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4245 regno))
6fb5fa3c
DB
4246 && REGNO_REG_SET_P (setjmp_crosses, regno));
4247}
4248
4249/* Walk the tree of blocks describing the binding levels within a
4250 function and warn about variables the might be killed by setjmp or
4251 vfork. This is done after calling flow_analysis before register
4252 allocation since that will clobber the pseudo-regs to hard
4253 regs. */
4254
4255static void
4256setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 4257{
b3694847 4258 tree decl, sub;
6de9cd9a 4259
910ad8de 4260 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 4261 {
6de9cd9a 4262 if (TREE_CODE (decl) == VAR_DECL
bc41842b 4263 && DECL_RTL_SET_P (decl)
f8cfc6aa 4264 && REG_P (DECL_RTL (decl))
6fb5fa3c 4265 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4266 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 4267 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 4268 }
6de9cd9a 4269
87caf699 4270 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 4271 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
4272}
4273
6de9cd9a 4274/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
4275 but for arguments instead of local variables. */
4276
6fb5fa3c
DB
4277static void
4278setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4279{
b3694847 4280 tree decl;
6f086dfc 4281 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4282 decl; decl = DECL_CHAIN (decl))
6f086dfc 4283 if (DECL_RTL (decl) != 0
f8cfc6aa 4284 && REG_P (DECL_RTL (decl))
6fb5fa3c 4285 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4286 warning (OPT_Wclobbered,
2b001724 4287 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4288 decl);
6f086dfc
RS
4289}
4290
6fb5fa3c
DB
4291/* Generate warning messages for variables live across setjmp. */
4292
b8698a0f 4293void
6fb5fa3c
DB
4294generate_setjmp_warnings (void)
4295{
4296 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4297
0cae8d31 4298 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
6fb5fa3c
DB
4299 || bitmap_empty_p (setjmp_crosses))
4300 return;
4301
4302 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4303 setjmp_args_warning (setjmp_crosses);
4304}
4305
6f086dfc 4306\f
3373692b 4307/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4308 and return the new head of the chain (old last element).
4309 In addition to that clear BLOCK_SAME_RANGE flags when needed
4310 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4311 its super fragment origin. */
3373692b
JJ
4312
4313static tree
4314block_fragments_nreverse (tree t)
4315{
1e3c1d95
JJ
4316 tree prev = 0, block, next, prev_super = 0;
4317 tree super = BLOCK_SUPERCONTEXT (t);
4318 if (BLOCK_FRAGMENT_ORIGIN (super))
4319 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4320 for (block = t; block; block = next)
4321 {
4322 next = BLOCK_FRAGMENT_CHAIN (block);
4323 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4324 if ((prev && !BLOCK_SAME_RANGE (prev))
4325 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4326 != prev_super))
4327 BLOCK_SAME_RANGE (block) = 0;
4328 prev_super = BLOCK_SUPERCONTEXT (block);
4329 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4330 prev = block;
4331 }
1e3c1d95
JJ
4332 t = BLOCK_FRAGMENT_ORIGIN (t);
4333 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4334 != prev_super)
4335 BLOCK_SAME_RANGE (t) = 0;
4336 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4337 return prev;
4338}
4339
4340/* Reverse the order of elements in the chain T of blocks,
4341 and return the new head of the chain (old last element).
4342 Also do the same on subblocks and reverse the order of elements
4343 in BLOCK_FRAGMENT_CHAIN as well. */
4344
4345static tree
4346blocks_nreverse_all (tree t)
4347{
4348 tree prev = 0, block, next;
4349 for (block = t; block; block = next)
4350 {
4351 next = BLOCK_CHAIN (block);
4352 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4353 if (BLOCK_FRAGMENT_CHAIN (block)
4354 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4355 {
4356 BLOCK_FRAGMENT_CHAIN (block)
4357 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4358 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4359 BLOCK_SAME_RANGE (block) = 0;
4360 }
4361 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4362 prev = block;
4363 }
4364 return prev;
4365}
4366
4367
a20612aa
RH
4368/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4369 and create duplicate blocks. */
4370/* ??? Need an option to either create block fragments or to create
4371 abstract origin duplicates of a source block. It really depends
4372 on what optimization has been performed. */
467456d0 4373
116eebd6 4374void
fa8db1f7 4375reorder_blocks (void)
467456d0 4376{
116eebd6 4377 tree block = DECL_INITIAL (current_function_decl);
467456d0 4378
1a4450c7 4379 if (block == NULL_TREE)
116eebd6 4380 return;
fc289cd1 4381
00f96dc9 4382 auto_vec<tree, 10> block_stack;
18c038b9 4383
a20612aa 4384 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4385 clear_block_marks (block);
a20612aa 4386
116eebd6
MM
4387 /* Prune the old trees away, so that they don't get in the way. */
4388 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4389 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4390
a20612aa 4391 /* Recreate the block tree from the note nesting. */
116eebd6 4392 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4393 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
467456d0
RS
4394}
4395
a20612aa 4396/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4397
6de9cd9a
DN
4398void
4399clear_block_marks (tree block)
cc1fe44f 4400{
a20612aa 4401 while (block)
cc1fe44f 4402 {
a20612aa 4403 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4404 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4405 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4406 }
4407}
4408
0a1c58a2 4409static void
691fe203
DM
4410reorder_blocks_1 (rtx_insn *insns, tree current_block,
4411 vec<tree> *p_block_stack)
0a1c58a2 4412{
691fe203 4413 rtx_insn *insn;
1e3c1d95 4414 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4415
4416 for (insn = insns; insn; insn = NEXT_INSN (insn))
4417 {
4b4bf941 4418 if (NOTE_P (insn))
0a1c58a2 4419 {
a38e7aa5 4420 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4421 {
4422 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4423 tree origin;
4424
3373692b
JJ
4425 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4426 origin = block;
a20612aa 4427
1e3c1d95
JJ
4428 if (prev_end)
4429 BLOCK_SAME_RANGE (prev_end) = 0;
4430 prev_end = NULL_TREE;
4431
a20612aa
RH
4432 /* If we have seen this block before, that means it now
4433 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4434 if (TREE_ASM_WRITTEN (block))
4435 {
a20612aa 4436 tree new_block = copy_node (block);
a20612aa 4437
1e3c1d95 4438 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4439 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4440 BLOCK_FRAGMENT_CHAIN (new_block)
4441 = BLOCK_FRAGMENT_CHAIN (origin);
4442 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4443
4444 NOTE_BLOCK (insn) = new_block;
4445 block = new_block;
0a1c58a2 4446 }
a20612aa 4447
1e3c1d95
JJ
4448 if (prev_beg == current_block && prev_beg)
4449 BLOCK_SAME_RANGE (block) = 1;
4450
4451 prev_beg = origin;
4452
0a1c58a2
JL
4453 BLOCK_SUBBLOCKS (block) = 0;
4454 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4455 /* When there's only one block for the entire function,
4456 current_block == block and we mustn't do this, it
4457 will cause infinite recursion. */
4458 if (block != current_block)
4459 {
1e3c1d95 4460 tree super;
51b7d006 4461 if (block != origin)
1e3c1d95
JJ
4462 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4463 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4464 (origin))
4465 == current_block);
9771b263 4466 if (p_block_stack->is_empty ())
1e3c1d95
JJ
4467 super = current_block;
4468 else
4469 {
9771b263 4470 super = p_block_stack->last ();
1e3c1d95
JJ
4471 gcc_assert (super == current_block
4472 || BLOCK_FRAGMENT_ORIGIN (super)
4473 == current_block);
4474 }
4475 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4476 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4477 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4478 current_block = origin;
339a28b9 4479 }
9771b263 4480 p_block_stack->safe_push (block);
0a1c58a2 4481 }
a38e7aa5 4482 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4483 {
9771b263 4484 NOTE_BLOCK (insn) = p_block_stack->pop ();
0a1c58a2 4485 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4486 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4487 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4488 prev_beg = NULL_TREE;
4489 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4490 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4491 }
4492 }
1e3c1d95
JJ
4493 else
4494 {
4495 prev_beg = NULL_TREE;
4496 if (prev_end)
4497 BLOCK_SAME_RANGE (prev_end) = 0;
4498 prev_end = NULL_TREE;
4499 }
0a1c58a2
JL
4500 }
4501}
4502
467456d0
RS
4503/* Reverse the order of elements in the chain T of blocks,
4504 and return the new head of the chain (old last element). */
4505
6de9cd9a 4506tree
fa8db1f7 4507blocks_nreverse (tree t)
467456d0 4508{
3373692b
JJ
4509 tree prev = 0, block, next;
4510 for (block = t; block; block = next)
467456d0 4511 {
3373692b
JJ
4512 next = BLOCK_CHAIN (block);
4513 BLOCK_CHAIN (block) = prev;
4514 prev = block;
467456d0
RS
4515 }
4516 return prev;
4517}
4518
61e46a7d
NF
4519/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4520 by modifying the last node in chain 1 to point to chain 2. */
4521
4522tree
4523block_chainon (tree op1, tree op2)
4524{
4525 tree t1;
4526
4527 if (!op1)
4528 return op2;
4529 if (!op2)
4530 return op1;
4531
4532 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4533 continue;
4534 BLOCK_CHAIN (t1) = op2;
4535
4536#ifdef ENABLE_TREE_CHECKING
4537 {
4538 tree t2;
4539 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4540 gcc_assert (t2 != t1);
4541 }
4542#endif
4543
4544 return op1;
4545}
4546
18c038b9
MM
4547/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4548 non-NULL, list them all into VECTOR, in a depth-first preorder
4549 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4550 blocks. */
467456d0
RS
4551
4552static int
fa8db1f7 4553all_blocks (tree block, tree *vector)
467456d0 4554{
b2a59b15
MS
4555 int n_blocks = 0;
4556
a84efb51
JO
4557 while (block)
4558 {
4559 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4560
a84efb51
JO
4561 /* Record this block. */
4562 if (vector)
4563 vector[n_blocks] = block;
b2a59b15 4564
a84efb51 4565 ++n_blocks;
718fe406 4566
a84efb51
JO
4567 /* Record the subblocks, and their subblocks... */
4568 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4569 vector ? vector + n_blocks : 0);
4570 block = BLOCK_CHAIN (block);
4571 }
467456d0
RS
4572
4573 return n_blocks;
4574}
18c038b9
MM
4575
4576/* Return a vector containing all the blocks rooted at BLOCK. The
4577 number of elements in the vector is stored in N_BLOCKS_P. The
4578 vector is dynamically allocated; it is the caller's responsibility
4579 to call `free' on the pointer returned. */
718fe406 4580
18c038b9 4581static tree *
fa8db1f7 4582get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4583{
4584 tree *block_vector;
4585
4586 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4587 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4588 all_blocks (block, block_vector);
4589
4590 return block_vector;
4591}
4592
f83b236e 4593static GTY(()) int next_block_index = 2;
18c038b9
MM
4594
4595/* Set BLOCK_NUMBER for all the blocks in FN. */
4596
4597void
fa8db1f7 4598number_blocks (tree fn)
18c038b9
MM
4599{
4600 int i;
4601 int n_blocks;
4602 tree *block_vector;
4603
4604 /* For SDB and XCOFF debugging output, we start numbering the blocks
4605 from 1 within each function, rather than keeping a running
4606 count. */
4607#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4608 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4609 next_block_index = 1;
18c038b9
MM
4610#endif
4611
4612 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4613
4614 /* The top-level BLOCK isn't numbered at all. */
4615 for (i = 1; i < n_blocks; ++i)
4616 /* We number the blocks from two. */
4617 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4618
4619 free (block_vector);
4620
4621 return;
4622}
df8992f8
RH
4623
4624/* If VAR is present in a subblock of BLOCK, return the subblock. */
4625
24e47c76 4626DEBUG_FUNCTION tree
fa8db1f7 4627debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4628{
4629 tree t;
4630
4631 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4632 if (t == var)
4633 return block;
4634
4635 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4636 {
4637 tree ret = debug_find_var_in_block_tree (var, t);
4638 if (ret)
4639 return ret;
4640 }
4641
4642 return NULL_TREE;
4643}
467456d0 4644\f
db2960f4
SL
4645/* Keep track of whether we're in a dummy function context. If we are,
4646 we don't want to invoke the set_current_function hook, because we'll
4647 get into trouble if the hook calls target_reinit () recursively or
4648 when the initial initialization is not yet complete. */
4649
4650static bool in_dummy_function;
4651
ab442df7
MM
4652/* Invoke the target hook when setting cfun. Update the optimization options
4653 if the function uses different options than the default. */
db2960f4
SL
4654
4655static void
4656invoke_set_current_function_hook (tree fndecl)
4657{
4658 if (!in_dummy_function)
ab442df7
MM
4659 {
4660 tree opts = ((fndecl)
4661 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4662 : optimization_default_node);
4663
4664 if (!opts)
4665 opts = optimization_default_node;
4666
4667 /* Change optimization options if needed. */
4668 if (optimization_current_node != opts)
4669 {
4670 optimization_current_node = opts;
46625112 4671 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4672 }
4673
892c4745 4674 targetm.set_current_function (fndecl);
4b1baac8 4675 this_fn_optabs = this_target_optabs;
135204dd 4676
4b1baac8 4677 if (opts != optimization_default_node)
135204dd 4678 {
4b1baac8
RS
4679 init_tree_optimization_optabs (opts);
4680 if (TREE_OPTIMIZATION_OPTABS (opts))
4681 this_fn_optabs = (struct target_optabs *)
4682 TREE_OPTIMIZATION_OPTABS (opts);
135204dd 4683 }
ab442df7 4684 }
db2960f4
SL
4685}
4686
4687/* cfun should never be set directly; use this function. */
4688
4689void
4690set_cfun (struct function *new_cfun)
4691{
4692 if (cfun != new_cfun)
4693 {
4694 cfun = new_cfun;
4695 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4696 }
4697}
4698
db2960f4
SL
4699/* Initialized with NOGC, making this poisonous to the garbage collector. */
4700
9771b263 4701static vec<function_p> cfun_stack;
db2960f4 4702
af16bc76
MJ
4703/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4704 current_function_decl accordingly. */
db2960f4
SL
4705
4706void
4707push_cfun (struct function *new_cfun)
4708{
af16bc76
MJ
4709 gcc_assert ((!cfun && !current_function_decl)
4710 || (cfun && current_function_decl == cfun->decl));
9771b263 4711 cfun_stack.safe_push (cfun);
af16bc76 4712 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4
SL
4713 set_cfun (new_cfun);
4714}
4715
af16bc76 4716/* Pop cfun from the stack. Also set current_function_decl accordingly. */
db2960f4
SL
4717
4718void
4719pop_cfun (void)
4720{
9771b263 4721 struct function *new_cfun = cfun_stack.pop ();
af16bc76
MJ
4722 /* When in_dummy_function, we do have a cfun but current_function_decl is
4723 NULL. We also allow pushing NULL cfun and subsequently changing
4724 current_function_decl to something else and have both restored by
4725 pop_cfun. */
4726 gcc_checking_assert (in_dummy_function
4727 || !cfun
4728 || current_function_decl == cfun->decl);
38d34676 4729 set_cfun (new_cfun);
af16bc76 4730 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4 4731}
3e87758a
RL
4732
4733/* Return value of funcdef and increase it. */
4734int
b8698a0f 4735get_next_funcdef_no (void)
3e87758a
RL
4736{
4737 return funcdef_no++;
4738}
4739
903d1e67
XDL
4740/* Return value of funcdef. */
4741int
4742get_last_funcdef_no (void)
4743{
4744 return funcdef_no;
4745}
4746
3a70d621 4747/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4748 to the defaults. Set cfun to the newly-allocated object.
4749 Some of the helper functions invoked during initialization assume
4750 that cfun has already been set. Therefore, assign the new object
4751 directly into cfun and invoke the back end hook explicitly at the
4752 very end, rather than initializing a temporary and calling set_cfun
4753 on it.
182e0d71
AK
4754
4755 ABSTRACT_P is true if this is a function that will never be seen by
4756 the middle-end. Such functions are front-end concepts (like C++
4757 function templates) that do not correspond directly to functions
4758 placed in object files. */
7a80cf9a 4759
3a70d621 4760void
182e0d71 4761allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4762{
6de9cd9a 4763 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4764
766090c2 4765 cfun = ggc_cleared_alloc<function> ();
b384405b 4766
3a70d621 4767 init_eh_for_function ();
6f086dfc 4768
3a70d621
RH
4769 if (init_machine_status)
4770 cfun->machine = (*init_machine_status) ();
e2ecd91c 4771
7c800926
KT
4772#ifdef OVERRIDE_ABI_FORMAT
4773 OVERRIDE_ABI_FORMAT (fndecl);
4774#endif
4775
81464b2c 4776 if (fndecl != NULL_TREE)
3a70d621 4777 {
db2960f4
SL
4778 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4779 cfun->decl = fndecl;
70cf5bc1 4780 current_function_funcdef_no = get_next_funcdef_no ();
5b9db1bc
MJ
4781 }
4782
4783 invoke_set_current_function_hook (fndecl);
db2960f4 4784
5b9db1bc
MJ
4785 if (fndecl != NULL_TREE)
4786 {
4787 tree result = DECL_RESULT (fndecl);
182e0d71 4788 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4789 {
3a70d621 4790#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4791 cfun->returns_pcc_struct = 1;
3a70d621 4792#endif
e3b5732b 4793 cfun->returns_struct = 1;
db2960f4
SL
4794 }
4795
f38958e8 4796 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4797
db2960f4
SL
4798 /* Assume all registers in stdarg functions need to be saved. */
4799 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4800 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4801
4802 /* ??? This could be set on a per-function basis by the front-end
4803 but is this worth the hassle? */
4804 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
d764963b 4805 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
0b37ba8a
AK
4806
4807 if (!profile_flag && !flag_instrument_function_entry_exit)
4808 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
3a70d621 4809 }
db2960f4
SL
4810}
4811
4812/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4813 instead of just setting it. */
9d30f3c1 4814
db2960f4
SL
4815void
4816push_struct_function (tree fndecl)
4817{
af16bc76
MJ
4818 /* When in_dummy_function we might be in the middle of a pop_cfun and
4819 current_function_decl and cfun may not match. */
4820 gcc_assert (in_dummy_function
4821 || (!cfun && !current_function_decl)
4822 || (cfun && current_function_decl == cfun->decl));
9771b263 4823 cfun_stack.safe_push (cfun);
af16bc76 4824 current_function_decl = fndecl;
182e0d71 4825 allocate_struct_function (fndecl, false);
3a70d621 4826}
6f086dfc 4827
8f4f502f 4828/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4829 appropriate for emitting rtl at the start of a function. */
6f086dfc 4830
3a70d621 4831static void
db2960f4 4832prepare_function_start (void)
3a70d621 4833{
614d5bd8 4834 gcc_assert (!get_last_insn ());
fb0703f7 4835 init_temp_slots ();
0de456a5 4836 init_emit ();
bd60bab2 4837 init_varasm_status ();
0de456a5 4838 init_expr ();
bf08ebeb 4839 default_rtl_profile ();
6f086dfc 4840
a11e0df4 4841 if (flag_stack_usage_info)
d3c12306 4842 {
766090c2 4843 cfun->su = ggc_cleared_alloc<stack_usage> ();
d3c12306
EB
4844 cfun->su->static_stack_size = -1;
4845 }
4846
3a70d621 4847 cse_not_expected = ! optimize;
6f086dfc 4848
3a70d621
RH
4849 /* Caller save not needed yet. */
4850 caller_save_needed = 0;
6f086dfc 4851
3a70d621
RH
4852 /* We haven't done register allocation yet. */
4853 reg_renumber = 0;
6f086dfc 4854
b384405b
BS
4855 /* Indicate that we have not instantiated virtual registers yet. */
4856 virtuals_instantiated = 0;
4857
1b3d8f8a
GK
4858 /* Indicate that we want CONCATs now. */
4859 generating_concat_p = 1;
4860
b384405b
BS
4861 /* Indicate we have no need of a frame pointer yet. */
4862 frame_pointer_needed = 0;
b384405b
BS
4863}
4864
4865/* Initialize the rtl expansion mechanism so that we can do simple things
4866 like generate sequences. This is used to provide a context during global
db2960f4
SL
4867 initialization of some passes. You must call expand_dummy_function_end
4868 to exit this context. */
4869
b384405b 4870void
fa8db1f7 4871init_dummy_function_start (void)
b384405b 4872{
db2960f4
SL
4873 gcc_assert (!in_dummy_function);
4874 in_dummy_function = true;
4875 push_struct_function (NULL_TREE);
4876 prepare_function_start ();
b384405b
BS
4877}
4878
4879/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4880 and initialize static variables for generating RTL for the statements
4881 of the function. */
4882
4883void
fa8db1f7 4884init_function_start (tree subr)
b384405b 4885{
db2960f4
SL
4886 if (subr && DECL_STRUCT_FUNCTION (subr))
4887 set_cfun (DECL_STRUCT_FUNCTION (subr));
4888 else
182e0d71 4889 allocate_struct_function (subr, false);
b9b5f433
JH
4890
4891 /* Initialize backend, if needed. */
4892 initialize_rtl ();
4893
db2960f4 4894 prepare_function_start ();
2c7eebae 4895 decide_function_section (subr);
b384405b 4896
6f086dfc
RS
4897 /* Warn if this value is an aggregate type,
4898 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4899 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4900 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4901}
5c7675e9 4902
7d69de61
RH
4903/* Expand code to verify the stack_protect_guard. This is invoked at
4904 the end of a function to be protected. */
4905
4906#ifndef HAVE_stack_protect_test
b76be05e
JJ
4907# define HAVE_stack_protect_test 0
4908# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4909#endif
4910
b755446c 4911void
7d69de61
RH
4912stack_protect_epilogue (void)
4913{
4914 tree guard_decl = targetm.stack_protect_guard ();
19f8b229 4915 rtx_code_label *label = gen_label_rtx ();
7d69de61
RH
4916 rtx x, y, tmp;
4917
08d4cc33
RH
4918 x = expand_normal (crtl->stack_protect_guard);
4919 y = expand_normal (guard_decl);
7d69de61
RH
4920
4921 /* Allow the target to compare Y with X without leaking either into
4922 a register. */
fedfecef 4923 switch ((int) (HAVE_stack_protect_test != 0))
7d69de61
RH
4924 {
4925 case 1:
3aebbe5f 4926 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4927 if (tmp)
4928 {
4929 emit_insn (tmp);
7d69de61
RH
4930 break;
4931 }
4932 /* FALLTHRU */
4933
4934 default:
4935 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4936 break;
4937 }
4938
4939 /* The noreturn predictor has been moved to the tree level. The rtl-level
4940 predictors estimate this branch about 20%, which isn't enough to get
4941 things moved out of line. Since this is the only extant case of adding
4942 a noreturn function at the rtl level, it doesn't seem worth doing ought
4943 except adding the prediction by hand. */
4944 tmp = get_last_insn ();
4945 if (JUMP_P (tmp))
9f215bf5 4946 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
7d69de61 4947
b3c144a3
SB
4948 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4949 free_temp_slots ();
7d69de61
RH
4950 emit_label (label);
4951}
4952\f
6f086dfc
RS
4953/* Start the RTL for a new function, and set variables used for
4954 emitting RTL.
4955 SUBR is the FUNCTION_DECL node.
4956 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4957 the function's parameters, which must be run at any return statement. */
4958
4959void
b79c5284 4960expand_function_start (tree subr)
6f086dfc 4961{
6f086dfc
RS
4962 /* Make sure volatile mem refs aren't considered
4963 valid operands of arithmetic insns. */
4964 init_recog_no_volatile ();
4965
e3b5732b 4966 crtl->profile
70f4f91c
WC
4967 = (profile_flag
4968 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4969
e3b5732b 4970 crtl->limit_stack
a157febd
GK
4971 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4972
52a11cbf
RH
4973 /* Make the label for return statements to jump to. Do not special
4974 case machines with special return instructions -- they will be
4975 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4976 return_label = gen_label_rtx ();
6f086dfc
RS
4977
4978 /* Initialize rtx used to return the value. */
4979 /* Do this before assign_parms so that we copy the struct value address
4980 before any library calls that assign parms might generate. */
4981
4982 /* Decide whether to return the value in memory or in a register. */
61f71b34 4983 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4984 {
4985 /* Returning something that won't go in a register. */
b3694847 4986 rtx value_address = 0;
6f086dfc
RS
4987
4988#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4989 if (cfun->returns_pcc_struct)
6f086dfc
RS
4990 {
4991 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4992 value_address = assemble_static_space (size);
4993 }
4994 else
4995#endif
4996 {
2225b57c 4997 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4998 /* Expect to be passed the address of a place to store the value.
4999 If it is passed as an argument, assign_parms will take care of
5000 it. */
61f71b34 5001 if (sv)
6f086dfc
RS
5002 {
5003 value_address = gen_reg_rtx (Pmode);
61f71b34 5004 emit_move_insn (value_address, sv);
6f086dfc
RS
5005 }
5006 }
5007 if (value_address)
ccdecf58 5008 {
01c98570
JM
5009 rtx x = value_address;
5010 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
5011 {
5012 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
5013 set_mem_attributes (x, DECL_RESULT (subr), 1);
5014 }
abde42f7 5015 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 5016 }
6f086dfc
RS
5017 }
5018 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5019 /* If return mode is void, this decl rtl should not be used. */
19e7881c 5020 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 5021 else
a53e14c0 5022 {
d5bf1143
RH
5023 /* Compute the return values into a pseudo reg, which we will copy
5024 into the true return register after the cleanups are done. */
bef5d8b6
RS
5025 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5026 if (TYPE_MODE (return_type) != BLKmode
5027 && targetm.calls.return_in_msb (return_type))
5028 /* expand_function_end will insert the appropriate padding in
5029 this case. Use the return value's natural (unpadded) mode
5030 within the function proper. */
5031 SET_DECL_RTL (DECL_RESULT (subr),
5032 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 5033 else
0bccc606 5034 {
bef5d8b6
RS
5035 /* In order to figure out what mode to use for the pseudo, we
5036 figure out what the mode of the eventual return register will
5037 actually be, and use that. */
1d636cc6 5038 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
5039
5040 /* Structures that are returned in registers are not
5041 aggregate_value_p, so we may see a PARALLEL or a REG. */
5042 if (REG_P (hard_reg))
5043 SET_DECL_RTL (DECL_RESULT (subr),
5044 gen_reg_rtx (GET_MODE (hard_reg)));
5045 else
5046 {
5047 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5048 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5049 }
0bccc606 5050 }
a53e14c0 5051
084a1106
JDA
5052 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5053 result to the real return register(s). */
5054 DECL_REGISTER (DECL_RESULT (subr)) = 1;
d5e254e1
IE
5055
5056 if (chkp_function_instrumented_p (current_function_decl))
5057 {
5058 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5059 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5060 subr, 1);
5061 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5062 }
a53e14c0 5063 }
6f086dfc
RS
5064
5065 /* Initialize rtx for parameters and local variables.
5066 In some cases this requires emitting insns. */
0d1416c6 5067 assign_parms (subr);
6f086dfc 5068
6de9cd9a
DN
5069 /* If function gets a static chain arg, store it. */
5070 if (cfun->static_chain_decl)
5071 {
7e140280 5072 tree parm = cfun->static_chain_decl;
531ca746 5073 rtx local, chain, insn;
7e140280 5074
531ca746
RH
5075 local = gen_reg_rtx (Pmode);
5076 chain = targetm.calls.static_chain (current_function_decl, true);
5077
5078 set_decl_incoming_rtl (parm, chain, false);
7e140280 5079 SET_DECL_RTL (parm, local);
7e140280 5080 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 5081
531ca746
RH
5082 insn = emit_move_insn (local, chain);
5083
5084 /* Mark the register as eliminable, similar to parameters. */
5085 if (MEM_P (chain)
5086 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 5087 set_dst_reg_note (insn, REG_EQUIV, chain, local);
3fd48b12
EB
5088
5089 /* If we aren't optimizing, save the static chain onto the stack. */
5090 if (!optimize)
5091 {
5092 tree saved_static_chain_decl
5093 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5094 DECL_NAME (parm), TREE_TYPE (parm));
5095 rtx saved_static_chain_rtx
5096 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5097 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5098 emit_move_insn (saved_static_chain_rtx, chain);
5099 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5100 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5101 }
6de9cd9a
DN
5102 }
5103
5104 /* If the function receives a non-local goto, then store the
5105 bits we need to restore the frame pointer. */
5106 if (cfun->nonlocal_goto_save_area)
5107 {
5108 tree t_save;
5109 rtx r_save;
5110
4846b435 5111 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 5112 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 5113
6bbec3e1
L
5114 t_save = build4 (ARRAY_REF,
5115 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
5116 cfun->nonlocal_goto_save_area,
5117 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 5118 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 5119 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 5120
88280cf9 5121 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
5122 update_nonlocal_goto_save_area ();
5123 }
f0c51a1e 5124
6f086dfc
RS
5125 /* The following was moved from init_function_start.
5126 The move is supposed to make sdb output more accurate. */
5127 /* Indicate the beginning of the function body,
5128 as opposed to parm setup. */
2e040219 5129 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 5130
ede497cf
SB
5131 gcc_assert (NOTE_P (get_last_insn ()));
5132
6f086dfc
RS
5133 parm_birth_insn = get_last_insn ();
5134
e3b5732b 5135 if (crtl->profile)
f6f315fe 5136 {
f6f315fe 5137#ifdef PROFILE_HOOK
df696a75 5138 PROFILE_HOOK (current_function_funcdef_no);
411707f4 5139#endif
f6f315fe 5140 }
411707f4 5141
6d3cc8f0
EB
5142 /* If we are doing generic stack checking, the probe should go here. */
5143 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 5144 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
5145}
5146\f
49ad7cfa
BS
5147/* Undo the effects of init_dummy_function_start. */
5148void
fa8db1f7 5149expand_dummy_function_end (void)
49ad7cfa 5150{
db2960f4
SL
5151 gcc_assert (in_dummy_function);
5152
49ad7cfa
BS
5153 /* End any sequences that failed to be closed due to syntax errors. */
5154 while (in_sequence_p ())
5155 end_sequence ();
5156
5157 /* Outside function body, can't compute type's actual size
5158 until next function's body starts. */
fa51b01b 5159
01d939e8
BS
5160 free_after_parsing (cfun);
5161 free_after_compilation (cfun);
db2960f4
SL
5162 pop_cfun ();
5163 in_dummy_function = false;
49ad7cfa
BS
5164}
5165
d5e254e1 5166/* Helper for diddle_return_value. */
bd695e1e
RH
5167
5168void
d5e254e1 5169diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
bd695e1e 5170{
c13fde05
RH
5171 if (! outgoing)
5172 return;
bd695e1e 5173
f8cfc6aa 5174 if (REG_P (outgoing))
c13fde05
RH
5175 (*doit) (outgoing, arg);
5176 else if (GET_CODE (outgoing) == PARALLEL)
5177 {
5178 int i;
bd695e1e 5179
c13fde05
RH
5180 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5181 {
5182 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5183
f8cfc6aa 5184 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 5185 (*doit) (x, arg);
bd695e1e
RH
5186 }
5187 }
5188}
5189
d5e254e1
IE
5190/* Call DOIT for each hard register used as a return value from
5191 the current function. */
5192
5193void
5194diddle_return_value (void (*doit) (rtx, void *), void *arg)
5195{
5196 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5197 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5198}
5199
c13fde05 5200static void
fa8db1f7 5201do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5202{
c41c1387 5203 emit_clobber (reg);
c13fde05
RH
5204}
5205
5206void
fa8db1f7 5207clobber_return_register (void)
c13fde05
RH
5208{
5209 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
5210
5211 /* In case we do use pseudo to return value, clobber it too. */
5212 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5213 {
5214 tree decl_result = DECL_RESULT (current_function_decl);
5215 rtx decl_rtl = DECL_RTL (decl_result);
5216 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5217 {
5218 do_clobber_return_reg (decl_rtl, NULL);
5219 }
5220 }
c13fde05
RH
5221}
5222
5223static void
fa8db1f7 5224do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5225{
c41c1387 5226 emit_use (reg);
c13fde05
RH
5227}
5228
0bf8477d 5229static void
fa8db1f7 5230use_return_register (void)
c13fde05
RH
5231{
5232 diddle_return_value (do_use_return_reg, NULL);
5233}
5234
902edd36
JH
5235/* Possibly warn about unused parameters. */
5236void
5237do_warn_unused_parameter (tree fn)
5238{
5239 tree decl;
5240
5241 for (decl = DECL_ARGUMENTS (fn);
910ad8de 5242 decl; decl = DECL_CHAIN (decl))
902edd36 5243 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
5244 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5245 && !TREE_NO_WARNING (decl))
b9b8dde3 5246 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
5247}
5248
862d0b35
DN
5249/* Set the location of the insn chain starting at INSN to LOC. */
5250
5251static void
dc01c3d1 5252set_insn_locations (rtx_insn *insn, int loc)
862d0b35 5253{
dc01c3d1 5254 while (insn != NULL)
862d0b35
DN
5255 {
5256 if (INSN_P (insn))
5257 INSN_LOCATION (insn) = loc;
5258 insn = NEXT_INSN (insn);
5259 }
5260}
5261
71c0e7fc 5262/* Generate RTL for the end of the current function. */
6f086dfc
RS
5263
5264void
fa8db1f7 5265expand_function_end (void)
6f086dfc 5266{
932f0847 5267 rtx clobber_after;
6f086dfc 5268
964be02f
RH
5269 /* If arg_pointer_save_area was referenced only from a nested
5270 function, we will not have initialized it yet. Do that now. */
e3b5732b 5271 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 5272 get_arg_pointer_save_area ();
964be02f 5273
b38f3813 5274 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
5275 do a stack probe at the start of the function to ensure we have enough
5276 space for another stack frame. */
b38f3813 5277 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66 5278 {
691fe203 5279 rtx_insn *insn, *seq;
11044f66
RK
5280
5281 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 5282 if (CALL_P (insn))
11044f66 5283 {
c35af30f 5284 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 5285 start_sequence ();
c35af30f
EB
5286 if (STACK_CHECK_MOVING_SP)
5287 anti_adjust_stack_and_probe (max_frame_size, true);
5288 else
5289 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
5290 seq = get_insns ();
5291 end_sequence ();
5368224f 5292 set_insn_locations (seq, prologue_location);
ede497cf 5293 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
5294 break;
5295 }
5296 }
5297
6f086dfc
RS
5298 /* End any sequences that failed to be closed due to syntax errors. */
5299 while (in_sequence_p ())
5f4f0e22 5300 end_sequence ();
6f086dfc 5301
6f086dfc
RS
5302 clear_pending_stack_adjust ();
5303 do_pending_stack_adjust ();
5304
6f086dfc
RS
5305 /* Output a linenumber for the end of the function.
5306 SDB depends on this. */
5368224f 5307 set_curr_insn_location (input_location);
6f086dfc 5308
fbffc70a 5309 /* Before the return label (if any), clobber the return
a1f300c0 5310 registers so that they are not propagated live to the rest of
fbffc70a
GK
5311 the function. This can only happen with functions that drop
5312 through; if there had been a return statement, there would
932f0847
JH
5313 have either been a return rtx, or a jump to the return label.
5314
5315 We delay actual code generation after the current_function_value_rtx
5316 is computed. */
5317 clobber_after = get_last_insn ();
fbffc70a 5318
526c334b
KH
5319 /* Output the label for the actual return from the function. */
5320 emit_label (return_label);
6f086dfc 5321
677f3fa8 5322 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5323 {
5324 /* Let except.c know where it should emit the call to unregister
5325 the function context for sjlj exceptions. */
5326 if (flag_exceptions)
5327 sjlj_emit_function_exit_after (get_last_insn ());
5328 }
6fb5fa3c
DB
5329 else
5330 {
5331 /* We want to ensure that instructions that may trap are not
5332 moved into the epilogue by scheduling, because we don't
5333 always emit unwind information for the epilogue. */
8f4f502f 5334 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5335 emit_insn (gen_blockage ());
5336 }
0b59e81e 5337
652b0932
RH
5338 /* If this is an implementation of throw, do what's necessary to
5339 communicate between __builtin_eh_return and the epilogue. */
5340 expand_eh_return ();
5341
3e4eac3f
RH
5342 /* If scalar return value was computed in a pseudo-reg, or was a named
5343 return value that got dumped to the stack, copy that to the hard
5344 return register. */
19e7881c 5345 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5346 {
3e4eac3f
RH
5347 tree decl_result = DECL_RESULT (current_function_decl);
5348 rtx decl_rtl = DECL_RTL (decl_result);
5349
5350 if (REG_P (decl_rtl)
5351 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5352 : DECL_REGISTER (decl_result))
5353 {
38173d38 5354 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5355
ce5e43d0 5356 /* This should be set in assign_parms. */
0bccc606 5357 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5358
5359 /* If this is a BLKmode structure being returned in registers,
5360 then use the mode computed in expand_return. Note that if
797a6ac1 5361 decl_rtl is memory, then its mode may have been changed,
38173d38 5362 but that crtl->return_rtx has not. */
3e4eac3f 5363 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5364 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5365
bef5d8b6
RS
5366 /* If a non-BLKmode return value should be padded at the least
5367 significant end of the register, shift it left by the appropriate
5368 amount. BLKmode results are handled using the group load/store
5369 machinery. */
5370 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
66de4d7c 5371 && REG_P (real_decl_rtl)
bef5d8b6
RS
5372 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5373 {
5374 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5375 REGNO (real_decl_rtl)),
5376 decl_rtl);
5377 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5378 }
3e4eac3f 5379 /* If a named return value dumped decl_return to memory, then
797a6ac1 5380 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5381 extension. */
bef5d8b6 5382 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5383 {
8df83eae 5384 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5385 promote_function_mode (TREE_TYPE (decl_result),
5386 GET_MODE (decl_rtl), &unsignedp,
5387 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5388
5389 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5390 }
aa570f54 5391 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5392 {
5393 /* If expand_function_start has created a PARALLEL for decl_rtl,
5394 move the result to the real return registers. Otherwise, do
5395 a group load from decl_rtl for a named return. */
5396 if (GET_CODE (decl_rtl) == PARALLEL)
5397 emit_group_move (real_decl_rtl, decl_rtl);
5398 else
5399 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5400 TREE_TYPE (decl_result),
084a1106
JDA
5401 int_size_in_bytes (TREE_TYPE (decl_result)));
5402 }
652b0932
RH
5403 /* In the case of complex integer modes smaller than a word, we'll
5404 need to generate some non-trivial bitfield insertions. Do that
5405 on a pseudo and not the hard register. */
5406 else if (GET_CODE (decl_rtl) == CONCAT
5407 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5408 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5409 {
5410 int old_generating_concat_p;
5411 rtx tmp;
5412
5413 old_generating_concat_p = generating_concat_p;
5414 generating_concat_p = 0;
5415 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5416 generating_concat_p = old_generating_concat_p;
5417
5418 emit_move_insn (tmp, decl_rtl);
5419 emit_move_insn (real_decl_rtl, tmp);
5420 }
3e4eac3f
RH
5421 else
5422 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5423 }
6f086dfc
RS
5424 }
5425
5426 /* If returning a structure, arrange to return the address of the value
5427 in a place where debuggers expect to find it.
5428
5429 If returning a structure PCC style,
5430 the caller also depends on this value.
e3b5732b 5431 And cfun->returns_pcc_struct is not necessarily set. */
e0d14c39
BS
5432 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5433 && !targetm.calls.omit_struct_return_reg)
6f086dfc 5434 {
cc77ae10 5435 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5436 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5437 rtx outgoing;
5438
5439 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5440 type = TREE_TYPE (type);
5441 else
5442 value_address = XEXP (value_address, 0);
5443
1d636cc6
RG
5444 outgoing = targetm.calls.function_value (build_pointer_type (type),
5445 current_function_decl, true);
6f086dfc
RS
5446
5447 /* Mark this as a function return value so integrate will delete the
5448 assignment and USE below when inlining this function. */
5449 REG_FUNCTION_VALUE_P (outgoing) = 1;
5450
d1608933 5451 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5452 value_address = convert_memory_address (GET_MODE (outgoing),
5453 value_address);
d1608933 5454
6f086dfc 5455 emit_move_insn (outgoing, value_address);
d1608933
RK
5456
5457 /* Show return register used to hold result (in this case the address
5458 of the result. */
38173d38 5459 crtl->return_rtx = outgoing;
6f086dfc
RS
5460 }
5461
79c7fda6
JJ
5462 /* Emit the actual code to clobber return register. Don't emit
5463 it if clobber_after is a barrier, then the previous basic block
5464 certainly doesn't fall thru into the exit block. */
5465 if (!BARRIER_P (clobber_after))
5466 {
5467 rtx seq;
797a6ac1 5468
79c7fda6
JJ
5469 start_sequence ();
5470 clobber_return_register ();
5471 seq = get_insns ();
5472 end_sequence ();
932f0847 5473
79c7fda6
JJ
5474 emit_insn_after (seq, clobber_after);
5475 }
932f0847 5476
609c3937 5477 /* Output the label for the naked return from the function. */
4c33221c
UW
5478 if (naked_return_label)
5479 emit_label (naked_return_label);
6e3077c6 5480
25108646
AH
5481 /* @@@ This is a kludge. We want to ensure that instructions that
5482 may trap are not moved into the epilogue by scheduling, because
56d17681 5483 we don't always emit unwind information for the epilogue. */
f0a0390e 5484 if (cfun->can_throw_non_call_exceptions
677f3fa8 5485 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5486 emit_insn (gen_blockage ());
25108646 5487
7d69de61 5488 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5489 if (crtl->stack_protect_guard)
7d69de61
RH
5490 stack_protect_epilogue ();
5491
40184445
BS
5492 /* If we had calls to alloca, and this machine needs
5493 an accurate stack pointer to exit the function,
5494 insert some code to save and restore the stack pointer. */
5495 if (! EXIT_IGNORE_STACK
e3b5732b 5496 && cfun->calls_alloca)
40184445 5497 {
9eac0f2a 5498 rtx tem = 0, seq;
40184445 5499
9eac0f2a
RH
5500 start_sequence ();
5501 emit_stack_save (SAVE_FUNCTION, &tem);
5502 seq = get_insns ();
5503 end_sequence ();
5504 emit_insn_before (seq, parm_birth_insn);
5505
5506 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5507 }
5508
c13fde05
RH
5509 /* ??? This should no longer be necessary since stupid is no longer with
5510 us, but there are some parts of the compiler (eg reload_combine, and
5511 sh mach_dep_reorg) that still try and compute their own lifetime info
5512 instead of using the general framework. */
5513 use_return_register ();
6f086dfc 5514}
278ed218
RH
5515
5516rtx
bd60bab2 5517get_arg_pointer_save_area (void)
278ed218 5518{
bd60bab2 5519 rtx ret = arg_pointer_save_area;
278ed218
RH
5520
5521 if (! ret)
5522 {
bd60bab2
JH
5523 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5524 arg_pointer_save_area = ret;
964be02f
RH
5525 }
5526
e3b5732b 5527 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5528 {
5529 rtx seq;
278ed218 5530
797a6ac1 5531 /* Save the arg pointer at the beginning of the function. The
964be02f 5532 generated stack slot may not be a valid memory address, so we
278ed218
RH
5533 have to check it and fix it if necessary. */
5534 start_sequence ();
1a8cb155 5535 emit_move_insn (validize_mem (copy_rtx (ret)),
2e3f842f 5536 crtl->args.internal_arg_pointer);
2f937369 5537 seq = get_insns ();
278ed218
RH
5538 end_sequence ();
5539
964be02f 5540 push_topmost_sequence ();
1cb2fc7b 5541 emit_insn_after (seq, entry_of_function ());
964be02f 5542 pop_topmost_sequence ();
c1d9a70a
ILT
5543
5544 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5545 }
5546
5547 return ret;
5548}
bdac5f58 5549\f
cd9c1ca8
RH
5550/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5551 for the first time. */
bdac5f58 5552
0a1c58a2 5553static void
d242408f 5554record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
bdac5f58 5555{
dc01c3d1 5556 rtx_insn *tmp;
d242408f 5557 hash_table<insn_cache_hasher> *hash = *hashp;
0a1c58a2 5558
cd9c1ca8 5559 if (hash == NULL)
d242408f 5560 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
cd9c1ca8
RH
5561
5562 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5563 {
d242408f 5564 rtx *slot = hash->find_slot (tmp, INSERT);
cd9c1ca8
RH
5565 gcc_assert (*slot == NULL);
5566 *slot = tmp;
5567 }
5568}
5569
cd400280
RH
5570/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5571 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5572 insn, then record COPY as well. */
cd9c1ca8
RH
5573
5574void
cd400280 5575maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5576{
d242408f
TS
5577 hash_table<insn_cache_hasher> *hash;
5578 rtx *slot;
cd9c1ca8 5579
cd400280 5580 hash = epilogue_insn_hash;
d242408f 5581 if (!hash || !hash->find (insn))
cd400280
RH
5582 {
5583 hash = prologue_insn_hash;
d242408f 5584 if (!hash || !hash->find (insn))
cd400280
RH
5585 return;
5586 }
cd9c1ca8 5587
d242408f 5588 slot = hash->find_slot (copy, INSERT);
cd9c1ca8
RH
5589 gcc_assert (*slot == NULL);
5590 *slot = copy;
bdac5f58
TW
5591}
5592
cd9c1ca8
RH
5593/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5594 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5595
cd9c1ca8 5596static bool
d242408f 5597contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
bdac5f58 5598{
cd9c1ca8
RH
5599 if (hash == NULL)
5600 return false;
bdac5f58 5601
cd9c1ca8 5602 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5603 {
e0944870 5604 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
cd9c1ca8 5605 int i;
e0944870 5606 for (i = seq->len () - 1; i >= 0; i--)
d242408f 5607 if (hash->find (seq->element (i)))
cd9c1ca8
RH
5608 return true;
5609 return false;
bdac5f58 5610 }
cd9c1ca8 5611
d242408f 5612 return hash->find (const_cast<rtx> (insn)) != NULL;
bdac5f58 5613}
5c7675e9
RH
5614
5615int
4f588890 5616prologue_epilogue_contains (const_rtx insn)
5c7675e9 5617{
cd9c1ca8 5618 if (contains (insn, prologue_insn_hash))
5c7675e9 5619 return 1;
cd9c1ca8 5620 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5621 return 1;
5622 return 0;
5623}
bdac5f58 5624
170d8157 5625#ifdef HAVE_return
4c029f40
TV
5626/* Insert use of return register before the end of BB. */
5627
5628static void
5629emit_use_return_register_into_block (basic_block bb)
5630{
1e1b18c1 5631 rtx seq, insn;
4c029f40
TV
5632 start_sequence ();
5633 use_return_register ();
5634 seq = get_insns ();
5635 end_sequence ();
1e1b18c1
EB
5636 insn = BB_END (bb);
5637#ifdef HAVE_cc0
5638 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5639 insn = prev_cc0_setter (insn);
5640#endif
5641 emit_insn_before (seq, insn);
4c029f40
TV
5642}
5643
484db665
BS
5644
5645/* Create a return pattern, either simple_return or return, depending on
5646 simple_p. */
5647
5648static rtx
5649gen_return_pattern (bool simple_p)
5650{
5651#ifdef HAVE_simple_return
5652 return simple_p ? gen_simple_return () : gen_return ();
5653#else
5654 gcc_assert (!simple_p);
5655 return gen_return ();
5656#endif
5657}
5658
5659/* Insert an appropriate return pattern at the end of block BB. This
5660 also means updating block_for_insn appropriately. SIMPLE_P is
5661 the same as in gen_return_pattern and passed to it. */
69732dcb 5662
f30e25a3 5663void
484db665 5664emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5665{
484db665
BS
5666 rtx jump, pat;
5667 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5668 pat = PATTERN (jump);
26898771
BS
5669 if (GET_CODE (pat) == PARALLEL)
5670 pat = XVECEXP (pat, 0, 0);
5671 gcc_assert (ANY_RETURN_P (pat));
5672 JUMP_LABEL (jump) = pat;
69732dcb 5673}
484db665 5674#endif
69732dcb 5675
387748de
AM
5676/* Set JUMP_LABEL for a return insn. */
5677
5678void
5679set_return_jump_label (rtx returnjump)
5680{
5681 rtx pat = PATTERN (returnjump);
5682 if (GET_CODE (pat) == PARALLEL)
5683 pat = XVECEXP (pat, 0, 0);
5684 if (ANY_RETURN_P (pat))
5685 JUMP_LABEL (returnjump) = pat;
5686 else
5687 JUMP_LABEL (returnjump) = ret_rtx;
5688}
5689
ffe14686
AM
5690#if defined (HAVE_return) || defined (HAVE_simple_return)
5691/* Return true if there are any active insns between HEAD and TAIL. */
f30e25a3 5692bool
ffd80b43 5693active_insn_between (rtx_insn *head, rtx_insn *tail)
39d52ae5 5694{
ffe14686
AM
5695 while (tail)
5696 {
5697 if (active_insn_p (tail))
5698 return true;
5699 if (tail == head)
5700 return false;
5701 tail = PREV_INSN (tail);
5702 }
5703 return false;
5704}
5705
5706/* LAST_BB is a block that exits, and empty of active instructions.
5707 Examine its predecessors for jumps that can be converted to
5708 (conditional) returns. */
f30e25a3 5709vec<edge>
ffe14686 5710convert_jumps_to_returns (basic_block last_bb, bool simple_p,
9771b263 5711 vec<edge> unconverted ATTRIBUTE_UNUSED)
ffe14686
AM
5712{
5713 int i;
5714 basic_block bb;
39d52ae5 5715 rtx label;
ffe14686
AM
5716 edge_iterator ei;
5717 edge e;
ef062b13 5718 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
39d52ae5 5719
ffe14686 5720 FOR_EACH_EDGE (e, ei, last_bb->preds)
fefa31b5 5721 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
9771b263 5722 src_bbs.quick_push (e->src);
ffe14686
AM
5723
5724 label = BB_HEAD (last_bb);
5725
9771b263 5726 FOR_EACH_VEC_ELT (src_bbs, i, bb)
39d52ae5 5727 {
68a1a6c0 5728 rtx_insn *jump = BB_END (bb);
ffe14686
AM
5729
5730 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5731 continue;
5732
5733 e = find_edge (bb, last_bb);
5734
5735 /* If we have an unconditional jump, we can replace that
5736 with a simple return instruction. */
5737 if (simplejump_p (jump))
5738 {
5739 /* The use of the return register might be present in the exit
5740 fallthru block. Either:
5741 - removing the use is safe, and we should remove the use in
5742 the exit fallthru block, or
5743 - removing the use is not safe, and we should add it here.
5744 For now, we conservatively choose the latter. Either of the
5745 2 helps in crossjumping. */
5746 emit_use_return_register_into_block (bb);
5747
5748 emit_return_into_block (simple_p, bb);
5749 delete_insn (jump);
5750 }
5751
5752 /* If we have a conditional jump branching to the last
5753 block, we can try to replace that with a conditional
5754 return instruction. */
5755 else if (condjump_p (jump))
5756 {
5757 rtx dest;
5758
5759 if (simple_p)
5760 dest = simple_return_rtx;
5761 else
5762 dest = ret_rtx;
5763 if (!redirect_jump (jump, dest, 0))
5764 {
5765#ifdef HAVE_simple_return
5766 if (simple_p)
5767 {
5768 if (dump_file)
5769 fprintf (dump_file,
5770 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5771 unconverted.safe_push (e);
ffe14686
AM
5772 }
5773#endif
5774 continue;
5775 }
5776
5777 /* See comment in simplejump_p case above. */
5778 emit_use_return_register_into_block (bb);
5779
5780 /* If this block has only one successor, it both jumps
5781 and falls through to the fallthru block, so we can't
5782 delete the edge. */
5783 if (single_succ_p (bb))
5784 continue;
5785 }
5786 else
5787 {
5788#ifdef HAVE_simple_return
5789 if (simple_p)
5790 {
5791 if (dump_file)
5792 fprintf (dump_file,
5793 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5794 unconverted.safe_push (e);
ffe14686
AM
5795 }
5796#endif
5797 continue;
5798 }
5799
5800 /* Fix up the CFG for the successful change we just made. */
fefa31b5 5801 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
d3b623c7 5802 e->flags &= ~EDGE_CROSSING;
39d52ae5 5803 }
9771b263 5804 src_bbs.release ();
ffe14686 5805 return unconverted;
39d52ae5
BS
5806}
5807
ffe14686 5808/* Emit a return insn for the exit fallthru block. */
f30e25a3 5809basic_block
ffe14686
AM
5810emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5811{
5812 basic_block last_bb = exit_fallthru_edge->src;
5813
5814 if (JUMP_P (BB_END (last_bb)))
5815 {
5816 last_bb = split_edge (exit_fallthru_edge);
5817 exit_fallthru_edge = single_succ_edge (last_bb);
5818 }
5819 emit_barrier_after (BB_END (last_bb));
5820 emit_return_into_block (simple_p, last_bb);
5821 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5822 return last_bb;
5823}
5824#endif
5825
5826
9faa82d8 5827/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5828 this into place with notes indicating where the prologue ends and where
484db665
BS
5829 the epilogue begins. Update the basic block information when possible.
5830
5831 Notes on epilogue placement:
5832 There are several kinds of edges to the exit block:
5833 * a single fallthru edge from LAST_BB
5834 * possibly, edges from blocks containing sibcalls
5835 * possibly, fake edges from infinite loops
5836
5837 The epilogue is always emitted on the fallthru edge from the last basic
5838 block in the function, LAST_BB, into the exit block.
5839
5840 If LAST_BB is empty except for a label, it is the target of every
5841 other basic block in the function that ends in a return. If a
5842 target has a return or simple_return pattern (possibly with
5843 conditional variants), these basic blocks can be changed so that a
5844 return insn is emitted into them, and their target is adjusted to
5845 the real exit block.
5846
5847 Notes on shrink wrapping: We implement a fairly conservative
5848 version of shrink-wrapping rather than the textbook one. We only
5849 generate a single prologue and a single epilogue. This is
5850 sufficient to catch a number of interesting cases involving early
5851 exits.
5852
5853 First, we identify the blocks that require the prologue to occur before
5854 them. These are the ones that modify a call-saved register, or reference
5855 any of the stack or frame pointer registers. To simplify things, we then
5856 mark everything reachable from these blocks as also requiring a prologue.
5857 This takes care of loops automatically, and avoids the need to examine
5858 whether MEMs reference the frame, since it is sufficient to check for
5859 occurrences of the stack or frame pointer.
5860
5861 We then compute the set of blocks for which the need for a prologue
5862 is anticipatable (borrowing terminology from the shrink-wrapping
5863 description in Muchnick's book). These are the blocks which either
5864 require a prologue themselves, or those that have only successors
5865 where the prologue is anticipatable. The prologue needs to be
5866 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5867 is not. For the moment, we ensure that only one such edge exists.
5868
5869 The epilogue is placed as described above, but we make a
5870 distinction between inserting return and simple_return patterns
5871 when modifying other blocks that end in a return. Blocks that end
5872 in a sibcall omit the sibcall_epilogue if the block is not in
5873 ANTIC. */
bdac5f58 5874
c81b4a0e 5875void
6fb5fa3c 5876thread_prologue_and_epilogue_insns (void)
bdac5f58 5877{
7458026b 5878 bool inserted;
484db665 5879#ifdef HAVE_simple_return
6e1aa848 5880 vec<edge> unconverted_simple_returns = vNULL;
ffe14686 5881 bitmap_head bb_flags;
484db665 5882#endif
9c8348cf 5883 rtx_insn *returnjump;
9c8348cf 5884 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
dc01c3d1 5885 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
484db665 5886 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5887 edge_iterator ei;
484db665
BS
5888
5889 df_analyze ();
e881bb1b 5890
fefa31b5 5891 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7458026b
ILT
5892
5893 inserted = false;
9c8348cf
DM
5894 epilogue_end = NULL;
5895 returnjump = NULL;
7458026b
ILT
5896
5897 /* Can't deal with multiple successors of the entry block at the
5898 moment. Function should always have at least one entry
5899 point. */
fefa31b5
DM
5900 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5901 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
484db665
BS
5902 orig_entry_edge = entry_edge;
5903
dc01c3d1 5904 split_prologue_seq = NULL;
7458026b
ILT
5905 if (flag_split_stack
5906 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5907 == NULL))
5908 {
5909#ifndef HAVE_split_stack_prologue
5910 gcc_unreachable ();
5911#else
5912 gcc_assert (HAVE_split_stack_prologue);
5913
5914 start_sequence ();
5915 emit_insn (gen_split_stack_prologue ());
484db665 5916 split_prologue_seq = get_insns ();
7458026b
ILT
5917 end_sequence ();
5918
484db665 5919 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5368224f 5920 set_insn_locations (split_prologue_seq, prologue_location);
7458026b
ILT
5921#endif
5922 }
5923
dc01c3d1 5924 prologue_seq = NULL;
bdac5f58
TW
5925#ifdef HAVE_prologue
5926 if (HAVE_prologue)
5927 {
e881bb1b 5928 start_sequence ();
dc01c3d1 5929 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
e881bb1b 5930 emit_insn (seq);
bdac5f58 5931
b8698a0f 5932 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5933 if the profiling is on and the frame pointer is required. */
e3b5732b 5934 if (crtl->profile && frame_pointer_needed)
c41c1387 5935 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5936
bdac5f58 5937 /* Retain a map of the prologue insns. */
cd9c1ca8 5938 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5939 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5940
56d17681
UB
5941 /* Ensure that instructions are not moved into the prologue when
5942 profiling is on. The call to the profiling routine can be
5943 emitted within the live range of a call-clobbered register. */
3c5273a9 5944 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5945 emit_insn (gen_blockage ());
9185a8d5 5946
484db665 5947 prologue_seq = get_insns ();
e881bb1b 5948 end_sequence ();
5368224f 5949 set_insn_locations (prologue_seq, prologue_location);
484db665
BS
5950 }
5951#endif
e881bb1b 5952
ffe14686 5953#ifdef HAVE_simple_return
484db665
BS
5954 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5955
484db665
BS
5956 /* Try to perform a kind of shrink-wrapping, making sure the
5957 prologue/epilogue is emitted only around those parts of the
5958 function that require it. */
5959
f30e25a3 5960 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
bdac5f58 5961#endif
bdac5f58 5962
484db665
BS
5963 if (split_prologue_seq != NULL_RTX)
5964 {
f4b31a33 5965 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
5966 inserted = true;
5967 }
5968 if (prologue_seq != NULL_RTX)
5969 {
5970 insert_insn_on_edge (prologue_seq, entry_edge);
5971 inserted = true;
5972 }
5973
19d3c25c
RH
5974 /* If the exit block has no non-fake predecessors, we don't need
5975 an epilogue. */
fefa31b5 5976 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
19d3c25c
RH
5977 if ((e->flags & EDGE_FAKE) == 0)
5978 break;
5979 if (e == NULL)
5980 goto epilogue_done;
5981
fefa31b5 5982 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
484db665 5983
fefa31b5 5984 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
ffe14686 5985
ffe14686
AM
5986#ifdef HAVE_simple_return
5987 if (entry_edge != orig_entry_edge)
f30e25a3
ZC
5988 exit_fallthru_edge
5989 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5990 &unconverted_simple_returns,
5991 &returnjump);
484db665 5992#endif
ffe14686
AM
5993#ifdef HAVE_return
5994 if (HAVE_return)
5995 {
5996 if (exit_fallthru_edge == NULL)
5997 goto epilogue_done;
69732dcb 5998
ffe14686
AM
5999 if (optimize)
6000 {
6001 basic_block last_bb = exit_fallthru_edge->src;
484db665 6002
ffe14686
AM
6003 if (LABEL_P (BB_HEAD (last_bb))
6004 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6e1aa848 6005 convert_jumps_to_returns (last_bb, false, vNULL);
ffe14686 6006
1ff2fd21
AM
6007 if (EDGE_COUNT (last_bb->preds) != 0
6008 && single_succ_p (last_bb))
484db665 6009 {
ffe14686
AM
6010 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6011 epilogue_end = returnjump = BB_END (last_bb);
484db665 6012#ifdef HAVE_simple_return
ffe14686
AM
6013 /* Emitting the return may add a basic block.
6014 Fix bb_flags for the added block. */
6015 if (last_bb != exit_fallthru_edge->src)
6016 bitmap_set_bit (&bb_flags, last_bb->index);
484db665 6017#endif
ffe14686 6018 goto epilogue_done;
69732dcb 6019 }
2dd8bc01 6020 }
69732dcb
RH
6021 }
6022#endif
cd9c1ca8
RH
6023
6024 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6025 this marker for the splits of EH_RETURN patterns, and nothing else
6026 uses the flag in the meantime. */
6027 epilogue_completed = 1;
6028
6029#ifdef HAVE_eh_return
6030 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6031 some targets, these get split to a special version of the epilogue
6032 code. In order to be able to properly annotate these with unwind
6033 info, try to split them now. If we get a valid split, drop an
6034 EPILOGUE_BEG note and mark the insns as epilogue insns. */
fefa31b5 6035 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
cd9c1ca8 6036 {
691fe203 6037 rtx_insn *prev, *last, *trial;
cd9c1ca8
RH
6038
6039 if (e->flags & EDGE_FALLTHRU)
6040 continue;
6041 last = BB_END (e->src);
6042 if (!eh_returnjump_p (last))
6043 continue;
6044
6045 prev = PREV_INSN (last);
6046 trial = try_split (PATTERN (last), last, 1);
6047 if (trial == last)
6048 continue;
6049
6050 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6051 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6052 }
6053#endif
6054
484db665
BS
6055 /* If nothing falls through into the exit block, we don't need an
6056 epilogue. */
623a66fa 6057
484db665 6058 if (exit_fallthru_edge == NULL)
623a66fa
R
6059 goto epilogue_done;
6060
bdac5f58
TW
6061#ifdef HAVE_epilogue
6062 if (HAVE_epilogue)
6063 {
19d3c25c 6064 start_sequence ();
2e040219 6065 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
dc01c3d1 6066 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
55c623b5
UW
6067 if (seq)
6068 emit_jump_insn (seq);
bdac5f58 6069
19d3c25c 6070 /* Retain a map of the epilogue insns. */
cd9c1ca8 6071 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 6072 set_insn_locations (seq, epilogue_location);
bdac5f58 6073
2f937369 6074 seq = get_insns ();
484db665 6075 returnjump = get_last_insn ();
718fe406 6076 end_sequence ();
e881bb1b 6077
484db665 6078 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 6079 inserted = true;
dc0ff1c8
BS
6080
6081 if (JUMP_P (returnjump))
387748de 6082 set_return_jump_label (returnjump);
bdac5f58 6083 }
623a66fa 6084 else
bdac5f58 6085#endif
623a66fa
R
6086 {
6087 basic_block cur_bb;
6088
484db665 6089 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
6090 goto epilogue_done;
6091 /* We have a fall-through edge to the exit block, the source is not
6092 at the end of the function, and there will be an assembler epilogue
6093 at the end of the function.
6094 We can't use force_nonfallthru here, because that would try to
484db665 6095 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 6096 we take advantage of cfg_layout_finalize using
484db665 6097 fixup_fallthru_exit_predecessor. */
35b6b437 6098 cfg_layout_initialize (0);
11cd3bed 6099 FOR_EACH_BB_FN (cur_bb, cfun)
24bd1a0b
DB
6100 if (cur_bb->index >= NUM_FIXED_BLOCKS
6101 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 6102 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
6103 cfg_layout_finalize ();
6104 }
cf103ca4 6105
19d3c25c 6106epilogue_done:
484db665 6107
a8ba47cb 6108 default_rtl_profile ();
e881bb1b 6109
ca1117cc 6110 if (inserted)
30a873c3 6111 {
cf103ca4
EB
6112 sbitmap blocks;
6113
30a873c3
ZD
6114 commit_edge_insertions ();
6115
cf103ca4 6116 /* Look for basic blocks within the prologue insns. */
8b1c6fd7 6117 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 6118 bitmap_clear (blocks);
d7c028c0
LC
6119 bitmap_set_bit (blocks, entry_edge->dest->index);
6120 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
6121 find_many_sub_basic_blocks (blocks);
6122 sbitmap_free (blocks);
6123
30a873c3
ZD
6124 /* The epilogue insns we inserted may cause the exit edge to no longer
6125 be fallthru. */
fefa31b5 6126 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
30a873c3
ZD
6127 {
6128 if (((e->flags & EDGE_FALLTHRU) != 0)
6129 && returnjump_p (BB_END (e->src)))
6130 e->flags &= ~EDGE_FALLTHRU;
6131 }
6132 }
0a1c58a2 6133
484db665 6134#ifdef HAVE_simple_return
f30e25a3
ZC
6135 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
6136 unconverted_simple_returns);
484db665
BS
6137#endif
6138
0a1c58a2
JL
6139#ifdef HAVE_sibcall_epilogue
6140 /* Emit sibling epilogues before any sibling call sites. */
fefa31b5
DM
6141 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6142 ei_safe_edge (ei));
6143 )
0a1c58a2
JL
6144 {
6145 basic_block bb = e->src;
691fe203 6146 rtx_insn *insn = BB_END (bb);
484db665 6147 rtx ep_seq;
0a1c58a2 6148
4b4bf941 6149 if (!CALL_P (insn)
484db665 6150 || ! SIBLING_CALL_P (insn)
ffe14686 6151#ifdef HAVE_simple_return
484db665 6152 || (entry_edge != orig_entry_edge
ffe14686
AM
6153 && !bitmap_bit_p (&bb_flags, bb->index))
6154#endif
6155 )
628f6a4e
BE
6156 {
6157 ei_next (&ei);
6158 continue;
6159 }
0a1c58a2 6160
484db665
BS
6161 ep_seq = gen_sibcall_epilogue ();
6162 if (ep_seq)
6163 {
6164 start_sequence ();
6165 emit_note (NOTE_INSN_EPILOGUE_BEG);
6166 emit_insn (ep_seq);
dc01c3d1 6167 rtx_insn *seq = get_insns ();
484db665 6168 end_sequence ();
0a1c58a2 6169
484db665
BS
6170 /* Retain a map of the epilogue insns. Used in life analysis to
6171 avoid getting rid of sibcall epilogue insns. Do this before we
6172 actually emit the sequence. */
6173 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 6174 set_insn_locations (seq, epilogue_location);
2f937369 6175
484db665
BS
6176 emit_insn_before (seq, insn);
6177 }
628f6a4e 6178 ei_next (&ei);
0a1c58a2
JL
6179 }
6180#endif
ca1117cc 6181
86c82654
RH
6182#ifdef HAVE_epilogue
6183 if (epilogue_end)
6184 {
9c8348cf 6185 rtx_insn *insn, *next;
86c82654
RH
6186
6187 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 6188 There is no need, however, to be quite so anal about the existence
071a42f9 6189 of such a note. Also possibly move
84c1fa24
UW
6190 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6191 info generation. */
718fe406 6192 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
6193 {
6194 next = NEXT_INSN (insn);
b8698a0f 6195 if (NOTE_P (insn)
a38e7aa5 6196 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
6197 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6198 }
6199 }
6200#endif
6fb5fa3c 6201
ffe14686 6202#ifdef HAVE_simple_return
484db665 6203 bitmap_clear (&bb_flags);
ffe14686 6204#endif
484db665 6205
6fb5fa3c
DB
6206 /* Threading the prologue and epilogue changes the artificial refs
6207 in the entry and exit blocks. */
6208 epilogue_completed = 1;
6209 df_update_entry_exit_and_calls ();
bdac5f58
TW
6210}
6211
cd9c1ca8
RH
6212/* Reposition the prologue-end and epilogue-begin notes after
6213 instruction scheduling. */
bdac5f58
TW
6214
6215void
6fb5fa3c 6216reposition_prologue_and_epilogue_notes (void)
bdac5f58 6217{
cd9c1ca8
RH
6218#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6219 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
6220 /* Since the hash table is created on demand, the fact that it is
6221 non-null is a signal that it is non-empty. */
6222 if (prologue_insn_hash != NULL)
bdac5f58 6223 {
d242408f 6224 size_t len = prologue_insn_hash->elements ();
691fe203 6225 rtx_insn *insn, *last = NULL, *note = NULL;
bdac5f58 6226
cd9c1ca8
RH
6227 /* Scan from the beginning until we reach the last prologue insn. */
6228 /* ??? While we do have the CFG intact, there are two problems:
6229 (1) The prologue can contain loops (typically probing the stack),
6230 which means that the end of the prologue isn't in the first bb.
6231 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 6232 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 6233 {
4b4bf941 6234 if (NOTE_P (insn))
9392c110 6235 {
a38e7aa5 6236 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
6237 note = insn;
6238 }
cd9c1ca8 6239 else if (contains (insn, prologue_insn_hash))
0a1c58a2 6240 {
9f53e965
RH
6241 last = insn;
6242 if (--len == 0)
6243 break;
6244 }
6245 }
797a6ac1 6246
9f53e965
RH
6247 if (last)
6248 {
cd9c1ca8 6249 if (note == NULL)
9f53e965 6250 {
cd9c1ca8
RH
6251 /* Scan forward looking for the PROLOGUE_END note. It should
6252 be right at the beginning of the block, possibly with other
6253 insn notes that got moved there. */
6254 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6255 {
6256 if (NOTE_P (note)
6257 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6258 break;
6259 }
9f53e965 6260 }
c93b03c2 6261
9f53e965 6262 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6263 if (LABEL_P (last))
9f53e965
RH
6264 last = NEXT_INSN (last);
6265 reorder_insns (note, note, last);
bdac5f58 6266 }
0a1c58a2
JL
6267 }
6268
cd9c1ca8 6269 if (epilogue_insn_hash != NULL)
0a1c58a2 6270 {
cd9c1ca8
RH
6271 edge_iterator ei;
6272 edge e;
bdac5f58 6273
fefa31b5 6274 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bdac5f58 6275 {
691fe203 6276 rtx_insn *insn, *first = NULL, *note = NULL;
997704f1 6277 basic_block bb = e->src;
c93b03c2 6278
997704f1 6279 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6280 FOR_BB_INSNS (bb, insn)
9f53e965 6281 {
cd9c1ca8
RH
6282 if (NOTE_P (insn))
6283 {
6284 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6285 {
6286 note = insn;
997704f1 6287 if (first != NULL)
cd9c1ca8
RH
6288 break;
6289 }
6290 }
997704f1 6291 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6292 {
997704f1 6293 first = insn;
cd9c1ca8
RH
6294 if (note != NULL)
6295 break;
6296 }
9392c110 6297 }
997704f1
RH
6298
6299 if (note)
6300 {
6301 /* If the function has a single basic block, and no real
b8698a0f 6302 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6303 epilogue note can get scheduled before the prologue
6304 note. If we have frame related prologue insns, having
6305 them scanned during the epilogue will result in a crash.
6306 In this case re-order the epilogue note to just before
6307 the last insn in the block. */
6308 if (first == NULL)
6309 first = BB_END (bb);
6310
6311 if (PREV_INSN (first) != note)
6312 reorder_insns (note, note, PREV_INSN (first));
6313 }
bdac5f58
TW
6314 }
6315 }
6316#endif /* HAVE_prologue or HAVE_epilogue */
6317}
87ff9c8e 6318
df92c640
SB
6319/* Returns the name of function declared by FNDECL. */
6320const char *
6321fndecl_name (tree fndecl)
6322{
6323 if (fndecl == NULL)
6324 return "(nofn)";
6325 return lang_hooks.decl_printable_name (fndecl, 2);
6326}
6327
532aafad
SB
6328/* Returns the name of function FN. */
6329const char *
6330function_name (struct function *fn)
6331{
df92c640
SB
6332 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6333 return fndecl_name (fndecl);
532aafad
SB
6334}
6335
faed5cc3
SB
6336/* Returns the name of the current function. */
6337const char *
6338current_function_name (void)
6339{
532aafad 6340 return function_name (cfun);
faed5cc3 6341}
ef330312
PB
6342\f
6343
c2924966 6344static unsigned int
ef330312
PB
6345rest_of_handle_check_leaf_regs (void)
6346{
6347#ifdef LEAF_REGISTERS
416ff32e 6348 crtl->uses_only_leaf_regs
ef330312
PB
6349 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6350#endif
c2924966 6351 return 0;
ef330312
PB
6352}
6353
8d8d1a28 6354/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6355
8d8d1a28
AH
6356static void
6357used_types_insert_helper (tree type, struct function *func)
33c9159e 6358{
8d8d1a28 6359 if (type != NULL && func != NULL)
33c9159e 6360 {
33c9159e 6361 if (func->used_types_hash == NULL)
b086d530
TS
6362 func->used_types_hash = hash_set<tree>::create_ggc (37);
6363
6364 func->used_types_hash->add (type);
33c9159e
AH
6365 }
6366}
6367
8d8d1a28
AH
6368/* Given a type, insert it into the used hash table in cfun. */
6369void
6370used_types_insert (tree t)
6371{
6372 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6373 if (TYPE_NAME (t))
6374 break;
6375 else
6376 t = TREE_TYPE (t);
29ce73cb
PB
6377 if (TREE_CODE (t) == ERROR_MARK)
6378 return;
095c7b3c
JJ
6379 if (TYPE_NAME (t) == NULL_TREE
6380 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6381 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6382 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6383 {
6384 if (cfun)
6385 used_types_insert_helper (t, cfun);
6386 else
9771b263
DN
6387 {
6388 /* So this might be a type referenced by a global variable.
6389 Record that type so that we can later decide to emit its
6390 debug information. */
6391 vec_safe_push (types_used_by_cur_var_decl, t);
6392 }
b646ba3f
DS
6393 }
6394}
6395
6396/* Helper to Hash a struct types_used_by_vars_entry. */
6397
6398static hashval_t
6399hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6400{
6401 gcc_assert (entry && entry->var_decl && entry->type);
6402
6403 return iterative_hash_object (entry->type,
6404 iterative_hash_object (entry->var_decl, 0));
6405}
6406
6407/* Hash function of the types_used_by_vars_entry hash table. */
6408
6409hashval_t
2a22f99c 6410used_type_hasher::hash (types_used_by_vars_entry *entry)
b646ba3f 6411{
b646ba3f
DS
6412 return hash_types_used_by_vars_entry (entry);
6413}
6414
6415/*Equality function of the types_used_by_vars_entry hash table. */
6416
2a22f99c
TS
6417bool
6418used_type_hasher::equal (types_used_by_vars_entry *e1,
6419 types_used_by_vars_entry *e2)
b646ba3f 6420{
b646ba3f
DS
6421 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6422}
6423
6424/* Inserts an entry into the types_used_by_vars_hash hash table. */
6425
6426void
6427types_used_by_var_decl_insert (tree type, tree var_decl)
6428{
6429 if (type != NULL && var_decl != NULL)
6430 {
2a22f99c 6431 types_used_by_vars_entry **slot;
b646ba3f
DS
6432 struct types_used_by_vars_entry e;
6433 e.var_decl = var_decl;
6434 e.type = type;
6435 if (types_used_by_vars_hash == NULL)
2a22f99c
TS
6436 types_used_by_vars_hash
6437 = hash_table<used_type_hasher>::create_ggc (37);
6438
6439 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
b646ba3f
DS
6440 if (*slot == NULL)
6441 {
6442 struct types_used_by_vars_entry *entry;
766090c2 6443 entry = ggc_alloc<types_used_by_vars_entry> ();
b646ba3f
DS
6444 entry->type = type;
6445 entry->var_decl = var_decl;
6446 *slot = entry;
6447 }
6448 }
8d8d1a28
AH
6449}
6450
27a4cd48
DM
6451namespace {
6452
6453const pass_data pass_data_leaf_regs =
6454{
6455 RTL_PASS, /* type */
6456 "*leaf_regs", /* name */
6457 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6458 TV_NONE, /* tv_id */
6459 0, /* properties_required */
6460 0, /* properties_provided */
6461 0, /* properties_destroyed */
6462 0, /* todo_flags_start */
6463 0, /* todo_flags_finish */
ef330312
PB
6464};
6465
27a4cd48
DM
6466class pass_leaf_regs : public rtl_opt_pass
6467{
6468public:
c3284718
RS
6469 pass_leaf_regs (gcc::context *ctxt)
6470 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
27a4cd48
DM
6471 {}
6472
6473 /* opt_pass methods: */
be55bfe6
TS
6474 virtual unsigned int execute (function *)
6475 {
6476 return rest_of_handle_check_leaf_regs ();
6477 }
27a4cd48
DM
6478
6479}; // class pass_leaf_regs
6480
6481} // anon namespace
6482
6483rtl_opt_pass *
6484make_pass_leaf_regs (gcc::context *ctxt)
6485{
6486 return new pass_leaf_regs (ctxt);
6487}
6488
6fb5fa3c
DB
6489static unsigned int
6490rest_of_handle_thread_prologue_and_epilogue (void)
6491{
6492 if (optimize)
6493 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6494
6fb5fa3c
DB
6495 /* On some machines, the prologue and epilogue code, or parts thereof,
6496 can be represented as RTL. Doing so lets us schedule insns between
6497 it and the rest of the code and also allows delayed branch
6498 scheduling to operate in the epilogue. */
6fb5fa3c 6499 thread_prologue_and_epilogue_insns ();
d3c12306 6500
bdc6e1ae
SB
6501 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6502 see PR57320. */
6503 cleanup_cfg (0);
6504
d3c12306 6505 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6506 if (flag_stack_usage_info)
d3c12306
EB
6507 output_stack_usage ();
6508
6fb5fa3c
DB
6509 return 0;
6510}
6511
27a4cd48
DM
6512namespace {
6513
6514const pass_data pass_data_thread_prologue_and_epilogue =
6515{
6516 RTL_PASS, /* type */
6517 "pro_and_epilogue", /* name */
6518 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6519 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6520 0, /* properties_required */
6521 0, /* properties_provided */
6522 0, /* properties_destroyed */
3bea341f
RB
6523 0, /* todo_flags_start */
6524 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6fb5fa3c 6525};
27a4cd48
DM
6526
6527class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6528{
6529public:
c3284718
RS
6530 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6531 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
27a4cd48
DM
6532 {}
6533
6534 /* opt_pass methods: */
be55bfe6
TS
6535 virtual unsigned int execute (function *)
6536 {
6537 return rest_of_handle_thread_prologue_and_epilogue ();
6538 }
27a4cd48
DM
6539
6540}; // class pass_thread_prologue_and_epilogue
6541
6542} // anon namespace
6543
6544rtl_opt_pass *
6545make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6546{
6547 return new pass_thread_prologue_and_epilogue (ctxt);
6548}
d8d72314
PB
6549\f
6550
6551/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6552 in-out constraints. Say you start with
d8d72314
PB
6553
6554 orig = inout;
6555 asm ("": "+mr" (inout));
6556 use (orig);
6557
6558 which is transformed very early to use explicit output and match operands:
6559
6560 orig = inout;
6561 asm ("": "=mr" (inout) : "0" (inout));
6562 use (orig);
6563
6564 Or, after SSA and copyprop,
6565
6566 asm ("": "=mr" (inout_2) : "0" (inout_1));
6567 use (inout_1);
6568
6569 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6570 they represent two separate values, so they will get different pseudo
6571 registers during expansion. Then, since the two operands need to match
6572 per the constraints, but use different pseudo registers, reload can
6573 only register a reload for these operands. But reloads can only be
6574 satisfied by hardregs, not by memory, so we need a register for this
6575 reload, just because we are presented with non-matching operands.
6576 So, even though we allow memory for this operand, no memory can be
6577 used for it, just because the two operands don't match. This can
6578 cause reload failures on register-starved targets.
6579
6580 So it's a symptom of reload not being able to use memory for reloads
6581 or, alternatively it's also a symptom of both operands not coming into
6582 reload as matching (in which case the pseudo could go to memory just
6583 fine, as the alternative allows it, and no reload would be necessary).
6584 We fix the latter problem here, by transforming
6585
6586 asm ("": "=mr" (inout_2) : "0" (inout_1));
6587
6588 back to
6589
6590 inout_2 = inout_1;
6591 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6592
6593static void
691fe203 6594match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
d8d72314
PB
6595{
6596 int i;
6597 bool changed = false;
6598 rtx op = SET_SRC (p_sets[0]);
6599 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6600 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6601 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6602
d7b8033f 6603 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6604 for (i = 0; i < ninputs; i++)
6605 {
691fe203
DM
6606 rtx input, output;
6607 rtx_insn *insns;
d8d72314
PB
6608 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6609 char *end;
53220215 6610 int match, j;
d8d72314 6611
70f16287
JJ
6612 if (*constraint == '%')
6613 constraint++;
6614
d8d72314
PB
6615 match = strtoul (constraint, &end, 10);
6616 if (end == constraint)
6617 continue;
6618
6619 gcc_assert (match < noutputs);
6620 output = SET_DEST (p_sets[match]);
6621 input = RTVEC_ELT (inputs, i);
53220215
MM
6622 /* Only do the transformation for pseudos. */
6623 if (! REG_P (output)
6624 || rtx_equal_p (output, input)
d8d72314
PB
6625 || (GET_MODE (input) != VOIDmode
6626 && GET_MODE (input) != GET_MODE (output)))
6627 continue;
6628
53220215
MM
6629 /* We can't do anything if the output is also used as input,
6630 as we're going to overwrite it. */
6631 for (j = 0; j < ninputs; j++)
6632 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6633 break;
6634 if (j != ninputs)
6635 continue;
6636
d7b8033f
JJ
6637 /* Avoid changing the same input several times. For
6638 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6639 only change in once (to out1), rather than changing it
6640 first to out1 and afterwards to out2. */
6641 if (i > 0)
6642 {
6643 for (j = 0; j < noutputs; j++)
6644 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6645 break;
6646 if (j != noutputs)
6647 continue;
6648 }
6649 output_matched[match] = true;
6650
d8d72314 6651 start_sequence ();
53220215 6652 emit_move_insn (output, input);
d8d72314
PB
6653 insns = get_insns ();
6654 end_sequence ();
d8d72314 6655 emit_insn_before (insns, insn);
53220215
MM
6656
6657 /* Now replace all mentions of the input with output. We can't
fa10beec 6658 just replace the occurrence in inputs[i], as the register might
53220215
MM
6659 also be used in some other input (or even in an address of an
6660 output), which would mean possibly increasing the number of
6661 inputs by one (namely 'output' in addition), which might pose
6662 a too complicated problem for reload to solve. E.g. this situation:
6663
6664 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6665
84fbffb2 6666 Here 'input' is used in two occurrences as input (once for the
53220215 6667 input operand, once for the address in the second output operand).
fa10beec 6668 If we would replace only the occurrence of the input operand (to
53220215
MM
6669 make the matching) we would be left with this:
6670
6671 output = input
6672 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6673
6674 Now we suddenly have two different input values (containing the same
6675 value, but different pseudos) where we formerly had only one.
6676 With more complicated asms this might lead to reload failures
6677 which wouldn't have happen without this pass. So, iterate over
84fbffb2 6678 all operands and replace all occurrences of the register used. */
53220215 6679 for (j = 0; j < noutputs; j++)
1596d61e 6680 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
6681 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6682 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6683 input, output);
6684 for (j = 0; j < ninputs; j++)
6685 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6686 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6687 input, output);
6688
d8d72314
PB
6689 changed = true;
6690 }
6691
6692 if (changed)
6693 df_insn_rescan (insn);
6694}
6695
5cf18d25
ML
6696/* Add the decl D to the local_decls list of FUN. */
6697
6698void
6699add_local_decl (struct function *fun, tree d)
6700{
6701 gcc_assert (TREE_CODE (d) == VAR_DECL);
6702 vec_safe_push (fun->local_decls, d);
6703}
6704
be55bfe6
TS
6705namespace {
6706
6707const pass_data pass_data_match_asm_constraints =
6708{
6709 RTL_PASS, /* type */
6710 "asmcons", /* name */
6711 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6712 TV_NONE, /* tv_id */
6713 0, /* properties_required */
6714 0, /* properties_provided */
6715 0, /* properties_destroyed */
6716 0, /* todo_flags_start */
6717 0, /* todo_flags_finish */
6718};
6719
6720class pass_match_asm_constraints : public rtl_opt_pass
6721{
6722public:
6723 pass_match_asm_constraints (gcc::context *ctxt)
6724 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6725 {}
6726
6727 /* opt_pass methods: */
6728 virtual unsigned int execute (function *);
6729
6730}; // class pass_match_asm_constraints
6731
6732unsigned
6733pass_match_asm_constraints::execute (function *fun)
d8d72314
PB
6734{
6735 basic_block bb;
691fe203
DM
6736 rtx_insn *insn;
6737 rtx pat, *p_sets;
d8d72314
PB
6738 int noutputs;
6739
e3b5732b 6740 if (!crtl->has_asm_statement)
d8d72314
PB
6741 return 0;
6742
6743 df_set_flags (DF_DEFER_INSN_RESCAN);
be55bfe6 6744 FOR_EACH_BB_FN (bb, fun)
d8d72314
PB
6745 {
6746 FOR_BB_INSNS (bb, insn)
6747 {
6748 if (!INSN_P (insn))
6749 continue;
6750
6751 pat = PATTERN (insn);
6752 if (GET_CODE (pat) == PARALLEL)
6753 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6754 else if (GET_CODE (pat) == SET)
6755 p_sets = &PATTERN (insn), noutputs = 1;
6756 else
6757 continue;
6758
6759 if (GET_CODE (*p_sets) == SET
6760 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6761 match_asm_constraints_1 (insn, p_sets, noutputs);
6762 }
6763 }
6764
6765 return TODO_df_finish;
6766}
6767
27a4cd48
DM
6768} // anon namespace
6769
6770rtl_opt_pass *
6771make_pass_match_asm_constraints (gcc::context *ctxt)
6772{
6773 return new pass_match_asm_constraints (ctxt);
6774}
6775
faed5cc3 6776
e2500fed 6777#include "gt-function.h"