]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
ipa-chkp.c: New.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
6f086dfc 3
1322177d 4This file is part of GCC.
6f086dfc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6f086dfc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6f086dfc
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6f086dfc 19
6f086dfc
RS
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 32 not get a hard register. */
6f086dfc
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
0cbd9993 38#include "rtl-error.h"
6f086dfc 39#include "tree.h"
d8a2d370
DN
40#include "stor-layout.h"
41#include "varasm.h"
42#include "stringpool.h"
6f086dfc 43#include "flags.h"
1ef08c63 44#include "except.h"
83685514
AM
45#include "hashtab.h"
46#include "hash-set.h"
47#include "vec.h"
48#include "machmode.h"
49#include "hard-reg-set.h"
50#include "input.h"
6f086dfc 51#include "function.h"
6f086dfc 52#include "expr.h"
b0710fe1 53#include "insn-codes.h"
c6b97fac 54#include "optabs.h"
e78d8e51 55#include "libfuncs.h"
6f086dfc 56#include "regs.h"
6f086dfc
RS
57#include "insn-config.h"
58#include "recog.h"
59#include "output.h"
b1474bb7 60#include "tm_p.h"
7afff7cf 61#include "langhooks.h"
61f71b34 62#include "target.h"
677f3fa8 63#include "common/common-target.h"
2fb9a547 64#include "gimple-expr.h"
45b0be94 65#include "gimplify.h"
ef330312 66#include "tree-pass.h"
7d69de61 67#include "predict.h"
60393bbc
AM
68#include "dominance.h"
69#include "cfg.h"
70#include "cfgrtl.h"
71#include "cfganal.h"
72#include "cfgbuild.h"
73#include "cfgcleanup.h"
74#include "basic-block.h"
6fb5fa3c 75#include "df.h"
ffe14686
AM
76#include "params.h"
77#include "bb-reorder.h"
f30e25a3 78#include "shrink-wrap.h"
b9b5f433 79#include "toplev.h"
b8704801 80#include "rtl-iter.h"
d5e254e1
IE
81#include "tree-chkp.h"
82#include "rtl-chkp.h"
7d69de61 83
5576d6f2
TT
84/* So we can assign to cfun in this file. */
85#undef cfun
86
95f3f59e
JDA
87#ifndef STACK_ALIGNMENT_NEEDED
88#define STACK_ALIGNMENT_NEEDED 1
89#endif
90
975f3818
RS
91#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92
6f086dfc
RS
93/* Round a value to the lowest integer less than it that is a multiple of
94 the required alignment. Avoid using division in case the value is
95 negative. Assume the alignment is a power of two. */
96#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97
98/* Similar, but round to the next highest integer that meets the
99 alignment. */
100#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101
6f086dfc 102/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
103 assign_stack_local uses frame_pointer_rtx when this is nonzero.
104 calls.c:emit_library_call_value_1 uses it to set up
105 post-instantiation libcalls. */
106int virtuals_instantiated;
6f086dfc 107
df696a75 108/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 109static GTY(()) int funcdef_no;
f6f315fe 110
414c4dc4
NC
111/* These variables hold pointers to functions to create and destroy
112 target specific, per-function data structures. */
fa8db1f7 113struct machine_function * (*init_machine_status) (void);
46766466 114
b384405b 115/* The currently compiled function. */
01d939e8 116struct function *cfun = 0;
b384405b 117
cd9c1ca8
RH
118/* These hashes record the prologue and epilogue insns. */
119static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
120 htab_t prologue_insn_hash;
121static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
122 htab_t epilogue_insn_hash;
6f086dfc 123\f
b646ba3f 124
2a22f99c 125hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
9771b263 126vec<tree, va_gc> *types_used_by_cur_var_decl;
b646ba3f 127
e15679f8
RK
128/* Forward declarations. */
129
fa8db1f7 130static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7 131static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
ef4bddc2 132static void pad_below (struct args_size *, machine_mode, tree);
691fe203 133static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
fa8db1f7
AJ
134static int all_blocks (tree, tree *);
135static tree *get_block_vector (tree, int *);
136extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 137/* We always define `record_insns' even if it's not used so that we
ec97b83a 138 can always export `prologue_epilogue_contains'. */
dc01c3d1 139static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
cd9c1ca8 140static bool contains (const_rtx, htab_t);
db2960f4 141static void prepare_function_start (void);
fa8db1f7
AJ
142static void do_clobber_return_reg (rtx, void *);
143static void do_use_return_reg (rtx, void *);
c20bf1f3 144\f
936fc9ba
JH
145/* Stack of nested functions. */
146/* Keep track of the cfun stack. */
e5e809f4 147
936fc9ba 148typedef struct function *function_p;
e5e809f4 149
9771b263 150static vec<function_p> function_context_stack;
6f086dfc
RS
151
152/* Save the current context for compilation of a nested function.
d2784db4 153 This is called from language-specific code. */
6f086dfc
RS
154
155void
d2784db4 156push_function_context (void)
6f086dfc 157{
01d939e8 158 if (cfun == 0)
182e0d71 159 allocate_struct_function (NULL, false);
b384405b 160
9771b263 161 function_context_stack.safe_push (cfun);
db2960f4 162 set_cfun (NULL);
6f086dfc
RS
163}
164
165/* Restore the last saved context, at the end of a nested function.
166 This function is called from language-specific code. */
167
168void
d2784db4 169pop_function_context (void)
6f086dfc 170{
9771b263 171 struct function *p = function_context_stack.pop ();
db2960f4 172 set_cfun (p);
6f086dfc 173 current_function_decl = p->decl;
6f086dfc 174
6f086dfc 175 /* Reset variables that have known state during rtx generation. */
6f086dfc 176 virtuals_instantiated = 0;
1b3d8f8a 177 generating_concat_p = 1;
6f086dfc 178}
e4a4639e 179
fa51b01b
RH
180/* Clear out all parts of the state in F that can safely be discarded
181 after the function has been parsed, but not compiled, to let
182 garbage collection reclaim the memory. */
183
184void
fa8db1f7 185free_after_parsing (struct function *f)
fa51b01b 186{
e8924938 187 f->language = 0;
fa51b01b
RH
188}
189
e2ecd91c
BS
190/* Clear out all parts of the state in F that can safely be discarded
191 after the function has been compiled, to let garbage collection
0a8a198c 192 reclaim the memory. */
21cd906e 193
e2ecd91c 194void
fa8db1f7 195free_after_compilation (struct function *f)
e2ecd91c 196{
cd9c1ca8
RH
197 prologue_insn_hash = NULL;
198 epilogue_insn_hash = NULL;
199
04695783 200 free (crtl->emit.regno_pointer_align);
f995dcfe 201
3e029763 202 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 203 f->eh = NULL;
e2500fed 204 f->machine = NULL;
997de8ed 205 f->cfg = NULL;
fa51b01b 206
57b9e367 207 regno_reg_rtx = NULL;
e2ecd91c 208}
6f086dfc 209\f
49ad7cfa
BS
210/* Return size needed for stack frame based on slots so far allocated.
211 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
212 the caller may have to do that. */
9fb798d7 213
49ad7cfa 214HOST_WIDE_INT
fa8db1f7 215get_frame_size (void)
49ad7cfa 216{
bd60bab2
JH
217 if (FRAME_GROWS_DOWNWARD)
218 return -frame_offset;
219 else
220 return frame_offset;
49ad7cfa
BS
221}
222
9fb798d7
EB
223/* Issue an error message and return TRUE if frame OFFSET overflows in
224 the signed target pointer arithmetics for function FUNC. Otherwise
225 return FALSE. */
226
227bool
228frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 229{
9fb798d7
EB
230 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
231
232 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
233 /* Leave room for the fixed part of the frame. */
234 - 64 * UNITS_PER_WORD)
235 {
c5d75364
MLI
236 error_at (DECL_SOURCE_LOCATION (func),
237 "total size of local objects too large");
9fb798d7
EB
238 return TRUE;
239 }
240
241 return FALSE;
242}
243
76fe54f0
L
244/* Return stack slot alignment in bits for TYPE and MODE. */
245
246static unsigned int
ef4bddc2 247get_stack_local_alignment (tree type, machine_mode mode)
76fe54f0
L
248{
249 unsigned int alignment;
250
251 if (mode == BLKmode)
252 alignment = BIGGEST_ALIGNMENT;
253 else
254 alignment = GET_MODE_ALIGNMENT (mode);
255
256 /* Allow the frond-end to (possibly) increase the alignment of this
257 stack slot. */
258 if (! type)
259 type = lang_hooks.types.type_for_mode (mode, 0);
260
261 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
262}
263
56731d64
BS
264/* Determine whether it is possible to fit a stack slot of size SIZE and
265 alignment ALIGNMENT into an area in the stack frame that starts at
266 frame offset START and has a length of LENGTH. If so, store the frame
267 offset to be used for the stack slot in *POFFSET and return true;
268 return false otherwise. This function will extend the frame size when
269 given a start/length pair that lies at the end of the frame. */
270
271static bool
272try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
273 HOST_WIDE_INT size, unsigned int alignment,
274 HOST_WIDE_INT *poffset)
275{
276 HOST_WIDE_INT this_frame_offset;
277 int frame_off, frame_alignment, frame_phase;
278
279 /* Calculate how many bytes the start of local variables is off from
280 stack alignment. */
281 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
282 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
283 frame_phase = frame_off ? frame_alignment - frame_off : 0;
284
285 /* Round the frame offset to the specified alignment. */
286
287 /* We must be careful here, since FRAME_OFFSET might be negative and
288 division with a negative dividend isn't as well defined as we might
289 like. So we instead assume that ALIGNMENT is a power of two and
290 use logical operations which are unambiguous. */
291 if (FRAME_GROWS_DOWNWARD)
292 this_frame_offset
293 = (FLOOR_ROUND (start + length - size - frame_phase,
294 (unsigned HOST_WIDE_INT) alignment)
295 + frame_phase);
296 else
297 this_frame_offset
298 = (CEIL_ROUND (start - frame_phase,
299 (unsigned HOST_WIDE_INT) alignment)
300 + frame_phase);
301
302 /* See if it fits. If this space is at the edge of the frame,
303 consider extending the frame to make it fit. Our caller relies on
304 this when allocating a new slot. */
305 if (frame_offset == start && this_frame_offset < frame_offset)
306 frame_offset = this_frame_offset;
307 else if (this_frame_offset < start)
308 return false;
309 else if (start + length == frame_offset
310 && this_frame_offset + size > start + length)
311 frame_offset = this_frame_offset + size;
312 else if (this_frame_offset + size > start + length)
313 return false;
314
315 *poffset = this_frame_offset;
316 return true;
317}
318
319/* Create a new frame_space structure describing free space in the stack
320 frame beginning at START and ending at END, and chain it into the
321 function's frame_space_list. */
322
323static void
324add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
325{
766090c2 326 struct frame_space *space = ggc_alloc<frame_space> ();
56731d64
BS
327 space->next = crtl->frame_space_list;
328 crtl->frame_space_list = space;
329 space->start = start;
330 space->length = end - start;
331}
332
6f086dfc
RS
333/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
334 with machine mode MODE.
718fe406 335
6f086dfc
RS
336 ALIGN controls the amount of alignment for the address of the slot:
337 0 means according to MODE,
338 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 339 -2 means use BITS_PER_UNIT,
6f086dfc
RS
340 positive specifies alignment boundary in bits.
341
80a832cd
JJ
342 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
343 alignment and ASLK_RECORD_PAD bit set if we should remember
344 extra space we allocated for alignment purposes. When we are
345 called from assign_stack_temp_for_type, it is not set so we don't
346 track the same stack slot in two independent lists.
2e3f842f 347
bd60bab2 348 We do not round to stack_boundary here. */
6f086dfc 349
bd60bab2 350rtx
ef4bddc2 351assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
80a832cd 352 int align, int kind)
6f086dfc 353{
b3694847 354 rtx x, addr;
6f086dfc 355 int bigend_correction = 0;
427188d5 356 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 357 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
358
359 if (align == 0)
360 {
76fe54f0 361 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 362 alignment /= BITS_PER_UNIT;
6f086dfc
RS
363 }
364 else if (align == -1)
365 {
366 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
367 size = CEIL_ROUND (size, alignment);
368 }
cfa29a4c
EB
369 else if (align == -2)
370 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
371 else
372 alignment = align / BITS_PER_UNIT;
373
2e3f842f
L
374 alignment_in_bits = alignment * BITS_PER_UNIT;
375
2e3f842f
L
376 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
377 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
378 {
379 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
380 alignment = alignment_in_bits / BITS_PER_UNIT;
381 }
a0871656 382
2e3f842f
L
383 if (SUPPORTS_STACK_ALIGNMENT)
384 {
385 if (crtl->stack_alignment_estimated < alignment_in_bits)
386 {
387 if (!crtl->stack_realign_processed)
388 crtl->stack_alignment_estimated = alignment_in_bits;
389 else
390 {
391 /* If stack is realigned and stack alignment value
392 hasn't been finalized, it is OK not to increase
393 stack_alignment_estimated. The bigger alignment
394 requirement is recorded in stack_alignment_needed
395 below. */
396 gcc_assert (!crtl->stack_realign_finalized);
397 if (!crtl->stack_realign_needed)
398 {
399 /* It is OK to reduce the alignment as long as the
400 requested size is 0 or the estimated stack
401 alignment >= mode alignment. */
80a832cd 402 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
403 || size == 0
404 || (crtl->stack_alignment_estimated
405 >= GET_MODE_ALIGNMENT (mode)));
406 alignment_in_bits = crtl->stack_alignment_estimated;
407 alignment = alignment_in_bits / BITS_PER_UNIT;
408 }
409 }
410 }
411 }
76fe54f0
L
412
413 if (crtl->stack_alignment_needed < alignment_in_bits)
414 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
415 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
416 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 417
56731d64
BS
418 if (mode != BLKmode || size != 0)
419 {
80a832cd 420 if (kind & ASLK_RECORD_PAD)
56731d64 421 {
80a832cd
JJ
422 struct frame_space **psp;
423
424 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
425 {
426 struct frame_space *space = *psp;
427 if (!try_fit_stack_local (space->start, space->length, size,
428 alignment, &slot_offset))
429 continue;
430 *psp = space->next;
431 if (slot_offset > space->start)
432 add_frame_space (space->start, slot_offset);
433 if (slot_offset + size < space->start + space->length)
434 add_frame_space (slot_offset + size,
435 space->start + space->length);
436 goto found_space;
437 }
56731d64
BS
438 }
439 }
440 else if (!STACK_ALIGNMENT_NEEDED)
441 {
442 slot_offset = frame_offset;
443 goto found_space;
444 }
445
446 old_frame_offset = frame_offset;
447
448 if (FRAME_GROWS_DOWNWARD)
449 {
450 frame_offset -= size;
451 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 452
80a832cd
JJ
453 if (kind & ASLK_RECORD_PAD)
454 {
455 if (slot_offset > frame_offset)
456 add_frame_space (frame_offset, slot_offset);
457 if (slot_offset + size < old_frame_offset)
458 add_frame_space (slot_offset + size, old_frame_offset);
459 }
56731d64
BS
460 }
461 else
95f3f59e 462 {
56731d64
BS
463 frame_offset += size;
464 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
465
80a832cd
JJ
466 if (kind & ASLK_RECORD_PAD)
467 {
468 if (slot_offset > old_frame_offset)
469 add_frame_space (old_frame_offset, slot_offset);
470 if (slot_offset + size < frame_offset)
471 add_frame_space (slot_offset + size, frame_offset);
472 }
95f3f59e 473 }
6f086dfc 474
56731d64 475 found_space:
6f086dfc
RS
476 /* On a big-endian machine, if we are allocating more space than we will use,
477 use the least significant bytes of those that are allocated. */
d70eadf7 478 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 479 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 480
6f086dfc
RS
481 /* If we have already instantiated virtual registers, return the actual
482 address relative to the frame pointer. */
bd60bab2 483 if (virtuals_instantiated)
0a81f074 484 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 485 trunc_int_for_mode
56731d64 486 (slot_offset + bigend_correction
c41536f5 487 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 488 else
0a81f074 489 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 490 trunc_int_for_mode
56731d64 491 (slot_offset + bigend_correction,
c41536f5 492 Pmode));
6f086dfc 493
38a448ca 494 x = gen_rtx_MEM (mode, addr);
76fe54f0 495 set_mem_align (x, alignment_in_bits);
be0c514c 496 MEM_NOTRAP_P (x) = 1;
6f086dfc 497
bd60bab2
JH
498 stack_slot_list
499 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 500
bd60bab2
JH
501 if (frame_offset_overflow (frame_offset, current_function_decl))
502 frame_offset = 0;
9070115b 503
6f086dfc
RS
504 return x;
505}
2e3f842f
L
506
507/* Wrap up assign_stack_local_1 with last parameter as false. */
508
509rtx
ef4bddc2 510assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
2e3f842f 511{
80a832cd 512 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 513}
0aea6467 514\f
fb0703f7
SB
515/* In order to evaluate some expressions, such as function calls returning
516 structures in memory, we need to temporarily allocate stack locations.
517 We record each allocated temporary in the following structure.
518
519 Associated with each temporary slot is a nesting level. When we pop up
520 one level, all temporaries associated with the previous level are freed.
521 Normally, all temporaries are freed after the execution of the statement
522 in which they were created. However, if we are inside a ({...}) grouping,
523 the result may be in a temporary and hence must be preserved. If the
524 result could be in a temporary, we preserve it if we can determine which
525 one it is in. If we cannot determine which temporary may contain the
526 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 527 pretending it was allocated at the previous nesting level. */
fb0703f7 528
d1b38208 529struct GTY(()) temp_slot {
fb0703f7
SB
530 /* Points to next temporary slot. */
531 struct temp_slot *next;
532 /* Points to previous temporary slot. */
533 struct temp_slot *prev;
534 /* The rtx to used to reference the slot. */
535 rtx slot;
fb0703f7
SB
536 /* The size, in units, of the slot. */
537 HOST_WIDE_INT size;
538 /* The type of the object in the slot, or zero if it doesn't correspond
539 to a type. We use this to determine whether a slot can be reused.
540 It can be reused if objects of the type of the new slot will always
541 conflict with objects of the type of the old slot. */
542 tree type;
8f5929e1
JJ
543 /* The alignment (in bits) of the slot. */
544 unsigned int align;
fb0703f7
SB
545 /* Nonzero if this temporary is currently in use. */
546 char in_use;
fb0703f7
SB
547 /* Nesting level at which this slot is being used. */
548 int level;
fb0703f7
SB
549 /* The offset of the slot from the frame_pointer, including extra space
550 for alignment. This info is for combine_temp_slots. */
551 HOST_WIDE_INT base_offset;
552 /* The size of the slot, including extra space for alignment. This
553 info is for combine_temp_slots. */
554 HOST_WIDE_INT full_size;
555};
556
2a22f99c
TS
557/* Entry for the below hash table. */
558struct GTY((for_user)) temp_slot_address_entry {
fb0703f7
SB
559 hashval_t hash;
560 rtx address;
561 struct temp_slot *temp_slot;
562};
563
2a22f99c
TS
564struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
565{
566 static hashval_t hash (temp_slot_address_entry *);
567 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
568};
569
570/* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
573static size_t n_temp_slots_in_use;
574
0aea6467
ZD
575/* Removes temporary slot TEMP from LIST. */
576
577static void
578cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
579{
580 if (temp->next)
581 temp->next->prev = temp->prev;
582 if (temp->prev)
583 temp->prev->next = temp->next;
584 else
585 *list = temp->next;
586
587 temp->prev = temp->next = NULL;
588}
589
590/* Inserts temporary slot TEMP to LIST. */
591
592static void
593insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
594{
595 temp->next = *list;
596 if (*list)
597 (*list)->prev = temp;
598 temp->prev = NULL;
599 *list = temp;
600}
601
602/* Returns the list of used temp slots at LEVEL. */
603
604static struct temp_slot **
605temp_slots_at_level (int level)
606{
9771b263
DN
607 if (level >= (int) vec_safe_length (used_temp_slots))
608 vec_safe_grow_cleared (used_temp_slots, level + 1);
0aea6467 609
9771b263 610 return &(*used_temp_slots)[level];
0aea6467
ZD
611}
612
613/* Returns the maximal temporary slot level. */
614
615static int
616max_slot_level (void)
617{
618 if (!used_temp_slots)
619 return -1;
620
9771b263 621 return used_temp_slots->length () - 1;
0aea6467
ZD
622}
623
624/* Moves temporary slot TEMP to LEVEL. */
625
626static void
627move_slot_to_level (struct temp_slot *temp, int level)
628{
629 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
630 insert_slot_to_list (temp, temp_slots_at_level (level));
631 temp->level = level;
632}
633
634/* Make temporary slot TEMP available. */
635
636static void
637make_slot_available (struct temp_slot *temp)
638{
639 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
640 insert_slot_to_list (temp, &avail_temp_slots);
641 temp->in_use = 0;
642 temp->level = -1;
f8395d62 643 n_temp_slots_in_use--;
0aea6467 644}
fb0703f7
SB
645
646/* Compute the hash value for an address -> temp slot mapping.
647 The value is cached on the mapping entry. */
648static hashval_t
649temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
650{
651 int do_not_record = 0;
652 return hash_rtx (t->address, GET_MODE (t->address),
653 &do_not_record, NULL, false);
654}
655
656/* Return the hash value for an address -> temp slot mapping. */
2a22f99c
TS
657hashval_t
658temp_address_hasher::hash (temp_slot_address_entry *t)
fb0703f7 659{
fb0703f7
SB
660 return t->hash;
661}
662
663/* Compare two address -> temp slot mapping entries. */
2a22f99c
TS
664bool
665temp_address_hasher::equal (temp_slot_address_entry *t1,
666 temp_slot_address_entry *t2)
fb0703f7 667{
fb0703f7
SB
668 return exp_equiv_p (t1->address, t2->address, 0, true);
669}
670
671/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
672static void
673insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
674{
766090c2 675 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fb0703f7
SB
676 t->address = address;
677 t->temp_slot = temp_slot;
678 t->hash = temp_slot_address_compute_hash (t);
2a22f99c 679 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fb0703f7
SB
680}
681
682/* Remove an address -> temp slot mapping entry if the temp slot is
683 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2a22f99c
TS
684int
685remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fb0703f7 686{
2a22f99c 687 const struct temp_slot_address_entry *t = *slot;
fb0703f7 688 if (! t->temp_slot->in_use)
2a22f99c 689 temp_slot_address_table->clear_slot (slot);
fb0703f7
SB
690 return 1;
691}
692
693/* Remove all mappings of addresses to unused temp slots. */
694static void
695remove_unused_temp_slot_addresses (void)
696{
f8395d62
MM
697 /* Use quicker clearing if there aren't any active temp slots. */
698 if (n_temp_slots_in_use)
2a22f99c
TS
699 temp_slot_address_table->traverse
700 <void *, remove_unused_temp_slot_addresses_1> (NULL);
f8395d62 701 else
2a22f99c 702 temp_slot_address_table->empty ();
fb0703f7
SB
703}
704
705/* Find the temp slot corresponding to the object at address X. */
706
707static struct temp_slot *
708find_temp_slot_from_address (rtx x)
709{
710 struct temp_slot *p;
711 struct temp_slot_address_entry tmp, *t;
712
713 /* First try the easy way:
714 See if X exists in the address -> temp slot mapping. */
715 tmp.address = x;
716 tmp.temp_slot = NULL;
717 tmp.hash = temp_slot_address_compute_hash (&tmp);
2a22f99c 718 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fb0703f7
SB
719 if (t)
720 return t->temp_slot;
721
722 /* If we have a sum involving a register, see if it points to a temp
723 slot. */
724 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
725 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
726 return p;
727 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
728 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
729 return p;
730
731 /* Last resort: Address is a virtual stack var address. */
732 if (GET_CODE (x) == PLUS
733 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 734 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
735 {
736 int i;
737 for (i = max_slot_level (); i >= 0; i--)
738 for (p = *temp_slots_at_level (i); p; p = p->next)
739 {
740 if (INTVAL (XEXP (x, 1)) >= p->base_offset
741 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
742 return p;
743 }
744 }
745
746 return NULL;
747}
6f086dfc
RS
748\f
749/* Allocate a temporary stack slot and record it for possible later
750 reuse.
751
752 MODE is the machine mode to be given to the returned rtx.
753
754 SIZE is the size in units of the space required. We do no rounding here
755 since assign_stack_local will do any required rounding.
756
a4c6502a 757 TYPE is the type that will be used for the stack slot. */
6f086dfc 758
a06ef755 759rtx
ef4bddc2 760assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
9474e8ab 761 tree type)
6f086dfc 762{
74e2819c 763 unsigned int align;
0aea6467 764 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 765 rtx slot;
6f086dfc 766
303ec2aa
RK
767 /* If SIZE is -1 it means that somebody tried to allocate a temporary
768 of a variable size. */
0bccc606 769 gcc_assert (size != -1);
303ec2aa 770
76fe54f0 771 align = get_stack_local_alignment (type, mode);
d16790f2
JW
772
773 /* Try to find an available, already-allocated temporary of the proper
774 mode which meets the size and alignment requirements. Choose the
3e8b0446 775 smallest one with the closest alignment.
b8698a0f 776
3e8b0446
ZD
777 If assign_stack_temp is called outside of the tree->rtl expansion,
778 we cannot reuse the stack slots (that may still refer to
779 VIRTUAL_STACK_VARS_REGNUM). */
780 if (!virtuals_instantiated)
0aea6467 781 {
3e8b0446 782 for (p = avail_temp_slots; p; p = p->next)
0aea6467 783 {
3e8b0446
ZD
784 if (p->align >= align && p->size >= size
785 && GET_MODE (p->slot) == mode
786 && objects_must_conflict_p (p->type, type)
787 && (best_p == 0 || best_p->size > p->size
788 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 789 {
3e8b0446
ZD
790 if (p->align == align && p->size == size)
791 {
792 selected = p;
793 cut_slot_from_list (selected, &avail_temp_slots);
794 best_p = 0;
795 break;
796 }
797 best_p = p;
0aea6467 798 }
0aea6467
ZD
799 }
800 }
6f086dfc
RS
801
802 /* Make our best, if any, the one to use. */
803 if (best_p)
a45035b6 804 {
0aea6467
ZD
805 selected = best_p;
806 cut_slot_from_list (selected, &avail_temp_slots);
807
a45035b6
JW
808 /* If there are enough aligned bytes left over, make them into a new
809 temp_slot so that the extra bytes don't get wasted. Do this only
810 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 811 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 812 {
d16790f2 813 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 814 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
815
816 if (best_p->size - rounded_size >= alignment)
817 {
766090c2 818 p = ggc_alloc<temp_slot> ();
9474e8ab 819 p->in_use = 0;
a45035b6 820 p->size = best_p->size - rounded_size;
307d8cd6
RK
821 p->base_offset = best_p->base_offset + rounded_size;
822 p->full_size = best_p->full_size - rounded_size;
be0c514c 823 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 824 p->align = best_p->align;
1da68f56 825 p->type = best_p->type;
0aea6467 826 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 827
38a448ca
RH
828 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
829 stack_slot_list);
a45035b6
JW
830
831 best_p->size = rounded_size;
291dde90 832 best_p->full_size = rounded_size;
a45035b6
JW
833 }
834 }
a45035b6 835 }
718fe406 836
6f086dfc 837 /* If we still didn't find one, make a new temporary. */
0aea6467 838 if (selected == 0)
6f086dfc 839 {
e5e809f4
JL
840 HOST_WIDE_INT frame_offset_old = frame_offset;
841
766090c2 842 p = ggc_alloc<temp_slot> ();
e5e809f4 843
c87a0a39
JL
844 /* We are passing an explicit alignment request to assign_stack_local.
845 One side effect of that is assign_stack_local will not round SIZE
846 to ensure the frame offset remains suitably aligned.
847
848 So for requests which depended on the rounding of SIZE, we go ahead
849 and round it now. We also make sure ALIGNMENT is at least
850 BIGGEST_ALIGNMENT. */
0bccc606 851 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
852 p->slot = assign_stack_local_1 (mode,
853 (mode == BLKmode
854 ? CEIL_ROUND (size,
855 (int) align
856 / BITS_PER_UNIT)
857 : size),
858 align, 0);
d16790f2
JW
859
860 p->align = align;
e5e809f4 861
b2a80c0d
DE
862 /* The following slot size computation is necessary because we don't
863 know the actual size of the temporary slot until assign_stack_local
864 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
865 requested temporary. Note that extra space added for alignment
866 can be either above or below this stack slot depending on which
867 way the frame grows. We include the extra space if and only if it
868 is above this slot. */
f62c8a5c
JJ
869 if (FRAME_GROWS_DOWNWARD)
870 p->size = frame_offset_old - frame_offset;
871 else
872 p->size = size;
e5e809f4 873
fc91b0d0 874 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
875 if (FRAME_GROWS_DOWNWARD)
876 {
877 p->base_offset = frame_offset;
878 p->full_size = frame_offset_old - frame_offset;
879 }
880 else
881 {
882 p->base_offset = frame_offset_old;
883 p->full_size = frame_offset - frame_offset_old;
884 }
0aea6467
ZD
885
886 selected = p;
6f086dfc
RS
887 }
888
0aea6467 889 p = selected;
6f086dfc 890 p->in_use = 1;
1da68f56 891 p->type = type;
7efcb746 892 p->level = temp_slot_level;
f8395d62 893 n_temp_slots_in_use++;
1995f267 894
0aea6467
ZD
895 pp = temp_slots_at_level (p->level);
896 insert_slot_to_list (p, pp);
fb0703f7 897 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
898
899 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
900 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
901 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 902
1da68f56
RK
903 /* If we know the alias set for the memory that will be used, use
904 it. If there's no TYPE, then we don't know anything about the
905 alias set for the memory. */
faa964e5
UW
906 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
907 set_mem_align (slot, align);
1da68f56 908
30f7a378 909 /* If a type is specified, set the relevant flags. */
3bdf5ad1 910 if (type != 0)
55356334 911 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 912 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 913
faa964e5 914 return slot;
6f086dfc 915}
d16790f2
JW
916
917/* Allocate a temporary stack slot and record it for possible later
9474e8ab 918 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
919
920rtx
ef4bddc2 921assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
d16790f2 922{
9474e8ab 923 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 924}
638141a6 925\f
9432c136
EB
926/* Assign a temporary.
927 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
928 and so that should be used in error messages. In either case, we
929 allocate of the given type.
230f21b4 930 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
931 it is 0 if a register is OK.
932 DONT_PROMOTE is 1 if we should not promote values in register
933 to wider modes. */
230f21b4
PB
934
935rtx
9474e8ab 936assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 937 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 938{
9432c136 939 tree type, decl;
ef4bddc2 940 machine_mode mode;
9e1622ed 941#ifdef PROMOTE_MODE
9432c136
EB
942 int unsignedp;
943#endif
944
945 if (DECL_P (type_or_decl))
946 decl = type_or_decl, type = TREE_TYPE (decl);
947 else
948 decl = NULL, type = type_or_decl;
949
950 mode = TYPE_MODE (type);
9e1622ed 951#ifdef PROMOTE_MODE
8df83eae 952 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 953#endif
638141a6 954
230f21b4
PB
955 if (mode == BLKmode || memory_required)
956 {
e5e809f4 957 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
958 rtx tmp;
959
44affdae
JH
960 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
961 problems with allocating the stack space. */
962 if (size == 0)
963 size = 1;
964
230f21b4 965 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
966 temporaries. However, sometimes we can find a fixed upper limit on
967 the size, so try that instead. */
968 else if (size == -1)
969 size = max_int_size_in_bytes (type);
e30bb772 970
9432c136
EB
971 /* The size of the temporary may be too large to fit into an integer. */
972 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 973 this to things that aren't compiler-generated temporaries. The
535a42b1 974 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
975 if (decl && size == -1
976 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
977 {
dee15844 978 error ("size of variable %q+D is too large", decl);
9432c136
EB
979 size = 1;
980 }
981
9474e8ab 982 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
983 return tmp;
984 }
638141a6 985
9e1622ed 986#ifdef PROMOTE_MODE
b55d9ff8 987 if (! dont_promote)
cde0f3fd 988 mode = promote_mode (type, mode, &unsignedp);
230f21b4 989#endif
638141a6 990
230f21b4
PB
991 return gen_reg_rtx (mode);
992}
638141a6 993\f
a45035b6
JW
994/* Combine temporary stack slots which are adjacent on the stack.
995
996 This allows for better use of already allocated stack space. This is only
997 done for BLKmode slots because we can be sure that we won't have alignment
998 problems in this case. */
999
6fe79279 1000static void
fa8db1f7 1001combine_temp_slots (void)
a45035b6 1002{
0aea6467 1003 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1004 int num_slots;
1005
a4c6502a
MM
1006 /* We can't combine slots, because the information about which slot
1007 is in which alias set will be lost. */
1008 if (flag_strict_aliasing)
1009 return;
1010
718fe406 1011 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1012 high levels of optimization. */
e5e809f4 1013 if (! flag_expensive_optimizations)
0aea6467 1014 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1015 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1016 return;
a45035b6 1017
0aea6467 1018 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1019 {
1020 int delete_p = 0;
e5e809f4 1021
0aea6467
ZD
1022 next = p->next;
1023
1024 if (GET_MODE (p->slot) != BLKmode)
1025 continue;
1026
1027 for (q = p->next; q; q = next_q)
e9b7093a 1028 {
0aea6467
ZD
1029 int delete_q = 0;
1030
1031 next_q = q->next;
1032
1033 if (GET_MODE (q->slot) != BLKmode)
1034 continue;
1035
1036 if (p->base_offset + p->full_size == q->base_offset)
1037 {
1038 /* Q comes after P; combine Q into P. */
1039 p->size += q->size;
1040 p->full_size += q->full_size;
1041 delete_q = 1;
1042 }
1043 else if (q->base_offset + q->full_size == p->base_offset)
1044 {
1045 /* P comes after Q; combine P into Q. */
1046 q->size += p->size;
1047 q->full_size += p->full_size;
1048 delete_p = 1;
1049 break;
1050 }
1051 if (delete_q)
1052 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1053 }
0aea6467
ZD
1054
1055 /* Either delete P or advance past it. */
1056 if (delete_p)
1057 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1058 }
a45035b6 1059}
6f086dfc 1060\f
82d6e6fc
KG
1061/* Indicate that NEW_RTX is an alternate way of referring to the temp
1062 slot that previously was known by OLD_RTX. */
e5e76139
RK
1063
1064void
82d6e6fc 1065update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1066{
14a774a9 1067 struct temp_slot *p;
e5e76139 1068
82d6e6fc 1069 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1070 return;
14a774a9 1071
82d6e6fc 1072 p = find_temp_slot_from_address (old_rtx);
14a774a9 1073
82d6e6fc
KG
1074 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1075 NEW_RTX is a register, see if one operand of the PLUS is a
1076 temporary location. If so, NEW_RTX points into it. Otherwise,
1077 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1078 in common between them. If so, try a recursive call on those
1079 values. */
14a774a9
RK
1080 if (p == 0)
1081 {
82d6e6fc 1082 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1083 return;
1084
82d6e6fc 1085 if (REG_P (new_rtx))
700f19f0 1086 {
82d6e6fc
KG
1087 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1088 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1089 return;
1090 }
82d6e6fc 1091 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1092 return;
1093
82d6e6fc
KG
1094 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1095 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1096 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1097 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1098 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1099 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1100 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1101 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1102
1103 return;
1104 }
1105
718fe406 1106 /* Otherwise add an alias for the temp's address. */
fb0703f7 1107 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1108}
1109
9cca6a99
MS
1110/* If X could be a reference to a temporary slot, mark that slot as
1111 belonging to the to one level higher than the current level. If X
1112 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1113 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1114
1115 This is called when an ({...}) construct occurs and a statement
1116 returns a value in memory. */
1117
1118void
fa8db1f7 1119preserve_temp_slots (rtx x)
6f086dfc 1120{
0aea6467 1121 struct temp_slot *p = 0, *next;
6f086dfc 1122
e3a77161 1123 if (x == 0)
9474e8ab 1124 return;
f7b6d104 1125
8fff4fc1 1126 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1127 a temporary slot we know it points to. */
8fff4fc1
RH
1128 if (REG_P (x) && REG_POINTER (x))
1129 p = find_temp_slot_from_address (x);
f7b6d104 1130
8fff4fc1 1131 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1132 a temporary slot. */
8fff4fc1 1133 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1134 return;
8fff4fc1
RH
1135
1136 /* First see if we can find a match. */
1137 if (p == 0)
1138 p = find_temp_slot_from_address (XEXP (x, 0));
1139
1140 if (p != 0)
1141 {
8fff4fc1 1142 if (p->level == temp_slot_level)
9474e8ab 1143 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1144 return;
f7b6d104 1145 }
e9a25f70 1146
8fff4fc1
RH
1147 /* Otherwise, preserve all non-kept slots at this level. */
1148 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1149 {
8fff4fc1 1150 next = p->next;
9474e8ab 1151 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1152 }
fe9b4957
MM
1153}
1154
8fff4fc1
RH
1155/* Free all temporaries used so far. This is normally called at the
1156 end of generating code for a statement. */
fe9b4957 1157
8fff4fc1
RH
1158void
1159free_temp_slots (void)
fe9b4957 1160{
8fff4fc1 1161 struct temp_slot *p, *next;
5d7cefe5 1162 bool some_available = false;
fe9b4957 1163
8fff4fc1
RH
1164 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1165 {
1166 next = p->next;
9474e8ab
MM
1167 make_slot_available (p);
1168 some_available = true;
8fff4fc1 1169 }
fe9b4957 1170
5d7cefe5
MM
1171 if (some_available)
1172 {
1173 remove_unused_temp_slot_addresses ();
1174 combine_temp_slots ();
1175 }
8fff4fc1 1176}
fe9b4957 1177
8fff4fc1 1178/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1179
8fff4fc1
RH
1180void
1181push_temp_slots (void)
fe9b4957 1182{
8fff4fc1 1183 temp_slot_level++;
fe9b4957
MM
1184}
1185
8fff4fc1
RH
1186/* Pop a temporary nesting level. All slots in use in the current level
1187 are freed. */
fe9b4957 1188
8fff4fc1
RH
1189void
1190pop_temp_slots (void)
fe9b4957 1191{
9474e8ab 1192 free_temp_slots ();
8fff4fc1 1193 temp_slot_level--;
8c36698e
NC
1194}
1195
8fff4fc1 1196/* Initialize temporary slots. */
e9a25f70
JL
1197
1198void
8fff4fc1 1199init_temp_slots (void)
e9a25f70 1200{
8fff4fc1
RH
1201 /* We have not allocated any temporaries yet. */
1202 avail_temp_slots = 0;
9771b263 1203 vec_alloc (used_temp_slots, 0);
8fff4fc1 1204 temp_slot_level = 0;
f8395d62 1205 n_temp_slots_in_use = 0;
fb0703f7
SB
1206
1207 /* Set up the table to map addresses to temp slots. */
1208 if (! temp_slot_address_table)
2a22f99c 1209 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fb0703f7 1210 else
2a22f99c 1211 temp_slot_address_table->empty ();
8fff4fc1
RH
1212}
1213\f
6399c0ab
SB
1214/* Functions and data structures to keep track of the values hard regs
1215 had at the start of the function. */
1216
1217/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1218 and has_hard_reg_initial_val.. */
1219typedef struct GTY(()) initial_value_pair {
1220 rtx hard_reg;
1221 rtx pseudo;
1222} initial_value_pair;
1223/* ??? This could be a VEC but there is currently no way to define an
1224 opaque VEC type. This could be worked around by defining struct
1225 initial_value_pair in function.h. */
1226typedef struct GTY(()) initial_value_struct {
1227 int num_entries;
1228 int max_entries;
1229 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1230} initial_value_struct;
1231
1232/* If a pseudo represents an initial hard reg (or expression), return
1233 it, else return NULL_RTX. */
1234
1235rtx
1236get_hard_reg_initial_reg (rtx reg)
1237{
1238 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1239 int i;
1240
1241 if (ivs == 0)
1242 return NULL_RTX;
1243
1244 for (i = 0; i < ivs->num_entries; i++)
1245 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1246 return ivs->entries[i].hard_reg;
1247
1248 return NULL_RTX;
1249}
1250
1251/* Make sure that there's a pseudo register of mode MODE that stores the
1252 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1253
1254rtx
ef4bddc2 1255get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1256{
1257 struct initial_value_struct *ivs;
1258 rtx rv;
1259
1260 rv = has_hard_reg_initial_val (mode, regno);
1261 if (rv)
1262 return rv;
1263
1264 ivs = crtl->hard_reg_initial_vals;
1265 if (ivs == 0)
1266 {
766090c2 1267 ivs = ggc_alloc<initial_value_struct> ();
6399c0ab
SB
1268 ivs->num_entries = 0;
1269 ivs->max_entries = 5;
766090c2 1270 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
6399c0ab
SB
1271 crtl->hard_reg_initial_vals = ivs;
1272 }
1273
1274 if (ivs->num_entries >= ivs->max_entries)
1275 {
1276 ivs->max_entries += 5;
1277 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1278 ivs->max_entries);
1279 }
1280
1281 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1282 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1283
1284 return ivs->entries[ivs->num_entries++].pseudo;
1285}
1286
1287/* See if get_hard_reg_initial_val has been used to create a pseudo
1288 for the initial value of hard register REGNO in mode MODE. Return
1289 the associated pseudo if so, otherwise return NULL. */
1290
1291rtx
ef4bddc2 1292has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1293{
1294 struct initial_value_struct *ivs;
1295 int i;
1296
1297 ivs = crtl->hard_reg_initial_vals;
1298 if (ivs != 0)
1299 for (i = 0; i < ivs->num_entries; i++)
1300 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1301 && REGNO (ivs->entries[i].hard_reg) == regno)
1302 return ivs->entries[i].pseudo;
1303
1304 return NULL_RTX;
1305}
1306
1307unsigned int
1308emit_initial_value_sets (void)
1309{
1310 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1311 int i;
691fe203 1312 rtx_insn *seq;
6399c0ab
SB
1313
1314 if (ivs == 0)
1315 return 0;
1316
1317 start_sequence ();
1318 for (i = 0; i < ivs->num_entries; i++)
1319 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1320 seq = get_insns ();
1321 end_sequence ();
1322
1323 emit_insn_at_entry (seq);
1324 return 0;
1325}
1326
1327/* Return the hardreg-pseudoreg initial values pair entry I and
1328 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1329bool
1330initial_value_entry (int i, rtx *hreg, rtx *preg)
1331{
1332 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1333 if (!ivs || i >= ivs->num_entries)
1334 return false;
1335
1336 *hreg = ivs->entries[i].hard_reg;
1337 *preg = ivs->entries[i].pseudo;
1338 return true;
1339}
1340\f
8fff4fc1
RH
1341/* These routines are responsible for converting virtual register references
1342 to the actual hard register references once RTL generation is complete.
718fe406 1343
8fff4fc1
RH
1344 The following four variables are used for communication between the
1345 routines. They contain the offsets of the virtual registers from their
1346 respective hard registers. */
fe9b4957 1347
8fff4fc1
RH
1348static int in_arg_offset;
1349static int var_offset;
1350static int dynamic_offset;
1351static int out_arg_offset;
1352static int cfa_offset;
8a5275eb 1353
8fff4fc1
RH
1354/* In most machines, the stack pointer register is equivalent to the bottom
1355 of the stack. */
718fe406 1356
8fff4fc1
RH
1357#ifndef STACK_POINTER_OFFSET
1358#define STACK_POINTER_OFFSET 0
1359#endif
8c36698e 1360
ddbb449f
AM
1361#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1362#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1363#endif
1364
8fff4fc1
RH
1365/* If not defined, pick an appropriate default for the offset of dynamically
1366 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
ddbb449f 1367 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1368
8fff4fc1 1369#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1370
8fff4fc1
RH
1371/* The bottom of the stack points to the actual arguments. If
1372 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1373 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1374 stack space for register parameters is not pushed by the caller, but
1375 rather part of the fixed stack areas and hence not included in
38173d38 1376 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1377 for it when allocating stack dynamic objects. */
8a5275eb 1378
ddbb449f 1379#ifdef INCOMING_REG_PARM_STACK_SPACE
8fff4fc1
RH
1380#define STACK_DYNAMIC_OFFSET(FNDECL) \
1381((ACCUMULATE_OUTGOING_ARGS \
38173d38 1382 ? (crtl->outgoing_args_size \
81464b2c 1383 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
ddbb449f 1384 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1385 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1386#else
1387#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1388((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1389 + (STACK_POINTER_OFFSET))
1390#endif
1391#endif
4fa48eae 1392
659e47fb 1393\f
bbf9b913
RH
1394/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1395 is a virtual register, return the equivalent hard register and set the
1396 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1397
bbf9b913
RH
1398static rtx
1399instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1400{
82d6e6fc 1401 rtx new_rtx;
bbf9b913 1402 HOST_WIDE_INT offset;
6f086dfc 1403
bbf9b913 1404 if (x == virtual_incoming_args_rtx)
2e3f842f 1405 {
d015f7cc 1406 if (stack_realign_drap)
2e3f842f 1407 {
d015f7cc
L
1408 /* Replace virtual_incoming_args_rtx with internal arg
1409 pointer if DRAP is used to realign stack. */
82d6e6fc 1410 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1411 offset = 0;
1412 }
1413 else
82d6e6fc 1414 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1415 }
bbf9b913 1416 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1417 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1418 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1419 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1420 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1421 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1422 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1423 {
1424#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1425 new_rtx = frame_pointer_rtx;
f6672e8e 1426#else
82d6e6fc 1427 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1428#endif
1429 offset = cfa_offset;
1430 }
32990d5b
JJ
1431 else if (x == virtual_preferred_stack_boundary_rtx)
1432 {
1433 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1434 offset = 0;
1435 }
bbf9b913
RH
1436 else
1437 return NULL_RTX;
6f086dfc 1438
bbf9b913 1439 *poffset = offset;
82d6e6fc 1440 return new_rtx;
6f086dfc
RS
1441}
1442
b8704801
RS
1443/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1444 registers present inside of *LOC. The expression is simplified,
1445 as much as possible, but is not to be considered "valid" in any sense
1446 implied by the target. Return true if any change is made. */
6f086dfc 1447
b8704801
RS
1448static bool
1449instantiate_virtual_regs_in_rtx (rtx *loc)
6f086dfc 1450{
b8704801
RS
1451 if (!*loc)
1452 return false;
1453 bool changed = false;
1454 subrtx_ptr_iterator::array_type array;
1455 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
6f086dfc 1456 {
b8704801
RS
1457 rtx *loc = *iter;
1458 if (rtx x = *loc)
bbf9b913 1459 {
b8704801
RS
1460 rtx new_rtx;
1461 HOST_WIDE_INT offset;
1462 switch (GET_CODE (x))
1463 {
1464 case REG:
1465 new_rtx = instantiate_new_reg (x, &offset);
1466 if (new_rtx)
1467 {
1468 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1469 changed = true;
1470 }
1471 iter.skip_subrtxes ();
1472 break;
bbf9b913 1473
b8704801
RS
1474 case PLUS:
1475 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1476 if (new_rtx)
1477 {
1478 XEXP (x, 0) = new_rtx;
1479 *loc = plus_constant (GET_MODE (x), x, offset, true);
1480 changed = true;
1481 iter.skip_subrtxes ();
1482 break;
1483 }
e5e809f4 1484
b8704801
RS
1485 /* FIXME -- from old code */
1486 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1487 we can commute the PLUS and SUBREG because pointers into the
1488 frame are well-behaved. */
1489 break;
ce717ce4 1490
b8704801
RS
1491 default:
1492 break;
1493 }
1494 }
6f086dfc 1495 }
b8704801 1496 return changed;
6f086dfc
RS
1497}
1498
bbf9b913
RH
1499/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1500 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1501
bbf9b913
RH
1502static int
1503safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1504{
2ef6ce06 1505 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1506}
5a73491b 1507
bbf9b913
RH
1508/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1509 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1510
1511static void
691fe203 1512instantiate_virtual_regs_in_insn (rtx_insn *insn)
5a73491b 1513{
bbf9b913
RH
1514 HOST_WIDE_INT offset;
1515 int insn_code, i;
9325973e 1516 bool any_change = false;
691fe203
DM
1517 rtx set, new_rtx, x;
1518 rtx_insn *seq;
32e66afd 1519
bbf9b913
RH
1520 /* There are some special cases to be handled first. */
1521 set = single_set (insn);
1522 if (set)
32e66afd 1523 {
bbf9b913
RH
1524 /* We're allowed to assign to a virtual register. This is interpreted
1525 to mean that the underlying register gets assigned the inverse
1526 transformation. This is used, for example, in the handling of
1527 non-local gotos. */
82d6e6fc
KG
1528 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1529 if (new_rtx)
bbf9b913
RH
1530 {
1531 start_sequence ();
32e66afd 1532
b8704801 1533 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
82d6e6fc 1534 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
69a59f0f 1535 gen_int_mode (-offset, GET_MODE (new_rtx)));
82d6e6fc
KG
1536 x = force_operand (x, new_rtx);
1537 if (x != new_rtx)
1538 emit_move_insn (new_rtx, x);
5a73491b 1539
bbf9b913
RH
1540 seq = get_insns ();
1541 end_sequence ();
5a73491b 1542
bbf9b913
RH
1543 emit_insn_before (seq, insn);
1544 delete_insn (insn);
1545 return;
1546 }
5a73491b 1547
bbf9b913
RH
1548 /* Handle a straight copy from a virtual register by generating a
1549 new add insn. The difference between this and falling through
1550 to the generic case is avoiding a new pseudo and eliminating a
1551 move insn in the initial rtl stream. */
82d6e6fc
KG
1552 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1553 if (new_rtx && offset != 0
bbf9b913
RH
1554 && REG_P (SET_DEST (set))
1555 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1556 {
1557 start_sequence ();
5a73491b 1558
2f1cd2eb
RS
1559 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1560 gen_int_mode (offset,
1561 GET_MODE (SET_DEST (set))),
1562 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1563 if (x != SET_DEST (set))
1564 emit_move_insn (SET_DEST (set), x);
770ae6cc 1565
bbf9b913
RH
1566 seq = get_insns ();
1567 end_sequence ();
87ce34d6 1568
bbf9b913
RH
1569 emit_insn_before (seq, insn);
1570 delete_insn (insn);
87ce34d6 1571 return;
bbf9b913 1572 }
5a73491b 1573
bbf9b913 1574 extract_insn (insn);
9325973e 1575 insn_code = INSN_CODE (insn);
5a73491b 1576
bbf9b913
RH
1577 /* Handle a plus involving a virtual register by determining if the
1578 operands remain valid if they're modified in place. */
1579 if (GET_CODE (SET_SRC (set)) == PLUS
1580 && recog_data.n_operands >= 3
1581 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1582 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1583 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1584 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1585 {
1586 offset += INTVAL (recog_data.operand[2]);
5a73491b 1587
bbf9b913 1588 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1589 if (offset == 0
1590 && REG_P (SET_DEST (set))
1591 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1592 {
1593 start_sequence ();
82d6e6fc 1594 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1595 seq = get_insns ();
1596 end_sequence ();
d1405722 1597
bbf9b913
RH
1598 emit_insn_before (seq, insn);
1599 delete_insn (insn);
1600 return;
1601 }
d1405722 1602
bbf9b913 1603 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1604
1605 /* Using validate_change and apply_change_group here leaves
1606 recog_data in an invalid state. Since we know exactly what
1607 we want to check, do those two by hand. */
82d6e6fc 1608 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1609 && safe_insn_predicate (insn_code, 2, x))
1610 {
82d6e6fc 1611 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1612 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1613 any_change = true;
9325973e
RH
1614
1615 /* Fall through into the regular operand fixup loop in
1616 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1617 }
1618 }
1619 }
d1405722 1620 else
9325973e
RH
1621 {
1622 extract_insn (insn);
1623 insn_code = INSN_CODE (insn);
1624 }
5dc96d60 1625
bbf9b913
RH
1626 /* In the general case, we expect virtual registers to appear only in
1627 operands, and then only as either bare registers or inside memories. */
1628 for (i = 0; i < recog_data.n_operands; ++i)
1629 {
1630 x = recog_data.operand[i];
1631 switch (GET_CODE (x))
1632 {
1633 case MEM:
1634 {
1635 rtx addr = XEXP (x, 0);
bbf9b913 1636
b8704801 1637 if (!instantiate_virtual_regs_in_rtx (&addr))
bbf9b913
RH
1638 continue;
1639
1640 start_sequence ();
23b33725 1641 x = replace_equiv_address (x, addr, true);
a5bfb13a
MM
1642 /* It may happen that the address with the virtual reg
1643 was valid (e.g. based on the virtual stack reg, which might
1644 be acceptable to the predicates with all offsets), whereas
1645 the address now isn't anymore, for instance when the address
1646 is still offsetted, but the base reg isn't virtual-stack-reg
1647 anymore. Below we would do a force_reg on the whole operand,
1648 but this insn might actually only accept memory. Hence,
1649 before doing that last resort, try to reload the address into
1650 a register, so this operand stays a MEM. */
1651 if (!safe_insn_predicate (insn_code, i, x))
1652 {
1653 addr = force_reg (GET_MODE (addr), addr);
23b33725 1654 x = replace_equiv_address (x, addr, true);
a5bfb13a 1655 }
bbf9b913
RH
1656 seq = get_insns ();
1657 end_sequence ();
1658 if (seq)
1659 emit_insn_before (seq, insn);
1660 }
1661 break;
1662
1663 case REG:
82d6e6fc
KG
1664 new_rtx = instantiate_new_reg (x, &offset);
1665 if (new_rtx == NULL)
bbf9b913
RH
1666 continue;
1667 if (offset == 0)
82d6e6fc 1668 x = new_rtx;
bbf9b913
RH
1669 else
1670 {
1671 start_sequence ();
6f086dfc 1672
bbf9b913
RH
1673 /* Careful, special mode predicates may have stuff in
1674 insn_data[insn_code].operand[i].mode that isn't useful
1675 to us for computing a new value. */
1676 /* ??? Recognize address_operand and/or "p" constraints
1677 to see if (plus new offset) is a valid before we put
1678 this through expand_simple_binop. */
82d6e6fc 1679 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
2f1cd2eb
RS
1680 gen_int_mode (offset, GET_MODE (x)),
1681 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1682 seq = get_insns ();
1683 end_sequence ();
1684 emit_insn_before (seq, insn);
1685 }
1686 break;
6f086dfc 1687
bbf9b913 1688 case SUBREG:
82d6e6fc
KG
1689 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1690 if (new_rtx == NULL)
bbf9b913
RH
1691 continue;
1692 if (offset != 0)
1693 {
1694 start_sequence ();
2f1cd2eb
RS
1695 new_rtx = expand_simple_binop
1696 (GET_MODE (new_rtx), PLUS, new_rtx,
1697 gen_int_mode (offset, GET_MODE (new_rtx)),
1698 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1699 seq = get_insns ();
1700 end_sequence ();
1701 emit_insn_before (seq, insn);
1702 }
82d6e6fc
KG
1703 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1704 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1705 gcc_assert (x);
bbf9b913 1706 break;
6f086dfc 1707
bbf9b913
RH
1708 default:
1709 continue;
1710 }
6f086dfc 1711
bbf9b913
RH
1712 /* At this point, X contains the new value for the operand.
1713 Validate the new value vs the insn predicate. Note that
1714 asm insns will have insn_code -1 here. */
1715 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1716 {
1717 start_sequence ();
f7ce0951
SE
1718 if (REG_P (x))
1719 {
1720 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1721 x = copy_to_reg (x);
1722 }
1723 else
1724 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1725 seq = get_insns ();
1726 end_sequence ();
1727 if (seq)
1728 emit_insn_before (seq, insn);
1729 }
6f086dfc 1730
bbf9b913
RH
1731 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1732 any_change = true;
1733 }
6f086dfc 1734
bbf9b913
RH
1735 if (any_change)
1736 {
1737 /* Propagate operand changes into the duplicates. */
1738 for (i = 0; i < recog_data.n_dups; ++i)
1739 *recog_data.dup_loc[i]
3e916873 1740 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1741
bbf9b913
RH
1742 /* Force re-recognition of the instruction for validation. */
1743 INSN_CODE (insn) = -1;
1744 }
6f086dfc 1745
bbf9b913 1746 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1747 {
bbf9b913 1748 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1749 {
bbf9b913 1750 error_for_asm (insn, "impossible constraint in %<asm%>");
5a860835
JJ
1751 /* For asm goto, instead of fixing up all the edges
1752 just clear the template and clear input operands
1753 (asm goto doesn't have any output operands). */
1754 if (JUMP_P (insn))
1755 {
1756 rtx asm_op = extract_asm_operands (PATTERN (insn));
1757 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1758 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1759 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1760 }
1761 else
1762 delete_insn (insn);
bbf9b913
RH
1763 }
1764 }
1765 else
1766 {
1767 if (recog_memoized (insn) < 0)
1768 fatal_insn_not_found (insn);
1769 }
1770}
14a774a9 1771
bbf9b913
RH
1772/* Subroutine of instantiate_decls. Given RTL representing a decl,
1773 do any instantiation required. */
14a774a9 1774
e41b2a33
PB
1775void
1776instantiate_decl_rtl (rtx x)
bbf9b913
RH
1777{
1778 rtx addr;
6f086dfc 1779
bbf9b913
RH
1780 if (x == 0)
1781 return;
6f086dfc 1782
bbf9b913
RH
1783 /* If this is a CONCAT, recurse for the pieces. */
1784 if (GET_CODE (x) == CONCAT)
1785 {
e41b2a33
PB
1786 instantiate_decl_rtl (XEXP (x, 0));
1787 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1788 return;
1789 }
6f086dfc 1790
bbf9b913
RH
1791 /* If this is not a MEM, no need to do anything. Similarly if the
1792 address is a constant or a register that is not a virtual register. */
1793 if (!MEM_P (x))
1794 return;
6f086dfc 1795
bbf9b913
RH
1796 addr = XEXP (x, 0);
1797 if (CONSTANT_P (addr)
1798 || (REG_P (addr)
1799 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1800 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1801 return;
6f086dfc 1802
b8704801 1803 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
bbf9b913 1804}
6f086dfc 1805
434eba35
JJ
1806/* Helper for instantiate_decls called via walk_tree: Process all decls
1807 in the given DECL_VALUE_EXPR. */
1808
1809static tree
1810instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1811{
1812 tree t = *tp;
726a989a 1813 if (! EXPR_P (t))
434eba35
JJ
1814 {
1815 *walk_subtrees = 0;
37d6a488
AO
1816 if (DECL_P (t))
1817 {
1818 if (DECL_RTL_SET_P (t))
1819 instantiate_decl_rtl (DECL_RTL (t));
1820 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1821 && DECL_INCOMING_RTL (t))
1822 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1823 if ((TREE_CODE (t) == VAR_DECL
1824 || TREE_CODE (t) == RESULT_DECL)
1825 && DECL_HAS_VALUE_EXPR_P (t))
1826 {
1827 tree v = DECL_VALUE_EXPR (t);
1828 walk_tree (&v, instantiate_expr, NULL, NULL);
1829 }
1830 }
434eba35
JJ
1831 }
1832 return NULL;
1833}
1834
bbf9b913
RH
1835/* Subroutine of instantiate_decls: Process all decls in the given
1836 BLOCK node and all its subblocks. */
6f086dfc 1837
bbf9b913
RH
1838static void
1839instantiate_decls_1 (tree let)
1840{
1841 tree t;
6f086dfc 1842
910ad8de 1843 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1844 {
1845 if (DECL_RTL_SET_P (t))
e41b2a33 1846 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1847 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1848 {
1849 tree v = DECL_VALUE_EXPR (t);
1850 walk_tree (&v, instantiate_expr, NULL, NULL);
1851 }
1852 }
6f086dfc 1853
bbf9b913 1854 /* Process all subblocks. */
87caf699 1855 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1856 instantiate_decls_1 (t);
1857}
6f086dfc 1858
bbf9b913
RH
1859/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1860 all virtual registers in their DECL_RTL's. */
6f086dfc 1861
bbf9b913
RH
1862static void
1863instantiate_decls (tree fndecl)
1864{
c021f10b
NF
1865 tree decl;
1866 unsigned ix;
6f086dfc 1867
bbf9b913 1868 /* Process all parameters of the function. */
910ad8de 1869 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1870 {
e41b2a33
PB
1871 instantiate_decl_rtl (DECL_RTL (decl));
1872 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1874 {
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1877 }
bbf9b913 1878 }
4fd796bb 1879
37d6a488
AO
1880 if ((decl = DECL_RESULT (fndecl))
1881 && TREE_CODE (decl) == RESULT_DECL)
1882 {
1883 if (DECL_RTL_SET_P (decl))
1884 instantiate_decl_rtl (DECL_RTL (decl));
1885 if (DECL_HAS_VALUE_EXPR_P (decl))
1886 {
1887 tree v = DECL_VALUE_EXPR (decl);
1888 walk_tree (&v, instantiate_expr, NULL, NULL);
1889 }
1890 }
1891
3fd48b12
EB
1892 /* Process the saved static chain if it exists. */
1893 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1894 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1895 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1896
bbf9b913
RH
1897 /* Now process all variables defined in the function or its subblocks. */
1898 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1899
c021f10b
NF
1900 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1901 if (DECL_RTL_SET_P (decl))
1902 instantiate_decl_rtl (DECL_RTL (decl));
9771b263 1903 vec_free (cfun->local_decls);
bbf9b913 1904}
6f086dfc 1905
bbf9b913
RH
1906/* Pass through the INSNS of function FNDECL and convert virtual register
1907 references to hard register references. */
6f086dfc 1908
c2924966 1909static unsigned int
bbf9b913
RH
1910instantiate_virtual_regs (void)
1911{
691fe203 1912 rtx_insn *insn;
6f086dfc 1913
bbf9b913
RH
1914 /* Compute the offsets to use for this function. */
1915 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1916 var_offset = STARTING_FRAME_OFFSET;
1917 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1918 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1919#ifdef FRAME_POINTER_CFA_OFFSET
1920 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1921#else
bbf9b913 1922 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1923#endif
e9a25f70 1924
bbf9b913
RH
1925 /* Initialize recognition, indicating that volatile is OK. */
1926 init_recog ();
6f086dfc 1927
bbf9b913
RH
1928 /* Scan through all the insns, instantiating every virtual register still
1929 present. */
45dbce1b
NF
1930 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1931 if (INSN_P (insn))
1932 {
1933 /* These patterns in the instruction stream can never be recognized.
1934 Fortunately, they shouldn't contain virtual registers either. */
39718607 1935 if (GET_CODE (PATTERN (insn)) == USE
45dbce1b 1936 || GET_CODE (PATTERN (insn)) == CLOBBER
45dbce1b
NF
1937 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1938 continue;
1939 else if (DEBUG_INSN_P (insn))
b8704801 1940 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
45dbce1b
NF
1941 else
1942 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1943
4654c0cf 1944 if (insn->deleted ())
45dbce1b 1945 continue;
7114321e 1946
b8704801 1947 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
ba4807a0 1948
45dbce1b
NF
1949 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1950 if (CALL_P (insn))
b8704801 1951 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
45dbce1b 1952 }
6f086dfc 1953
bbf9b913
RH
1954 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1955 instantiate_decls (current_function_decl);
1956
e41b2a33
PB
1957 targetm.instantiate_decls ();
1958
bbf9b913
RH
1959 /* Indicate that, from now on, assign_stack_local should use
1960 frame_pointer_rtx. */
1961 virtuals_instantiated = 1;
d3c12306 1962
c2924966 1963 return 0;
6f086dfc 1964}
ef330312 1965
27a4cd48
DM
1966namespace {
1967
1968const pass_data pass_data_instantiate_virtual_regs =
1969{
1970 RTL_PASS, /* type */
1971 "vregs", /* name */
1972 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1973 TV_NONE, /* tv_id */
1974 0, /* properties_required */
1975 0, /* properties_provided */
1976 0, /* properties_destroyed */
1977 0, /* todo_flags_start */
1978 0, /* todo_flags_finish */
ef330312
PB
1979};
1980
27a4cd48
DM
1981class pass_instantiate_virtual_regs : public rtl_opt_pass
1982{
1983public:
c3284718
RS
1984 pass_instantiate_virtual_regs (gcc::context *ctxt)
1985 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
27a4cd48
DM
1986 {}
1987
1988 /* opt_pass methods: */
be55bfe6
TS
1989 virtual unsigned int execute (function *)
1990 {
1991 return instantiate_virtual_regs ();
1992 }
27a4cd48
DM
1993
1994}; // class pass_instantiate_virtual_regs
1995
1996} // anon namespace
1997
1998rtl_opt_pass *
1999make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2000{
2001 return new pass_instantiate_virtual_regs (ctxt);
2002}
2003
6f086dfc 2004\f
d181c154
RS
2005/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2006 This means a type for which function calls must pass an address to the
2007 function or get an address back from the function.
2008 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
2009
2010int
586de218 2011aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 2012{
d47d0a8d 2013 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
2014 int i, regno, nregs;
2015 rtx reg;
2f939d94 2016
61f71b34
DD
2017 if (fntype)
2018 switch (TREE_CODE (fntype))
2019 {
2020 case CALL_EXPR:
d47d0a8d
EB
2021 {
2022 tree fndecl = get_callee_fndecl (fntype);
2023 fntype = (fndecl
2024 ? TREE_TYPE (fndecl)
2025 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2026 }
61f71b34
DD
2027 break;
2028 case FUNCTION_DECL:
d47d0a8d 2029 fntype = TREE_TYPE (fntype);
61f71b34
DD
2030 break;
2031 case FUNCTION_TYPE:
2032 case METHOD_TYPE:
2033 break;
2034 case IDENTIFIER_NODE:
d47d0a8d 2035 fntype = NULL_TREE;
61f71b34
DD
2036 break;
2037 default:
d47d0a8d 2038 /* We don't expect other tree types here. */
0bccc606 2039 gcc_unreachable ();
61f71b34
DD
2040 }
2041
d47d0a8d 2042 if (VOID_TYPE_P (type))
d7bf8ada 2043 return 0;
500c353d 2044
ebf0bf7f
JJ
2045 /* If a record should be passed the same as its first (and only) member
2046 don't pass it as an aggregate. */
2047 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2048 return aggregate_value_p (first_field (type), fntype);
2049
cc77ae10
JM
2050 /* If the front end has decided that this needs to be passed by
2051 reference, do so. */
2052 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2053 && DECL_BY_REFERENCE (exp))
2054 return 1;
500c353d 2055
d47d0a8d
EB
2056 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2057 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2058 return 1;
b8698a0f 2059
956d6950 2060 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2061 and thus can't be returned in registers. */
2062 if (TREE_ADDRESSABLE (type))
2063 return 1;
d47d0a8d 2064
05e3bdb9 2065 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2066 return 1;
d47d0a8d
EB
2067
2068 if (targetm.calls.return_in_memory (type, fntype))
2069 return 1;
2070
9d790a4f
RS
2071 /* Make sure we have suitable call-clobbered regs to return
2072 the value in; if not, we must return it in memory. */
1d636cc6 2073 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2074
2075 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2076 it is OK. */
f8cfc6aa 2077 if (!REG_P (reg))
e71f7aa5
JW
2078 return 0;
2079
9d790a4f 2080 regno = REGNO (reg);
66fd46b6 2081 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2082 for (i = 0; i < nregs; i++)
2083 if (! call_used_regs[regno + i])
2084 return 1;
d47d0a8d 2085
6f086dfc
RS
2086 return 0;
2087}
2088\f
8fff4fc1
RH
2089/* Return true if we should assign DECL a pseudo register; false if it
2090 should live on the local stack. */
2091
2092bool
fa233e34 2093use_register_for_decl (const_tree decl)
8fff4fc1 2094{
c3284718 2095 if (!targetm.calls.allocate_stack_slots_for_args ())
007e61c2 2096 return true;
b8698a0f 2097
8fff4fc1
RH
2098 /* Honor volatile. */
2099 if (TREE_SIDE_EFFECTS (decl))
2100 return false;
2101
2102 /* Honor addressability. */
2103 if (TREE_ADDRESSABLE (decl))
2104 return false;
2105
d5e254e1
IE
2106 /* Decl is implicitly addressible by bound stores and loads
2107 if it is an aggregate holding bounds. */
2108 if (chkp_function_instrumented_p (current_function_decl)
2109 && TREE_TYPE (decl)
2110 && !BOUNDED_P (decl)
2111 && chkp_type_has_pointer (TREE_TYPE (decl)))
2112 return false;
2113
8fff4fc1
RH
2114 /* Only register-like things go in registers. */
2115 if (DECL_MODE (decl) == BLKmode)
2116 return false;
2117
2118 /* If -ffloat-store specified, don't put explicit float variables
2119 into registers. */
2120 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2121 propagates values across these stores, and it probably shouldn't. */
2122 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2123 return false;
2124
78e0d62b
RH
2125 /* If we're not interested in tracking debugging information for
2126 this decl, then we can certainly put it in a register. */
2127 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2128 return true;
2129
d130d647
JJ
2130 if (optimize)
2131 return true;
2132
2133 if (!DECL_REGISTER (decl))
2134 return false;
2135
2136 switch (TREE_CODE (TREE_TYPE (decl)))
2137 {
2138 case RECORD_TYPE:
2139 case UNION_TYPE:
2140 case QUAL_UNION_TYPE:
2141 /* When not optimizing, disregard register keyword for variables with
2142 types containing methods, otherwise the methods won't be callable
2143 from the debugger. */
2144 if (TYPE_METHODS (TREE_TYPE (decl)))
2145 return false;
2146 break;
2147 default:
2148 break;
2149 }
2150
2151 return true;
8fff4fc1
RH
2152}
2153
0976078c
RH
2154/* Return true if TYPE should be passed by invisible reference. */
2155
2156bool
ef4bddc2 2157pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
8cd5a4e0 2158 tree type, bool named_arg)
0976078c
RH
2159{
2160 if (type)
2161 {
2162 /* If this type contains non-trivial constructors, then it is
2163 forbidden for the middle-end to create any new copies. */
2164 if (TREE_ADDRESSABLE (type))
2165 return true;
2166
d58247a3
RH
2167 /* GCC post 3.4 passes *all* variable sized types by reference. */
2168 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2169 return true;
ebf0bf7f
JJ
2170
2171 /* If a record type should be passed the same as its first (and only)
2172 member, use the type and mode of that member. */
2173 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2174 {
2175 type = TREE_TYPE (first_field (type));
2176 mode = TYPE_MODE (type);
2177 }
0976078c
RH
2178 }
2179
d5cc9181
JR
2180 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2181 type, named_arg);
0976078c
RH
2182}
2183
6cdd5672
RH
2184/* Return true if TYPE, which is passed by reference, should be callee
2185 copied instead of caller copied. */
2186
2187bool
ef4bddc2 2188reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
6cdd5672
RH
2189 tree type, bool named_arg)
2190{
2191 if (type && TREE_ADDRESSABLE (type))
2192 return false;
d5cc9181
JR
2193 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2194 named_arg);
6cdd5672
RH
2195}
2196
6071dc7f
RH
2197/* Structures to communicate between the subroutines of assign_parms.
2198 The first holds data persistent across all parameters, the second
2199 is cleared out for each parameter. */
6f086dfc 2200
6071dc7f 2201struct assign_parm_data_all
6f086dfc 2202{
d5cc9181
JR
2203 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2204 should become a job of the target or otherwise encapsulated. */
2205 CUMULATIVE_ARGS args_so_far_v;
2206 cumulative_args_t args_so_far;
6f086dfc 2207 struct args_size stack_args_size;
6071dc7f
RH
2208 tree function_result_decl;
2209 tree orig_fnargs;
7a688d52
DM
2210 rtx_insn *first_conversion_insn;
2211 rtx_insn *last_conversion_insn;
6071dc7f
RH
2212 HOST_WIDE_INT pretend_args_size;
2213 HOST_WIDE_INT extra_pretend_bytes;
2214 int reg_parm_stack_space;
2215};
6f086dfc 2216
6071dc7f
RH
2217struct assign_parm_data_one
2218{
2219 tree nominal_type;
2220 tree passed_type;
2221 rtx entry_parm;
2222 rtx stack_parm;
ef4bddc2
RS
2223 machine_mode nominal_mode;
2224 machine_mode passed_mode;
2225 machine_mode promoted_mode;
6071dc7f
RH
2226 struct locate_and_pad_arg_data locate;
2227 int partial;
2228 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2229 BOOL_BITFIELD passed_pointer : 1;
2230 BOOL_BITFIELD on_stack : 1;
2231 BOOL_BITFIELD loaded_in_reg : 1;
2232};
ebb904cb 2233
d5e254e1
IE
2234struct bounds_parm_data
2235{
2236 assign_parm_data_one parm_data;
2237 tree bounds_parm;
2238 tree ptr_parm;
2239 rtx ptr_entry;
2240 int bound_no;
2241};
2242
6071dc7f 2243/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2244
6071dc7f
RH
2245static void
2246assign_parms_initialize_all (struct assign_parm_data_all *all)
2247{
fc2f1f53 2248 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2249
6071dc7f
RH
2250 memset (all, 0, sizeof (*all));
2251
2252 fntype = TREE_TYPE (current_function_decl);
2253
2254#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2255 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2256#else
d5cc9181 2257 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2258 current_function_decl, -1);
2259#endif
d5cc9181 2260 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f 2261
ddbb449f
AM
2262#ifdef INCOMING_REG_PARM_STACK_SPACE
2263 all->reg_parm_stack_space
2264 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
6071dc7f
RH
2265#endif
2266}
6f086dfc 2267
6071dc7f
RH
2268/* If ARGS contains entries with complex types, split the entry into two
2269 entries of the component type. Return a new list of substitutions are
2270 needed, else the old list. */
2271
3b3f318a 2272static void
9771b263 2273split_complex_args (vec<tree> *args)
6071dc7f 2274{
3b3f318a 2275 unsigned i;
6071dc7f
RH
2276 tree p;
2277
9771b263 2278 FOR_EACH_VEC_ELT (*args, i, p)
6071dc7f
RH
2279 {
2280 tree type = TREE_TYPE (p);
2281 if (TREE_CODE (type) == COMPLEX_TYPE
2282 && targetm.calls.split_complex_arg (type))
2283 {
2284 tree decl;
2285 tree subtype = TREE_TYPE (type);
6ccd356e 2286 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2287
2288 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2289 p = copy_node (p);
6071dc7f
RH
2290 TREE_TYPE (p) = subtype;
2291 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2292 DECL_MODE (p) = VOIDmode;
2293 DECL_SIZE (p) = NULL;
2294 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2295 /* If this arg must go in memory, put it in a pseudo here.
2296 We can't allow it to go in memory as per normal parms,
2297 because the usual place might not have the imag part
2298 adjacent to the real part. */
2299 DECL_ARTIFICIAL (p) = addressable;
2300 DECL_IGNORED_P (p) = addressable;
2301 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2302 layout_decl (p, 0);
9771b263 2303 (*args)[i] = p;
6071dc7f
RH
2304
2305 /* Build a second synthetic decl. */
c2255bc4
AH
2306 decl = build_decl (EXPR_LOCATION (p),
2307 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2308 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2309 DECL_ARTIFICIAL (decl) = addressable;
2310 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2311 layout_decl (decl, 0);
9771b263 2312 args->safe_insert (++i, decl);
6071dc7f
RH
2313 }
2314 }
6071dc7f
RH
2315}
2316
2317/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2318 the hidden struct return argument, and (abi willing) complex args.
2319 Return the new parameter list. */
2320
9771b263 2321static vec<tree>
6071dc7f
RH
2322assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2323{
2324 tree fndecl = current_function_decl;
2325 tree fntype = TREE_TYPE (fndecl);
6e1aa848 2326 vec<tree> fnargs = vNULL;
3b3f318a
RG
2327 tree arg;
2328
910ad8de 2329 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
9771b263 2330 fnargs.safe_push (arg);
3b3f318a
RG
2331
2332 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2333
2334 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2335 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2336 && ! cfun->returns_pcc_struct
61f71b34 2337 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2338 {
f9f29478 2339 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2340 tree decl;
6f086dfc 2341
c2255bc4 2342 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2343 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2344 DECL_ARG_TYPE (decl) = type;
2345 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2346 DECL_NAMELESS (decl) = 1;
2347 TREE_CONSTANT (decl) = 1;
6f086dfc 2348
910ad8de 2349 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a 2350 all->orig_fnargs = decl;
9771b263 2351 fnargs.safe_insert (0, decl);
3b3f318a 2352
6071dc7f 2353 all->function_result_decl = decl;
d5e254e1
IE
2354
2355 /* If function is instrumented then bounds of the
2356 passed structure address is the second argument. */
2357 if (chkp_function_instrumented_p (fndecl))
2358 {
2359 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2360 PARM_DECL, get_identifier (".result_bnd"),
2361 pointer_bounds_type_node);
2362 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2363 DECL_ARTIFICIAL (decl) = 1;
2364 DECL_NAMELESS (decl) = 1;
2365 TREE_CONSTANT (decl) = 1;
2366
2367 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2368 DECL_CHAIN (all->orig_fnargs) = decl;
2369 fnargs.safe_insert (1, decl);
2370 }
6f086dfc 2371 }
718fe406 2372
42ba5130
RH
2373 /* If the target wants to split complex arguments into scalars, do so. */
2374 if (targetm.calls.split_complex_arg)
3b3f318a 2375 split_complex_args (&fnargs);
ded9bf77 2376
6071dc7f
RH
2377 return fnargs;
2378}
e7949876 2379
6071dc7f
RH
2380/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2381 data for the parameter. Incorporate ABI specifics such as pass-by-
2382 reference and type promotion. */
6f086dfc 2383
6071dc7f
RH
2384static void
2385assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2386 struct assign_parm_data_one *data)
2387{
2388 tree nominal_type, passed_type;
ef4bddc2 2389 machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2390 int unsignedp;
6f086dfc 2391
6071dc7f
RH
2392 memset (data, 0, sizeof (*data));
2393
fa10beec 2394 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2395 if (!cfun->stdarg)
fa10beec 2396 data->named_arg = 1; /* No variadic parms. */
910ad8de 2397 else if (DECL_CHAIN (parm))
fa10beec 2398 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2399 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2400 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2401 else
fa10beec 2402 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2403
2404 nominal_type = TREE_TYPE (parm);
2405 passed_type = DECL_ARG_TYPE (parm);
2406
2407 /* Look out for errors propagating this far. Also, if the parameter's
2408 type is void then its value doesn't matter. */
2409 if (TREE_TYPE (parm) == error_mark_node
2410 /* This can happen after weird syntax errors
2411 or if an enum type is defined among the parms. */
2412 || TREE_CODE (parm) != PARM_DECL
2413 || passed_type == NULL
2414 || VOID_TYPE_P (nominal_type))
2415 {
2416 nominal_type = passed_type = void_type_node;
2417 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2418 goto egress;
2419 }
108b7d3d 2420
6071dc7f
RH
2421 /* Find mode of arg as it is passed, and mode of arg as it should be
2422 during execution of this function. */
2423 passed_mode = TYPE_MODE (passed_type);
2424 nominal_mode = TYPE_MODE (nominal_type);
2425
ebf0bf7f
JJ
2426 /* If the parm is to be passed as a transparent union or record, use the
2427 type of the first field for the tests below. We have already verified
2428 that the modes are the same. */
2429 if ((TREE_CODE (passed_type) == UNION_TYPE
2430 || TREE_CODE (passed_type) == RECORD_TYPE)
2431 && TYPE_TRANSPARENT_AGGR (passed_type))
2432 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2433
0976078c 2434 /* See if this arg was passed by invisible reference. */
d5cc9181 2435 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2436 passed_type, data->named_arg))
6071dc7f
RH
2437 {
2438 passed_type = nominal_type = build_pointer_type (passed_type);
2439 data->passed_pointer = true;
fd91cfe3 2440 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
6071dc7f 2441 }
6f086dfc 2442
6071dc7f 2443 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2444 unsignedp = TYPE_UNSIGNED (passed_type);
2445 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2446 TREE_TYPE (current_function_decl), 0);
6f086dfc 2447
6071dc7f
RH
2448 egress:
2449 data->nominal_type = nominal_type;
2450 data->passed_type = passed_type;
2451 data->nominal_mode = nominal_mode;
2452 data->passed_mode = passed_mode;
2453 data->promoted_mode = promoted_mode;
2454}
16bae307 2455
6071dc7f 2456/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2457
6071dc7f
RH
2458static void
2459assign_parms_setup_varargs (struct assign_parm_data_all *all,
2460 struct assign_parm_data_one *data, bool no_rtl)
2461{
2462 int varargs_pretend_bytes = 0;
2463
d5cc9181 2464 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2465 data->promoted_mode,
2466 data->passed_type,
2467 &varargs_pretend_bytes, no_rtl);
2468
2469 /* If the back-end has requested extra stack space, record how much is
2470 needed. Do not change pretend_args_size otherwise since it may be
2471 nonzero from an earlier partial argument. */
2472 if (varargs_pretend_bytes > 0)
2473 all->pretend_args_size = varargs_pretend_bytes;
2474}
a53e14c0 2475
6071dc7f
RH
2476/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2477 the incoming location of the current parameter. */
2478
2479static void
2480assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2481 struct assign_parm_data_one *data)
2482{
2483 HOST_WIDE_INT pretend_bytes = 0;
2484 rtx entry_parm;
2485 bool in_regs;
2486
2487 if (data->promoted_mode == VOIDmode)
2488 {
2489 data->entry_parm = data->stack_parm = const0_rtx;
2490 return;
2491 }
a53e14c0 2492
d5cc9181 2493 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2494 data->promoted_mode,
2495 data->passed_type,
2496 data->named_arg);
6f086dfc 2497
6071dc7f
RH
2498 if (entry_parm == 0)
2499 data->promoted_mode = data->passed_mode;
6f086dfc 2500
6071dc7f
RH
2501 /* Determine parm's home in the stack, in case it arrives in the stack
2502 or we should pretend it did. Compute the stack position and rtx where
2503 the argument arrives and its size.
6f086dfc 2504
6071dc7f
RH
2505 There is one complexity here: If this was a parameter that would
2506 have been passed in registers, but wasn't only because it is
2507 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2508 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2509 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2510 as it was the previous time. */
d5e254e1 2511 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
6f086dfc 2512#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2513 in_regs = true;
e7949876 2514#endif
6071dc7f
RH
2515 if (!in_regs && !data->named_arg)
2516 {
d5cc9181 2517 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2518 {
6071dc7f 2519 rtx tem;
d5cc9181 2520 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2521 data->promoted_mode,
2522 data->passed_type, true);
6071dc7f 2523 in_regs = tem != NULL;
e7949876 2524 }
6071dc7f 2525 }
e7949876 2526
6071dc7f
RH
2527 /* If this parameter was passed both in registers and in the stack, use
2528 the copy on the stack. */
fe984136
RH
2529 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2530 data->passed_type))
6071dc7f 2531 entry_parm = 0;
e7949876 2532
6071dc7f
RH
2533 if (entry_parm)
2534 {
2535 int partial;
2536
d5cc9181 2537 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2538 data->promoted_mode,
2539 data->passed_type,
2540 data->named_arg);
6071dc7f
RH
2541 data->partial = partial;
2542
2543 /* The caller might already have allocated stack space for the
2544 register parameters. */
2545 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2546 {
6071dc7f
RH
2547 /* Part of this argument is passed in registers and part
2548 is passed on the stack. Ask the prologue code to extend
2549 the stack part so that we can recreate the full value.
2550
2551 PRETEND_BYTES is the size of the registers we need to store.
2552 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2553 stack space that the prologue should allocate.
2554
2555 Internally, gcc assumes that the argument pointer is aligned
2556 to STACK_BOUNDARY bits. This is used both for alignment
2557 optimizations (see init_emit) and to locate arguments that are
2558 aligned to more than PARM_BOUNDARY bits. We must preserve this
2559 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2560 a stack boundary. */
2561
2562 /* We assume at most one partial arg, and it must be the first
2563 argument on the stack. */
0bccc606 2564 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2565
78a52f11 2566 pretend_bytes = partial;
6071dc7f
RH
2567 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2568
2569 /* We want to align relative to the actual stack pointer, so
2570 don't include this in the stack size until later. */
2571 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2572 }
6071dc7f 2573 }
e7949876 2574
6071dc7f 2575 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e4ceca5 2576 all->reg_parm_stack_space,
6071dc7f
RH
2577 entry_parm ? data->partial : 0, current_function_decl,
2578 &all->stack_args_size, &data->locate);
6f086dfc 2579
e94a448f
L
2580 /* Update parm_stack_boundary if this parameter is passed in the
2581 stack. */
2582 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2583 crtl->parm_stack_boundary = data->locate.boundary;
2584
6071dc7f
RH
2585 /* Adjust offsets to include the pretend args. */
2586 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2587 data->locate.slot_offset.constant += pretend_bytes;
2588 data->locate.offset.constant += pretend_bytes;
ebca59c3 2589
6071dc7f
RH
2590 data->entry_parm = entry_parm;
2591}
6f086dfc 2592
6071dc7f
RH
2593/* A subroutine of assign_parms. If there is actually space on the stack
2594 for this parm, count it in stack_args_size and return true. */
6f086dfc 2595
6071dc7f
RH
2596static bool
2597assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2598 struct assign_parm_data_one *data)
2599{
d5e254e1
IE
2600 /* Bounds are never passed on the stack to keep compatibility
2601 with not instrumented code. */
2602 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2603 return false;
2e6ae27f 2604 /* Trivially true if we've no incoming register. */
d5e254e1 2605 else if (data->entry_parm == NULL)
6071dc7f
RH
2606 ;
2607 /* Also true if we're partially in registers and partially not,
2608 since we've arranged to drop the entire argument on the stack. */
2609 else if (data->partial != 0)
2610 ;
2611 /* Also true if the target says that it's passed in both registers
2612 and on the stack. */
2613 else if (GET_CODE (data->entry_parm) == PARALLEL
2614 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2615 ;
2616 /* Also true if the target says that there's stack allocated for
2617 all register parameters. */
2618 else if (all->reg_parm_stack_space > 0)
2619 ;
2620 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2621 else
2622 return false;
6f086dfc 2623
6071dc7f
RH
2624 all->stack_args_size.constant += data->locate.size.constant;
2625 if (data->locate.size.var)
2626 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2627
6071dc7f
RH
2628 return true;
2629}
0d1416c6 2630
6071dc7f
RH
2631/* A subroutine of assign_parms. Given that this parameter is allocated
2632 stack space by the ABI, find it. */
6f086dfc 2633
6071dc7f
RH
2634static void
2635assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2636{
2637 rtx offset_rtx, stack_parm;
2638 unsigned int align, boundary;
6f086dfc 2639
6071dc7f
RH
2640 /* If we're passing this arg using a reg, make its stack home the
2641 aligned stack slot. */
2642 if (data->entry_parm)
2643 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2644 else
2645 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2646
38173d38 2647 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2648 if (offset_rtx != const0_rtx)
2649 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2650 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2651
08ab0acf 2652 if (!data->passed_pointer)
997f78fb 2653 {
08ab0acf
JJ
2654 set_mem_attributes (stack_parm, parm, 1);
2655 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2656 while promoted mode's size is needed. */
2657 if (data->promoted_mode != BLKmode
2658 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2659 {
f5541398 2660 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2661 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2662 {
2663 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2664 data->promoted_mode);
2665 if (offset)
527210c4 2666 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2667 }
997f78fb
JJ
2668 }
2669 }
6071dc7f 2670
bfc45551
AM
2671 boundary = data->locate.boundary;
2672 align = BITS_PER_UNIT;
6071dc7f
RH
2673
2674 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2675 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2676 intentionally forcing upward padding. Otherwise we have to come
2677 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2678 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2679 align = boundary;
481683e1 2680 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2681 {
2682 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2683 align = align & -align;
2684 }
bfc45551 2685 set_mem_align (stack_parm, align);
6071dc7f
RH
2686
2687 if (data->entry_parm)
2688 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2689
2690 data->stack_parm = stack_parm;
2691}
2692
2693/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2694 always valid and contiguous. */
2695
2696static void
2697assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2698{
2699 rtx entry_parm = data->entry_parm;
2700 rtx stack_parm = data->stack_parm;
2701
2702 /* If this parm was passed part in regs and part in memory, pretend it
2703 arrived entirely in memory by pushing the register-part onto the stack.
2704 In the special case of a DImode or DFmode that is split, we could put
2705 it together in a pseudoreg directly, but for now that's not worth
2706 bothering with. */
2707 if (data->partial != 0)
2708 {
2709 /* Handle calls that pass values in multiple non-contiguous
2710 locations. The Irix 6 ABI has examples of this. */
2711 if (GET_CODE (entry_parm) == PARALLEL)
1a8cb155 2712 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
b8698a0f 2713 data->passed_type,
6071dc7f 2714 int_size_in_bytes (data->passed_type));
6f086dfc 2715 else
78a52f11
RH
2716 {
2717 gcc_assert (data->partial % UNITS_PER_WORD == 0);
1a8cb155
RS
2718 move_block_from_reg (REGNO (entry_parm),
2719 validize_mem (copy_rtx (stack_parm)),
78a52f11
RH
2720 data->partial / UNITS_PER_WORD);
2721 }
6f086dfc 2722
6071dc7f
RH
2723 entry_parm = stack_parm;
2724 }
6f086dfc 2725
6071dc7f
RH
2726 /* If we didn't decide this parm came in a register, by default it came
2727 on the stack. */
2728 else if (entry_parm == NULL)
2729 entry_parm = stack_parm;
2730
2731 /* When an argument is passed in multiple locations, we can't make use
2732 of this information, but we can save some copying if the whole argument
2733 is passed in a single register. */
2734 else if (GET_CODE (entry_parm) == PARALLEL
2735 && data->nominal_mode != BLKmode
2736 && data->passed_mode != BLKmode)
2737 {
2738 size_t i, len = XVECLEN (entry_parm, 0);
2739
2740 for (i = 0; i < len; i++)
2741 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2742 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2743 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2744 == data->passed_mode)
2745 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2746 {
2747 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2748 break;
2749 }
2750 }
e68a6ce1 2751
6071dc7f
RH
2752 data->entry_parm = entry_parm;
2753}
6f086dfc 2754
4d2a9850
DJ
2755/* A subroutine of assign_parms. Reconstitute any values which were
2756 passed in multiple registers and would fit in a single register. */
2757
2758static void
2759assign_parm_remove_parallels (struct assign_parm_data_one *data)
2760{
2761 rtx entry_parm = data->entry_parm;
2762
2763 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2764 This can be done with register operations rather than on the
2765 stack, even if we will store the reconstituted parameter on the
2766 stack later. */
85776d60 2767 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2768 {
2769 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2770 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2771 GET_MODE_SIZE (GET_MODE (entry_parm)));
2772 entry_parm = parmreg;
2773 }
2774
2775 data->entry_parm = entry_parm;
2776}
2777
6071dc7f
RH
2778/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2779 always valid and properly aligned. */
6f086dfc 2780
6071dc7f
RH
2781static void
2782assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2783{
2784 rtx stack_parm = data->stack_parm;
2785
2786 /* If we can't trust the parm stack slot to be aligned enough for its
2787 ultimate type, don't use that slot after entry. We'll make another
2788 stack slot, if we need one. */
bfc45551
AM
2789 if (stack_parm
2790 && ((STRICT_ALIGNMENT
2791 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2792 || (data->nominal_type
2793 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2794 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2795 stack_parm = NULL;
2796
2797 /* If parm was passed in memory, and we need to convert it on entry,
2798 don't store it back in that same slot. */
2799 else if (data->entry_parm == stack_parm
2800 && data->nominal_mode != BLKmode
2801 && data->nominal_mode != data->passed_mode)
2802 stack_parm = NULL;
2803
7d69de61
RH
2804 /* If stack protection is in effect for this function, don't leave any
2805 pointers in their passed stack slots. */
cb91fab0 2806 else if (crtl->stack_protect_guard
7d69de61
RH
2807 && (flag_stack_protect == 2
2808 || data->passed_pointer
2809 || POINTER_TYPE_P (data->nominal_type)))
2810 stack_parm = NULL;
2811
6071dc7f
RH
2812 data->stack_parm = stack_parm;
2813}
a0506b54 2814
6071dc7f
RH
2815/* A subroutine of assign_parms. Return true if the current parameter
2816 should be stored as a BLKmode in the current frame. */
2817
2818static bool
2819assign_parm_setup_block_p (struct assign_parm_data_one *data)
2820{
2821 if (data->nominal_mode == BLKmode)
2822 return true;
85776d60
DJ
2823 if (GET_MODE (data->entry_parm) == BLKmode)
2824 return true;
531547e9 2825
6e985040 2826#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2827 /* Only assign_parm_setup_block knows how to deal with register arguments
2828 that are padded at the least significant end. */
2829 if (REG_P (data->entry_parm)
2830 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2831 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2832 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2833 return true;
6e985040 2834#endif
6071dc7f
RH
2835
2836 return false;
2837}
2838
b8698a0f 2839/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2840 present and valid in DATA->STACK_RTL. */
2841
2842static void
27e29549
RH
2843assign_parm_setup_block (struct assign_parm_data_all *all,
2844 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2845{
2846 rtx entry_parm = data->entry_parm;
2847 rtx stack_parm = data->stack_parm;
bfc45551
AM
2848 HOST_WIDE_INT size;
2849 HOST_WIDE_INT size_stored;
6071dc7f 2850
27e29549
RH
2851 if (GET_CODE (entry_parm) == PARALLEL)
2852 entry_parm = emit_group_move_into_temps (entry_parm);
2853
bfc45551
AM
2854 size = int_size_in_bytes (data->passed_type);
2855 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2856 if (stack_parm == 0)
2857 {
a561d88b 2858 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2859 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2860 DECL_ALIGN (parm));
bfc45551
AM
2861 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2862 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2863 set_mem_attributes (stack_parm, parm, 1);
2864 }
2865
6071dc7f
RH
2866 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2867 calls that pass values in multiple non-contiguous locations. */
2868 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2869 {
6071dc7f
RH
2870 rtx mem;
2871
2872 /* Note that we will be storing an integral number of words.
2873 So we have to be careful to ensure that we allocate an
bfc45551 2874 integral number of words. We do this above when we call
6071dc7f
RH
2875 assign_stack_local if space was not allocated in the argument
2876 list. If it was, this will not work if PARM_BOUNDARY is not
2877 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2878 if it becomes a problem. Exception is when BLKmode arrives
2879 with arguments not conforming to word_mode. */
2880
bfc45551
AM
2881 if (data->stack_parm == 0)
2882 ;
6071dc7f
RH
2883 else if (GET_CODE (entry_parm) == PARALLEL)
2884 ;
0bccc606
NS
2885 else
2886 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2887
1a8cb155 2888 mem = validize_mem (copy_rtx (stack_parm));
c6b97fac 2889
6071dc7f
RH
2890 /* Handle values in multiple non-contiguous locations. */
2891 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2892 {
bb27eeda
SE
2893 push_to_sequence2 (all->first_conversion_insn,
2894 all->last_conversion_insn);
27e29549 2895 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2896 all->first_conversion_insn = get_insns ();
2897 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2898 end_sequence ();
2899 }
c6b97fac 2900
6071dc7f
RH
2901 else if (size == 0)
2902 ;
5c07bd7a 2903
6071dc7f
RH
2904 /* If SIZE is that of a mode no bigger than a word, just use
2905 that mode's store operation. */
2906 else if (size <= UNITS_PER_WORD)
2907 {
ef4bddc2 2908 machine_mode mode
6071dc7f 2909 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2910
6071dc7f 2911 if (mode != BLKmode
6e985040 2912#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2913 && (size == UNITS_PER_WORD
2914 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2915 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2916#endif
6071dc7f
RH
2917 )
2918 {
208996c7
RS
2919 rtx reg;
2920
2921 /* We are really truncating a word_mode value containing
2922 SIZE bytes into a value of mode MODE. If such an
2923 operation requires no actual instructions, we can refer
2924 to the value directly in mode MODE, otherwise we must
2925 start with the register in word_mode and explicitly
2926 convert it. */
2927 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2928 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2929 else
2930 {
2931 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2932 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2933 }
6071dc7f
RH
2934 emit_move_insn (change_address (mem, mode, 0), reg);
2935 }
c6b97fac 2936
6071dc7f
RH
2937 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2938 machine must be aligned to the left before storing
2939 to memory. Note that the previous test doesn't
2940 handle all cases (e.g. SIZE == 3). */
2941 else if (size != UNITS_PER_WORD
6e985040 2942#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2943 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2944 == downward)
6e985040 2945#else
6071dc7f 2946 && BYTES_BIG_ENDIAN
6e985040 2947#endif
6071dc7f
RH
2948 )
2949 {
2950 rtx tem, x;
2951 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2952 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2953
eb6c3df1 2954 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2955 tem = change_address (mem, word_mode, 0);
2956 emit_move_insn (tem, x);
6f086dfc 2957 }
6071dc7f 2958 else
27e29549 2959 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2960 size_stored / UNITS_PER_WORD);
6f086dfc 2961 }
6071dc7f 2962 else
27e29549 2963 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2964 size_stored / UNITS_PER_WORD);
2965 }
bfc45551
AM
2966 else if (data->stack_parm == 0)
2967 {
bb27eeda 2968 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2969 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2970 BLOCK_OP_NORMAL);
bb27eeda
SE
2971 all->first_conversion_insn = get_insns ();
2972 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2973 end_sequence ();
2974 }
6071dc7f 2975
bfc45551 2976 data->stack_parm = stack_parm;
6071dc7f
RH
2977 SET_DECL_RTL (parm, stack_parm);
2978}
2979
2980/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2981 parameter. Get it there. Perform all ABI specified conversions. */
2982
2983static void
2984assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2985 struct assign_parm_data_one *data)
2986{
71008de4
BS
2987 rtx parmreg, validated_mem;
2988 rtx equiv_stack_parm;
ef4bddc2 2989 machine_mode promoted_nominal_mode;
6071dc7f
RH
2990 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2991 bool did_conversion = false;
71008de4 2992 bool need_conversion, moved;
6071dc7f
RH
2993
2994 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2995 need to do it in a wider mode. Using 2 here makes the result
2996 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2997 promoted_nominal_mode
cde0f3fd 2998 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2999 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
3000
3001 parmreg = gen_reg_rtx (promoted_nominal_mode);
3002
3003 if (!DECL_ARTIFICIAL (parm))
3004 mark_user_reg (parmreg);
3005
3006 /* If this was an item that we received a pointer to,
3007 set DECL_RTL appropriately. */
3008 if (data->passed_pointer)
3009 {
3010 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3011 set_mem_attributes (x, parm, 1);
3012 SET_DECL_RTL (parm, x);
3013 }
3014 else
389fdba0 3015 SET_DECL_RTL (parm, parmreg);
6071dc7f 3016
4d2a9850
DJ
3017 assign_parm_remove_parallels (data);
3018
666e3ceb
PB
3019 /* Copy the value into the register, thus bridging between
3020 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 3021
71008de4 3022 equiv_stack_parm = data->stack_parm;
1a8cb155 3023 validated_mem = validize_mem (copy_rtx (data->entry_parm));
71008de4
BS
3024
3025 need_conversion = (data->nominal_mode != data->passed_mode
3026 || promoted_nominal_mode != data->promoted_mode);
3027 moved = false;
3028
dbb94435
BS
3029 if (need_conversion
3030 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3031 && data->nominal_mode == data->passed_mode
3032 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 3033 {
6071dc7f
RH
3034 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3035 mode, by the caller. We now have to convert it to
3036 NOMINAL_MODE, if different. However, PARMREG may be in
3037 a different mode than NOMINAL_MODE if it is being stored
3038 promoted.
3039
3040 If ENTRY_PARM is a hard register, it might be in a register
3041 not valid for operating in its mode (e.g., an odd-numbered
3042 register for a DFmode). In that case, moves are the only
3043 thing valid, so we can't do a convert from there. This
3044 occurs when the calling sequence allow such misaligned
3045 usages.
3046
3047 In addition, the conversion may involve a call, which could
3048 clobber parameters which haven't been copied to pseudo
71008de4
BS
3049 registers yet.
3050
3051 First, we try to emit an insn which performs the necessary
3052 conversion. We verify that this insn does not clobber any
3053 hard registers. */
3054
3055 enum insn_code icode;
3056 rtx op0, op1;
3057
3058 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3059 unsignedp);
3060
3061 op0 = parmreg;
3062 op1 = validated_mem;
3063 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3064 && insn_operand_matches (icode, 0, op0)
3065 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3066 {
3067 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
b32d5189
DM
3068 rtx_insn *insn, *insns;
3069 rtx t = op1;
71008de4
BS
3070 HARD_REG_SET hardregs;
3071
3072 start_sequence ();
f9fef349
JJ
3073 /* If op1 is a hard register that is likely spilled, first
3074 force it into a pseudo, otherwise combiner might extend
3075 its lifetime too much. */
3076 if (GET_CODE (t) == SUBREG)
3077 t = SUBREG_REG (t);
3078 if (REG_P (t)
3079 && HARD_REGISTER_P (t)
3080 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3081 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3082 {
3083 t = gen_reg_rtx (GET_MODE (op1));
3084 emit_move_insn (t, op1);
3085 }
3086 else
3087 t = op1;
a11899b2
DM
3088 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3089 data->passed_mode, unsignedp);
3090 emit_insn (pat);
71008de4
BS
3091 insns = get_insns ();
3092
3093 moved = true;
3094 CLEAR_HARD_REG_SET (hardregs);
3095 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3096 {
3097 if (INSN_P (insn))
3098 note_stores (PATTERN (insn), record_hard_reg_sets,
3099 &hardregs);
3100 if (!hard_reg_set_empty_p (hardregs))
3101 moved = false;
3102 }
3103
3104 end_sequence ();
3105
3106 if (moved)
3107 {
3108 emit_insn (insns);
dbb94435
BS
3109 if (equiv_stack_parm != NULL_RTX)
3110 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3111 equiv_stack_parm);
71008de4
BS
3112 }
3113 }
3114 }
3115
3116 if (moved)
3117 /* Nothing to do. */
3118 ;
3119 else if (need_conversion)
3120 {
3121 /* We did not have an insn to convert directly, or the sequence
3122 generated appeared unsafe. We must first copy the parm to a
3123 pseudo reg, and save the conversion until after all
6071dc7f
RH
3124 parameters have been moved. */
3125
71008de4 3126 int save_tree_used;
6071dc7f
RH
3127 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3128
71008de4 3129 emit_move_insn (tempreg, validated_mem);
6071dc7f 3130
bb27eeda 3131 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3132 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3133
3134 if (GET_CODE (tempreg) == SUBREG
3135 && GET_MODE (tempreg) == data->nominal_mode
3136 && REG_P (SUBREG_REG (tempreg))
3137 && data->nominal_mode == data->passed_mode
3138 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3139 && GET_MODE_SIZE (GET_MODE (tempreg))
3140 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3141 {
6071dc7f
RH
3142 /* The argument is already sign/zero extended, so note it
3143 into the subreg. */
3144 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
362d42dc 3145 SUBREG_PROMOTED_SET (tempreg, unsignedp);
6071dc7f 3146 }
00d8a4c1 3147
6071dc7f
RH
3148 /* TREE_USED gets set erroneously during expand_assignment. */
3149 save_tree_used = TREE_USED (parm);
79f5e442 3150 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3151 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3152 all->first_conversion_insn = get_insns ();
3153 all->last_conversion_insn = get_last_insn ();
6071dc7f 3154 end_sequence ();
00d8a4c1 3155
6071dc7f
RH
3156 did_conversion = true;
3157 }
3158 else
71008de4 3159 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3160
3161 /* If we were passed a pointer but the actual value can safely live
f7e088e7
EB
3162 in a register, retrieve it and use it directly. */
3163 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
6071dc7f
RH
3164 {
3165 /* We can't use nominal_mode, because it will have been set to
3166 Pmode above. We must use the actual mode of the parm. */
f7e088e7
EB
3167 if (use_register_for_decl (parm))
3168 {
3169 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3170 mark_user_reg (parmreg);
3171 }
3172 else
3173 {
3174 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3175 TYPE_MODE (TREE_TYPE (parm)),
3176 TYPE_ALIGN (TREE_TYPE (parm)));
3177 parmreg
3178 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3179 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3180 align);
3181 set_mem_attributes (parmreg, parm, 1);
3182 }
cd5b3469 3183
6071dc7f
RH
3184 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3185 {
3186 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3187 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3188
bb27eeda
SE
3189 push_to_sequence2 (all->first_conversion_insn,
3190 all->last_conversion_insn);
6071dc7f
RH
3191 emit_move_insn (tempreg, DECL_RTL (parm));
3192 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3193 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3194 all->first_conversion_insn = get_insns ();
3195 all->last_conversion_insn = get_last_insn ();
6071dc7f 3196 end_sequence ();
6f086dfc 3197
6071dc7f
RH
3198 did_conversion = true;
3199 }
3200 else
3201 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3202
6071dc7f 3203 SET_DECL_RTL (parm, parmreg);
797a6ac1 3204
6071dc7f
RH
3205 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3206 now the parm. */
3207 data->stack_parm = NULL;
3208 }
ddef6bc7 3209
6071dc7f
RH
3210 /* Mark the register as eliminable if we did no conversion and it was
3211 copied from memory at a fixed offset, and the arg pointer was not
3212 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3213 offset formed an invalid address, such memory-equivalences as we
3214 make here would screw up life analysis for it. */
3215 if (data->nominal_mode == data->passed_mode
3216 && !did_conversion
3217 && data->stack_parm != 0
3218 && MEM_P (data->stack_parm)
3219 && data->locate.offset.var == 0
3220 && reg_mentioned_p (virtual_incoming_args_rtx,
3221 XEXP (data->stack_parm, 0)))
3222 {
691fe203
DM
3223 rtx_insn *linsn = get_last_insn ();
3224 rtx_insn *sinsn;
3225 rtx set;
a03caf76 3226
6071dc7f
RH
3227 /* Mark complex types separately. */
3228 if (GET_CODE (parmreg) == CONCAT)
3229 {
ef4bddc2 3230 machine_mode submode
6071dc7f 3231 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3232 int regnor = REGNO (XEXP (parmreg, 0));
3233 int regnoi = REGNO (XEXP (parmreg, 1));
3234 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3235 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3236 GET_MODE_SIZE (submode));
6071dc7f
RH
3237
3238 /* Scan backwards for the set of the real and
3239 imaginary parts. */
3240 for (sinsn = linsn; sinsn != 0;
3241 sinsn = prev_nonnote_insn (sinsn))
3242 {
3243 set = single_set (sinsn);
3244 if (set == 0)
3245 continue;
3246
3247 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3248 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3249 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3250 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3251 }
6071dc7f 3252 }
7543f918
JR
3253 else
3254 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3255 }
3256
3257 /* For pointer data type, suggest pointer register. */
3258 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3259 mark_reg_pointer (parmreg,
3260 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3261}
3262
3263/* A subroutine of assign_parms. Allocate stack space to hold the current
3264 parameter. Get it there. Perform all ABI specified conversions. */
3265
3266static void
3267assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3268 struct assign_parm_data_one *data)
3269{
3270 /* Value must be stored in the stack slot STACK_PARM during function
3271 execution. */
bfc45551 3272 bool to_conversion = false;
6071dc7f 3273
4d2a9850
DJ
3274 assign_parm_remove_parallels (data);
3275
6071dc7f
RH
3276 if (data->promoted_mode != data->nominal_mode)
3277 {
3278 /* Conversion is required. */
3279 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3280
1a8cb155 3281 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
6071dc7f 3282
bb27eeda 3283 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3284 to_conversion = true;
3285
6071dc7f
RH
3286 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3287 TYPE_UNSIGNED (TREE_TYPE (parm)));
3288
3289 if (data->stack_parm)
dd67163f
JJ
3290 {
3291 int offset = subreg_lowpart_offset (data->nominal_mode,
3292 GET_MODE (data->stack_parm));
3293 /* ??? This may need a big-endian conversion on sparc64. */
3294 data->stack_parm
3295 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3296 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3297 set_mem_offset (data->stack_parm,
527210c4 3298 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3299 }
6071dc7f
RH
3300 }
3301
3302 if (data->entry_parm != data->stack_parm)
3303 {
bfc45551
AM
3304 rtx src, dest;
3305
6071dc7f
RH
3306 if (data->stack_parm == 0)
3307 {
3a695389
UW
3308 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3309 GET_MODE (data->entry_parm),
3310 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3311 data->stack_parm
3312 = assign_stack_local (GET_MODE (data->entry_parm),
3313 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3314 align);
6071dc7f 3315 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3316 }
6071dc7f 3317
1a8cb155
RS
3318 dest = validize_mem (copy_rtx (data->stack_parm));
3319 src = validize_mem (copy_rtx (data->entry_parm));
bfc45551
AM
3320
3321 if (MEM_P (src))
6f086dfc 3322 {
bfc45551
AM
3323 /* Use a block move to handle potentially misaligned entry_parm. */
3324 if (!to_conversion)
bb27eeda
SE
3325 push_to_sequence2 (all->first_conversion_insn,
3326 all->last_conversion_insn);
bfc45551
AM
3327 to_conversion = true;
3328
3329 emit_block_move (dest, src,
3330 GEN_INT (int_size_in_bytes (data->passed_type)),
3331 BLOCK_OP_NORMAL);
6071dc7f
RH
3332 }
3333 else
bfc45551
AM
3334 emit_move_insn (dest, src);
3335 }
3336
3337 if (to_conversion)
3338 {
bb27eeda
SE
3339 all->first_conversion_insn = get_insns ();
3340 all->last_conversion_insn = get_last_insn ();
bfc45551 3341 end_sequence ();
6071dc7f 3342 }
6f086dfc 3343
6071dc7f
RH
3344 SET_DECL_RTL (parm, data->stack_parm);
3345}
3412b298 3346
6071dc7f
RH
3347/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3348 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3349
6071dc7f 3350static void
3b3f318a 3351assign_parms_unsplit_complex (struct assign_parm_data_all *all,
9771b263 3352 vec<tree> fnargs)
6071dc7f
RH
3353{
3354 tree parm;
6ccd356e 3355 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3356 unsigned i = 0;
f4ef873c 3357
3b3f318a 3358 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3359 {
3360 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3361 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3362 {
3363 rtx tmp, real, imag;
ef4bddc2 3364 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3365
9771b263
DN
3366 real = DECL_RTL (fnargs[i]);
3367 imag = DECL_RTL (fnargs[i + 1]);
6071dc7f 3368 if (inner != GET_MODE (real))
6f086dfc 3369 {
6071dc7f
RH
3370 real = gen_lowpart_SUBREG (inner, real);
3371 imag = gen_lowpart_SUBREG (inner, imag);
3372 }
6ccd356e
AM
3373
3374 if (TREE_ADDRESSABLE (parm))
3375 {
3376 rtx rmem, imem;
3377 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3378 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3379 DECL_MODE (parm),
3380 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3381
3382 /* split_complex_arg put the real and imag parts in
3383 pseudos. Move them to memory. */
3a695389 3384 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3385 set_mem_attributes (tmp, parm, 1);
3386 rmem = adjust_address_nv (tmp, inner, 0);
3387 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3388 push_to_sequence2 (all->first_conversion_insn,
3389 all->last_conversion_insn);
6ccd356e
AM
3390 emit_move_insn (rmem, real);
3391 emit_move_insn (imem, imag);
bb27eeda
SE
3392 all->first_conversion_insn = get_insns ();
3393 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3394 end_sequence ();
3395 }
3396 else
3397 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3398 SET_DECL_RTL (parm, tmp);
7e41ffa2 3399
9771b263
DN
3400 real = DECL_INCOMING_RTL (fnargs[i]);
3401 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
6071dc7f
RH
3402 if (inner != GET_MODE (real))
3403 {
3404 real = gen_lowpart_SUBREG (inner, real);
3405 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3406 }
6071dc7f 3407 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3408 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3409 i++;
6f086dfc 3410 }
6f086dfc 3411 }
6071dc7f
RH
3412}
3413
d5e254e1
IE
3414/* Load bounds of PARM from bounds table. */
3415static void
3416assign_parm_load_bounds (struct assign_parm_data_one *data,
3417 tree parm,
3418 rtx entry,
3419 unsigned bound_no)
3420{
3421 bitmap_iterator bi;
3422 unsigned i, offs = 0;
3423 int bnd_no = -1;
3424 rtx slot = NULL, ptr = NULL;
3425
3426 if (parm)
3427 {
3428 bitmap slots;
3429 bitmap_obstack_initialize (NULL);
3430 slots = BITMAP_ALLOC (NULL);
3431 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3432 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3433 {
3434 if (bound_no)
3435 bound_no--;
3436 else
3437 {
3438 bnd_no = i;
3439 break;
3440 }
3441 }
3442 BITMAP_FREE (slots);
3443 bitmap_obstack_release (NULL);
3444 }
3445
3446 /* We may have bounds not associated with any pointer. */
3447 if (bnd_no != -1)
3448 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3449
3450 /* Find associated pointer. */
3451 if (bnd_no == -1)
3452 {
3453 /* If bounds are not associated with any bounds,
3454 then it is passed in a register or special slot. */
3455 gcc_assert (data->entry_parm);
3456 ptr = const0_rtx;
3457 }
3458 else if (MEM_P (entry))
3459 slot = adjust_address (entry, Pmode, offs);
3460 else if (REG_P (entry))
3461 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3462 else if (GET_CODE (entry) == PARALLEL)
3463 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3464 else
3465 gcc_unreachable ();
3466 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3467 data->entry_parm);
3468}
3469
3470/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3471
3472static void
3473assign_bounds (vec<bounds_parm_data> &bndargs,
3474 struct assign_parm_data_all &all)
3475{
3476 unsigned i, pass, handled = 0;
3477 bounds_parm_data *pbdata;
3478
3479 if (!bndargs.exists ())
3480 return;
3481
3482 /* We make few passes to store input bounds. Firstly handle bounds
3483 passed in registers. After that we load bounds passed in special
3484 slots. Finally we load bounds from Bounds Table. */
3485 for (pass = 0; pass < 3; pass++)
3486 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3487 {
3488 /* Pass 0 => regs only. */
3489 if (pass == 0
3490 && (!pbdata->parm_data.entry_parm
3491 || GET_CODE (pbdata->parm_data.entry_parm) != REG))
3492 continue;
3493 /* Pass 1 => slots only. */
3494 else if (pass == 1
3495 && (!pbdata->parm_data.entry_parm
3496 || GET_CODE (pbdata->parm_data.entry_parm) == REG))
3497 continue;
3498 /* Pass 2 => BT only. */
3499 else if (pass == 2
3500 && pbdata->parm_data.entry_parm)
3501 continue;
3502
3503 if (!pbdata->parm_data.entry_parm
3504 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3505 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3506 pbdata->ptr_entry, pbdata->bound_no);
3507
3508 set_decl_incoming_rtl (pbdata->bounds_parm,
3509 pbdata->parm_data.entry_parm, false);
3510
3511 if (assign_parm_setup_block_p (&pbdata->parm_data))
3512 assign_parm_setup_block (&all, pbdata->bounds_parm,
3513 &pbdata->parm_data);
3514 else if (pbdata->parm_data.passed_pointer
3515 || use_register_for_decl (pbdata->bounds_parm))
3516 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3517 &pbdata->parm_data);
3518 else
3519 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3520 &pbdata->parm_data);
3521
3522 /* Count handled bounds to make sure we miss nothing. */
3523 handled++;
3524 }
3525
3526 gcc_assert (handled == bndargs.length ());
3527
3528 bndargs.release ();
3529}
3530
6071dc7f
RH
3531/* Assign RTL expressions to the function's parameters. This may involve
3532 copying them into registers and using those registers as the DECL_RTL. */
3533
6fe79279 3534static void
6071dc7f
RH
3535assign_parms (tree fndecl)
3536{
3537 struct assign_parm_data_all all;
3b3f318a 3538 tree parm;
9771b263 3539 vec<tree> fnargs;
d5e254e1
IE
3540 unsigned i, bound_no = 0;
3541 tree last_arg = NULL;
3542 rtx last_arg_entry = NULL;
3543 vec<bounds_parm_data> bndargs = vNULL;
3544 bounds_parm_data bdata;
6f086dfc 3545
38173d38 3546 crtl->args.internal_arg_pointer
150cdc9e 3547 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3548
3549 assign_parms_initialize_all (&all);
3550 fnargs = assign_parms_augmented_arg_list (&all);
3551
9771b263 3552 FOR_EACH_VEC_ELT (fnargs, i, parm)
ded9bf77 3553 {
6071dc7f
RH
3554 struct assign_parm_data_one data;
3555
3556 /* Extract the type of PARM; adjust it according to ABI. */
3557 assign_parm_find_data_types (&all, parm, &data);
3558
3559 /* Early out for errors and void parameters. */
3560 if (data.passed_mode == VOIDmode)
ded9bf77 3561 {
6071dc7f
RH
3562 SET_DECL_RTL (parm, const0_rtx);
3563 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3564 continue;
3565 }
196c42cd 3566
2e3f842f
L
3567 /* Estimate stack alignment from parameter alignment. */
3568 if (SUPPORTS_STACK_ALIGNMENT)
3569 {
c2ed6cf8
NF
3570 unsigned int align
3571 = targetm.calls.function_arg_boundary (data.promoted_mode,
3572 data.passed_type);
ae58e548
JJ
3573 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3574 align);
2e3f842f 3575 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3576 align = MINIMUM_ALIGNMENT (data.nominal_type,
3577 TYPE_MODE (data.nominal_type),
3578 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3579 if (crtl->stack_alignment_estimated < align)
3580 {
3581 gcc_assert (!crtl->stack_realign_processed);
3582 crtl->stack_alignment_estimated = align;
3583 }
3584 }
b8698a0f 3585
6071dc7f
RH
3586 /* Find out where the parameter arrives in this function. */
3587 assign_parm_find_entry_rtl (&all, &data);
3588
3589 /* Find out where stack space for this parameter might be. */
3590 if (assign_parm_is_stack_parm (&all, &data))
3591 {
3592 assign_parm_find_stack_rtl (parm, &data);
3593 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3594 }
d5e254e1
IE
3595 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3596 {
3597 /* Remember where last non bounds arg was passed in case
3598 we have to load associated bounds for it from Bounds
3599 Table. */
3600 last_arg = parm;
3601 last_arg_entry = data.entry_parm;
3602 bound_no = 0;
3603 }
6071dc7f 3604 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3605 if (data.passed_pointer)
3606 {
3607 rtx incoming_rtl
3608 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3609 data.entry_parm);
3610 set_decl_incoming_rtl (parm, incoming_rtl, true);
3611 }
3612 else
3613 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f 3614
d5e254e1
IE
3615 /* Boudns should be loaded in the particular order to
3616 have registers allocated correctly. Collect info about
3617 input bounds and load them later. */
3618 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3619 {
3620 /* Expect bounds in instrumented functions only. */
3621 gcc_assert (chkp_function_instrumented_p (fndecl));
3622
3623 bdata.parm_data = data;
3624 bdata.bounds_parm = parm;
3625 bdata.ptr_parm = last_arg;
3626 bdata.ptr_entry = last_arg_entry;
3627 bdata.bound_no = bound_no;
3628 bndargs.safe_push (bdata);
3629 }
3630 else
3631 {
3632 assign_parm_adjust_stack_rtl (&data);
3633
3634 if (assign_parm_setup_block_p (&data))
3635 assign_parm_setup_block (&all, parm, &data);
3636 else if (data.passed_pointer || use_register_for_decl (parm))
3637 assign_parm_setup_reg (&all, parm, &data);
3638 else
3639 assign_parm_setup_stack (&all, parm, &data);
3640 }
3641
3642 if (cfun->stdarg && !DECL_CHAIN (parm))
3643 {
3644 int pretend_bytes = 0;
3645
3646 assign_parms_setup_varargs (&all, &data, false);
3647
3648 if (chkp_function_instrumented_p (fndecl))
3649 {
3650 /* We expect this is the last parm. Otherwise it is wrong
3651 to assign bounds right now. */
3652 gcc_assert (i == (fnargs.length () - 1));
3653 assign_bounds (bndargs, all);
3654 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3655 data.promoted_mode,
3656 data.passed_type,
3657 &pretend_bytes,
3658 false);
3659 }
3660 }
3661
6071dc7f 3662 /* Update info on where next arg arrives in registers. */
d5cc9181 3663 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3664 data.passed_type, data.named_arg);
6071dc7f 3665
d5e254e1
IE
3666 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3667 bound_no++;
ded9bf77
AH
3668 }
3669
d5e254e1
IE
3670 assign_bounds (bndargs, all);
3671
3b3f318a 3672 if (targetm.calls.split_complex_arg)
6ccd356e 3673 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3674
9771b263 3675 fnargs.release ();
3b3f318a 3676
bcb21886
KY
3677 /* Initialize pic_offset_table_rtx with a pseudo register
3678 if required. */
3679 if (targetm.use_pseudo_pic_reg ())
3680 pic_offset_table_rtx = gen_reg_rtx (Pmode);
3681
3412b298
JW
3682 /* Output all parameter conversion instructions (possibly including calls)
3683 now that all parameters have been copied out of hard registers. */
bb27eeda 3684 emit_insn (all.first_conversion_insn);
3412b298 3685
2e3f842f
L
3686 /* Estimate reload stack alignment from scalar return mode. */
3687 if (SUPPORTS_STACK_ALIGNMENT)
3688 {
3689 if (DECL_RESULT (fndecl))
3690 {
3691 tree type = TREE_TYPE (DECL_RESULT (fndecl));
ef4bddc2 3692 machine_mode mode = TYPE_MODE (type);
2e3f842f
L
3693
3694 if (mode != BLKmode
3695 && mode != VOIDmode
3696 && !AGGREGATE_TYPE_P (type))
3697 {
3698 unsigned int align = GET_MODE_ALIGNMENT (mode);
3699 if (crtl->stack_alignment_estimated < align)
3700 {
3701 gcc_assert (!crtl->stack_realign_processed);
3702 crtl->stack_alignment_estimated = align;
3703 }
3704 }
b8698a0f 3705 }
2e3f842f
L
3706 }
3707
b36a8cc2
OH
3708 /* If we are receiving a struct value address as the first argument, set up
3709 the RTL for the function result. As this might require code to convert
3710 the transmitted address to Pmode, we do this here to ensure that possible
3711 preliminary conversions of the address have been emitted already. */
6071dc7f 3712 if (all.function_result_decl)
b36a8cc2 3713 {
6071dc7f
RH
3714 tree result = DECL_RESULT (current_function_decl);
3715 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3716 rtx x;
fa8db1f7 3717
cc77ae10 3718 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3719 {
3720 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3721 x = addr;
3722 }
cc77ae10
JM
3723 else
3724 {
8dcfef8f
AO
3725 SET_DECL_VALUE_EXPR (result,
3726 build1 (INDIRECT_REF, TREE_TYPE (result),
3727 all.function_result_decl));
cc77ae10
JM
3728 addr = convert_memory_address (Pmode, addr);
3729 x = gen_rtx_MEM (DECL_MODE (result), addr);
3730 set_mem_attributes (x, result, 1);
3731 }
8dcfef8f
AO
3732
3733 DECL_HAS_VALUE_EXPR_P (result) = 1;
3734
b36a8cc2
OH
3735 SET_DECL_RTL (result, x);
3736 }
3737
53c428d0 3738 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3739 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3740 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3741 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3742
3743 /* Adjust function incoming argument size for alignment and
3744 minimum length. */
3745
2e4ceca5 3746 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
38173d38 3747 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3748 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3749
6f086dfc 3750#ifdef ARGS_GROW_DOWNWARD
38173d38 3751 crtl->args.arg_offset_rtx
477eff96 3752 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3753 : expand_expr (size_diffop (all.stack_args_size.var,
3754 size_int (-all.stack_args_size.constant)),
bbbbb16a 3755 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3756#else
38173d38 3757 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3758#endif
3759
3760 /* See how many bytes, if any, of its args a function should try to pop
3761 on return. */
3762
079e7538
NF
3763 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3764 TREE_TYPE (fndecl),
3765 crtl->args.size);
6f086dfc 3766
3b69d50e
RK
3767 /* For stdarg.h function, save info about
3768 regs and stack space used by the named args. */
6f086dfc 3769
d5cc9181 3770 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3771
3772 /* Set the rtx used for the function return value. Put this in its
3773 own variable so any optimizers that need this information don't have
3774 to include tree.h. Do this here so it gets done when an inlined
3775 function gets output. */
3776
38173d38 3777 crtl->return_rtx
19e7881c
MM
3778 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3779 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3780
3781 /* If scalar return value was computed in a pseudo-reg, or was a named
3782 return value that got dumped to the stack, copy that to the hard
3783 return register. */
3784 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3785 {
3786 tree decl_result = DECL_RESULT (fndecl);
3787 rtx decl_rtl = DECL_RTL (decl_result);
3788
3789 if (REG_P (decl_rtl)
3790 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3791 : DECL_REGISTER (decl_result))
3792 {
3793 rtx real_decl_rtl;
3794
1d636cc6
RG
3795 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3796 fndecl, true);
d5e254e1
IE
3797 if (chkp_function_instrumented_p (fndecl))
3798 crtl->return_bnd
3799 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3800 fndecl, true);
ce5e43d0 3801 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3802 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3803 holds the hard register containing the return value, not a
3804 temporary pseudo. */
38173d38 3805 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3806 }
3807 }
6f086dfc 3808}
4744afba
RH
3809
3810/* A subroutine of gimplify_parameters, invoked via walk_tree.
3811 For all seen types, gimplify their sizes. */
3812
3813static tree
3814gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3815{
3816 tree t = *tp;
3817
3818 *walk_subtrees = 0;
3819 if (TYPE_P (t))
3820 {
3821 if (POINTER_TYPE_P (t))
3822 *walk_subtrees = 1;
ad50bc8d
RH
3823 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3824 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3825 {
726a989a 3826 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3827 *walk_subtrees = 1;
3828 }
3829 }
3830
3831 return NULL;
3832}
3833
3834/* Gimplify the parameter list for current_function_decl. This involves
3835 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3836 to implement callee-copies reference parameters. Returns a sequence of
3837 statements to add to the beginning of the function. */
4744afba 3838
726a989a 3839gimple_seq
4744afba
RH
3840gimplify_parameters (void)
3841{
3842 struct assign_parm_data_all all;
3b3f318a 3843 tree parm;
726a989a 3844 gimple_seq stmts = NULL;
9771b263 3845 vec<tree> fnargs;
3b3f318a 3846 unsigned i;
4744afba
RH
3847
3848 assign_parms_initialize_all (&all);
3849 fnargs = assign_parms_augmented_arg_list (&all);
3850
9771b263 3851 FOR_EACH_VEC_ELT (fnargs, i, parm)
4744afba
RH
3852 {
3853 struct assign_parm_data_one data;
3854
3855 /* Extract the type of PARM; adjust it according to ABI. */
3856 assign_parm_find_data_types (&all, parm, &data);
3857
3858 /* Early out for errors and void parameters. */
3859 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3860 continue;
3861
3862 /* Update info on where next arg arrives in registers. */
d5cc9181 3863 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3864 data.passed_type, data.named_arg);
4744afba
RH
3865
3866 /* ??? Once upon a time variable_size stuffed parameter list
3867 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3868 turned out to be less than manageable in the gimple world.
3869 Now we have to hunt them down ourselves. */
3870 walk_tree_without_duplicates (&data.passed_type,
3871 gimplify_parm_type, &stmts);
3872
b38f3813 3873 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3874 {
3875 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3876 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3877 }
3878
3879 if (data.passed_pointer)
3880 {
3881 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3882 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3883 type, data.named_arg))
3884 {
3885 tree local, t;
3886
b38f3813 3887 /* For constant-sized objects, this is trivial; for
4744afba 3888 variable-sized objects, we have to play games. */
b38f3813
EB
3889 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3890 && !(flag_stack_check == GENERIC_STACK_CHECK
3891 && compare_tree_int (DECL_SIZE_UNIT (parm),
3892 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3893 {
5dac1dae 3894 local = create_tmp_var (type, get_name (parm));
4744afba 3895 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3896 /* If PARM was addressable, move that flag over
3897 to the local copy, as its address will be taken,
37609bf0
RG
3898 not the PARMs. Keep the parms address taken
3899 as we'll query that flag during gimplification. */
04487a2f 3900 if (TREE_ADDRESSABLE (parm))
37609bf0 3901 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3902 else if (TREE_CODE (type) == COMPLEX_TYPE
3903 || TREE_CODE (type) == VECTOR_TYPE)
3904 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3905 }
3906 else
3907 {
5039610b 3908 tree ptr_type, addr;
4744afba
RH
3909
3910 ptr_type = build_pointer_type (type);
c98b08ff 3911 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3912 DECL_IGNORED_P (addr) = 0;
3913 local = build_fold_indirect_ref (addr);
3914
e79983f4 3915 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3916 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3917 size_int (DECL_ALIGN (parm)));
3918
d3c12306 3919 /* The call has been built for a variable-sized object. */
63d2a353 3920 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3921 t = fold_convert (ptr_type, t);
726a989a 3922 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3923 gimplify_and_add (t, &stmts);
3924 }
3925
726a989a 3926 gimplify_assign (local, parm, &stmts);
4744afba 3927
833b3afe
DB
3928 SET_DECL_VALUE_EXPR (parm, local);
3929 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3930 }
3931 }
3932 }
3933
9771b263 3934 fnargs.release ();
3b3f318a 3935
4744afba
RH
3936 return stmts;
3937}
75dc3319 3938\f
6f086dfc
RS
3939/* Compute the size and offset from the start of the stacked arguments for a
3940 parm passed in mode PASSED_MODE and with type TYPE.
3941
3942 INITIAL_OFFSET_PTR points to the current offset into the stacked
3943 arguments.
3944
e7949876
AM
3945 The starting offset and size for this parm are returned in
3946 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3947 nonzero, the offset is that of stack slot, which is returned in
3948 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3949 padding required from the initial offset ptr to the stack slot.
6f086dfc 3950
cc2902df 3951 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3952 never be set if REG_PARM_STACK_SPACE is not defined.
3953
2e4ceca5
UW
3954 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3955 for arguments which are passed in registers.
3956
6f086dfc
RS
3957 FNDECL is the function in which the argument was defined.
3958
3959 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3960 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3961 argument list to be aligned to the specific boundary (in bits). This
3962 rounding affects the initial and starting offsets, but not the argument
3963 size.
6f086dfc
RS
3964
3965 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3966 optionally rounds the size of the parm to PARM_BOUNDARY. The
3967 initial offset is not affected by this rounding, while the size always
3968 is and the starting offset may be. */
3969
e7949876
AM
3970/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3971 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3972 callers pass in the total size of args so far as
e7949876 3973 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3974
6f086dfc 3975void
ef4bddc2 3976locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e4ceca5
UW
3977 int reg_parm_stack_space, int partial,
3978 tree fndecl ATTRIBUTE_UNUSED,
fa8db1f7
AJ
3979 struct args_size *initial_offset_ptr,
3980 struct locate_and_pad_arg_data *locate)
6f086dfc 3981{
e7949876
AM
3982 tree sizetree;
3983 enum direction where_pad;
123148b5 3984 unsigned int boundary, round_boundary;
e7949876 3985 int part_size_in_regs;
6f086dfc 3986
6f086dfc
RS
3987 /* If we have found a stack parm before we reach the end of the
3988 area reserved for registers, skip that area. */
3989 if (! in_regs)
3990 {
6f086dfc
RS
3991 if (reg_parm_stack_space > 0)
3992 {
3993 if (initial_offset_ptr->var)
3994 {
3995 initial_offset_ptr->var
3996 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3997 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3998 initial_offset_ptr->constant = 0;
3999 }
4000 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4001 initial_offset_ptr->constant = reg_parm_stack_space;
4002 }
4003 }
6f086dfc 4004
78a52f11 4005 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
4006
4007 sizetree
4008 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4009 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 4010 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
4011 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4012 type);
6e985040 4013 locate->where_pad = where_pad;
2e3f842f
L
4014
4015 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4016 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4017 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4018
bfc45551 4019 locate->boundary = boundary;
6f086dfc 4020
2e3f842f
L
4021 if (SUPPORTS_STACK_ALIGNMENT)
4022 {
4023 /* stack_alignment_estimated can't change after stack has been
4024 realigned. */
4025 if (crtl->stack_alignment_estimated < boundary)
4026 {
4027 if (!crtl->stack_realign_processed)
4028 crtl->stack_alignment_estimated = boundary;
4029 else
4030 {
4031 /* If stack is realigned and stack alignment value
4032 hasn't been finalized, it is OK not to increase
4033 stack_alignment_estimated. The bigger alignment
4034 requirement is recorded in stack_alignment_needed
4035 below. */
4036 gcc_assert (!crtl->stack_realign_finalized
4037 && crtl->stack_realign_needed);
4038 }
4039 }
4040 }
4041
c7e777b5
RH
4042 /* Remember if the outgoing parameter requires extra alignment on the
4043 calling function side. */
cb91fab0
JH
4044 if (crtl->stack_alignment_needed < boundary)
4045 crtl->stack_alignment_needed = boundary;
2e3f842f
L
4046 if (crtl->preferred_stack_boundary < boundary)
4047 crtl->preferred_stack_boundary = boundary;
c7e777b5 4048
6f086dfc 4049#ifdef ARGS_GROW_DOWNWARD
e7949876 4050 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 4051 if (initial_offset_ptr->var)
e7949876
AM
4052 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4053 initial_offset_ptr->var);
9dff28ab 4054
e7949876
AM
4055 {
4056 tree s2 = sizetree;
4057 if (where_pad != none
cc269bb6 4058 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 4059 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 4060 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
e7949876
AM
4061 SUB_PARM_SIZE (locate->slot_offset, s2);
4062 }
4063
4064 locate->slot_offset.constant += part_size_in_regs;
9dff28ab 4065
2e4ceca5 4066 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
4067 pad_to_arg_alignment (&locate->slot_offset, boundary,
4068 &locate->alignment_pad);
9dff28ab 4069
e7949876
AM
4070 locate->size.constant = (-initial_offset_ptr->constant
4071 - locate->slot_offset.constant);
6f086dfc 4072 if (initial_offset_ptr->var)
e7949876
AM
4073 locate->size.var = size_binop (MINUS_EXPR,
4074 size_binop (MINUS_EXPR,
4075 ssize_int (0),
4076 initial_offset_ptr->var),
4077 locate->slot_offset.var);
4078
4079 /* Pad_below needs the pre-rounded size to know how much to pad
4080 below. */
4081 locate->offset = locate->slot_offset;
4082 if (where_pad == downward)
4083 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 4084
6f086dfc 4085#else /* !ARGS_GROW_DOWNWARD */
2e4ceca5 4086 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
4087 pad_to_arg_alignment (initial_offset_ptr, boundary,
4088 &locate->alignment_pad);
4089 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
4090
4091#ifdef PUSH_ROUNDING
4092 if (passed_mode != BLKmode)
4093 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4094#endif
4095
d4b0a7a0
DE
4096 /* Pad_below needs the pre-rounded size to know how much to pad below
4097 so this must be done before rounding up. */
e7949876
AM
4098 locate->offset = locate->slot_offset;
4099 if (where_pad == downward)
4100 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 4101
6f086dfc 4102 if (where_pad != none
cc269bb6 4103 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 4104 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 4105 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 4106
e7949876
AM
4107 ADD_PARM_SIZE (locate->size, sizetree);
4108
4109 locate->size.constant -= part_size_in_regs;
6f086dfc 4110#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
4111
4112#ifdef FUNCTION_ARG_OFFSET
4113 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4114#endif
6f086dfc
RS
4115}
4116
e16c591a
RS
4117/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4118 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4119
6f086dfc 4120static void
fa8db1f7
AJ
4121pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4122 struct args_size *alignment_pad)
6f086dfc 4123{
a544cfd2
KG
4124 tree save_var = NULL_TREE;
4125 HOST_WIDE_INT save_constant = 0;
a751cd5b 4126 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
4127 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4128
4129#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
4130 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4131 the real alignment of %sp. However, when it does this, the
4132 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
4133 if (SPARC_STACK_BOUNDARY_HACK)
4134 sp_offset = 0;
4135#endif
4fc026cd 4136
6f6b8f81 4137 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
4138 {
4139 save_var = offset_ptr->var;
4140 save_constant = offset_ptr->constant;
4141 }
4142
4143 alignment_pad->var = NULL_TREE;
4144 alignment_pad->constant = 0;
4fc026cd 4145
6f086dfc
RS
4146 if (boundary > BITS_PER_UNIT)
4147 {
4148 if (offset_ptr->var)
4149 {
a594a19c
GK
4150 tree sp_offset_tree = ssize_int (sp_offset);
4151 tree offset = size_binop (PLUS_EXPR,
4152 ARGS_SIZE_TREE (*offset_ptr),
4153 sp_offset_tree);
6f086dfc 4154#ifdef ARGS_GROW_DOWNWARD
a594a19c 4155 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 4156#else
a594a19c 4157 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 4158#endif
a594a19c
GK
4159
4160 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
4161 /* ARGS_SIZE_TREE includes constant term. */
4162 offset_ptr->constant = 0;
6f6b8f81 4163 if (boundary > PARM_BOUNDARY)
dd3f0101 4164 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 4165 save_var);
6f086dfc
RS
4166 }
4167 else
718fe406 4168 {
a594a19c 4169 offset_ptr->constant = -sp_offset +
6f086dfc 4170#ifdef ARGS_GROW_DOWNWARD
a594a19c 4171 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 4172#else
a594a19c 4173 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 4174#endif
6f6b8f81 4175 if (boundary > PARM_BOUNDARY)
718fe406
KH
4176 alignment_pad->constant = offset_ptr->constant - save_constant;
4177 }
6f086dfc
RS
4178 }
4179}
4180
4181static void
ef4bddc2 4182pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
6f086dfc
RS
4183{
4184 if (passed_mode != BLKmode)
4185 {
4186 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4187 offset_ptr->constant
4188 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4189 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4190 - GET_MODE_SIZE (passed_mode));
4191 }
4192 else
4193 {
4194 if (TREE_CODE (sizetree) != INTEGER_CST
4195 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4196 {
4197 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4198 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4199 /* Add it in. */
4200 ADD_PARM_SIZE (*offset_ptr, s2);
4201 SUB_PARM_SIZE (*offset_ptr, sizetree);
4202 }
4203 }
4204}
6f086dfc 4205\f
6f086dfc 4206
6fb5fa3c
DB
4207/* True if register REGNO was alive at a place where `setjmp' was
4208 called and was set more than once or is an argument. Such regs may
4209 be clobbered by `longjmp'. */
4210
4211static bool
4212regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4213{
4214 /* There appear to be cases where some local vars never reach the
4215 backend but have bogus regnos. */
4216 if (regno >= max_reg_num ())
4217 return false;
4218
4219 return ((REG_N_SETS (regno) > 1
fefa31b5
DM
4220 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4221 regno))
6fb5fa3c
DB
4222 && REGNO_REG_SET_P (setjmp_crosses, regno));
4223}
4224
4225/* Walk the tree of blocks describing the binding levels within a
4226 function and warn about variables the might be killed by setjmp or
4227 vfork. This is done after calling flow_analysis before register
4228 allocation since that will clobber the pseudo-regs to hard
4229 regs. */
4230
4231static void
4232setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 4233{
b3694847 4234 tree decl, sub;
6de9cd9a 4235
910ad8de 4236 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 4237 {
6de9cd9a 4238 if (TREE_CODE (decl) == VAR_DECL
bc41842b 4239 && DECL_RTL_SET_P (decl)
f8cfc6aa 4240 && REG_P (DECL_RTL (decl))
6fb5fa3c 4241 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4242 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 4243 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 4244 }
6de9cd9a 4245
87caf699 4246 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 4247 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
4248}
4249
6de9cd9a 4250/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
4251 but for arguments instead of local variables. */
4252
6fb5fa3c
DB
4253static void
4254setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4255{
b3694847 4256 tree decl;
6f086dfc 4257 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4258 decl; decl = DECL_CHAIN (decl))
6f086dfc 4259 if (DECL_RTL (decl) != 0
f8cfc6aa 4260 && REG_P (DECL_RTL (decl))
6fb5fa3c 4261 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4262 warning (OPT_Wclobbered,
2b001724 4263 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4264 decl);
6f086dfc
RS
4265}
4266
6fb5fa3c
DB
4267/* Generate warning messages for variables live across setjmp. */
4268
b8698a0f 4269void
6fb5fa3c
DB
4270generate_setjmp_warnings (void)
4271{
4272 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4273
0cae8d31 4274 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
6fb5fa3c
DB
4275 || bitmap_empty_p (setjmp_crosses))
4276 return;
4277
4278 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4279 setjmp_args_warning (setjmp_crosses);
4280}
4281
6f086dfc 4282\f
3373692b 4283/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4284 and return the new head of the chain (old last element).
4285 In addition to that clear BLOCK_SAME_RANGE flags when needed
4286 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4287 its super fragment origin. */
3373692b
JJ
4288
4289static tree
4290block_fragments_nreverse (tree t)
4291{
1e3c1d95
JJ
4292 tree prev = 0, block, next, prev_super = 0;
4293 tree super = BLOCK_SUPERCONTEXT (t);
4294 if (BLOCK_FRAGMENT_ORIGIN (super))
4295 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4296 for (block = t; block; block = next)
4297 {
4298 next = BLOCK_FRAGMENT_CHAIN (block);
4299 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4300 if ((prev && !BLOCK_SAME_RANGE (prev))
4301 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4302 != prev_super))
4303 BLOCK_SAME_RANGE (block) = 0;
4304 prev_super = BLOCK_SUPERCONTEXT (block);
4305 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4306 prev = block;
4307 }
1e3c1d95
JJ
4308 t = BLOCK_FRAGMENT_ORIGIN (t);
4309 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4310 != prev_super)
4311 BLOCK_SAME_RANGE (t) = 0;
4312 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4313 return prev;
4314}
4315
4316/* Reverse the order of elements in the chain T of blocks,
4317 and return the new head of the chain (old last element).
4318 Also do the same on subblocks and reverse the order of elements
4319 in BLOCK_FRAGMENT_CHAIN as well. */
4320
4321static tree
4322blocks_nreverse_all (tree t)
4323{
4324 tree prev = 0, block, next;
4325 for (block = t; block; block = next)
4326 {
4327 next = BLOCK_CHAIN (block);
4328 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4329 if (BLOCK_FRAGMENT_CHAIN (block)
4330 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4331 {
4332 BLOCK_FRAGMENT_CHAIN (block)
4333 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4334 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4335 BLOCK_SAME_RANGE (block) = 0;
4336 }
4337 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4338 prev = block;
4339 }
4340 return prev;
4341}
4342
4343
a20612aa
RH
4344/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4345 and create duplicate blocks. */
4346/* ??? Need an option to either create block fragments or to create
4347 abstract origin duplicates of a source block. It really depends
4348 on what optimization has been performed. */
467456d0 4349
116eebd6 4350void
fa8db1f7 4351reorder_blocks (void)
467456d0 4352{
116eebd6 4353 tree block = DECL_INITIAL (current_function_decl);
467456d0 4354
1a4450c7 4355 if (block == NULL_TREE)
116eebd6 4356 return;
fc289cd1 4357
00f96dc9 4358 auto_vec<tree, 10> block_stack;
18c038b9 4359
a20612aa 4360 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4361 clear_block_marks (block);
a20612aa 4362
116eebd6
MM
4363 /* Prune the old trees away, so that they don't get in the way. */
4364 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4365 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4366
a20612aa 4367 /* Recreate the block tree from the note nesting. */
116eebd6 4368 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4369 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
467456d0
RS
4370}
4371
a20612aa 4372/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4373
6de9cd9a
DN
4374void
4375clear_block_marks (tree block)
cc1fe44f 4376{
a20612aa 4377 while (block)
cc1fe44f 4378 {
a20612aa 4379 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4380 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4381 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4382 }
4383}
4384
0a1c58a2 4385static void
691fe203
DM
4386reorder_blocks_1 (rtx_insn *insns, tree current_block,
4387 vec<tree> *p_block_stack)
0a1c58a2 4388{
691fe203 4389 rtx_insn *insn;
1e3c1d95 4390 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4391
4392 for (insn = insns; insn; insn = NEXT_INSN (insn))
4393 {
4b4bf941 4394 if (NOTE_P (insn))
0a1c58a2 4395 {
a38e7aa5 4396 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4397 {
4398 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4399 tree origin;
4400
3373692b
JJ
4401 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4402 origin = block;
a20612aa 4403
1e3c1d95
JJ
4404 if (prev_end)
4405 BLOCK_SAME_RANGE (prev_end) = 0;
4406 prev_end = NULL_TREE;
4407
a20612aa
RH
4408 /* If we have seen this block before, that means it now
4409 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4410 if (TREE_ASM_WRITTEN (block))
4411 {
a20612aa 4412 tree new_block = copy_node (block);
a20612aa 4413
1e3c1d95 4414 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4415 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4416 BLOCK_FRAGMENT_CHAIN (new_block)
4417 = BLOCK_FRAGMENT_CHAIN (origin);
4418 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4419
4420 NOTE_BLOCK (insn) = new_block;
4421 block = new_block;
0a1c58a2 4422 }
a20612aa 4423
1e3c1d95
JJ
4424 if (prev_beg == current_block && prev_beg)
4425 BLOCK_SAME_RANGE (block) = 1;
4426
4427 prev_beg = origin;
4428
0a1c58a2
JL
4429 BLOCK_SUBBLOCKS (block) = 0;
4430 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4431 /* When there's only one block for the entire function,
4432 current_block == block and we mustn't do this, it
4433 will cause infinite recursion. */
4434 if (block != current_block)
4435 {
1e3c1d95 4436 tree super;
51b7d006 4437 if (block != origin)
1e3c1d95
JJ
4438 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4439 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4440 (origin))
4441 == current_block);
9771b263 4442 if (p_block_stack->is_empty ())
1e3c1d95
JJ
4443 super = current_block;
4444 else
4445 {
9771b263 4446 super = p_block_stack->last ();
1e3c1d95
JJ
4447 gcc_assert (super == current_block
4448 || BLOCK_FRAGMENT_ORIGIN (super)
4449 == current_block);
4450 }
4451 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4452 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4453 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4454 current_block = origin;
339a28b9 4455 }
9771b263 4456 p_block_stack->safe_push (block);
0a1c58a2 4457 }
a38e7aa5 4458 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4459 {
9771b263 4460 NOTE_BLOCK (insn) = p_block_stack->pop ();
0a1c58a2 4461 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4462 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4463 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4464 prev_beg = NULL_TREE;
4465 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4466 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4467 }
4468 }
1e3c1d95
JJ
4469 else
4470 {
4471 prev_beg = NULL_TREE;
4472 if (prev_end)
4473 BLOCK_SAME_RANGE (prev_end) = 0;
4474 prev_end = NULL_TREE;
4475 }
0a1c58a2
JL
4476 }
4477}
4478
467456d0
RS
4479/* Reverse the order of elements in the chain T of blocks,
4480 and return the new head of the chain (old last element). */
4481
6de9cd9a 4482tree
fa8db1f7 4483blocks_nreverse (tree t)
467456d0 4484{
3373692b
JJ
4485 tree prev = 0, block, next;
4486 for (block = t; block; block = next)
467456d0 4487 {
3373692b
JJ
4488 next = BLOCK_CHAIN (block);
4489 BLOCK_CHAIN (block) = prev;
4490 prev = block;
467456d0
RS
4491 }
4492 return prev;
4493}
4494
61e46a7d
NF
4495/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4496 by modifying the last node in chain 1 to point to chain 2. */
4497
4498tree
4499block_chainon (tree op1, tree op2)
4500{
4501 tree t1;
4502
4503 if (!op1)
4504 return op2;
4505 if (!op2)
4506 return op1;
4507
4508 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4509 continue;
4510 BLOCK_CHAIN (t1) = op2;
4511
4512#ifdef ENABLE_TREE_CHECKING
4513 {
4514 tree t2;
4515 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4516 gcc_assert (t2 != t1);
4517 }
4518#endif
4519
4520 return op1;
4521}
4522
18c038b9
MM
4523/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4524 non-NULL, list them all into VECTOR, in a depth-first preorder
4525 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4526 blocks. */
467456d0
RS
4527
4528static int
fa8db1f7 4529all_blocks (tree block, tree *vector)
467456d0 4530{
b2a59b15
MS
4531 int n_blocks = 0;
4532
a84efb51
JO
4533 while (block)
4534 {
4535 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4536
a84efb51
JO
4537 /* Record this block. */
4538 if (vector)
4539 vector[n_blocks] = block;
b2a59b15 4540
a84efb51 4541 ++n_blocks;
718fe406 4542
a84efb51
JO
4543 /* Record the subblocks, and their subblocks... */
4544 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4545 vector ? vector + n_blocks : 0);
4546 block = BLOCK_CHAIN (block);
4547 }
467456d0
RS
4548
4549 return n_blocks;
4550}
18c038b9
MM
4551
4552/* Return a vector containing all the blocks rooted at BLOCK. The
4553 number of elements in the vector is stored in N_BLOCKS_P. The
4554 vector is dynamically allocated; it is the caller's responsibility
4555 to call `free' on the pointer returned. */
718fe406 4556
18c038b9 4557static tree *
fa8db1f7 4558get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4559{
4560 tree *block_vector;
4561
4562 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4563 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4564 all_blocks (block, block_vector);
4565
4566 return block_vector;
4567}
4568
f83b236e 4569static GTY(()) int next_block_index = 2;
18c038b9
MM
4570
4571/* Set BLOCK_NUMBER for all the blocks in FN. */
4572
4573void
fa8db1f7 4574number_blocks (tree fn)
18c038b9
MM
4575{
4576 int i;
4577 int n_blocks;
4578 tree *block_vector;
4579
4580 /* For SDB and XCOFF debugging output, we start numbering the blocks
4581 from 1 within each function, rather than keeping a running
4582 count. */
4583#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4584 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4585 next_block_index = 1;
18c038b9
MM
4586#endif
4587
4588 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4589
4590 /* The top-level BLOCK isn't numbered at all. */
4591 for (i = 1; i < n_blocks; ++i)
4592 /* We number the blocks from two. */
4593 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4594
4595 free (block_vector);
4596
4597 return;
4598}
df8992f8
RH
4599
4600/* If VAR is present in a subblock of BLOCK, return the subblock. */
4601
24e47c76 4602DEBUG_FUNCTION tree
fa8db1f7 4603debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4604{
4605 tree t;
4606
4607 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4608 if (t == var)
4609 return block;
4610
4611 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4612 {
4613 tree ret = debug_find_var_in_block_tree (var, t);
4614 if (ret)
4615 return ret;
4616 }
4617
4618 return NULL_TREE;
4619}
467456d0 4620\f
db2960f4
SL
4621/* Keep track of whether we're in a dummy function context. If we are,
4622 we don't want to invoke the set_current_function hook, because we'll
4623 get into trouble if the hook calls target_reinit () recursively or
4624 when the initial initialization is not yet complete. */
4625
4626static bool in_dummy_function;
4627
ab442df7
MM
4628/* Invoke the target hook when setting cfun. Update the optimization options
4629 if the function uses different options than the default. */
db2960f4
SL
4630
4631static void
4632invoke_set_current_function_hook (tree fndecl)
4633{
4634 if (!in_dummy_function)
ab442df7
MM
4635 {
4636 tree opts = ((fndecl)
4637 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4638 : optimization_default_node);
4639
4640 if (!opts)
4641 opts = optimization_default_node;
4642
4643 /* Change optimization options if needed. */
4644 if (optimization_current_node != opts)
4645 {
4646 optimization_current_node = opts;
46625112 4647 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4648 }
4649
892c4745 4650 targetm.set_current_function (fndecl);
4b1baac8 4651 this_fn_optabs = this_target_optabs;
135204dd 4652
4b1baac8 4653 if (opts != optimization_default_node)
135204dd 4654 {
4b1baac8
RS
4655 init_tree_optimization_optabs (opts);
4656 if (TREE_OPTIMIZATION_OPTABS (opts))
4657 this_fn_optabs = (struct target_optabs *)
4658 TREE_OPTIMIZATION_OPTABS (opts);
135204dd 4659 }
ab442df7 4660 }
db2960f4
SL
4661}
4662
4663/* cfun should never be set directly; use this function. */
4664
4665void
4666set_cfun (struct function *new_cfun)
4667{
4668 if (cfun != new_cfun)
4669 {
4670 cfun = new_cfun;
4671 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4672 }
4673}
4674
db2960f4
SL
4675/* Initialized with NOGC, making this poisonous to the garbage collector. */
4676
9771b263 4677static vec<function_p> cfun_stack;
db2960f4 4678
af16bc76
MJ
4679/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4680 current_function_decl accordingly. */
db2960f4
SL
4681
4682void
4683push_cfun (struct function *new_cfun)
4684{
af16bc76
MJ
4685 gcc_assert ((!cfun && !current_function_decl)
4686 || (cfun && current_function_decl == cfun->decl));
9771b263 4687 cfun_stack.safe_push (cfun);
af16bc76 4688 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4
SL
4689 set_cfun (new_cfun);
4690}
4691
af16bc76 4692/* Pop cfun from the stack. Also set current_function_decl accordingly. */
db2960f4
SL
4693
4694void
4695pop_cfun (void)
4696{
9771b263 4697 struct function *new_cfun = cfun_stack.pop ();
af16bc76
MJ
4698 /* When in_dummy_function, we do have a cfun but current_function_decl is
4699 NULL. We also allow pushing NULL cfun and subsequently changing
4700 current_function_decl to something else and have both restored by
4701 pop_cfun. */
4702 gcc_checking_assert (in_dummy_function
4703 || !cfun
4704 || current_function_decl == cfun->decl);
38d34676 4705 set_cfun (new_cfun);
af16bc76 4706 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4 4707}
3e87758a
RL
4708
4709/* Return value of funcdef and increase it. */
4710int
b8698a0f 4711get_next_funcdef_no (void)
3e87758a
RL
4712{
4713 return funcdef_no++;
4714}
4715
903d1e67
XDL
4716/* Return value of funcdef. */
4717int
4718get_last_funcdef_no (void)
4719{
4720 return funcdef_no;
4721}
4722
3a70d621 4723/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4724 to the defaults. Set cfun to the newly-allocated object.
4725 Some of the helper functions invoked during initialization assume
4726 that cfun has already been set. Therefore, assign the new object
4727 directly into cfun and invoke the back end hook explicitly at the
4728 very end, rather than initializing a temporary and calling set_cfun
4729 on it.
182e0d71
AK
4730
4731 ABSTRACT_P is true if this is a function that will never be seen by
4732 the middle-end. Such functions are front-end concepts (like C++
4733 function templates) that do not correspond directly to functions
4734 placed in object files. */
7a80cf9a 4735
3a70d621 4736void
182e0d71 4737allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4738{
6de9cd9a 4739 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4740
766090c2 4741 cfun = ggc_cleared_alloc<function> ();
b384405b 4742
3a70d621 4743 init_eh_for_function ();
6f086dfc 4744
3a70d621
RH
4745 if (init_machine_status)
4746 cfun->machine = (*init_machine_status) ();
e2ecd91c 4747
7c800926
KT
4748#ifdef OVERRIDE_ABI_FORMAT
4749 OVERRIDE_ABI_FORMAT (fndecl);
4750#endif
4751
81464b2c 4752 if (fndecl != NULL_TREE)
3a70d621 4753 {
db2960f4
SL
4754 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4755 cfun->decl = fndecl;
70cf5bc1 4756 current_function_funcdef_no = get_next_funcdef_no ();
5b9db1bc
MJ
4757 }
4758
4759 invoke_set_current_function_hook (fndecl);
db2960f4 4760
5b9db1bc
MJ
4761 if (fndecl != NULL_TREE)
4762 {
4763 tree result = DECL_RESULT (fndecl);
182e0d71 4764 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4765 {
3a70d621 4766#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4767 cfun->returns_pcc_struct = 1;
3a70d621 4768#endif
e3b5732b 4769 cfun->returns_struct = 1;
db2960f4
SL
4770 }
4771
f38958e8 4772 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4773
db2960f4
SL
4774 /* Assume all registers in stdarg functions need to be saved. */
4775 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4776 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4777
4778 /* ??? This could be set on a per-function basis by the front-end
4779 but is this worth the hassle? */
4780 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
d764963b 4781 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
0b37ba8a
AK
4782
4783 if (!profile_flag && !flag_instrument_function_entry_exit)
4784 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
3a70d621 4785 }
db2960f4
SL
4786}
4787
4788/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4789 instead of just setting it. */
9d30f3c1 4790
db2960f4
SL
4791void
4792push_struct_function (tree fndecl)
4793{
af16bc76
MJ
4794 /* When in_dummy_function we might be in the middle of a pop_cfun and
4795 current_function_decl and cfun may not match. */
4796 gcc_assert (in_dummy_function
4797 || (!cfun && !current_function_decl)
4798 || (cfun && current_function_decl == cfun->decl));
9771b263 4799 cfun_stack.safe_push (cfun);
af16bc76 4800 current_function_decl = fndecl;
182e0d71 4801 allocate_struct_function (fndecl, false);
3a70d621 4802}
6f086dfc 4803
8f4f502f 4804/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4805 appropriate for emitting rtl at the start of a function. */
6f086dfc 4806
3a70d621 4807static void
db2960f4 4808prepare_function_start (void)
3a70d621 4809{
3e029763 4810 gcc_assert (!crtl->emit.x_last_insn);
fb0703f7 4811 init_temp_slots ();
0de456a5 4812 init_emit ();
bd60bab2 4813 init_varasm_status ();
0de456a5 4814 init_expr ();
bf08ebeb 4815 default_rtl_profile ();
6f086dfc 4816
a11e0df4 4817 if (flag_stack_usage_info)
d3c12306 4818 {
766090c2 4819 cfun->su = ggc_cleared_alloc<stack_usage> ();
d3c12306
EB
4820 cfun->su->static_stack_size = -1;
4821 }
4822
3a70d621 4823 cse_not_expected = ! optimize;
6f086dfc 4824
3a70d621
RH
4825 /* Caller save not needed yet. */
4826 caller_save_needed = 0;
6f086dfc 4827
3a70d621
RH
4828 /* We haven't done register allocation yet. */
4829 reg_renumber = 0;
6f086dfc 4830
b384405b
BS
4831 /* Indicate that we have not instantiated virtual registers yet. */
4832 virtuals_instantiated = 0;
4833
1b3d8f8a
GK
4834 /* Indicate that we want CONCATs now. */
4835 generating_concat_p = 1;
4836
b384405b
BS
4837 /* Indicate we have no need of a frame pointer yet. */
4838 frame_pointer_needed = 0;
b384405b
BS
4839}
4840
4841/* Initialize the rtl expansion mechanism so that we can do simple things
4842 like generate sequences. This is used to provide a context during global
db2960f4
SL
4843 initialization of some passes. You must call expand_dummy_function_end
4844 to exit this context. */
4845
b384405b 4846void
fa8db1f7 4847init_dummy_function_start (void)
b384405b 4848{
db2960f4
SL
4849 gcc_assert (!in_dummy_function);
4850 in_dummy_function = true;
4851 push_struct_function (NULL_TREE);
4852 prepare_function_start ();
b384405b
BS
4853}
4854
4855/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4856 and initialize static variables for generating RTL for the statements
4857 of the function. */
4858
4859void
fa8db1f7 4860init_function_start (tree subr)
b384405b 4861{
db2960f4
SL
4862 if (subr && DECL_STRUCT_FUNCTION (subr))
4863 set_cfun (DECL_STRUCT_FUNCTION (subr));
4864 else
182e0d71 4865 allocate_struct_function (subr, false);
b9b5f433
JH
4866
4867 /* Initialize backend, if needed. */
4868 initialize_rtl ();
4869
db2960f4 4870 prepare_function_start ();
2c7eebae 4871 decide_function_section (subr);
b384405b 4872
6f086dfc
RS
4873 /* Warn if this value is an aggregate type,
4874 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4875 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4876 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4877}
5c7675e9 4878
7d69de61
RH
4879/* Expand code to verify the stack_protect_guard. This is invoked at
4880 the end of a function to be protected. */
4881
4882#ifndef HAVE_stack_protect_test
b76be05e
JJ
4883# define HAVE_stack_protect_test 0
4884# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4885#endif
4886
b755446c 4887void
7d69de61
RH
4888stack_protect_epilogue (void)
4889{
4890 tree guard_decl = targetm.stack_protect_guard ();
19f8b229 4891 rtx_code_label *label = gen_label_rtx ();
7d69de61
RH
4892 rtx x, y, tmp;
4893
08d4cc33
RH
4894 x = expand_normal (crtl->stack_protect_guard);
4895 y = expand_normal (guard_decl);
7d69de61
RH
4896
4897 /* Allow the target to compare Y with X without leaking either into
4898 a register. */
fedfecef 4899 switch ((int) (HAVE_stack_protect_test != 0))
7d69de61
RH
4900 {
4901 case 1:
3aebbe5f 4902 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4903 if (tmp)
4904 {
4905 emit_insn (tmp);
7d69de61
RH
4906 break;
4907 }
4908 /* FALLTHRU */
4909
4910 default:
4911 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4912 break;
4913 }
4914
4915 /* The noreturn predictor has been moved to the tree level. The rtl-level
4916 predictors estimate this branch about 20%, which isn't enough to get
4917 things moved out of line. Since this is the only extant case of adding
4918 a noreturn function at the rtl level, it doesn't seem worth doing ought
4919 except adding the prediction by hand. */
4920 tmp = get_last_insn ();
4921 if (JUMP_P (tmp))
9f215bf5 4922 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
7d69de61 4923
b3c144a3
SB
4924 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4925 free_temp_slots ();
7d69de61
RH
4926 emit_label (label);
4927}
4928\f
6f086dfc
RS
4929/* Start the RTL for a new function, and set variables used for
4930 emitting RTL.
4931 SUBR is the FUNCTION_DECL node.
4932 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4933 the function's parameters, which must be run at any return statement. */
4934
4935void
b79c5284 4936expand_function_start (tree subr)
6f086dfc 4937{
6f086dfc
RS
4938 /* Make sure volatile mem refs aren't considered
4939 valid operands of arithmetic insns. */
4940 init_recog_no_volatile ();
4941
e3b5732b 4942 crtl->profile
70f4f91c
WC
4943 = (profile_flag
4944 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4945
e3b5732b 4946 crtl->limit_stack
a157febd
GK
4947 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4948
52a11cbf
RH
4949 /* Make the label for return statements to jump to. Do not special
4950 case machines with special return instructions -- they will be
4951 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4952 return_label = gen_label_rtx ();
6f086dfc
RS
4953
4954 /* Initialize rtx used to return the value. */
4955 /* Do this before assign_parms so that we copy the struct value address
4956 before any library calls that assign parms might generate. */
4957
4958 /* Decide whether to return the value in memory or in a register. */
61f71b34 4959 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4960 {
4961 /* Returning something that won't go in a register. */
b3694847 4962 rtx value_address = 0;
6f086dfc
RS
4963
4964#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4965 if (cfun->returns_pcc_struct)
6f086dfc
RS
4966 {
4967 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4968 value_address = assemble_static_space (size);
4969 }
4970 else
4971#endif
4972 {
2225b57c 4973 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4974 /* Expect to be passed the address of a place to store the value.
4975 If it is passed as an argument, assign_parms will take care of
4976 it. */
61f71b34 4977 if (sv)
6f086dfc
RS
4978 {
4979 value_address = gen_reg_rtx (Pmode);
61f71b34 4980 emit_move_insn (value_address, sv);
6f086dfc
RS
4981 }
4982 }
4983 if (value_address)
ccdecf58 4984 {
01c98570
JM
4985 rtx x = value_address;
4986 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4987 {
4988 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4989 set_mem_attributes (x, DECL_RESULT (subr), 1);
4990 }
abde42f7 4991 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4992 }
6f086dfc
RS
4993 }
4994 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4995 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4996 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4997 else
a53e14c0 4998 {
d5bf1143
RH
4999 /* Compute the return values into a pseudo reg, which we will copy
5000 into the true return register after the cleanups are done. */
bef5d8b6
RS
5001 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5002 if (TYPE_MODE (return_type) != BLKmode
5003 && targetm.calls.return_in_msb (return_type))
5004 /* expand_function_end will insert the appropriate padding in
5005 this case. Use the return value's natural (unpadded) mode
5006 within the function proper. */
5007 SET_DECL_RTL (DECL_RESULT (subr),
5008 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 5009 else
0bccc606 5010 {
bef5d8b6
RS
5011 /* In order to figure out what mode to use for the pseudo, we
5012 figure out what the mode of the eventual return register will
5013 actually be, and use that. */
1d636cc6 5014 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
5015
5016 /* Structures that are returned in registers are not
5017 aggregate_value_p, so we may see a PARALLEL or a REG. */
5018 if (REG_P (hard_reg))
5019 SET_DECL_RTL (DECL_RESULT (subr),
5020 gen_reg_rtx (GET_MODE (hard_reg)));
5021 else
5022 {
5023 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5024 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5025 }
0bccc606 5026 }
a53e14c0 5027
084a1106
JDA
5028 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5029 result to the real return register(s). */
5030 DECL_REGISTER (DECL_RESULT (subr)) = 1;
d5e254e1
IE
5031
5032 if (chkp_function_instrumented_p (current_function_decl))
5033 {
5034 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5035 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5036 subr, 1);
5037 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5038 }
a53e14c0 5039 }
6f086dfc
RS
5040
5041 /* Initialize rtx for parameters and local variables.
5042 In some cases this requires emitting insns. */
0d1416c6 5043 assign_parms (subr);
6f086dfc 5044
6de9cd9a
DN
5045 /* If function gets a static chain arg, store it. */
5046 if (cfun->static_chain_decl)
5047 {
7e140280 5048 tree parm = cfun->static_chain_decl;
531ca746 5049 rtx local, chain, insn;
7e140280 5050
531ca746
RH
5051 local = gen_reg_rtx (Pmode);
5052 chain = targetm.calls.static_chain (current_function_decl, true);
5053
5054 set_decl_incoming_rtl (parm, chain, false);
7e140280 5055 SET_DECL_RTL (parm, local);
7e140280 5056 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 5057
531ca746
RH
5058 insn = emit_move_insn (local, chain);
5059
5060 /* Mark the register as eliminable, similar to parameters. */
5061 if (MEM_P (chain)
5062 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 5063 set_dst_reg_note (insn, REG_EQUIV, chain, local);
3fd48b12
EB
5064
5065 /* If we aren't optimizing, save the static chain onto the stack. */
5066 if (!optimize)
5067 {
5068 tree saved_static_chain_decl
5069 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5070 DECL_NAME (parm), TREE_TYPE (parm));
5071 rtx saved_static_chain_rtx
5072 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5073 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5074 emit_move_insn (saved_static_chain_rtx, chain);
5075 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5076 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5077 }
6de9cd9a
DN
5078 }
5079
5080 /* If the function receives a non-local goto, then store the
5081 bits we need to restore the frame pointer. */
5082 if (cfun->nonlocal_goto_save_area)
5083 {
5084 tree t_save;
5085 rtx r_save;
5086
4846b435 5087 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 5088 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 5089
6bbec3e1
L
5090 t_save = build4 (ARRAY_REF,
5091 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
5092 cfun->nonlocal_goto_save_area,
5093 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 5094 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 5095 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 5096
88280cf9 5097 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
5098 update_nonlocal_goto_save_area ();
5099 }
f0c51a1e 5100
6f086dfc
RS
5101 /* The following was moved from init_function_start.
5102 The move is supposed to make sdb output more accurate. */
5103 /* Indicate the beginning of the function body,
5104 as opposed to parm setup. */
2e040219 5105 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 5106
ede497cf
SB
5107 gcc_assert (NOTE_P (get_last_insn ()));
5108
6f086dfc
RS
5109 parm_birth_insn = get_last_insn ();
5110
e3b5732b 5111 if (crtl->profile)
f6f315fe 5112 {
f6f315fe 5113#ifdef PROFILE_HOOK
df696a75 5114 PROFILE_HOOK (current_function_funcdef_no);
411707f4 5115#endif
f6f315fe 5116 }
411707f4 5117
6d3cc8f0
EB
5118 /* If we are doing generic stack checking, the probe should go here. */
5119 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 5120 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
5121}
5122\f
49ad7cfa
BS
5123/* Undo the effects of init_dummy_function_start. */
5124void
fa8db1f7 5125expand_dummy_function_end (void)
49ad7cfa 5126{
db2960f4
SL
5127 gcc_assert (in_dummy_function);
5128
49ad7cfa
BS
5129 /* End any sequences that failed to be closed due to syntax errors. */
5130 while (in_sequence_p ())
5131 end_sequence ();
5132
5133 /* Outside function body, can't compute type's actual size
5134 until next function's body starts. */
fa51b01b 5135
01d939e8
BS
5136 free_after_parsing (cfun);
5137 free_after_compilation (cfun);
db2960f4
SL
5138 pop_cfun ();
5139 in_dummy_function = false;
49ad7cfa
BS
5140}
5141
d5e254e1 5142/* Helper for diddle_return_value. */
bd695e1e
RH
5143
5144void
d5e254e1 5145diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
bd695e1e 5146{
c13fde05
RH
5147 if (! outgoing)
5148 return;
bd695e1e 5149
f8cfc6aa 5150 if (REG_P (outgoing))
c13fde05
RH
5151 (*doit) (outgoing, arg);
5152 else if (GET_CODE (outgoing) == PARALLEL)
5153 {
5154 int i;
bd695e1e 5155
c13fde05
RH
5156 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5157 {
5158 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5159
f8cfc6aa 5160 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 5161 (*doit) (x, arg);
bd695e1e
RH
5162 }
5163 }
5164}
5165
d5e254e1
IE
5166/* Call DOIT for each hard register used as a return value from
5167 the current function. */
5168
5169void
5170diddle_return_value (void (*doit) (rtx, void *), void *arg)
5171{
5172 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5173 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5174}
5175
c13fde05 5176static void
fa8db1f7 5177do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5178{
c41c1387 5179 emit_clobber (reg);
c13fde05
RH
5180}
5181
5182void
fa8db1f7 5183clobber_return_register (void)
c13fde05
RH
5184{
5185 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
5186
5187 /* In case we do use pseudo to return value, clobber it too. */
5188 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5189 {
5190 tree decl_result = DECL_RESULT (current_function_decl);
5191 rtx decl_rtl = DECL_RTL (decl_result);
5192 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5193 {
5194 do_clobber_return_reg (decl_rtl, NULL);
5195 }
5196 }
c13fde05
RH
5197}
5198
5199static void
fa8db1f7 5200do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5201{
c41c1387 5202 emit_use (reg);
c13fde05
RH
5203}
5204
0bf8477d 5205static void
fa8db1f7 5206use_return_register (void)
c13fde05
RH
5207{
5208 diddle_return_value (do_use_return_reg, NULL);
5209}
5210
902edd36
JH
5211/* Possibly warn about unused parameters. */
5212void
5213do_warn_unused_parameter (tree fn)
5214{
5215 tree decl;
5216
5217 for (decl = DECL_ARGUMENTS (fn);
910ad8de 5218 decl; decl = DECL_CHAIN (decl))
902edd36 5219 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
5220 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5221 && !TREE_NO_WARNING (decl))
b9b8dde3 5222 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
5223}
5224
862d0b35
DN
5225/* Set the location of the insn chain starting at INSN to LOC. */
5226
5227static void
dc01c3d1 5228set_insn_locations (rtx_insn *insn, int loc)
862d0b35 5229{
dc01c3d1 5230 while (insn != NULL)
862d0b35
DN
5231 {
5232 if (INSN_P (insn))
5233 INSN_LOCATION (insn) = loc;
5234 insn = NEXT_INSN (insn);
5235 }
5236}
5237
71c0e7fc 5238/* Generate RTL for the end of the current function. */
6f086dfc
RS
5239
5240void
fa8db1f7 5241expand_function_end (void)
6f086dfc 5242{
932f0847 5243 rtx clobber_after;
6f086dfc 5244
964be02f
RH
5245 /* If arg_pointer_save_area was referenced only from a nested
5246 function, we will not have initialized it yet. Do that now. */
e3b5732b 5247 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 5248 get_arg_pointer_save_area ();
964be02f 5249
b38f3813 5250 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
5251 do a stack probe at the start of the function to ensure we have enough
5252 space for another stack frame. */
b38f3813 5253 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66 5254 {
691fe203 5255 rtx_insn *insn, *seq;
11044f66
RK
5256
5257 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 5258 if (CALL_P (insn))
11044f66 5259 {
c35af30f 5260 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 5261 start_sequence ();
c35af30f
EB
5262 if (STACK_CHECK_MOVING_SP)
5263 anti_adjust_stack_and_probe (max_frame_size, true);
5264 else
5265 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
5266 seq = get_insns ();
5267 end_sequence ();
5368224f 5268 set_insn_locations (seq, prologue_location);
ede497cf 5269 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
5270 break;
5271 }
5272 }
5273
6f086dfc
RS
5274 /* End any sequences that failed to be closed due to syntax errors. */
5275 while (in_sequence_p ())
5f4f0e22 5276 end_sequence ();
6f086dfc 5277
6f086dfc
RS
5278 clear_pending_stack_adjust ();
5279 do_pending_stack_adjust ();
5280
6f086dfc
RS
5281 /* Output a linenumber for the end of the function.
5282 SDB depends on this. */
5368224f 5283 set_curr_insn_location (input_location);
6f086dfc 5284
fbffc70a 5285 /* Before the return label (if any), clobber the return
a1f300c0 5286 registers so that they are not propagated live to the rest of
fbffc70a
GK
5287 the function. This can only happen with functions that drop
5288 through; if there had been a return statement, there would
932f0847
JH
5289 have either been a return rtx, or a jump to the return label.
5290
5291 We delay actual code generation after the current_function_value_rtx
5292 is computed. */
5293 clobber_after = get_last_insn ();
fbffc70a 5294
526c334b
KH
5295 /* Output the label for the actual return from the function. */
5296 emit_label (return_label);
6f086dfc 5297
677f3fa8 5298 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5299 {
5300 /* Let except.c know where it should emit the call to unregister
5301 the function context for sjlj exceptions. */
5302 if (flag_exceptions)
5303 sjlj_emit_function_exit_after (get_last_insn ());
5304 }
6fb5fa3c
DB
5305 else
5306 {
5307 /* We want to ensure that instructions that may trap are not
5308 moved into the epilogue by scheduling, because we don't
5309 always emit unwind information for the epilogue. */
8f4f502f 5310 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5311 emit_insn (gen_blockage ());
5312 }
0b59e81e 5313
652b0932
RH
5314 /* If this is an implementation of throw, do what's necessary to
5315 communicate between __builtin_eh_return and the epilogue. */
5316 expand_eh_return ();
5317
3e4eac3f
RH
5318 /* If scalar return value was computed in a pseudo-reg, or was a named
5319 return value that got dumped to the stack, copy that to the hard
5320 return register. */
19e7881c 5321 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5322 {
3e4eac3f
RH
5323 tree decl_result = DECL_RESULT (current_function_decl);
5324 rtx decl_rtl = DECL_RTL (decl_result);
5325
5326 if (REG_P (decl_rtl)
5327 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5328 : DECL_REGISTER (decl_result))
5329 {
38173d38 5330 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5331
ce5e43d0 5332 /* This should be set in assign_parms. */
0bccc606 5333 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5334
5335 /* If this is a BLKmode structure being returned in registers,
5336 then use the mode computed in expand_return. Note that if
797a6ac1 5337 decl_rtl is memory, then its mode may have been changed,
38173d38 5338 but that crtl->return_rtx has not. */
3e4eac3f 5339 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5340 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5341
bef5d8b6
RS
5342 /* If a non-BLKmode return value should be padded at the least
5343 significant end of the register, shift it left by the appropriate
5344 amount. BLKmode results are handled using the group load/store
5345 machinery. */
5346 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
66de4d7c 5347 && REG_P (real_decl_rtl)
bef5d8b6
RS
5348 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5349 {
5350 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5351 REGNO (real_decl_rtl)),
5352 decl_rtl);
5353 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5354 }
3e4eac3f 5355 /* If a named return value dumped decl_return to memory, then
797a6ac1 5356 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5357 extension. */
bef5d8b6 5358 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5359 {
8df83eae 5360 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5361 promote_function_mode (TREE_TYPE (decl_result),
5362 GET_MODE (decl_rtl), &unsignedp,
5363 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5364
5365 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5366 }
aa570f54 5367 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5368 {
5369 /* If expand_function_start has created a PARALLEL for decl_rtl,
5370 move the result to the real return registers. Otherwise, do
5371 a group load from decl_rtl for a named return. */
5372 if (GET_CODE (decl_rtl) == PARALLEL)
5373 emit_group_move (real_decl_rtl, decl_rtl);
5374 else
5375 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5376 TREE_TYPE (decl_result),
084a1106
JDA
5377 int_size_in_bytes (TREE_TYPE (decl_result)));
5378 }
652b0932
RH
5379 /* In the case of complex integer modes smaller than a word, we'll
5380 need to generate some non-trivial bitfield insertions. Do that
5381 on a pseudo and not the hard register. */
5382 else if (GET_CODE (decl_rtl) == CONCAT
5383 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5384 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5385 {
5386 int old_generating_concat_p;
5387 rtx tmp;
5388
5389 old_generating_concat_p = generating_concat_p;
5390 generating_concat_p = 0;
5391 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5392 generating_concat_p = old_generating_concat_p;
5393
5394 emit_move_insn (tmp, decl_rtl);
5395 emit_move_insn (real_decl_rtl, tmp);
5396 }
3e4eac3f
RH
5397 else
5398 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5399 }
6f086dfc
RS
5400 }
5401
5402 /* If returning a structure, arrange to return the address of the value
5403 in a place where debuggers expect to find it.
5404
5405 If returning a structure PCC style,
5406 the caller also depends on this value.
e3b5732b 5407 And cfun->returns_pcc_struct is not necessarily set. */
e0d14c39
BS
5408 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5409 && !targetm.calls.omit_struct_return_reg)
6f086dfc 5410 {
cc77ae10 5411 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5412 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5413 rtx outgoing;
5414
5415 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5416 type = TREE_TYPE (type);
5417 else
5418 value_address = XEXP (value_address, 0);
5419
1d636cc6
RG
5420 outgoing = targetm.calls.function_value (build_pointer_type (type),
5421 current_function_decl, true);
6f086dfc
RS
5422
5423 /* Mark this as a function return value so integrate will delete the
5424 assignment and USE below when inlining this function. */
5425 REG_FUNCTION_VALUE_P (outgoing) = 1;
5426
d1608933 5427 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5428 value_address = convert_memory_address (GET_MODE (outgoing),
5429 value_address);
d1608933 5430
6f086dfc 5431 emit_move_insn (outgoing, value_address);
d1608933
RK
5432
5433 /* Show return register used to hold result (in this case the address
5434 of the result. */
38173d38 5435 crtl->return_rtx = outgoing;
6f086dfc
RS
5436 }
5437
79c7fda6
JJ
5438 /* Emit the actual code to clobber return register. Don't emit
5439 it if clobber_after is a barrier, then the previous basic block
5440 certainly doesn't fall thru into the exit block. */
5441 if (!BARRIER_P (clobber_after))
5442 {
5443 rtx seq;
797a6ac1 5444
79c7fda6
JJ
5445 start_sequence ();
5446 clobber_return_register ();
5447 seq = get_insns ();
5448 end_sequence ();
932f0847 5449
79c7fda6
JJ
5450 emit_insn_after (seq, clobber_after);
5451 }
932f0847 5452
609c3937 5453 /* Output the label for the naked return from the function. */
4c33221c
UW
5454 if (naked_return_label)
5455 emit_label (naked_return_label);
6e3077c6 5456
25108646
AH
5457 /* @@@ This is a kludge. We want to ensure that instructions that
5458 may trap are not moved into the epilogue by scheduling, because
56d17681 5459 we don't always emit unwind information for the epilogue. */
f0a0390e 5460 if (cfun->can_throw_non_call_exceptions
677f3fa8 5461 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5462 emit_insn (gen_blockage ());
25108646 5463
7d69de61 5464 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5465 if (crtl->stack_protect_guard)
7d69de61
RH
5466 stack_protect_epilogue ();
5467
40184445
BS
5468 /* If we had calls to alloca, and this machine needs
5469 an accurate stack pointer to exit the function,
5470 insert some code to save and restore the stack pointer. */
5471 if (! EXIT_IGNORE_STACK
e3b5732b 5472 && cfun->calls_alloca)
40184445 5473 {
9eac0f2a 5474 rtx tem = 0, seq;
40184445 5475
9eac0f2a
RH
5476 start_sequence ();
5477 emit_stack_save (SAVE_FUNCTION, &tem);
5478 seq = get_insns ();
5479 end_sequence ();
5480 emit_insn_before (seq, parm_birth_insn);
5481
5482 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5483 }
5484
c13fde05
RH
5485 /* ??? This should no longer be necessary since stupid is no longer with
5486 us, but there are some parts of the compiler (eg reload_combine, and
5487 sh mach_dep_reorg) that still try and compute their own lifetime info
5488 instead of using the general framework. */
5489 use_return_register ();
6f086dfc 5490}
278ed218
RH
5491
5492rtx
bd60bab2 5493get_arg_pointer_save_area (void)
278ed218 5494{
bd60bab2 5495 rtx ret = arg_pointer_save_area;
278ed218
RH
5496
5497 if (! ret)
5498 {
bd60bab2
JH
5499 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5500 arg_pointer_save_area = ret;
964be02f
RH
5501 }
5502
e3b5732b 5503 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5504 {
5505 rtx seq;
278ed218 5506
797a6ac1 5507 /* Save the arg pointer at the beginning of the function. The
964be02f 5508 generated stack slot may not be a valid memory address, so we
278ed218
RH
5509 have to check it and fix it if necessary. */
5510 start_sequence ();
1a8cb155 5511 emit_move_insn (validize_mem (copy_rtx (ret)),
2e3f842f 5512 crtl->args.internal_arg_pointer);
2f937369 5513 seq = get_insns ();
278ed218
RH
5514 end_sequence ();
5515
964be02f 5516 push_topmost_sequence ();
1cb2fc7b 5517 emit_insn_after (seq, entry_of_function ());
964be02f 5518 pop_topmost_sequence ();
c1d9a70a
ILT
5519
5520 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5521 }
5522
5523 return ret;
5524}
bdac5f58 5525\f
cd9c1ca8
RH
5526/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5527 for the first time. */
bdac5f58 5528
0a1c58a2 5529static void
dc01c3d1 5530record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
bdac5f58 5531{
dc01c3d1 5532 rtx_insn *tmp;
cd9c1ca8 5533 htab_t hash = *hashp;
0a1c58a2 5534
cd9c1ca8
RH
5535 if (hash == NULL)
5536 *hashp = hash
5537 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5538
5539 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5540 {
5541 void **slot = htab_find_slot (hash, tmp, INSERT);
5542 gcc_assert (*slot == NULL);
5543 *slot = tmp;
5544 }
5545}
5546
cd400280
RH
5547/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5548 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5549 insn, then record COPY as well. */
cd9c1ca8
RH
5550
5551void
cd400280 5552maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5553{
cd400280 5554 htab_t hash;
cd9c1ca8
RH
5555 void **slot;
5556
cd400280
RH
5557 hash = epilogue_insn_hash;
5558 if (!hash || !htab_find (hash, insn))
5559 {
5560 hash = prologue_insn_hash;
5561 if (!hash || !htab_find (hash, insn))
5562 return;
5563 }
cd9c1ca8 5564
cd400280 5565 slot = htab_find_slot (hash, copy, INSERT);
cd9c1ca8
RH
5566 gcc_assert (*slot == NULL);
5567 *slot = copy;
bdac5f58
TW
5568}
5569
cd9c1ca8
RH
5570/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5571 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5572
cd9c1ca8
RH
5573static bool
5574contains (const_rtx insn, htab_t hash)
bdac5f58 5575{
cd9c1ca8
RH
5576 if (hash == NULL)
5577 return false;
bdac5f58 5578
cd9c1ca8 5579 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5580 {
e0944870 5581 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
cd9c1ca8 5582 int i;
e0944870
DM
5583 for (i = seq->len () - 1; i >= 0; i--)
5584 if (htab_find (hash, seq->element (i)))
cd9c1ca8
RH
5585 return true;
5586 return false;
bdac5f58 5587 }
cd9c1ca8
RH
5588
5589 return htab_find (hash, insn) != NULL;
bdac5f58 5590}
5c7675e9
RH
5591
5592int
4f588890 5593prologue_epilogue_contains (const_rtx insn)
5c7675e9 5594{
cd9c1ca8 5595 if (contains (insn, prologue_insn_hash))
5c7675e9 5596 return 1;
cd9c1ca8 5597 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5598 return 1;
5599 return 0;
5600}
bdac5f58 5601
170d8157 5602#ifdef HAVE_return
4c029f40
TV
5603/* Insert use of return register before the end of BB. */
5604
5605static void
5606emit_use_return_register_into_block (basic_block bb)
5607{
1e1b18c1 5608 rtx seq, insn;
4c029f40
TV
5609 start_sequence ();
5610 use_return_register ();
5611 seq = get_insns ();
5612 end_sequence ();
1e1b18c1
EB
5613 insn = BB_END (bb);
5614#ifdef HAVE_cc0
5615 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5616 insn = prev_cc0_setter (insn);
5617#endif
5618 emit_insn_before (seq, insn);
4c029f40
TV
5619}
5620
484db665
BS
5621
5622/* Create a return pattern, either simple_return or return, depending on
5623 simple_p. */
5624
5625static rtx
5626gen_return_pattern (bool simple_p)
5627{
5628#ifdef HAVE_simple_return
5629 return simple_p ? gen_simple_return () : gen_return ();
5630#else
5631 gcc_assert (!simple_p);
5632 return gen_return ();
5633#endif
5634}
5635
5636/* Insert an appropriate return pattern at the end of block BB. This
5637 also means updating block_for_insn appropriately. SIMPLE_P is
5638 the same as in gen_return_pattern and passed to it. */
69732dcb 5639
f30e25a3 5640void
484db665 5641emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5642{
484db665
BS
5643 rtx jump, pat;
5644 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5645 pat = PATTERN (jump);
26898771
BS
5646 if (GET_CODE (pat) == PARALLEL)
5647 pat = XVECEXP (pat, 0, 0);
5648 gcc_assert (ANY_RETURN_P (pat));
5649 JUMP_LABEL (jump) = pat;
69732dcb 5650}
484db665 5651#endif
69732dcb 5652
387748de
AM
5653/* Set JUMP_LABEL for a return insn. */
5654
5655void
5656set_return_jump_label (rtx returnjump)
5657{
5658 rtx pat = PATTERN (returnjump);
5659 if (GET_CODE (pat) == PARALLEL)
5660 pat = XVECEXP (pat, 0, 0);
5661 if (ANY_RETURN_P (pat))
5662 JUMP_LABEL (returnjump) = pat;
5663 else
5664 JUMP_LABEL (returnjump) = ret_rtx;
5665}
5666
ffe14686
AM
5667#if defined (HAVE_return) || defined (HAVE_simple_return)
5668/* Return true if there are any active insns between HEAD and TAIL. */
f30e25a3 5669bool
ffd80b43 5670active_insn_between (rtx_insn *head, rtx_insn *tail)
39d52ae5 5671{
ffe14686
AM
5672 while (tail)
5673 {
5674 if (active_insn_p (tail))
5675 return true;
5676 if (tail == head)
5677 return false;
5678 tail = PREV_INSN (tail);
5679 }
5680 return false;
5681}
5682
5683/* LAST_BB is a block that exits, and empty of active instructions.
5684 Examine its predecessors for jumps that can be converted to
5685 (conditional) returns. */
f30e25a3 5686vec<edge>
ffe14686 5687convert_jumps_to_returns (basic_block last_bb, bool simple_p,
9771b263 5688 vec<edge> unconverted ATTRIBUTE_UNUSED)
ffe14686
AM
5689{
5690 int i;
5691 basic_block bb;
39d52ae5 5692 rtx label;
ffe14686
AM
5693 edge_iterator ei;
5694 edge e;
ef062b13 5695 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
39d52ae5 5696
ffe14686 5697 FOR_EACH_EDGE (e, ei, last_bb->preds)
fefa31b5 5698 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
9771b263 5699 src_bbs.quick_push (e->src);
ffe14686
AM
5700
5701 label = BB_HEAD (last_bb);
5702
9771b263 5703 FOR_EACH_VEC_ELT (src_bbs, i, bb)
39d52ae5 5704 {
68a1a6c0 5705 rtx_insn *jump = BB_END (bb);
ffe14686
AM
5706
5707 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5708 continue;
5709
5710 e = find_edge (bb, last_bb);
5711
5712 /* If we have an unconditional jump, we can replace that
5713 with a simple return instruction. */
5714 if (simplejump_p (jump))
5715 {
5716 /* The use of the return register might be present in the exit
5717 fallthru block. Either:
5718 - removing the use is safe, and we should remove the use in
5719 the exit fallthru block, or
5720 - removing the use is not safe, and we should add it here.
5721 For now, we conservatively choose the latter. Either of the
5722 2 helps in crossjumping. */
5723 emit_use_return_register_into_block (bb);
5724
5725 emit_return_into_block (simple_p, bb);
5726 delete_insn (jump);
5727 }
5728
5729 /* If we have a conditional jump branching to the last
5730 block, we can try to replace that with a conditional
5731 return instruction. */
5732 else if (condjump_p (jump))
5733 {
5734 rtx dest;
5735
5736 if (simple_p)
5737 dest = simple_return_rtx;
5738 else
5739 dest = ret_rtx;
5740 if (!redirect_jump (jump, dest, 0))
5741 {
5742#ifdef HAVE_simple_return
5743 if (simple_p)
5744 {
5745 if (dump_file)
5746 fprintf (dump_file,
5747 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5748 unconverted.safe_push (e);
ffe14686
AM
5749 }
5750#endif
5751 continue;
5752 }
5753
5754 /* See comment in simplejump_p case above. */
5755 emit_use_return_register_into_block (bb);
5756
5757 /* If this block has only one successor, it both jumps
5758 and falls through to the fallthru block, so we can't
5759 delete the edge. */
5760 if (single_succ_p (bb))
5761 continue;
5762 }
5763 else
5764 {
5765#ifdef HAVE_simple_return
5766 if (simple_p)
5767 {
5768 if (dump_file)
5769 fprintf (dump_file,
5770 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5771 unconverted.safe_push (e);
ffe14686
AM
5772 }
5773#endif
5774 continue;
5775 }
5776
5777 /* Fix up the CFG for the successful change we just made. */
fefa31b5 5778 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
d3b623c7 5779 e->flags &= ~EDGE_CROSSING;
39d52ae5 5780 }
9771b263 5781 src_bbs.release ();
ffe14686 5782 return unconverted;
39d52ae5
BS
5783}
5784
ffe14686 5785/* Emit a return insn for the exit fallthru block. */
f30e25a3 5786basic_block
ffe14686
AM
5787emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5788{
5789 basic_block last_bb = exit_fallthru_edge->src;
5790
5791 if (JUMP_P (BB_END (last_bb)))
5792 {
5793 last_bb = split_edge (exit_fallthru_edge);
5794 exit_fallthru_edge = single_succ_edge (last_bb);
5795 }
5796 emit_barrier_after (BB_END (last_bb));
5797 emit_return_into_block (simple_p, last_bb);
5798 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5799 return last_bb;
5800}
5801#endif
5802
5803
9faa82d8 5804/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5805 this into place with notes indicating where the prologue ends and where
484db665
BS
5806 the epilogue begins. Update the basic block information when possible.
5807
5808 Notes on epilogue placement:
5809 There are several kinds of edges to the exit block:
5810 * a single fallthru edge from LAST_BB
5811 * possibly, edges from blocks containing sibcalls
5812 * possibly, fake edges from infinite loops
5813
5814 The epilogue is always emitted on the fallthru edge from the last basic
5815 block in the function, LAST_BB, into the exit block.
5816
5817 If LAST_BB is empty except for a label, it is the target of every
5818 other basic block in the function that ends in a return. If a
5819 target has a return or simple_return pattern (possibly with
5820 conditional variants), these basic blocks can be changed so that a
5821 return insn is emitted into them, and their target is adjusted to
5822 the real exit block.
5823
5824 Notes on shrink wrapping: We implement a fairly conservative
5825 version of shrink-wrapping rather than the textbook one. We only
5826 generate a single prologue and a single epilogue. This is
5827 sufficient to catch a number of interesting cases involving early
5828 exits.
5829
5830 First, we identify the blocks that require the prologue to occur before
5831 them. These are the ones that modify a call-saved register, or reference
5832 any of the stack or frame pointer registers. To simplify things, we then
5833 mark everything reachable from these blocks as also requiring a prologue.
5834 This takes care of loops automatically, and avoids the need to examine
5835 whether MEMs reference the frame, since it is sufficient to check for
5836 occurrences of the stack or frame pointer.
5837
5838 We then compute the set of blocks for which the need for a prologue
5839 is anticipatable (borrowing terminology from the shrink-wrapping
5840 description in Muchnick's book). These are the blocks which either
5841 require a prologue themselves, or those that have only successors
5842 where the prologue is anticipatable. The prologue needs to be
5843 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5844 is not. For the moment, we ensure that only one such edge exists.
5845
5846 The epilogue is placed as described above, but we make a
5847 distinction between inserting return and simple_return patterns
5848 when modifying other blocks that end in a return. Blocks that end
5849 in a sibcall omit the sibcall_epilogue if the block is not in
5850 ANTIC. */
bdac5f58 5851
6fb5fa3c
DB
5852static void
5853thread_prologue_and_epilogue_insns (void)
bdac5f58 5854{
7458026b 5855 bool inserted;
484db665 5856#ifdef HAVE_simple_return
6e1aa848 5857 vec<edge> unconverted_simple_returns = vNULL;
ffe14686 5858 bitmap_head bb_flags;
484db665 5859#endif
9c8348cf 5860 rtx_insn *returnjump;
9c8348cf 5861 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
dc01c3d1 5862 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
484db665 5863 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5864 edge_iterator ei;
484db665
BS
5865
5866 df_analyze ();
e881bb1b 5867
fefa31b5 5868 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7458026b
ILT
5869
5870 inserted = false;
9c8348cf
DM
5871 epilogue_end = NULL;
5872 returnjump = NULL;
7458026b
ILT
5873
5874 /* Can't deal with multiple successors of the entry block at the
5875 moment. Function should always have at least one entry
5876 point. */
fefa31b5
DM
5877 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5878 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
484db665
BS
5879 orig_entry_edge = entry_edge;
5880
dc01c3d1 5881 split_prologue_seq = NULL;
7458026b
ILT
5882 if (flag_split_stack
5883 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5884 == NULL))
5885 {
5886#ifndef HAVE_split_stack_prologue
5887 gcc_unreachable ();
5888#else
5889 gcc_assert (HAVE_split_stack_prologue);
5890
5891 start_sequence ();
5892 emit_insn (gen_split_stack_prologue ());
484db665 5893 split_prologue_seq = get_insns ();
7458026b
ILT
5894 end_sequence ();
5895
484db665 5896 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5368224f 5897 set_insn_locations (split_prologue_seq, prologue_location);
7458026b
ILT
5898#endif
5899 }
5900
dc01c3d1 5901 prologue_seq = NULL;
bdac5f58
TW
5902#ifdef HAVE_prologue
5903 if (HAVE_prologue)
5904 {
e881bb1b 5905 start_sequence ();
dc01c3d1 5906 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
e881bb1b 5907 emit_insn (seq);
bdac5f58 5908
b8698a0f 5909 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5910 if the profiling is on and the frame pointer is required. */
e3b5732b 5911 if (crtl->profile && frame_pointer_needed)
c41c1387 5912 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5913
bdac5f58 5914 /* Retain a map of the prologue insns. */
cd9c1ca8 5915 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5916 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5917
56d17681
UB
5918 /* Ensure that instructions are not moved into the prologue when
5919 profiling is on. The call to the profiling routine can be
5920 emitted within the live range of a call-clobbered register. */
3c5273a9 5921 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5922 emit_insn (gen_blockage ());
9185a8d5 5923
484db665 5924 prologue_seq = get_insns ();
e881bb1b 5925 end_sequence ();
5368224f 5926 set_insn_locations (prologue_seq, prologue_location);
484db665
BS
5927 }
5928#endif
e881bb1b 5929
ffe14686 5930#ifdef HAVE_simple_return
484db665
BS
5931 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5932
484db665
BS
5933 /* Try to perform a kind of shrink-wrapping, making sure the
5934 prologue/epilogue is emitted only around those parts of the
5935 function that require it. */
5936
f30e25a3 5937 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
bdac5f58 5938#endif
bdac5f58 5939
484db665
BS
5940 if (split_prologue_seq != NULL_RTX)
5941 {
f4b31a33 5942 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
5943 inserted = true;
5944 }
5945 if (prologue_seq != NULL_RTX)
5946 {
5947 insert_insn_on_edge (prologue_seq, entry_edge);
5948 inserted = true;
5949 }
5950
19d3c25c
RH
5951 /* If the exit block has no non-fake predecessors, we don't need
5952 an epilogue. */
fefa31b5 5953 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
19d3c25c
RH
5954 if ((e->flags & EDGE_FAKE) == 0)
5955 break;
5956 if (e == NULL)
5957 goto epilogue_done;
5958
fefa31b5 5959 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
484db665 5960
fefa31b5 5961 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
ffe14686 5962
ffe14686
AM
5963#ifdef HAVE_simple_return
5964 if (entry_edge != orig_entry_edge)
f30e25a3
ZC
5965 exit_fallthru_edge
5966 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5967 &unconverted_simple_returns,
5968 &returnjump);
484db665 5969#endif
ffe14686
AM
5970#ifdef HAVE_return
5971 if (HAVE_return)
5972 {
5973 if (exit_fallthru_edge == NULL)
5974 goto epilogue_done;
69732dcb 5975
ffe14686
AM
5976 if (optimize)
5977 {
5978 basic_block last_bb = exit_fallthru_edge->src;
484db665 5979
ffe14686
AM
5980 if (LABEL_P (BB_HEAD (last_bb))
5981 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6e1aa848 5982 convert_jumps_to_returns (last_bb, false, vNULL);
ffe14686 5983
1ff2fd21
AM
5984 if (EDGE_COUNT (last_bb->preds) != 0
5985 && single_succ_p (last_bb))
484db665 5986 {
ffe14686
AM
5987 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5988 epilogue_end = returnjump = BB_END (last_bb);
484db665 5989#ifdef HAVE_simple_return
ffe14686
AM
5990 /* Emitting the return may add a basic block.
5991 Fix bb_flags for the added block. */
5992 if (last_bb != exit_fallthru_edge->src)
5993 bitmap_set_bit (&bb_flags, last_bb->index);
484db665 5994#endif
ffe14686 5995 goto epilogue_done;
69732dcb 5996 }
2dd8bc01 5997 }
69732dcb
RH
5998 }
5999#endif
cd9c1ca8
RH
6000
6001 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6002 this marker for the splits of EH_RETURN patterns, and nothing else
6003 uses the flag in the meantime. */
6004 epilogue_completed = 1;
6005
6006#ifdef HAVE_eh_return
6007 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6008 some targets, these get split to a special version of the epilogue
6009 code. In order to be able to properly annotate these with unwind
6010 info, try to split them now. If we get a valid split, drop an
6011 EPILOGUE_BEG note and mark the insns as epilogue insns. */
fefa31b5 6012 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
cd9c1ca8 6013 {
691fe203 6014 rtx_insn *prev, *last, *trial;
cd9c1ca8
RH
6015
6016 if (e->flags & EDGE_FALLTHRU)
6017 continue;
6018 last = BB_END (e->src);
6019 if (!eh_returnjump_p (last))
6020 continue;
6021
6022 prev = PREV_INSN (last);
6023 trial = try_split (PATTERN (last), last, 1);
6024 if (trial == last)
6025 continue;
6026
6027 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6028 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6029 }
6030#endif
6031
484db665
BS
6032 /* If nothing falls through into the exit block, we don't need an
6033 epilogue. */
623a66fa 6034
484db665 6035 if (exit_fallthru_edge == NULL)
623a66fa
R
6036 goto epilogue_done;
6037
bdac5f58
TW
6038#ifdef HAVE_epilogue
6039 if (HAVE_epilogue)
6040 {
19d3c25c 6041 start_sequence ();
2e040219 6042 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
dc01c3d1 6043 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
55c623b5
UW
6044 if (seq)
6045 emit_jump_insn (seq);
bdac5f58 6046
19d3c25c 6047 /* Retain a map of the epilogue insns. */
cd9c1ca8 6048 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 6049 set_insn_locations (seq, epilogue_location);
bdac5f58 6050
2f937369 6051 seq = get_insns ();
484db665 6052 returnjump = get_last_insn ();
718fe406 6053 end_sequence ();
e881bb1b 6054
484db665 6055 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 6056 inserted = true;
dc0ff1c8
BS
6057
6058 if (JUMP_P (returnjump))
387748de 6059 set_return_jump_label (returnjump);
bdac5f58 6060 }
623a66fa 6061 else
bdac5f58 6062#endif
623a66fa
R
6063 {
6064 basic_block cur_bb;
6065
484db665 6066 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
6067 goto epilogue_done;
6068 /* We have a fall-through edge to the exit block, the source is not
6069 at the end of the function, and there will be an assembler epilogue
6070 at the end of the function.
6071 We can't use force_nonfallthru here, because that would try to
484db665 6072 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 6073 we take advantage of cfg_layout_finalize using
484db665 6074 fixup_fallthru_exit_predecessor. */
35b6b437 6075 cfg_layout_initialize (0);
11cd3bed 6076 FOR_EACH_BB_FN (cur_bb, cfun)
24bd1a0b
DB
6077 if (cur_bb->index >= NUM_FIXED_BLOCKS
6078 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 6079 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
6080 cfg_layout_finalize ();
6081 }
cf103ca4 6082
19d3c25c 6083epilogue_done:
484db665 6084
a8ba47cb 6085 default_rtl_profile ();
e881bb1b 6086
ca1117cc 6087 if (inserted)
30a873c3 6088 {
cf103ca4
EB
6089 sbitmap blocks;
6090
30a873c3
ZD
6091 commit_edge_insertions ();
6092
cf103ca4 6093 /* Look for basic blocks within the prologue insns. */
8b1c6fd7 6094 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 6095 bitmap_clear (blocks);
d7c028c0
LC
6096 bitmap_set_bit (blocks, entry_edge->dest->index);
6097 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
6098 find_many_sub_basic_blocks (blocks);
6099 sbitmap_free (blocks);
6100
30a873c3
ZD
6101 /* The epilogue insns we inserted may cause the exit edge to no longer
6102 be fallthru. */
fefa31b5 6103 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
30a873c3
ZD
6104 {
6105 if (((e->flags & EDGE_FALLTHRU) != 0)
6106 && returnjump_p (BB_END (e->src)))
6107 e->flags &= ~EDGE_FALLTHRU;
6108 }
6109 }
0a1c58a2 6110
484db665 6111#ifdef HAVE_simple_return
f30e25a3
ZC
6112 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
6113 unconverted_simple_returns);
484db665
BS
6114#endif
6115
0a1c58a2
JL
6116#ifdef HAVE_sibcall_epilogue
6117 /* Emit sibling epilogues before any sibling call sites. */
fefa31b5
DM
6118 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6119 ei_safe_edge (ei));
6120 )
0a1c58a2
JL
6121 {
6122 basic_block bb = e->src;
691fe203 6123 rtx_insn *insn = BB_END (bb);
484db665 6124 rtx ep_seq;
0a1c58a2 6125
4b4bf941 6126 if (!CALL_P (insn)
484db665 6127 || ! SIBLING_CALL_P (insn)
ffe14686 6128#ifdef HAVE_simple_return
484db665 6129 || (entry_edge != orig_entry_edge
ffe14686
AM
6130 && !bitmap_bit_p (&bb_flags, bb->index))
6131#endif
6132 )
628f6a4e
BE
6133 {
6134 ei_next (&ei);
6135 continue;
6136 }
0a1c58a2 6137
484db665
BS
6138 ep_seq = gen_sibcall_epilogue ();
6139 if (ep_seq)
6140 {
6141 start_sequence ();
6142 emit_note (NOTE_INSN_EPILOGUE_BEG);
6143 emit_insn (ep_seq);
dc01c3d1 6144 rtx_insn *seq = get_insns ();
484db665 6145 end_sequence ();
0a1c58a2 6146
484db665
BS
6147 /* Retain a map of the epilogue insns. Used in life analysis to
6148 avoid getting rid of sibcall epilogue insns. Do this before we
6149 actually emit the sequence. */
6150 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 6151 set_insn_locations (seq, epilogue_location);
2f937369 6152
484db665
BS
6153 emit_insn_before (seq, insn);
6154 }
628f6a4e 6155 ei_next (&ei);
0a1c58a2
JL
6156 }
6157#endif
ca1117cc 6158
86c82654
RH
6159#ifdef HAVE_epilogue
6160 if (epilogue_end)
6161 {
9c8348cf 6162 rtx_insn *insn, *next;
86c82654
RH
6163
6164 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 6165 There is no need, however, to be quite so anal about the existence
071a42f9 6166 of such a note. Also possibly move
84c1fa24
UW
6167 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6168 info generation. */
718fe406 6169 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
6170 {
6171 next = NEXT_INSN (insn);
b8698a0f 6172 if (NOTE_P (insn)
a38e7aa5 6173 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
6174 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6175 }
6176 }
6177#endif
6fb5fa3c 6178
ffe14686 6179#ifdef HAVE_simple_return
484db665 6180 bitmap_clear (&bb_flags);
ffe14686 6181#endif
484db665 6182
6fb5fa3c
DB
6183 /* Threading the prologue and epilogue changes the artificial refs
6184 in the entry and exit blocks. */
6185 epilogue_completed = 1;
6186 df_update_entry_exit_and_calls ();
bdac5f58
TW
6187}
6188
cd9c1ca8
RH
6189/* Reposition the prologue-end and epilogue-begin notes after
6190 instruction scheduling. */
bdac5f58
TW
6191
6192void
6fb5fa3c 6193reposition_prologue_and_epilogue_notes (void)
bdac5f58 6194{
cd9c1ca8
RH
6195#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6196 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
6197 /* Since the hash table is created on demand, the fact that it is
6198 non-null is a signal that it is non-empty. */
6199 if (prologue_insn_hash != NULL)
bdac5f58 6200 {
cd9c1ca8 6201 size_t len = htab_elements (prologue_insn_hash);
691fe203 6202 rtx_insn *insn, *last = NULL, *note = NULL;
bdac5f58 6203
cd9c1ca8
RH
6204 /* Scan from the beginning until we reach the last prologue insn. */
6205 /* ??? While we do have the CFG intact, there are two problems:
6206 (1) The prologue can contain loops (typically probing the stack),
6207 which means that the end of the prologue isn't in the first bb.
6208 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 6209 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 6210 {
4b4bf941 6211 if (NOTE_P (insn))
9392c110 6212 {
a38e7aa5 6213 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
6214 note = insn;
6215 }
cd9c1ca8 6216 else if (contains (insn, prologue_insn_hash))
0a1c58a2 6217 {
9f53e965
RH
6218 last = insn;
6219 if (--len == 0)
6220 break;
6221 }
6222 }
797a6ac1 6223
9f53e965
RH
6224 if (last)
6225 {
cd9c1ca8 6226 if (note == NULL)
9f53e965 6227 {
cd9c1ca8
RH
6228 /* Scan forward looking for the PROLOGUE_END note. It should
6229 be right at the beginning of the block, possibly with other
6230 insn notes that got moved there. */
6231 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6232 {
6233 if (NOTE_P (note)
6234 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6235 break;
6236 }
9f53e965 6237 }
c93b03c2 6238
9f53e965 6239 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6240 if (LABEL_P (last))
9f53e965
RH
6241 last = NEXT_INSN (last);
6242 reorder_insns (note, note, last);
bdac5f58 6243 }
0a1c58a2
JL
6244 }
6245
cd9c1ca8 6246 if (epilogue_insn_hash != NULL)
0a1c58a2 6247 {
cd9c1ca8
RH
6248 edge_iterator ei;
6249 edge e;
bdac5f58 6250
fefa31b5 6251 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bdac5f58 6252 {
691fe203 6253 rtx_insn *insn, *first = NULL, *note = NULL;
997704f1 6254 basic_block bb = e->src;
c93b03c2 6255
997704f1 6256 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6257 FOR_BB_INSNS (bb, insn)
9f53e965 6258 {
cd9c1ca8
RH
6259 if (NOTE_P (insn))
6260 {
6261 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6262 {
6263 note = insn;
997704f1 6264 if (first != NULL)
cd9c1ca8
RH
6265 break;
6266 }
6267 }
997704f1 6268 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6269 {
997704f1 6270 first = insn;
cd9c1ca8
RH
6271 if (note != NULL)
6272 break;
6273 }
9392c110 6274 }
997704f1
RH
6275
6276 if (note)
6277 {
6278 /* If the function has a single basic block, and no real
b8698a0f 6279 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6280 epilogue note can get scheduled before the prologue
6281 note. If we have frame related prologue insns, having
6282 them scanned during the epilogue will result in a crash.
6283 In this case re-order the epilogue note to just before
6284 the last insn in the block. */
6285 if (first == NULL)
6286 first = BB_END (bb);
6287
6288 if (PREV_INSN (first) != note)
6289 reorder_insns (note, note, PREV_INSN (first));
6290 }
bdac5f58
TW
6291 }
6292 }
6293#endif /* HAVE_prologue or HAVE_epilogue */
6294}
87ff9c8e 6295
df92c640
SB
6296/* Returns the name of function declared by FNDECL. */
6297const char *
6298fndecl_name (tree fndecl)
6299{
6300 if (fndecl == NULL)
6301 return "(nofn)";
6302 return lang_hooks.decl_printable_name (fndecl, 2);
6303}
6304
532aafad
SB
6305/* Returns the name of function FN. */
6306const char *
6307function_name (struct function *fn)
6308{
df92c640
SB
6309 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6310 return fndecl_name (fndecl);
532aafad
SB
6311}
6312
faed5cc3
SB
6313/* Returns the name of the current function. */
6314const char *
6315current_function_name (void)
6316{
532aafad 6317 return function_name (cfun);
faed5cc3 6318}
ef330312
PB
6319\f
6320
c2924966 6321static unsigned int
ef330312
PB
6322rest_of_handle_check_leaf_regs (void)
6323{
6324#ifdef LEAF_REGISTERS
416ff32e 6325 crtl->uses_only_leaf_regs
ef330312
PB
6326 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6327#endif
c2924966 6328 return 0;
ef330312
PB
6329}
6330
8d8d1a28 6331/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6332
8d8d1a28
AH
6333static void
6334used_types_insert_helper (tree type, struct function *func)
33c9159e 6335{
8d8d1a28 6336 if (type != NULL && func != NULL)
33c9159e 6337 {
33c9159e 6338 if (func->used_types_hash == NULL)
b086d530
TS
6339 func->used_types_hash = hash_set<tree>::create_ggc (37);
6340
6341 func->used_types_hash->add (type);
33c9159e
AH
6342 }
6343}
6344
8d8d1a28
AH
6345/* Given a type, insert it into the used hash table in cfun. */
6346void
6347used_types_insert (tree t)
6348{
6349 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6350 if (TYPE_NAME (t))
6351 break;
6352 else
6353 t = TREE_TYPE (t);
29ce73cb
PB
6354 if (TREE_CODE (t) == ERROR_MARK)
6355 return;
095c7b3c
JJ
6356 if (TYPE_NAME (t) == NULL_TREE
6357 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6358 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6359 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6360 {
6361 if (cfun)
6362 used_types_insert_helper (t, cfun);
6363 else
9771b263
DN
6364 {
6365 /* So this might be a type referenced by a global variable.
6366 Record that type so that we can later decide to emit its
6367 debug information. */
6368 vec_safe_push (types_used_by_cur_var_decl, t);
6369 }
b646ba3f
DS
6370 }
6371}
6372
6373/* Helper to Hash a struct types_used_by_vars_entry. */
6374
6375static hashval_t
6376hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6377{
6378 gcc_assert (entry && entry->var_decl && entry->type);
6379
6380 return iterative_hash_object (entry->type,
6381 iterative_hash_object (entry->var_decl, 0));
6382}
6383
6384/* Hash function of the types_used_by_vars_entry hash table. */
6385
6386hashval_t
2a22f99c 6387used_type_hasher::hash (types_used_by_vars_entry *entry)
b646ba3f 6388{
b646ba3f
DS
6389 return hash_types_used_by_vars_entry (entry);
6390}
6391
6392/*Equality function of the types_used_by_vars_entry hash table. */
6393
2a22f99c
TS
6394bool
6395used_type_hasher::equal (types_used_by_vars_entry *e1,
6396 types_used_by_vars_entry *e2)
b646ba3f 6397{
b646ba3f
DS
6398 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6399}
6400
6401/* Inserts an entry into the types_used_by_vars_hash hash table. */
6402
6403void
6404types_used_by_var_decl_insert (tree type, tree var_decl)
6405{
6406 if (type != NULL && var_decl != NULL)
6407 {
2a22f99c 6408 types_used_by_vars_entry **slot;
b646ba3f
DS
6409 struct types_used_by_vars_entry e;
6410 e.var_decl = var_decl;
6411 e.type = type;
6412 if (types_used_by_vars_hash == NULL)
2a22f99c
TS
6413 types_used_by_vars_hash
6414 = hash_table<used_type_hasher>::create_ggc (37);
6415
6416 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
b646ba3f
DS
6417 if (*slot == NULL)
6418 {
6419 struct types_used_by_vars_entry *entry;
766090c2 6420 entry = ggc_alloc<types_used_by_vars_entry> ();
b646ba3f
DS
6421 entry->type = type;
6422 entry->var_decl = var_decl;
6423 *slot = entry;
6424 }
6425 }
8d8d1a28
AH
6426}
6427
27a4cd48
DM
6428namespace {
6429
6430const pass_data pass_data_leaf_regs =
6431{
6432 RTL_PASS, /* type */
6433 "*leaf_regs", /* name */
6434 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6435 TV_NONE, /* tv_id */
6436 0, /* properties_required */
6437 0, /* properties_provided */
6438 0, /* properties_destroyed */
6439 0, /* todo_flags_start */
6440 0, /* todo_flags_finish */
ef330312
PB
6441};
6442
27a4cd48
DM
6443class pass_leaf_regs : public rtl_opt_pass
6444{
6445public:
c3284718
RS
6446 pass_leaf_regs (gcc::context *ctxt)
6447 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
27a4cd48
DM
6448 {}
6449
6450 /* opt_pass methods: */
be55bfe6
TS
6451 virtual unsigned int execute (function *)
6452 {
6453 return rest_of_handle_check_leaf_regs ();
6454 }
27a4cd48
DM
6455
6456}; // class pass_leaf_regs
6457
6458} // anon namespace
6459
6460rtl_opt_pass *
6461make_pass_leaf_regs (gcc::context *ctxt)
6462{
6463 return new pass_leaf_regs (ctxt);
6464}
6465
6fb5fa3c
DB
6466static unsigned int
6467rest_of_handle_thread_prologue_and_epilogue (void)
6468{
6469 if (optimize)
6470 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6471
6fb5fa3c
DB
6472 /* On some machines, the prologue and epilogue code, or parts thereof,
6473 can be represented as RTL. Doing so lets us schedule insns between
6474 it and the rest of the code and also allows delayed branch
6475 scheduling to operate in the epilogue. */
6fb5fa3c 6476 thread_prologue_and_epilogue_insns ();
d3c12306 6477
bdc6e1ae
SB
6478 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6479 see PR57320. */
6480 cleanup_cfg (0);
6481
d3c12306 6482 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6483 if (flag_stack_usage_info)
d3c12306
EB
6484 output_stack_usage ();
6485
6fb5fa3c
DB
6486 return 0;
6487}
6488
27a4cd48
DM
6489namespace {
6490
6491const pass_data pass_data_thread_prologue_and_epilogue =
6492{
6493 RTL_PASS, /* type */
6494 "pro_and_epilogue", /* name */
6495 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6496 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6497 0, /* properties_required */
6498 0, /* properties_provided */
6499 0, /* properties_destroyed */
3bea341f
RB
6500 0, /* todo_flags_start */
6501 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6fb5fa3c 6502};
27a4cd48
DM
6503
6504class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6505{
6506public:
c3284718
RS
6507 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6508 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
27a4cd48
DM
6509 {}
6510
6511 /* opt_pass methods: */
be55bfe6
TS
6512 virtual unsigned int execute (function *)
6513 {
6514 return rest_of_handle_thread_prologue_and_epilogue ();
6515 }
27a4cd48
DM
6516
6517}; // class pass_thread_prologue_and_epilogue
6518
6519} // anon namespace
6520
6521rtl_opt_pass *
6522make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6523{
6524 return new pass_thread_prologue_and_epilogue (ctxt);
6525}
d8d72314
PB
6526\f
6527
6528/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6529 in-out constraints. Say you start with
d8d72314
PB
6530
6531 orig = inout;
6532 asm ("": "+mr" (inout));
6533 use (orig);
6534
6535 which is transformed very early to use explicit output and match operands:
6536
6537 orig = inout;
6538 asm ("": "=mr" (inout) : "0" (inout));
6539 use (orig);
6540
6541 Or, after SSA and copyprop,
6542
6543 asm ("": "=mr" (inout_2) : "0" (inout_1));
6544 use (inout_1);
6545
6546 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6547 they represent two separate values, so they will get different pseudo
6548 registers during expansion. Then, since the two operands need to match
6549 per the constraints, but use different pseudo registers, reload can
6550 only register a reload for these operands. But reloads can only be
6551 satisfied by hardregs, not by memory, so we need a register for this
6552 reload, just because we are presented with non-matching operands.
6553 So, even though we allow memory for this operand, no memory can be
6554 used for it, just because the two operands don't match. This can
6555 cause reload failures on register-starved targets.
6556
6557 So it's a symptom of reload not being able to use memory for reloads
6558 or, alternatively it's also a symptom of both operands not coming into
6559 reload as matching (in which case the pseudo could go to memory just
6560 fine, as the alternative allows it, and no reload would be necessary).
6561 We fix the latter problem here, by transforming
6562
6563 asm ("": "=mr" (inout_2) : "0" (inout_1));
6564
6565 back to
6566
6567 inout_2 = inout_1;
6568 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6569
6570static void
691fe203 6571match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
d8d72314
PB
6572{
6573 int i;
6574 bool changed = false;
6575 rtx op = SET_SRC (p_sets[0]);
6576 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6577 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6578 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6579
d7b8033f 6580 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6581 for (i = 0; i < ninputs; i++)
6582 {
691fe203
DM
6583 rtx input, output;
6584 rtx_insn *insns;
d8d72314
PB
6585 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6586 char *end;
53220215 6587 int match, j;
d8d72314 6588
70f16287
JJ
6589 if (*constraint == '%')
6590 constraint++;
6591
d8d72314
PB
6592 match = strtoul (constraint, &end, 10);
6593 if (end == constraint)
6594 continue;
6595
6596 gcc_assert (match < noutputs);
6597 output = SET_DEST (p_sets[match]);
6598 input = RTVEC_ELT (inputs, i);
53220215
MM
6599 /* Only do the transformation for pseudos. */
6600 if (! REG_P (output)
6601 || rtx_equal_p (output, input)
d8d72314
PB
6602 || (GET_MODE (input) != VOIDmode
6603 && GET_MODE (input) != GET_MODE (output)))
6604 continue;
6605
53220215
MM
6606 /* We can't do anything if the output is also used as input,
6607 as we're going to overwrite it. */
6608 for (j = 0; j < ninputs; j++)
6609 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6610 break;
6611 if (j != ninputs)
6612 continue;
6613
d7b8033f
JJ
6614 /* Avoid changing the same input several times. For
6615 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6616 only change in once (to out1), rather than changing it
6617 first to out1 and afterwards to out2. */
6618 if (i > 0)
6619 {
6620 for (j = 0; j < noutputs; j++)
6621 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6622 break;
6623 if (j != noutputs)
6624 continue;
6625 }
6626 output_matched[match] = true;
6627
d8d72314 6628 start_sequence ();
53220215 6629 emit_move_insn (output, input);
d8d72314
PB
6630 insns = get_insns ();
6631 end_sequence ();
d8d72314 6632 emit_insn_before (insns, insn);
53220215
MM
6633
6634 /* Now replace all mentions of the input with output. We can't
fa10beec 6635 just replace the occurrence in inputs[i], as the register might
53220215
MM
6636 also be used in some other input (or even in an address of an
6637 output), which would mean possibly increasing the number of
6638 inputs by one (namely 'output' in addition), which might pose
6639 a too complicated problem for reload to solve. E.g. this situation:
6640
6641 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6642
84fbffb2 6643 Here 'input' is used in two occurrences as input (once for the
53220215 6644 input operand, once for the address in the second output operand).
fa10beec 6645 If we would replace only the occurrence of the input operand (to
53220215
MM
6646 make the matching) we would be left with this:
6647
6648 output = input
6649 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6650
6651 Now we suddenly have two different input values (containing the same
6652 value, but different pseudos) where we formerly had only one.
6653 With more complicated asms this might lead to reload failures
6654 which wouldn't have happen without this pass. So, iterate over
84fbffb2 6655 all operands and replace all occurrences of the register used. */
53220215 6656 for (j = 0; j < noutputs; j++)
1596d61e 6657 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
6658 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6659 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6660 input, output);
6661 for (j = 0; j < ninputs; j++)
6662 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6663 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6664 input, output);
6665
d8d72314
PB
6666 changed = true;
6667 }
6668
6669 if (changed)
6670 df_insn_rescan (insn);
6671}
6672
5cf18d25
ML
6673/* Add the decl D to the local_decls list of FUN. */
6674
6675void
6676add_local_decl (struct function *fun, tree d)
6677{
6678 gcc_assert (TREE_CODE (d) == VAR_DECL);
6679 vec_safe_push (fun->local_decls, d);
6680}
6681
be55bfe6
TS
6682namespace {
6683
6684const pass_data pass_data_match_asm_constraints =
6685{
6686 RTL_PASS, /* type */
6687 "asmcons", /* name */
6688 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6689 TV_NONE, /* tv_id */
6690 0, /* properties_required */
6691 0, /* properties_provided */
6692 0, /* properties_destroyed */
6693 0, /* todo_flags_start */
6694 0, /* todo_flags_finish */
6695};
6696
6697class pass_match_asm_constraints : public rtl_opt_pass
6698{
6699public:
6700 pass_match_asm_constraints (gcc::context *ctxt)
6701 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6702 {}
6703
6704 /* opt_pass methods: */
6705 virtual unsigned int execute (function *);
6706
6707}; // class pass_match_asm_constraints
6708
6709unsigned
6710pass_match_asm_constraints::execute (function *fun)
d8d72314
PB
6711{
6712 basic_block bb;
691fe203
DM
6713 rtx_insn *insn;
6714 rtx pat, *p_sets;
d8d72314
PB
6715 int noutputs;
6716
e3b5732b 6717 if (!crtl->has_asm_statement)
d8d72314
PB
6718 return 0;
6719
6720 df_set_flags (DF_DEFER_INSN_RESCAN);
be55bfe6 6721 FOR_EACH_BB_FN (bb, fun)
d8d72314
PB
6722 {
6723 FOR_BB_INSNS (bb, insn)
6724 {
6725 if (!INSN_P (insn))
6726 continue;
6727
6728 pat = PATTERN (insn);
6729 if (GET_CODE (pat) == PARALLEL)
6730 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6731 else if (GET_CODE (pat) == SET)
6732 p_sets = &PATTERN (insn), noutputs = 1;
6733 else
6734 continue;
6735
6736 if (GET_CODE (*p_sets) == SET
6737 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6738 match_asm_constraints_1 (insn, p_sets, noutputs);
6739 }
6740 }
6741
6742 return TODO_df_finish;
6743}
6744
27a4cd48
DM
6745} // anon namespace
6746
6747rtl_opt_pass *
6748make_pass_match_asm_constraints (gcc::context *ctxt)
6749{
6750 return new pass_match_asm_constraints (ctxt);
6751}
6752
faed5cc3 6753
e2500fed 6754#include "gt-function.h"