]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
Stack usage support
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
095c7b3c
JJ
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6f086dfc 21
6f086dfc
RS
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 34 not get a hard register. */
6f086dfc
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
0cbd9993 40#include "rtl-error.h"
6f086dfc
RS
41#include "tree.h"
42#include "flags.h"
1ef08c63 43#include "except.h"
6f086dfc 44#include "function.h"
6f086dfc 45#include "expr.h"
c6b97fac 46#include "optabs.h"
e78d8e51 47#include "libfuncs.h"
6f086dfc
RS
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "insn-config.h"
51#include "recog.h"
52#include "output.h"
bdac5f58 53#include "basic-block.h"
e2500fed 54#include "hashtab.h"
87ff9c8e 55#include "ggc.h"
b1474bb7 56#include "tm_p.h"
c0e7830f 57#include "integrate.h"
7afff7cf 58#include "langhooks.h"
61f71b34 59#include "target.h"
623a66fa 60#include "cfglayout.h"
726a989a 61#include "gimple.h"
ef330312 62#include "tree-pass.h"
7d69de61 63#include "predict.h"
6fb5fa3c
DB
64#include "df.h"
65#include "timevar.h"
e3df376d 66#include "vecprim.h"
7d69de61 67
5576d6f2
TT
68/* So we can assign to cfun in this file. */
69#undef cfun
70
95f3f59e
JDA
71#ifndef STACK_ALIGNMENT_NEEDED
72#define STACK_ALIGNMENT_NEEDED 1
73#endif
74
975f3818
RS
75#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76
293e3de4
RS
77/* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
0f41302f 80 must define both, or neither. */
293e3de4
RS
81#ifndef NAME__MAIN
82#define NAME__MAIN "__main"
293e3de4
RS
83#endif
84
6f086dfc
RS
85/* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89
90/* Similar, but round to the next highest integer that meets the
91 alignment. */
92#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93
54ff41b7
JW
94/* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
718fe406 97 compiler passes. */
54ff41b7
JW
98int current_function_is_leaf;
99
fdb8a883
JW
100/* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
6fb5fa3c 102 pass_stack_ptr_mod has run. */
fdb8a883
JW
103int current_function_sp_is_unchanging;
104
54ff41b7
JW
105/* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
108int current_function_uses_only_leaf_regs;
109
6f086dfc 110/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114int virtuals_instantiated;
6f086dfc 115
df696a75 116/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 117static GTY(()) int funcdef_no;
f6f315fe 118
414c4dc4
NC
119/* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
fa8db1f7 121struct machine_function * (*init_machine_status) (void);
46766466 122
b384405b 123/* The currently compiled function. */
01d939e8 124struct function *cfun = 0;
b384405b 125
cd9c1ca8
RH
126/* These hashes record the prologue and epilogue insns. */
127static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
128 htab_t prologue_insn_hash;
129static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t epilogue_insn_hash;
6f086dfc 131\f
b646ba3f
DS
132
133htab_t types_used_by_vars_hash = NULL;
bc87224e 134VEC(tree,gc) *types_used_by_cur_var_decl;
b646ba3f 135
e15679f8
RK
136/* Forward declarations. */
137
fa8db1f7 138static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
139static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 141static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7
AJ
142static int all_blocks (tree, tree *);
143static tree *get_block_vector (tree, int *);
144extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 145/* We always define `record_insns' even if it's not used so that we
ec97b83a 146 can always export `prologue_epilogue_contains'. */
cd9c1ca8
RH
147static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
148static bool contains (const_rtx, htab_t);
73ef99fb 149#ifdef HAVE_return
6039a0c7 150static void emit_return_into_block (basic_block);
73ef99fb 151#endif
db2960f4 152static void prepare_function_start (void);
fa8db1f7
AJ
153static void do_clobber_return_reg (rtx, void *);
154static void do_use_return_reg (rtx, void *);
4c4d143a 155static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 156\f
936fc9ba
JH
157/* Stack of nested functions. */
158/* Keep track of the cfun stack. */
e5e809f4 159
936fc9ba 160typedef struct function *function_p;
e5e809f4 161
936fc9ba
JH
162DEF_VEC_P(function_p);
163DEF_VEC_ALLOC_P(function_p,heap);
164static VEC(function_p,heap) *function_context_stack;
6f086dfc
RS
165
166/* Save the current context for compilation of a nested function.
d2784db4 167 This is called from language-specific code. */
6f086dfc
RS
168
169void
d2784db4 170push_function_context (void)
6f086dfc 171{
01d939e8 172 if (cfun == 0)
182e0d71 173 allocate_struct_function (NULL, false);
b384405b 174
936fc9ba 175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
db2960f4 176 set_cfun (NULL);
6f086dfc
RS
177}
178
179/* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
181
182void
d2784db4 183pop_function_context (void)
6f086dfc 184{
936fc9ba 185 struct function *p = VEC_pop (function_p, function_context_stack);
db2960f4 186 set_cfun (p);
6f086dfc 187 current_function_decl = p->decl;
6f086dfc 188
6f086dfc 189 /* Reset variables that have known state during rtx generation. */
6f086dfc 190 virtuals_instantiated = 0;
1b3d8f8a 191 generating_concat_p = 1;
6f086dfc 192}
e4a4639e 193
fa51b01b
RH
194/* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
197
198void
fa8db1f7 199free_after_parsing (struct function *f)
fa51b01b 200{
e8924938 201 f->language = 0;
fa51b01b
RH
202}
203
e2ecd91c
BS
204/* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
0a8a198c 206 reclaim the memory. */
21cd906e 207
e2ecd91c 208void
fa8db1f7 209free_after_compilation (struct function *f)
e2ecd91c 210{
cd9c1ca8
RH
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
213
3e029763
JH
214 if (crtl->emit.regno_pointer_align)
215 free (crtl->emit.regno_pointer_align);
f995dcfe 216
3e029763 217 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 218 f->eh = NULL;
e2500fed 219 f->machine = NULL;
997de8ed 220 f->cfg = NULL;
fa51b01b 221
57b9e367 222 regno_reg_rtx = NULL;
825b2fe7 223 insn_locators_free ();
e2ecd91c 224}
6f086dfc 225\f
49ad7cfa
BS
226/* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
9fb798d7 229
49ad7cfa 230HOST_WIDE_INT
fa8db1f7 231get_frame_size (void)
49ad7cfa 232{
bd60bab2
JH
233 if (FRAME_GROWS_DOWNWARD)
234 return -frame_offset;
235 else
236 return frame_offset;
49ad7cfa
BS
237}
238
9fb798d7
EB
239/* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
241 return FALSE. */
242
243bool
244frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 245{
9fb798d7
EB
246 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247
248 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD)
251 {
c5d75364
MLI
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects too large");
9fb798d7
EB
254 return TRUE;
255 }
256
257 return FALSE;
258}
259
76fe54f0
L
260/* Return stack slot alignment in bits for TYPE and MODE. */
261
262static unsigned int
263get_stack_local_alignment (tree type, enum machine_mode mode)
264{
265 unsigned int alignment;
266
267 if (mode == BLKmode)
268 alignment = BIGGEST_ALIGNMENT;
269 else
270 alignment = GET_MODE_ALIGNMENT (mode);
271
272 /* Allow the frond-end to (possibly) increase the alignment of this
273 stack slot. */
274 if (! type)
275 type = lang_hooks.types.type_for_mode (mode, 0);
276
277 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
278}
279
56731d64
BS
280/* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
286
287static bool
288try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
289 HOST_WIDE_INT size, unsigned int alignment,
290 HOST_WIDE_INT *poffset)
291{
292 HOST_WIDE_INT this_frame_offset;
293 int frame_off, frame_alignment, frame_phase;
294
295 /* Calculate how many bytes the start of local variables is off from
296 stack alignment. */
297 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
298 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
299 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300
301 /* Round the frame offset to the specified alignment. */
302
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD)
308 this_frame_offset
309 = (FLOOR_ROUND (start + length - size - frame_phase,
310 (unsigned HOST_WIDE_INT) alignment)
311 + frame_phase);
312 else
313 this_frame_offset
314 = (CEIL_ROUND (start - frame_phase,
315 (unsigned HOST_WIDE_INT) alignment)
316 + frame_phase);
317
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset == start && this_frame_offset < frame_offset)
322 frame_offset = this_frame_offset;
323 else if (this_frame_offset < start)
324 return false;
325 else if (start + length == frame_offset
326 && this_frame_offset + size > start + length)
327 frame_offset = this_frame_offset + size;
328 else if (this_frame_offset + size > start + length)
329 return false;
330
331 *poffset = this_frame_offset;
332 return true;
333}
334
335/* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
338
339static void
340add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341{
a9429e29 342 struct frame_space *space = ggc_alloc_frame_space ();
56731d64
BS
343 space->next = crtl->frame_space_list;
344 crtl->frame_space_list = space;
345 space->start = start;
346 space->length = end - start;
347}
348
6f086dfc
RS
349/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
718fe406 351
6f086dfc
RS
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 355 -2 means use BITS_PER_UNIT,
6f086dfc
RS
356 positive specifies alignment boundary in bits.
357
2e3f842f
L
358 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
359
bd60bab2 360 We do not round to stack_boundary here. */
6f086dfc 361
bd60bab2 362rtx
2e3f842f
L
363assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
364 int align,
365 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
6f086dfc 366{
b3694847 367 rtx x, addr;
6f086dfc 368 int bigend_correction = 0;
427188d5 369 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 370 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
371
372 if (align == 0)
373 {
76fe54f0 374 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 375 alignment /= BITS_PER_UNIT;
6f086dfc
RS
376 }
377 else if (align == -1)
378 {
379 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
380 size = CEIL_ROUND (size, alignment);
381 }
cfa29a4c
EB
382 else if (align == -2)
383 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
384 else
385 alignment = align / BITS_PER_UNIT;
386
2e3f842f
L
387 alignment_in_bits = alignment * BITS_PER_UNIT;
388
2e3f842f
L
389 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
390 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
391 {
392 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
393 alignment = alignment_in_bits / BITS_PER_UNIT;
394 }
a0871656 395
2e3f842f
L
396 if (SUPPORTS_STACK_ALIGNMENT)
397 {
398 if (crtl->stack_alignment_estimated < alignment_in_bits)
399 {
400 if (!crtl->stack_realign_processed)
401 crtl->stack_alignment_estimated = alignment_in_bits;
402 else
403 {
404 /* If stack is realigned and stack alignment value
405 hasn't been finalized, it is OK not to increase
406 stack_alignment_estimated. The bigger alignment
407 requirement is recorded in stack_alignment_needed
408 below. */
409 gcc_assert (!crtl->stack_realign_finalized);
410 if (!crtl->stack_realign_needed)
411 {
412 /* It is OK to reduce the alignment as long as the
413 requested size is 0 or the estimated stack
414 alignment >= mode alignment. */
415 gcc_assert (reduce_alignment_ok
416 || size == 0
417 || (crtl->stack_alignment_estimated
418 >= GET_MODE_ALIGNMENT (mode)));
419 alignment_in_bits = crtl->stack_alignment_estimated;
420 alignment = alignment_in_bits / BITS_PER_UNIT;
421 }
422 }
423 }
424 }
76fe54f0
L
425
426 if (crtl->stack_alignment_needed < alignment_in_bits)
427 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
428 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
429 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 430
56731d64
BS
431 if (mode != BLKmode || size != 0)
432 {
433 struct frame_space **psp;
434
435 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
436 {
437 struct frame_space *space = *psp;
438 if (!try_fit_stack_local (space->start, space->length, size,
439 alignment, &slot_offset))
440 continue;
441 *psp = space->next;
442 if (slot_offset > space->start)
443 add_frame_space (space->start, slot_offset);
444 if (slot_offset + size < space->start + space->length)
445 add_frame_space (slot_offset + size,
446 space->start + space->length);
447 goto found_space;
448 }
449 }
450 else if (!STACK_ALIGNMENT_NEEDED)
451 {
452 slot_offset = frame_offset;
453 goto found_space;
454 }
455
456 old_frame_offset = frame_offset;
457
458 if (FRAME_GROWS_DOWNWARD)
459 {
460 frame_offset -= size;
461 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 462
56731d64
BS
463 if (slot_offset > frame_offset)
464 add_frame_space (frame_offset, slot_offset);
465 if (slot_offset + size < old_frame_offset)
466 add_frame_space (slot_offset + size, old_frame_offset);
467 }
468 else
95f3f59e 469 {
56731d64
BS
470 frame_offset += size;
471 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
472
473 if (slot_offset > old_frame_offset)
474 add_frame_space (old_frame_offset, slot_offset);
475 if (slot_offset + size < frame_offset)
476 add_frame_space (slot_offset + size, frame_offset);
95f3f59e 477 }
6f086dfc 478
56731d64 479 found_space:
6f086dfc
RS
480 /* On a big-endian machine, if we are allocating more space than we will use,
481 use the least significant bytes of those that are allocated. */
d70eadf7 482 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 483 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 484
6f086dfc
RS
485 /* If we have already instantiated virtual registers, return the actual
486 address relative to the frame pointer. */
bd60bab2 487 if (virtuals_instantiated)
6f086dfc 488 addr = plus_constant (frame_pointer_rtx,
c41536f5 489 trunc_int_for_mode
56731d64 490 (slot_offset + bigend_correction
c41536f5 491 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc
RS
492 else
493 addr = plus_constant (virtual_stack_vars_rtx,
c41536f5 494 trunc_int_for_mode
56731d64 495 (slot_offset + bigend_correction,
c41536f5 496 Pmode));
6f086dfc 497
38a448ca 498 x = gen_rtx_MEM (mode, addr);
76fe54f0 499 set_mem_align (x, alignment_in_bits);
be0c514c 500 MEM_NOTRAP_P (x) = 1;
6f086dfc 501
bd60bab2
JH
502 stack_slot_list
503 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 504
bd60bab2
JH
505 if (frame_offset_overflow (frame_offset, current_function_decl))
506 frame_offset = 0;
9070115b 507
6f086dfc
RS
508 return x;
509}
2e3f842f
L
510
511/* Wrap up assign_stack_local_1 with last parameter as false. */
512
513rtx
514assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
515{
516 return assign_stack_local_1 (mode, size, align, false);
517}
0aea6467 518\f
fb0703f7
SB
519\f
520/* In order to evaluate some expressions, such as function calls returning
521 structures in memory, we need to temporarily allocate stack locations.
522 We record each allocated temporary in the following structure.
523
524 Associated with each temporary slot is a nesting level. When we pop up
525 one level, all temporaries associated with the previous level are freed.
526 Normally, all temporaries are freed after the execution of the statement
527 in which they were created. However, if we are inside a ({...}) grouping,
528 the result may be in a temporary and hence must be preserved. If the
529 result could be in a temporary, we preserve it if we can determine which
530 one it is in. If we cannot determine which temporary may contain the
531 result, all temporaries are preserved. A temporary is preserved by
532 pretending it was allocated at the previous nesting level.
533
534 Automatic variables are also assigned temporary slots, at the nesting
535 level where they are defined. They are marked a "kept" so that
536 free_temp_slots will not free them. */
537
d1b38208 538struct GTY(()) temp_slot {
fb0703f7
SB
539 /* Points to next temporary slot. */
540 struct temp_slot *next;
541 /* Points to previous temporary slot. */
542 struct temp_slot *prev;
543 /* The rtx to used to reference the slot. */
544 rtx slot;
fb0703f7
SB
545 /* The size, in units, of the slot. */
546 HOST_WIDE_INT size;
547 /* The type of the object in the slot, or zero if it doesn't correspond
548 to a type. We use this to determine whether a slot can be reused.
549 It can be reused if objects of the type of the new slot will always
550 conflict with objects of the type of the old slot. */
551 tree type;
8f5929e1
JJ
552 /* The alignment (in bits) of the slot. */
553 unsigned int align;
fb0703f7
SB
554 /* Nonzero if this temporary is currently in use. */
555 char in_use;
556 /* Nonzero if this temporary has its address taken. */
557 char addr_taken;
558 /* Nesting level at which this slot is being used. */
559 int level;
560 /* Nonzero if this should survive a call to free_temp_slots. */
561 int keep;
562 /* The offset of the slot from the frame_pointer, including extra space
563 for alignment. This info is for combine_temp_slots. */
564 HOST_WIDE_INT base_offset;
565 /* The size of the slot, including extra space for alignment. This
566 info is for combine_temp_slots. */
567 HOST_WIDE_INT full_size;
568};
569
570/* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
573
574/* Entry for the above hash table. */
d1b38208 575struct GTY(()) temp_slot_address_entry {
fb0703f7
SB
576 hashval_t hash;
577 rtx address;
578 struct temp_slot *temp_slot;
579};
580
0aea6467
ZD
581/* Removes temporary slot TEMP from LIST. */
582
583static void
584cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
585{
586 if (temp->next)
587 temp->next->prev = temp->prev;
588 if (temp->prev)
589 temp->prev->next = temp->next;
590 else
591 *list = temp->next;
592
593 temp->prev = temp->next = NULL;
594}
595
596/* Inserts temporary slot TEMP to LIST. */
597
598static void
599insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
600{
601 temp->next = *list;
602 if (*list)
603 (*list)->prev = temp;
604 temp->prev = NULL;
605 *list = temp;
606}
607
608/* Returns the list of used temp slots at LEVEL. */
609
610static struct temp_slot **
611temp_slots_at_level (int level)
612{
6370682a 613 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
a590ac65 614 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
0aea6467 615
6370682a 616 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
0aea6467
ZD
617}
618
619/* Returns the maximal temporary slot level. */
620
621static int
622max_slot_level (void)
623{
624 if (!used_temp_slots)
625 return -1;
626
6370682a 627 return VEC_length (temp_slot_p, used_temp_slots) - 1;
0aea6467
ZD
628}
629
630/* Moves temporary slot TEMP to LEVEL. */
631
632static void
633move_slot_to_level (struct temp_slot *temp, int level)
634{
635 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
636 insert_slot_to_list (temp, temp_slots_at_level (level));
637 temp->level = level;
638}
639
640/* Make temporary slot TEMP available. */
641
642static void
643make_slot_available (struct temp_slot *temp)
644{
645 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
646 insert_slot_to_list (temp, &avail_temp_slots);
647 temp->in_use = 0;
648 temp->level = -1;
649}
fb0703f7
SB
650
651/* Compute the hash value for an address -> temp slot mapping.
652 The value is cached on the mapping entry. */
653static hashval_t
654temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
655{
656 int do_not_record = 0;
657 return hash_rtx (t->address, GET_MODE (t->address),
658 &do_not_record, NULL, false);
659}
660
661/* Return the hash value for an address -> temp slot mapping. */
662static hashval_t
663temp_slot_address_hash (const void *p)
664{
665 const struct temp_slot_address_entry *t;
666 t = (const struct temp_slot_address_entry *) p;
667 return t->hash;
668}
669
670/* Compare two address -> temp slot mapping entries. */
671static int
672temp_slot_address_eq (const void *p1, const void *p2)
673{
674 const struct temp_slot_address_entry *t1, *t2;
675 t1 = (const struct temp_slot_address_entry *) p1;
676 t2 = (const struct temp_slot_address_entry *) p2;
677 return exp_equiv_p (t1->address, t2->address, 0, true);
678}
679
680/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
681static void
682insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
683{
684 void **slot;
a9429e29 685 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
fb0703f7
SB
686 t->address = address;
687 t->temp_slot = temp_slot;
688 t->hash = temp_slot_address_compute_hash (t);
689 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
690 *slot = t;
691}
692
693/* Remove an address -> temp slot mapping entry if the temp slot is
694 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
695static int
696remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
697{
698 const struct temp_slot_address_entry *t;
699 t = (const struct temp_slot_address_entry *) *slot;
700 if (! t->temp_slot->in_use)
701 *slot = NULL;
702 return 1;
703}
704
705/* Remove all mappings of addresses to unused temp slots. */
706static void
707remove_unused_temp_slot_addresses (void)
708{
709 htab_traverse (temp_slot_address_table,
710 remove_unused_temp_slot_addresses_1,
711 NULL);
712}
713
714/* Find the temp slot corresponding to the object at address X. */
715
716static struct temp_slot *
717find_temp_slot_from_address (rtx x)
718{
719 struct temp_slot *p;
720 struct temp_slot_address_entry tmp, *t;
721
722 /* First try the easy way:
723 See if X exists in the address -> temp slot mapping. */
724 tmp.address = x;
725 tmp.temp_slot = NULL;
726 tmp.hash = temp_slot_address_compute_hash (&tmp);
727 t = (struct temp_slot_address_entry *)
728 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
729 if (t)
730 return t->temp_slot;
731
732 /* If we have a sum involving a register, see if it points to a temp
733 slot. */
734 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
735 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
736 return p;
737 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
738 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
739 return p;
740
741 /* Last resort: Address is a virtual stack var address. */
742 if (GET_CODE (x) == PLUS
743 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 744 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
745 {
746 int i;
747 for (i = max_slot_level (); i >= 0; i--)
748 for (p = *temp_slots_at_level (i); p; p = p->next)
749 {
750 if (INTVAL (XEXP (x, 1)) >= p->base_offset
751 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
752 return p;
753 }
754 }
755
756 return NULL;
757}
6f086dfc
RS
758\f
759/* Allocate a temporary stack slot and record it for possible later
760 reuse.
761
762 MODE is the machine mode to be given to the returned rtx.
763
764 SIZE is the size in units of the space required. We do no rounding here
765 since assign_stack_local will do any required rounding.
766
d93d4205
MS
767 KEEP is 1 if this slot is to be retained after a call to
768 free_temp_slots. Automatic variables for a block are allocated
7efcb746
PB
769 with this flag. KEEP values of 2 or 3 were needed respectively
770 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535a42b1 771 or for SAVE_EXPRs, but they are now unused.
a4c6502a
MM
772
773 TYPE is the type that will be used for the stack slot. */
6f086dfc 774
a06ef755 775rtx
535a42b1
NS
776assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
777 int keep, tree type)
6f086dfc 778{
74e2819c 779 unsigned int align;
0aea6467 780 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 781 rtx slot;
6f086dfc 782
303ec2aa
RK
783 /* If SIZE is -1 it means that somebody tried to allocate a temporary
784 of a variable size. */
0bccc606 785 gcc_assert (size != -1);
303ec2aa 786
7efcb746 787 /* These are now unused. */
0bccc606 788 gcc_assert (keep <= 1);
7efcb746 789
76fe54f0 790 align = get_stack_local_alignment (type, mode);
d16790f2
JW
791
792 /* Try to find an available, already-allocated temporary of the proper
793 mode which meets the size and alignment requirements. Choose the
3e8b0446 794 smallest one with the closest alignment.
b8698a0f 795
3e8b0446
ZD
796 If assign_stack_temp is called outside of the tree->rtl expansion,
797 we cannot reuse the stack slots (that may still refer to
798 VIRTUAL_STACK_VARS_REGNUM). */
799 if (!virtuals_instantiated)
0aea6467 800 {
3e8b0446 801 for (p = avail_temp_slots; p; p = p->next)
0aea6467 802 {
3e8b0446
ZD
803 if (p->align >= align && p->size >= size
804 && GET_MODE (p->slot) == mode
805 && objects_must_conflict_p (p->type, type)
806 && (best_p == 0 || best_p->size > p->size
807 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 808 {
3e8b0446
ZD
809 if (p->align == align && p->size == size)
810 {
811 selected = p;
812 cut_slot_from_list (selected, &avail_temp_slots);
813 best_p = 0;
814 break;
815 }
816 best_p = p;
0aea6467 817 }
0aea6467
ZD
818 }
819 }
6f086dfc
RS
820
821 /* Make our best, if any, the one to use. */
822 if (best_p)
a45035b6 823 {
0aea6467
ZD
824 selected = best_p;
825 cut_slot_from_list (selected, &avail_temp_slots);
826
a45035b6
JW
827 /* If there are enough aligned bytes left over, make them into a new
828 temp_slot so that the extra bytes don't get wasted. Do this only
829 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 830 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 831 {
d16790f2 832 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 833 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
834
835 if (best_p->size - rounded_size >= alignment)
836 {
a9429e29 837 p = ggc_alloc_temp_slot ();
a25d4ba2 838 p->in_use = p->addr_taken = 0;
a45035b6 839 p->size = best_p->size - rounded_size;
307d8cd6
RK
840 p->base_offset = best_p->base_offset + rounded_size;
841 p->full_size = best_p->full_size - rounded_size;
be0c514c 842 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 843 p->align = best_p->align;
1da68f56 844 p->type = best_p->type;
0aea6467 845 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 846
38a448ca
RH
847 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
848 stack_slot_list);
a45035b6
JW
849
850 best_p->size = rounded_size;
291dde90 851 best_p->full_size = rounded_size;
a45035b6
JW
852 }
853 }
a45035b6 854 }
718fe406 855
6f086dfc 856 /* If we still didn't find one, make a new temporary. */
0aea6467 857 if (selected == 0)
6f086dfc 858 {
e5e809f4
JL
859 HOST_WIDE_INT frame_offset_old = frame_offset;
860
a9429e29 861 p = ggc_alloc_temp_slot ();
e5e809f4 862
c87a0a39
JL
863 /* We are passing an explicit alignment request to assign_stack_local.
864 One side effect of that is assign_stack_local will not round SIZE
865 to ensure the frame offset remains suitably aligned.
866
867 So for requests which depended on the rounding of SIZE, we go ahead
868 and round it now. We also make sure ALIGNMENT is at least
869 BIGGEST_ALIGNMENT. */
0bccc606 870 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
6f67a30d 871 p->slot = assign_stack_local (mode,
010529e5 872 (mode == BLKmode
fc555370 873 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
010529e5 874 : size),
6f67a30d 875 align);
d16790f2
JW
876
877 p->align = align;
e5e809f4 878
b2a80c0d
DE
879 /* The following slot size computation is necessary because we don't
880 know the actual size of the temporary slot until assign_stack_local
881 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
882 requested temporary. Note that extra space added for alignment
883 can be either above or below this stack slot depending on which
884 way the frame grows. We include the extra space if and only if it
885 is above this slot. */
f62c8a5c
JJ
886 if (FRAME_GROWS_DOWNWARD)
887 p->size = frame_offset_old - frame_offset;
888 else
889 p->size = size;
e5e809f4 890
fc91b0d0 891 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
892 if (FRAME_GROWS_DOWNWARD)
893 {
894 p->base_offset = frame_offset;
895 p->full_size = frame_offset_old - frame_offset;
896 }
897 else
898 {
899 p->base_offset = frame_offset_old;
900 p->full_size = frame_offset - frame_offset_old;
901 }
0aea6467
ZD
902
903 selected = p;
6f086dfc
RS
904 }
905
0aea6467 906 p = selected;
6f086dfc 907 p->in_use = 1;
a25d4ba2 908 p->addr_taken = 0;
1da68f56 909 p->type = type;
7efcb746
PB
910 p->level = temp_slot_level;
911 p->keep = keep;
1995f267 912
0aea6467
ZD
913 pp = temp_slots_at_level (p->level);
914 insert_slot_to_list (p, pp);
fb0703f7 915 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
916
917 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
918 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
919 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 920
1da68f56
RK
921 /* If we know the alias set for the memory that will be used, use
922 it. If there's no TYPE, then we don't know anything about the
923 alias set for the memory. */
faa964e5
UW
924 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
925 set_mem_align (slot, align);
1da68f56 926
30f7a378 927 /* If a type is specified, set the relevant flags. */
3bdf5ad1 928 if (type != 0)
1da68f56 929 {
faa964e5 930 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
07cb6e8c
JM
931 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
932 || TREE_CODE (type) == COMPLEX_TYPE));
1da68f56 933 }
be0c514c 934 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 935
faa964e5 936 return slot;
6f086dfc 937}
d16790f2
JW
938
939/* Allocate a temporary stack slot and record it for possible later
940 reuse. First three arguments are same as in preceding function. */
941
942rtx
fa8db1f7 943assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
d16790f2
JW
944{
945 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
946}
638141a6 947\f
9432c136
EB
948/* Assign a temporary.
949 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
950 and so that should be used in error messages. In either case, we
951 allocate of the given type.
230f21b4
PB
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
956 to wider modes. */
230f21b4
PB
957
958rtx
fa8db1f7
AJ
959assign_temp (tree type_or_decl, int keep, int memory_required,
960 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 961{
9432c136
EB
962 tree type, decl;
963 enum machine_mode mode;
9e1622ed 964#ifdef PROMOTE_MODE
9432c136
EB
965 int unsignedp;
966#endif
967
968 if (DECL_P (type_or_decl))
969 decl = type_or_decl, type = TREE_TYPE (decl);
970 else
971 decl = NULL, type = type_or_decl;
972
973 mode = TYPE_MODE (type);
9e1622ed 974#ifdef PROMOTE_MODE
8df83eae 975 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 976#endif
638141a6 977
230f21b4
PB
978 if (mode == BLKmode || memory_required)
979 {
e5e809f4 980 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
981 rtx tmp;
982
44affdae
JH
983 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
984 problems with allocating the stack space. */
985 if (size == 0)
986 size = 1;
987
230f21b4 988 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
989 temporaries. However, sometimes we can find a fixed upper limit on
990 the size, so try that instead. */
991 else if (size == -1)
992 size = max_int_size_in_bytes (type);
e30bb772 993
9432c136
EB
994 /* The size of the temporary may be too large to fit into an integer. */
995 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 996 this to things that aren't compiler-generated temporaries. The
535a42b1 997 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
998 if (decl && size == -1
999 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1000 {
dee15844 1001 error ("size of variable %q+D is too large", decl);
9432c136
EB
1002 size = 1;
1003 }
1004
d16790f2 1005 tmp = assign_stack_temp_for_type (mode, size, keep, type);
230f21b4
PB
1006 return tmp;
1007 }
638141a6 1008
9e1622ed 1009#ifdef PROMOTE_MODE
b55d9ff8 1010 if (! dont_promote)
cde0f3fd 1011 mode = promote_mode (type, mode, &unsignedp);
230f21b4 1012#endif
638141a6 1013
230f21b4
PB
1014 return gen_reg_rtx (mode);
1015}
638141a6 1016\f
a45035b6
JW
1017/* Combine temporary stack slots which are adjacent on the stack.
1018
1019 This allows for better use of already allocated stack space. This is only
1020 done for BLKmode slots because we can be sure that we won't have alignment
1021 problems in this case. */
1022
6fe79279 1023static void
fa8db1f7 1024combine_temp_slots (void)
a45035b6 1025{
0aea6467 1026 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1027 int num_slots;
1028
a4c6502a
MM
1029 /* We can't combine slots, because the information about which slot
1030 is in which alias set will be lost. */
1031 if (flag_strict_aliasing)
1032 return;
1033
718fe406 1034 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1035 high levels of optimization. */
e5e809f4 1036 if (! flag_expensive_optimizations)
0aea6467 1037 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1038 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1039 return;
a45035b6 1040
0aea6467 1041 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1042 {
1043 int delete_p = 0;
e5e809f4 1044
0aea6467
ZD
1045 next = p->next;
1046
1047 if (GET_MODE (p->slot) != BLKmode)
1048 continue;
1049
1050 for (q = p->next; q; q = next_q)
e9b7093a 1051 {
0aea6467
ZD
1052 int delete_q = 0;
1053
1054 next_q = q->next;
1055
1056 if (GET_MODE (q->slot) != BLKmode)
1057 continue;
1058
1059 if (p->base_offset + p->full_size == q->base_offset)
1060 {
1061 /* Q comes after P; combine Q into P. */
1062 p->size += q->size;
1063 p->full_size += q->full_size;
1064 delete_q = 1;
1065 }
1066 else if (q->base_offset + q->full_size == p->base_offset)
1067 {
1068 /* P comes after Q; combine P into Q. */
1069 q->size += p->size;
1070 q->full_size += p->full_size;
1071 delete_p = 1;
1072 break;
1073 }
1074 if (delete_q)
1075 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1076 }
0aea6467
ZD
1077
1078 /* Either delete P or advance past it. */
1079 if (delete_p)
1080 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1081 }
a45035b6 1082}
6f086dfc 1083\f
82d6e6fc
KG
1084/* Indicate that NEW_RTX is an alternate way of referring to the temp
1085 slot that previously was known by OLD_RTX. */
e5e76139
RK
1086
1087void
82d6e6fc 1088update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1089{
14a774a9 1090 struct temp_slot *p;
e5e76139 1091
82d6e6fc 1092 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1093 return;
14a774a9 1094
82d6e6fc 1095 p = find_temp_slot_from_address (old_rtx);
14a774a9 1096
82d6e6fc
KG
1097 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1098 NEW_RTX is a register, see if one operand of the PLUS is a
1099 temporary location. If so, NEW_RTX points into it. Otherwise,
1100 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1101 in common between them. If so, try a recursive call on those
1102 values. */
14a774a9
RK
1103 if (p == 0)
1104 {
82d6e6fc 1105 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1106 return;
1107
82d6e6fc 1108 if (REG_P (new_rtx))
700f19f0 1109 {
82d6e6fc
KG
1110 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1111 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1112 return;
1113 }
82d6e6fc 1114 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1115 return;
1116
82d6e6fc
KG
1117 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1118 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1119 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1120 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1121 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1122 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1123 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1124 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1125
1126 return;
1127 }
1128
718fe406 1129 /* Otherwise add an alias for the temp's address. */
fb0703f7 1130 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1131}
1132
a25d4ba2 1133/* If X could be a reference to a temporary slot, mark the fact that its
9faa82d8 1134 address was taken. */
a25d4ba2
RK
1135
1136void
fa8db1f7 1137mark_temp_addr_taken (rtx x)
a25d4ba2
RK
1138{
1139 struct temp_slot *p;
1140
1141 if (x == 0)
1142 return;
1143
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
3c0cb5de 1146 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
a25d4ba2
RK
1147 return;
1148
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 p->addr_taken = 1;
1152}
1153
9cca6a99
MS
1154/* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
6f086dfc
RS
1159
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1162
1163void
fa8db1f7 1164preserve_temp_slots (rtx x)
6f086dfc 1165{
0aea6467 1166 struct temp_slot *p = 0, *next;
6f086dfc 1167
73620b82
RK
1168 /* If there is no result, we still might have some objects whose address
1169 were taken, so we need to make sure they stay around. */
e3a77161 1170 if (x == 0)
73620b82 1171 {
0aea6467
ZD
1172 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1173 {
1174 next = p->next;
1175
1176 if (p->addr_taken)
1177 move_slot_to_level (p, temp_slot_level - 1);
1178 }
73620b82 1179
8fff4fc1
RH
1180 return;
1181 }
f7b6d104 1182
8fff4fc1
RH
1183 /* If X is a register that is being used as a pointer, see if we have
1184 a temporary slot we know it points to. To be consistent with
1185 the code below, we really should preserve all non-kept slots
1186 if we can't find a match, but that seems to be much too costly. */
1187 if (REG_P (x) && REG_POINTER (x))
1188 p = find_temp_slot_from_address (x);
f7b6d104 1189
8fff4fc1
RH
1190 /* If X is not in memory or is at a constant address, it cannot be in
1191 a temporary slot, but it can contain something whose address was
1192 taken. */
1193 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1194 {
1195 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1196 {
1197 next = p->next;
b5bd3b3c 1198
8fff4fc1
RH
1199 if (p->addr_taken)
1200 move_slot_to_level (p, temp_slot_level - 1);
e9a25f70 1201 }
c5c76735 1202
8fff4fc1
RH
1203 return;
1204 }
1205
1206 /* First see if we can find a match. */
1207 if (p == 0)
1208 p = find_temp_slot_from_address (XEXP (x, 0));
1209
1210 if (p != 0)
1211 {
1212 /* Move everything at our level whose address was taken to our new
1213 level in case we used its address. */
1214 struct temp_slot *q;
1215
1216 if (p->level == temp_slot_level)
fbdfe39c 1217 {
8fff4fc1 1218 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
8b04083b 1219 {
8fff4fc1 1220 next = q->next;
8b04083b 1221
8fff4fc1
RH
1222 if (p != q && q->addr_taken)
1223 move_slot_to_level (q, temp_slot_level - 1);
8b04083b 1224 }
8fff4fc1
RH
1225
1226 move_slot_to_level (p, temp_slot_level - 1);
1227 p->addr_taken = 0;
fbdfe39c 1228 }
8fff4fc1 1229 return;
f7b6d104 1230 }
e9a25f70 1231
8fff4fc1
RH
1232 /* Otherwise, preserve all non-kept slots at this level. */
1233 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1234 {
8fff4fc1 1235 next = p->next;
fe9b4957 1236
8fff4fc1
RH
1237 if (!p->keep)
1238 move_slot_to_level (p, temp_slot_level - 1);
1239 }
fe9b4957
MM
1240}
1241
8fff4fc1
RH
1242/* Free all temporaries used so far. This is normally called at the
1243 end of generating code for a statement. */
fe9b4957 1244
8fff4fc1
RH
1245void
1246free_temp_slots (void)
fe9b4957 1247{
8fff4fc1 1248 struct temp_slot *p, *next;
5d7cefe5 1249 bool some_available = false;
fe9b4957 1250
8fff4fc1
RH
1251 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1252 {
1253 next = p->next;
fe9b4957 1254
8fff4fc1 1255 if (!p->keep)
5d7cefe5
MM
1256 {
1257 make_slot_available (p);
1258 some_available = true;
1259 }
8fff4fc1 1260 }
fe9b4957 1261
5d7cefe5
MM
1262 if (some_available)
1263 {
1264 remove_unused_temp_slot_addresses ();
1265 combine_temp_slots ();
1266 }
8fff4fc1 1267}
fe9b4957 1268
8fff4fc1 1269/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1270
8fff4fc1
RH
1271void
1272push_temp_slots (void)
fe9b4957 1273{
8fff4fc1 1274 temp_slot_level++;
fe9b4957
MM
1275}
1276
8fff4fc1
RH
1277/* Pop a temporary nesting level. All slots in use in the current level
1278 are freed. */
fe9b4957 1279
8fff4fc1
RH
1280void
1281pop_temp_slots (void)
fe9b4957 1282{
8fff4fc1 1283 struct temp_slot *p, *next;
5d7cefe5 1284 bool some_available = false;
fe9b4957 1285
8fff4fc1
RH
1286 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1287 {
1288 next = p->next;
1289 make_slot_available (p);
5d7cefe5 1290 some_available = true;
8fff4fc1 1291 }
e9a25f70 1292
5d7cefe5
MM
1293 if (some_available)
1294 {
1295 remove_unused_temp_slot_addresses ();
1296 combine_temp_slots ();
1297 }
b987f237 1298
8fff4fc1 1299 temp_slot_level--;
8c36698e
NC
1300}
1301
8fff4fc1 1302/* Initialize temporary slots. */
e9a25f70
JL
1303
1304void
8fff4fc1 1305init_temp_slots (void)
e9a25f70 1306{
8fff4fc1
RH
1307 /* We have not allocated any temporaries yet. */
1308 avail_temp_slots = 0;
1309 used_temp_slots = 0;
1310 temp_slot_level = 0;
fb0703f7
SB
1311
1312 /* Set up the table to map addresses to temp slots. */
1313 if (! temp_slot_address_table)
1314 temp_slot_address_table = htab_create_ggc (32,
1315 temp_slot_address_hash,
1316 temp_slot_address_eq,
1317 NULL);
1318 else
1319 htab_empty (temp_slot_address_table);
8fff4fc1
RH
1320}
1321\f
1322/* These routines are responsible for converting virtual register references
1323 to the actual hard register references once RTL generation is complete.
718fe406 1324
8fff4fc1
RH
1325 The following four variables are used for communication between the
1326 routines. They contain the offsets of the virtual registers from their
1327 respective hard registers. */
fe9b4957 1328
8fff4fc1
RH
1329static int in_arg_offset;
1330static int var_offset;
1331static int dynamic_offset;
1332static int out_arg_offset;
1333static int cfa_offset;
8a5275eb 1334
8fff4fc1
RH
1335/* In most machines, the stack pointer register is equivalent to the bottom
1336 of the stack. */
718fe406 1337
8fff4fc1
RH
1338#ifndef STACK_POINTER_OFFSET
1339#define STACK_POINTER_OFFSET 0
1340#endif
8c36698e 1341
8fff4fc1
RH
1342/* If not defined, pick an appropriate default for the offset of dynamically
1343 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1344 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1345
8fff4fc1 1346#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1347
8fff4fc1
RH
1348/* The bottom of the stack points to the actual arguments. If
1349 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1350 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1351 stack space for register parameters is not pushed by the caller, but
1352 rather part of the fixed stack areas and hence not included in
38173d38 1353 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1354 for it when allocating stack dynamic objects. */
8a5275eb 1355
ac294f0b 1356#if defined(REG_PARM_STACK_SPACE)
8fff4fc1
RH
1357#define STACK_DYNAMIC_OFFSET(FNDECL) \
1358((ACCUMULATE_OUTGOING_ARGS \
38173d38 1359 ? (crtl->outgoing_args_size \
81464b2c
KT
1360 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1361 : REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1362 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1363#else
1364#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1365((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1366 + (STACK_POINTER_OFFSET))
1367#endif
1368#endif
4fa48eae 1369
659e47fb 1370\f
bbf9b913
RH
1371/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1372 is a virtual register, return the equivalent hard register and set the
1373 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1374
bbf9b913
RH
1375static rtx
1376instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1377{
82d6e6fc 1378 rtx new_rtx;
bbf9b913 1379 HOST_WIDE_INT offset;
6f086dfc 1380
bbf9b913 1381 if (x == virtual_incoming_args_rtx)
2e3f842f 1382 {
d015f7cc 1383 if (stack_realign_drap)
2e3f842f 1384 {
d015f7cc
L
1385 /* Replace virtual_incoming_args_rtx with internal arg
1386 pointer if DRAP is used to realign stack. */
82d6e6fc 1387 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1388 offset = 0;
1389 }
1390 else
82d6e6fc 1391 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1392 }
bbf9b913 1393 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1394 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1395 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1396 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1397 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1398 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1399 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1400 {
1401#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1402 new_rtx = frame_pointer_rtx;
f6672e8e 1403#else
82d6e6fc 1404 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1405#endif
1406 offset = cfa_offset;
1407 }
bbf9b913
RH
1408 else
1409 return NULL_RTX;
6f086dfc 1410
bbf9b913 1411 *poffset = offset;
82d6e6fc 1412 return new_rtx;
6f086dfc
RS
1413}
1414
bbf9b913
RH
1415/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1416 Instantiate any virtual registers present inside of *LOC. The expression
1417 is simplified, as much as possible, but is not to be considered "valid"
1418 in any sense implied by the target. If any change is made, set CHANGED
1419 to true. */
6f086dfc 1420
bbf9b913
RH
1421static int
1422instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1423{
bbf9b913
RH
1424 HOST_WIDE_INT offset;
1425 bool *changed = (bool *) data;
82d6e6fc 1426 rtx x, new_rtx;
6f086dfc 1427
bbf9b913
RH
1428 x = *loc;
1429 if (x == 0)
1430 return 0;
1431
1432 switch (GET_CODE (x))
6f086dfc 1433 {
bbf9b913 1434 case REG:
82d6e6fc
KG
1435 new_rtx = instantiate_new_reg (x, &offset);
1436 if (new_rtx)
bbf9b913 1437 {
82d6e6fc 1438 *loc = plus_constant (new_rtx, offset);
bbf9b913
RH
1439 if (changed)
1440 *changed = true;
1441 }
1442 return -1;
1443
1444 case PLUS:
82d6e6fc
KG
1445 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1446 if (new_rtx)
bbf9b913 1447 {
82d6e6fc
KG
1448 new_rtx = plus_constant (new_rtx, offset);
1449 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
bbf9b913
RH
1450 if (changed)
1451 *changed = true;
1452 return -1;
1453 }
e5e809f4 1454
bbf9b913
RH
1455 /* FIXME -- from old code */
1456 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1457 we can commute the PLUS and SUBREG because pointers into the
1458 frame are well-behaved. */
1459 break;
ce717ce4 1460
bbf9b913
RH
1461 default:
1462 break;
6f086dfc
RS
1463 }
1464
bbf9b913 1465 return 0;
6f086dfc
RS
1466}
1467
bbf9b913
RH
1468/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1469 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1470
bbf9b913
RH
1471static int
1472safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1473{
bbf9b913 1474 const struct insn_operand_data *op_data;
6f086dfc 1475
bbf9b913
RH
1476 if (code < 0)
1477 return true;
6f086dfc 1478
bbf9b913
RH
1479 op_data = &insn_data[code].operand[operand];
1480 if (op_data->predicate == NULL)
1481 return true;
5a73491b 1482
bbf9b913
RH
1483 return op_data->predicate (x, op_data->mode);
1484}
5a73491b 1485
bbf9b913
RH
1486/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1487 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1488
1489static void
bbf9b913 1490instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1491{
bbf9b913
RH
1492 HOST_WIDE_INT offset;
1493 int insn_code, i;
9325973e 1494 bool any_change = false;
82d6e6fc 1495 rtx set, new_rtx, x, seq;
32e66afd 1496
bbf9b913
RH
1497 /* There are some special cases to be handled first. */
1498 set = single_set (insn);
1499 if (set)
32e66afd 1500 {
bbf9b913
RH
1501 /* We're allowed to assign to a virtual register. This is interpreted
1502 to mean that the underlying register gets assigned the inverse
1503 transformation. This is used, for example, in the handling of
1504 non-local gotos. */
82d6e6fc
KG
1505 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1506 if (new_rtx)
bbf9b913
RH
1507 {
1508 start_sequence ();
32e66afd 1509
bbf9b913 1510 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
82d6e6fc 1511 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
bbf9b913 1512 GEN_INT (-offset));
82d6e6fc
KG
1513 x = force_operand (x, new_rtx);
1514 if (x != new_rtx)
1515 emit_move_insn (new_rtx, x);
5a73491b 1516
bbf9b913
RH
1517 seq = get_insns ();
1518 end_sequence ();
5a73491b 1519
bbf9b913
RH
1520 emit_insn_before (seq, insn);
1521 delete_insn (insn);
1522 return;
1523 }
5a73491b 1524
bbf9b913
RH
1525 /* Handle a straight copy from a virtual register by generating a
1526 new add insn. The difference between this and falling through
1527 to the generic case is avoiding a new pseudo and eliminating a
1528 move insn in the initial rtl stream. */
82d6e6fc
KG
1529 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1530 if (new_rtx && offset != 0
bbf9b913
RH
1531 && REG_P (SET_DEST (set))
1532 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1533 {
1534 start_sequence ();
5a73491b 1535
bbf9b913 1536 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
82d6e6fc 1537 new_rtx, GEN_INT (offset), SET_DEST (set),
bbf9b913
RH
1538 1, OPTAB_LIB_WIDEN);
1539 if (x != SET_DEST (set))
1540 emit_move_insn (SET_DEST (set), x);
770ae6cc 1541
bbf9b913
RH
1542 seq = get_insns ();
1543 end_sequence ();
87ce34d6 1544
bbf9b913
RH
1545 emit_insn_before (seq, insn);
1546 delete_insn (insn);
87ce34d6 1547 return;
bbf9b913 1548 }
5a73491b 1549
bbf9b913 1550 extract_insn (insn);
9325973e 1551 insn_code = INSN_CODE (insn);
5a73491b 1552
bbf9b913
RH
1553 /* Handle a plus involving a virtual register by determining if the
1554 operands remain valid if they're modified in place. */
1555 if (GET_CODE (SET_SRC (set)) == PLUS
1556 && recog_data.n_operands >= 3
1557 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1558 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1559 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1560 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1561 {
1562 offset += INTVAL (recog_data.operand[2]);
5a73491b 1563
bbf9b913 1564 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1565 if (offset == 0
1566 && REG_P (SET_DEST (set))
1567 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1568 {
1569 start_sequence ();
82d6e6fc 1570 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1571 seq = get_insns ();
1572 end_sequence ();
d1405722 1573
bbf9b913
RH
1574 emit_insn_before (seq, insn);
1575 delete_insn (insn);
1576 return;
1577 }
d1405722 1578
bbf9b913 1579 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1580
1581 /* Using validate_change and apply_change_group here leaves
1582 recog_data in an invalid state. Since we know exactly what
1583 we want to check, do those two by hand. */
82d6e6fc 1584 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1585 && safe_insn_predicate (insn_code, 2, x))
1586 {
82d6e6fc 1587 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1588 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1589 any_change = true;
9325973e
RH
1590
1591 /* Fall through into the regular operand fixup loop in
1592 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1593 }
1594 }
1595 }
d1405722 1596 else
9325973e
RH
1597 {
1598 extract_insn (insn);
1599 insn_code = INSN_CODE (insn);
1600 }
5dc96d60 1601
bbf9b913
RH
1602 /* In the general case, we expect virtual registers to appear only in
1603 operands, and then only as either bare registers or inside memories. */
1604 for (i = 0; i < recog_data.n_operands; ++i)
1605 {
1606 x = recog_data.operand[i];
1607 switch (GET_CODE (x))
1608 {
1609 case MEM:
1610 {
1611 rtx addr = XEXP (x, 0);
1612 bool changed = false;
1613
1614 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1615 if (!changed)
1616 continue;
1617
1618 start_sequence ();
1619 x = replace_equiv_address (x, addr);
a5bfb13a
MM
1620 /* It may happen that the address with the virtual reg
1621 was valid (e.g. based on the virtual stack reg, which might
1622 be acceptable to the predicates with all offsets), whereas
1623 the address now isn't anymore, for instance when the address
1624 is still offsetted, but the base reg isn't virtual-stack-reg
1625 anymore. Below we would do a force_reg on the whole operand,
1626 but this insn might actually only accept memory. Hence,
1627 before doing that last resort, try to reload the address into
1628 a register, so this operand stays a MEM. */
1629 if (!safe_insn_predicate (insn_code, i, x))
1630 {
1631 addr = force_reg (GET_MODE (addr), addr);
1632 x = replace_equiv_address (x, addr);
1633 }
bbf9b913
RH
1634 seq = get_insns ();
1635 end_sequence ();
1636 if (seq)
1637 emit_insn_before (seq, insn);
1638 }
1639 break;
1640
1641 case REG:
82d6e6fc
KG
1642 new_rtx = instantiate_new_reg (x, &offset);
1643 if (new_rtx == NULL)
bbf9b913
RH
1644 continue;
1645 if (offset == 0)
82d6e6fc 1646 x = new_rtx;
bbf9b913
RH
1647 else
1648 {
1649 start_sequence ();
6f086dfc 1650
bbf9b913
RH
1651 /* Careful, special mode predicates may have stuff in
1652 insn_data[insn_code].operand[i].mode that isn't useful
1653 to us for computing a new value. */
1654 /* ??? Recognize address_operand and/or "p" constraints
1655 to see if (plus new offset) is a valid before we put
1656 this through expand_simple_binop. */
82d6e6fc 1657 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
bbf9b913
RH
1658 GEN_INT (offset), NULL_RTX,
1659 1, OPTAB_LIB_WIDEN);
1660 seq = get_insns ();
1661 end_sequence ();
1662 emit_insn_before (seq, insn);
1663 }
1664 break;
6f086dfc 1665
bbf9b913 1666 case SUBREG:
82d6e6fc
KG
1667 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1668 if (new_rtx == NULL)
bbf9b913
RH
1669 continue;
1670 if (offset != 0)
1671 {
1672 start_sequence ();
82d6e6fc 1673 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
bbf9b913
RH
1674 GEN_INT (offset), NULL_RTX,
1675 1, OPTAB_LIB_WIDEN);
1676 seq = get_insns ();
1677 end_sequence ();
1678 emit_insn_before (seq, insn);
1679 }
82d6e6fc
KG
1680 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1681 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1682 gcc_assert (x);
bbf9b913 1683 break;
6f086dfc 1684
bbf9b913
RH
1685 default:
1686 continue;
1687 }
6f086dfc 1688
bbf9b913
RH
1689 /* At this point, X contains the new value for the operand.
1690 Validate the new value vs the insn predicate. Note that
1691 asm insns will have insn_code -1 here. */
1692 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1693 {
1694 start_sequence ();
f7ce0951
SE
1695 if (REG_P (x))
1696 {
1697 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1698 x = copy_to_reg (x);
1699 }
1700 else
1701 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1702 seq = get_insns ();
1703 end_sequence ();
1704 if (seq)
1705 emit_insn_before (seq, insn);
1706 }
6f086dfc 1707
bbf9b913
RH
1708 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1709 any_change = true;
1710 }
6f086dfc 1711
bbf9b913
RH
1712 if (any_change)
1713 {
1714 /* Propagate operand changes into the duplicates. */
1715 for (i = 0; i < recog_data.n_dups; ++i)
1716 *recog_data.dup_loc[i]
3e916873 1717 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1718
bbf9b913
RH
1719 /* Force re-recognition of the instruction for validation. */
1720 INSN_CODE (insn) = -1;
1721 }
6f086dfc 1722
bbf9b913 1723 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1724 {
bbf9b913 1725 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1726 {
bbf9b913
RH
1727 error_for_asm (insn, "impossible constraint in %<asm%>");
1728 delete_insn (insn);
1729 }
1730 }
1731 else
1732 {
1733 if (recog_memoized (insn) < 0)
1734 fatal_insn_not_found (insn);
1735 }
1736}
14a774a9 1737
bbf9b913
RH
1738/* Subroutine of instantiate_decls. Given RTL representing a decl,
1739 do any instantiation required. */
14a774a9 1740
e41b2a33
PB
1741void
1742instantiate_decl_rtl (rtx x)
bbf9b913
RH
1743{
1744 rtx addr;
6f086dfc 1745
bbf9b913
RH
1746 if (x == 0)
1747 return;
6f086dfc 1748
bbf9b913
RH
1749 /* If this is a CONCAT, recurse for the pieces. */
1750 if (GET_CODE (x) == CONCAT)
1751 {
e41b2a33
PB
1752 instantiate_decl_rtl (XEXP (x, 0));
1753 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1754 return;
1755 }
6f086dfc 1756
bbf9b913
RH
1757 /* If this is not a MEM, no need to do anything. Similarly if the
1758 address is a constant or a register that is not a virtual register. */
1759 if (!MEM_P (x))
1760 return;
6f086dfc 1761
bbf9b913
RH
1762 addr = XEXP (x, 0);
1763 if (CONSTANT_P (addr)
1764 || (REG_P (addr)
1765 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1766 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1767 return;
6f086dfc 1768
bbf9b913
RH
1769 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1770}
6f086dfc 1771
434eba35
JJ
1772/* Helper for instantiate_decls called via walk_tree: Process all decls
1773 in the given DECL_VALUE_EXPR. */
1774
1775static tree
1776instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1777{
1778 tree t = *tp;
726a989a 1779 if (! EXPR_P (t))
434eba35
JJ
1780 {
1781 *walk_subtrees = 0;
1782 if (DECL_P (t) && DECL_RTL_SET_P (t))
e41b2a33 1783 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1784 }
1785 return NULL;
1786}
1787
bbf9b913
RH
1788/* Subroutine of instantiate_decls: Process all decls in the given
1789 BLOCK node and all its subblocks. */
6f086dfc 1790
bbf9b913
RH
1791static void
1792instantiate_decls_1 (tree let)
1793{
1794 tree t;
6f086dfc 1795
910ad8de 1796 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1797 {
1798 if (DECL_RTL_SET_P (t))
e41b2a33 1799 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1800 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1801 {
1802 tree v = DECL_VALUE_EXPR (t);
1803 walk_tree (&v, instantiate_expr, NULL, NULL);
1804 }
1805 }
6f086dfc 1806
bbf9b913 1807 /* Process all subblocks. */
87caf699 1808 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1809 instantiate_decls_1 (t);
1810}
6f086dfc 1811
bbf9b913
RH
1812/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1813 all virtual registers in their DECL_RTL's. */
6f086dfc 1814
bbf9b913
RH
1815static void
1816instantiate_decls (tree fndecl)
1817{
c021f10b
NF
1818 tree decl;
1819 unsigned ix;
6f086dfc 1820
bbf9b913 1821 /* Process all parameters of the function. */
910ad8de 1822 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1823 {
e41b2a33
PB
1824 instantiate_decl_rtl (DECL_RTL (decl));
1825 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1826 if (DECL_HAS_VALUE_EXPR_P (decl))
1827 {
1828 tree v = DECL_VALUE_EXPR (decl);
1829 walk_tree (&v, instantiate_expr, NULL, NULL);
1830 }
bbf9b913 1831 }
4fd796bb 1832
bbf9b913
RH
1833 /* Now process all variables defined in the function or its subblocks. */
1834 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1835
c021f10b
NF
1836 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1837 if (DECL_RTL_SET_P (decl))
1838 instantiate_decl_rtl (DECL_RTL (decl));
1839 VEC_free (tree, gc, cfun->local_decls);
bbf9b913 1840}
6f086dfc 1841
bbf9b913
RH
1842/* Pass through the INSNS of function FNDECL and convert virtual register
1843 references to hard register references. */
6f086dfc 1844
c2924966 1845static unsigned int
bbf9b913
RH
1846instantiate_virtual_regs (void)
1847{
ba4807a0 1848 rtx insn;
6f086dfc 1849
bbf9b913
RH
1850 /* Compute the offsets to use for this function. */
1851 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1852 var_offset = STARTING_FRAME_OFFSET;
1853 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1854 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1855#ifdef FRAME_POINTER_CFA_OFFSET
1856 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1857#else
bbf9b913 1858 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1859#endif
e9a25f70 1860
bbf9b913
RH
1861 /* Initialize recognition, indicating that volatile is OK. */
1862 init_recog ();
6f086dfc 1863
bbf9b913
RH
1864 /* Scan through all the insns, instantiating every virtual register still
1865 present. */
ba4807a0
LB
1866 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1867 if (INSN_P (insn))
1868 {
1869 /* These patterns in the instruction stream can never be recognized.
1870 Fortunately, they shouldn't contain virtual registers either. */
1871 if (GET_CODE (PATTERN (insn)) == USE
1872 || GET_CODE (PATTERN (insn)) == CLOBBER
1873 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1874 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1875 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1876 continue;
b5b8b0ac
AO
1877 else if (DEBUG_INSN_P (insn))
1878 for_each_rtx (&INSN_VAR_LOCATION (insn),
1879 instantiate_virtual_regs_in_rtx, NULL);
1880 else
1881 instantiate_virtual_regs_in_insn (insn);
ba4807a0
LB
1882
1883 if (INSN_DELETED_P (insn))
1884 continue;
1885
1886 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1887
1888 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
481683e1 1889 if (CALL_P (insn))
ba4807a0
LB
1890 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1891 instantiate_virtual_regs_in_rtx, NULL);
1892 }
6f086dfc 1893
bbf9b913
RH
1894 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1895 instantiate_decls (current_function_decl);
1896
e41b2a33
PB
1897 targetm.instantiate_decls ();
1898
bbf9b913
RH
1899 /* Indicate that, from now on, assign_stack_local should use
1900 frame_pointer_rtx. */
1901 virtuals_instantiated = 1;
d3c12306
EB
1902
1903 /* See allocate_dynamic_stack_space for the rationale. */
1904#ifdef SETJMP_VIA_SAVE_AREA
1905 if (flag_stack_usage && cfun->calls_setjmp)
1906 {
1907 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1908 dynamic_offset = (dynamic_offset + align - 1) / align * align;
1909 current_function_dynamic_stack_size
1910 += current_function_dynamic_alloc_count * dynamic_offset;
1911 }
1912#endif
1913
c2924966 1914 return 0;
6f086dfc 1915}
ef330312 1916
8ddbbcae 1917struct rtl_opt_pass pass_instantiate_virtual_regs =
ef330312 1918{
8ddbbcae
JH
1919 {
1920 RTL_PASS,
defb77dc 1921 "vregs", /* name */
ef330312
PB
1922 NULL, /* gate */
1923 instantiate_virtual_regs, /* execute */
1924 NULL, /* sub */
1925 NULL, /* next */
1926 0, /* static_pass_number */
7072a650 1927 TV_NONE, /* tv_id */
ef330312
PB
1928 0, /* properties_required */
1929 0, /* properties_provided */
1930 0, /* properties_destroyed */
1931 0, /* todo_flags_start */
8ddbbcae
JH
1932 TODO_dump_func /* todo_flags_finish */
1933 }
ef330312
PB
1934};
1935
6f086dfc 1936\f
d181c154
RS
1937/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1938 This means a type for which function calls must pass an address to the
1939 function or get an address back from the function.
1940 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1941
1942int
586de218 1943aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 1944{
d47d0a8d 1945 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
1946 int i, regno, nregs;
1947 rtx reg;
2f939d94 1948
61f71b34
DD
1949 if (fntype)
1950 switch (TREE_CODE (fntype))
1951 {
1952 case CALL_EXPR:
d47d0a8d
EB
1953 {
1954 tree fndecl = get_callee_fndecl (fntype);
1955 fntype = (fndecl
1956 ? TREE_TYPE (fndecl)
1957 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1958 }
61f71b34
DD
1959 break;
1960 case FUNCTION_DECL:
d47d0a8d 1961 fntype = TREE_TYPE (fntype);
61f71b34
DD
1962 break;
1963 case FUNCTION_TYPE:
1964 case METHOD_TYPE:
1965 break;
1966 case IDENTIFIER_NODE:
d47d0a8d 1967 fntype = NULL_TREE;
61f71b34
DD
1968 break;
1969 default:
d47d0a8d 1970 /* We don't expect other tree types here. */
0bccc606 1971 gcc_unreachable ();
61f71b34
DD
1972 }
1973
d47d0a8d 1974 if (VOID_TYPE_P (type))
d7bf8ada 1975 return 0;
500c353d 1976
ebf0bf7f
JJ
1977 /* If a record should be passed the same as its first (and only) member
1978 don't pass it as an aggregate. */
1979 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
1980 return aggregate_value_p (first_field (type), fntype);
1981
cc77ae10
JM
1982 /* If the front end has decided that this needs to be passed by
1983 reference, do so. */
1984 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1985 && DECL_BY_REFERENCE (exp))
1986 return 1;
500c353d 1987
d47d0a8d
EB
1988 /* Function types that are TREE_ADDRESSABLE force return in memory. */
1989 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 1990 return 1;
b8698a0f 1991
956d6950 1992 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
1993 and thus can't be returned in registers. */
1994 if (TREE_ADDRESSABLE (type))
1995 return 1;
d47d0a8d 1996
05e3bdb9 1997 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 1998 return 1;
d47d0a8d
EB
1999
2000 if (targetm.calls.return_in_memory (type, fntype))
2001 return 1;
2002
9d790a4f
RS
2003 /* Make sure we have suitable call-clobbered regs to return
2004 the value in; if not, we must return it in memory. */
1d636cc6 2005 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2006
2007 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2008 it is OK. */
f8cfc6aa 2009 if (!REG_P (reg))
e71f7aa5
JW
2010 return 0;
2011
9d790a4f 2012 regno = REGNO (reg);
66fd46b6 2013 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2014 for (i = 0; i < nregs; i++)
2015 if (! call_used_regs[regno + i])
2016 return 1;
d47d0a8d 2017
6f086dfc
RS
2018 return 0;
2019}
2020\f
8fff4fc1
RH
2021/* Return true if we should assign DECL a pseudo register; false if it
2022 should live on the local stack. */
2023
2024bool
fa233e34 2025use_register_for_decl (const_tree decl)
8fff4fc1 2026{
007e61c2
PB
2027 if (!targetm.calls.allocate_stack_slots_for_args())
2028 return true;
b8698a0f 2029
8fff4fc1
RH
2030 /* Honor volatile. */
2031 if (TREE_SIDE_EFFECTS (decl))
2032 return false;
2033
2034 /* Honor addressability. */
2035 if (TREE_ADDRESSABLE (decl))
2036 return false;
2037
2038 /* Only register-like things go in registers. */
2039 if (DECL_MODE (decl) == BLKmode)
2040 return false;
2041
2042 /* If -ffloat-store specified, don't put explicit float variables
2043 into registers. */
2044 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2045 propagates values across these stores, and it probably shouldn't. */
2046 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2047 return false;
2048
78e0d62b
RH
2049 /* If we're not interested in tracking debugging information for
2050 this decl, then we can certainly put it in a register. */
2051 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2052 return true;
2053
d130d647
JJ
2054 if (optimize)
2055 return true;
2056
2057 if (!DECL_REGISTER (decl))
2058 return false;
2059
2060 switch (TREE_CODE (TREE_TYPE (decl)))
2061 {
2062 case RECORD_TYPE:
2063 case UNION_TYPE:
2064 case QUAL_UNION_TYPE:
2065 /* When not optimizing, disregard register keyword for variables with
2066 types containing methods, otherwise the methods won't be callable
2067 from the debugger. */
2068 if (TYPE_METHODS (TREE_TYPE (decl)))
2069 return false;
2070 break;
2071 default:
2072 break;
2073 }
2074
2075 return true;
8fff4fc1
RH
2076}
2077
0976078c
RH
2078/* Return true if TYPE should be passed by invisible reference. */
2079
2080bool
8cd5a4e0
RH
2081pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2082 tree type, bool named_arg)
0976078c
RH
2083{
2084 if (type)
2085 {
2086 /* If this type contains non-trivial constructors, then it is
2087 forbidden for the middle-end to create any new copies. */
2088 if (TREE_ADDRESSABLE (type))
2089 return true;
2090
d58247a3
RH
2091 /* GCC post 3.4 passes *all* variable sized types by reference. */
2092 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2093 return true;
ebf0bf7f
JJ
2094
2095 /* If a record type should be passed the same as its first (and only)
2096 member, use the type and mode of that member. */
2097 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2098 {
2099 type = TREE_TYPE (first_field (type));
2100 mode = TYPE_MODE (type);
2101 }
0976078c
RH
2102 }
2103
8cd5a4e0 2104 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
0976078c
RH
2105}
2106
6cdd5672
RH
2107/* Return true if TYPE, which is passed by reference, should be callee
2108 copied instead of caller copied. */
2109
2110bool
2111reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2112 tree type, bool named_arg)
2113{
2114 if (type && TREE_ADDRESSABLE (type))
2115 return false;
2116 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2117}
2118
6071dc7f
RH
2119/* Structures to communicate between the subroutines of assign_parms.
2120 The first holds data persistent across all parameters, the second
2121 is cleared out for each parameter. */
6f086dfc 2122
6071dc7f 2123struct assign_parm_data_all
6f086dfc 2124{
6f086dfc 2125 CUMULATIVE_ARGS args_so_far;
6f086dfc 2126 struct args_size stack_args_size;
6071dc7f
RH
2127 tree function_result_decl;
2128 tree orig_fnargs;
bb27eeda
SE
2129 rtx first_conversion_insn;
2130 rtx last_conversion_insn;
6071dc7f
RH
2131 HOST_WIDE_INT pretend_args_size;
2132 HOST_WIDE_INT extra_pretend_bytes;
2133 int reg_parm_stack_space;
2134};
6f086dfc 2135
6071dc7f
RH
2136struct assign_parm_data_one
2137{
2138 tree nominal_type;
2139 tree passed_type;
2140 rtx entry_parm;
2141 rtx stack_parm;
2142 enum machine_mode nominal_mode;
2143 enum machine_mode passed_mode;
2144 enum machine_mode promoted_mode;
2145 struct locate_and_pad_arg_data locate;
2146 int partial;
2147 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2148 BOOL_BITFIELD passed_pointer : 1;
2149 BOOL_BITFIELD on_stack : 1;
2150 BOOL_BITFIELD loaded_in_reg : 1;
2151};
ebb904cb 2152
6071dc7f 2153/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2154
6071dc7f
RH
2155static void
2156assign_parms_initialize_all (struct assign_parm_data_all *all)
2157{
fc2f1f53 2158 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2159
6071dc7f
RH
2160 memset (all, 0, sizeof (*all));
2161
2162 fntype = TREE_TYPE (current_function_decl);
2163
2164#ifdef INIT_CUMULATIVE_INCOMING_ARGS
2165 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2166#else
2167 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2168 current_function_decl, -1);
2169#endif
2170
2171#ifdef REG_PARM_STACK_SPACE
2172 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2173#endif
2174}
6f086dfc 2175
6071dc7f
RH
2176/* If ARGS contains entries with complex types, split the entry into two
2177 entries of the component type. Return a new list of substitutions are
2178 needed, else the old list. */
2179
3b3f318a
RG
2180static void
2181split_complex_args (VEC(tree, heap) **args)
6071dc7f 2182{
3b3f318a 2183 unsigned i;
6071dc7f
RH
2184 tree p;
2185
ac47786e 2186 FOR_EACH_VEC_ELT (tree, *args, i, p)
6071dc7f
RH
2187 {
2188 tree type = TREE_TYPE (p);
2189 if (TREE_CODE (type) == COMPLEX_TYPE
2190 && targetm.calls.split_complex_arg (type))
2191 {
2192 tree decl;
2193 tree subtype = TREE_TYPE (type);
6ccd356e 2194 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2195
2196 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2197 p = copy_node (p);
6071dc7f
RH
2198 TREE_TYPE (p) = subtype;
2199 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2200 DECL_MODE (p) = VOIDmode;
2201 DECL_SIZE (p) = NULL;
2202 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2203 /* If this arg must go in memory, put it in a pseudo here.
2204 We can't allow it to go in memory as per normal parms,
2205 because the usual place might not have the imag part
2206 adjacent to the real part. */
2207 DECL_ARTIFICIAL (p) = addressable;
2208 DECL_IGNORED_P (p) = addressable;
2209 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2210 layout_decl (p, 0);
3b3f318a 2211 VEC_replace (tree, *args, i, p);
6071dc7f
RH
2212
2213 /* Build a second synthetic decl. */
c2255bc4
AH
2214 decl = build_decl (EXPR_LOCATION (p),
2215 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2216 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2217 DECL_ARTIFICIAL (decl) = addressable;
2218 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2219 layout_decl (decl, 0);
3b3f318a 2220 VEC_safe_insert (tree, heap, *args, ++i, decl);
6071dc7f
RH
2221 }
2222 }
6071dc7f
RH
2223}
2224
2225/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2226 the hidden struct return argument, and (abi willing) complex args.
2227 Return the new parameter list. */
2228
3b3f318a 2229static VEC(tree, heap) *
6071dc7f
RH
2230assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2231{
2232 tree fndecl = current_function_decl;
2233 tree fntype = TREE_TYPE (fndecl);
3b3f318a
RG
2234 VEC(tree, heap) *fnargs = NULL;
2235 tree arg;
2236
910ad8de 2237 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
3b3f318a
RG
2238 VEC_safe_push (tree, heap, fnargs, arg);
2239
2240 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2241
2242 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2243 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2244 && ! cfun->returns_pcc_struct
61f71b34 2245 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2246 {
f9f29478 2247 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2248 tree decl;
6f086dfc 2249
c2255bc4
AH
2250 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2251 PARM_DECL, NULL_TREE, type);
6071dc7f
RH
2252 DECL_ARG_TYPE (decl) = type;
2253 DECL_ARTIFICIAL (decl) = 1;
78e0d62b 2254 DECL_IGNORED_P (decl) = 1;
6f086dfc 2255
910ad8de 2256 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a
RG
2257 all->orig_fnargs = decl;
2258 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2259
6071dc7f 2260 all->function_result_decl = decl;
6f086dfc 2261 }
718fe406 2262
42ba5130
RH
2263 /* If the target wants to split complex arguments into scalars, do so. */
2264 if (targetm.calls.split_complex_arg)
3b3f318a 2265 split_complex_args (&fnargs);
ded9bf77 2266
6071dc7f
RH
2267 return fnargs;
2268}
e7949876 2269
6071dc7f
RH
2270/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2271 data for the parameter. Incorporate ABI specifics such as pass-by-
2272 reference and type promotion. */
6f086dfc 2273
6071dc7f
RH
2274static void
2275assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2276 struct assign_parm_data_one *data)
2277{
2278 tree nominal_type, passed_type;
2279 enum machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2280 int unsignedp;
6f086dfc 2281
6071dc7f
RH
2282 memset (data, 0, sizeof (*data));
2283
fa10beec 2284 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2285 if (!cfun->stdarg)
fa10beec 2286 data->named_arg = 1; /* No variadic parms. */
910ad8de 2287 else if (DECL_CHAIN (parm))
fa10beec 2288 data->named_arg = 1; /* Not the last non-variadic parm. */
8117c488 2289 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
fa10beec 2290 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2291 else
fa10beec 2292 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2293
2294 nominal_type = TREE_TYPE (parm);
2295 passed_type = DECL_ARG_TYPE (parm);
2296
2297 /* Look out for errors propagating this far. Also, if the parameter's
2298 type is void then its value doesn't matter. */
2299 if (TREE_TYPE (parm) == error_mark_node
2300 /* This can happen after weird syntax errors
2301 or if an enum type is defined among the parms. */
2302 || TREE_CODE (parm) != PARM_DECL
2303 || passed_type == NULL
2304 || VOID_TYPE_P (nominal_type))
2305 {
2306 nominal_type = passed_type = void_type_node;
2307 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2308 goto egress;
2309 }
108b7d3d 2310
6071dc7f
RH
2311 /* Find mode of arg as it is passed, and mode of arg as it should be
2312 during execution of this function. */
2313 passed_mode = TYPE_MODE (passed_type);
2314 nominal_mode = TYPE_MODE (nominal_type);
2315
ebf0bf7f
JJ
2316 /* If the parm is to be passed as a transparent union or record, use the
2317 type of the first field for the tests below. We have already verified
2318 that the modes are the same. */
2319 if ((TREE_CODE (passed_type) == UNION_TYPE
2320 || TREE_CODE (passed_type) == RECORD_TYPE)
2321 && TYPE_TRANSPARENT_AGGR (passed_type))
2322 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2323
0976078c
RH
2324 /* See if this arg was passed by invisible reference. */
2325 if (pass_by_reference (&all->args_so_far, passed_mode,
2326 passed_type, data->named_arg))
6071dc7f
RH
2327 {
2328 passed_type = nominal_type = build_pointer_type (passed_type);
2329 data->passed_pointer = true;
2330 passed_mode = nominal_mode = Pmode;
2331 }
6f086dfc 2332
6071dc7f 2333 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2334 unsignedp = TYPE_UNSIGNED (passed_type);
2335 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2336 TREE_TYPE (current_function_decl), 0);
6f086dfc 2337
6071dc7f
RH
2338 egress:
2339 data->nominal_type = nominal_type;
2340 data->passed_type = passed_type;
2341 data->nominal_mode = nominal_mode;
2342 data->passed_mode = passed_mode;
2343 data->promoted_mode = promoted_mode;
2344}
16bae307 2345
6071dc7f 2346/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2347
6071dc7f
RH
2348static void
2349assign_parms_setup_varargs (struct assign_parm_data_all *all,
2350 struct assign_parm_data_one *data, bool no_rtl)
2351{
2352 int varargs_pretend_bytes = 0;
2353
2354 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2355 data->promoted_mode,
2356 data->passed_type,
2357 &varargs_pretend_bytes, no_rtl);
2358
2359 /* If the back-end has requested extra stack space, record how much is
2360 needed. Do not change pretend_args_size otherwise since it may be
2361 nonzero from an earlier partial argument. */
2362 if (varargs_pretend_bytes > 0)
2363 all->pretend_args_size = varargs_pretend_bytes;
2364}
a53e14c0 2365
6071dc7f
RH
2366/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2367 the incoming location of the current parameter. */
2368
2369static void
2370assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2371 struct assign_parm_data_one *data)
2372{
2373 HOST_WIDE_INT pretend_bytes = 0;
2374 rtx entry_parm;
2375 bool in_regs;
2376
2377 if (data->promoted_mode == VOIDmode)
2378 {
2379 data->entry_parm = data->stack_parm = const0_rtx;
2380 return;
2381 }
a53e14c0 2382
3c07301f
NF
2383 entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
2384 data->promoted_mode,
2385 data->passed_type,
2386 data->named_arg);
6f086dfc 2387
6071dc7f
RH
2388 if (entry_parm == 0)
2389 data->promoted_mode = data->passed_mode;
6f086dfc 2390
6071dc7f
RH
2391 /* Determine parm's home in the stack, in case it arrives in the stack
2392 or we should pretend it did. Compute the stack position and rtx where
2393 the argument arrives and its size.
6f086dfc 2394
6071dc7f
RH
2395 There is one complexity here: If this was a parameter that would
2396 have been passed in registers, but wasn't only because it is
2397 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2398 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2399 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2400 as it was the previous time. */
2401 in_regs = entry_parm != 0;
6f086dfc 2402#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2403 in_regs = true;
e7949876 2404#endif
6071dc7f
RH
2405 if (!in_regs && !data->named_arg)
2406 {
2407 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
e7949876 2408 {
6071dc7f 2409 rtx tem;
3c07301f
NF
2410 tem = targetm.calls.function_incoming_arg (&all->args_so_far,
2411 data->promoted_mode,
2412 data->passed_type, true);
6071dc7f 2413 in_regs = tem != NULL;
e7949876 2414 }
6071dc7f 2415 }
e7949876 2416
6071dc7f
RH
2417 /* If this parameter was passed both in registers and in the stack, use
2418 the copy on the stack. */
fe984136
RH
2419 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2420 data->passed_type))
6071dc7f 2421 entry_parm = 0;
e7949876 2422
6071dc7f
RH
2423 if (entry_parm)
2424 {
2425 int partial;
2426
78a52f11
RH
2427 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2428 data->promoted_mode,
2429 data->passed_type,
2430 data->named_arg);
6071dc7f
RH
2431 data->partial = partial;
2432
2433 /* The caller might already have allocated stack space for the
2434 register parameters. */
2435 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2436 {
6071dc7f
RH
2437 /* Part of this argument is passed in registers and part
2438 is passed on the stack. Ask the prologue code to extend
2439 the stack part so that we can recreate the full value.
2440
2441 PRETEND_BYTES is the size of the registers we need to store.
2442 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2443 stack space that the prologue should allocate.
2444
2445 Internally, gcc assumes that the argument pointer is aligned
2446 to STACK_BOUNDARY bits. This is used both for alignment
2447 optimizations (see init_emit) and to locate arguments that are
2448 aligned to more than PARM_BOUNDARY bits. We must preserve this
2449 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2450 a stack boundary. */
2451
2452 /* We assume at most one partial arg, and it must be the first
2453 argument on the stack. */
0bccc606 2454 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2455
78a52f11 2456 pretend_bytes = partial;
6071dc7f
RH
2457 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2458
2459 /* We want to align relative to the actual stack pointer, so
2460 don't include this in the stack size until later. */
2461 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2462 }
6071dc7f 2463 }
e7949876 2464
6071dc7f
RH
2465 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2466 entry_parm ? data->partial : 0, current_function_decl,
2467 &all->stack_args_size, &data->locate);
6f086dfc 2468
e94a448f
L
2469 /* Update parm_stack_boundary if this parameter is passed in the
2470 stack. */
2471 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2472 crtl->parm_stack_boundary = data->locate.boundary;
2473
6071dc7f
RH
2474 /* Adjust offsets to include the pretend args. */
2475 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2476 data->locate.slot_offset.constant += pretend_bytes;
2477 data->locate.offset.constant += pretend_bytes;
ebca59c3 2478
6071dc7f
RH
2479 data->entry_parm = entry_parm;
2480}
6f086dfc 2481
6071dc7f
RH
2482/* A subroutine of assign_parms. If there is actually space on the stack
2483 for this parm, count it in stack_args_size and return true. */
6f086dfc 2484
6071dc7f
RH
2485static bool
2486assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2487 struct assign_parm_data_one *data)
2488{
2e6ae27f 2489 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2490 if (data->entry_parm == NULL)
2491 ;
2492 /* Also true if we're partially in registers and partially not,
2493 since we've arranged to drop the entire argument on the stack. */
2494 else if (data->partial != 0)
2495 ;
2496 /* Also true if the target says that it's passed in both registers
2497 and on the stack. */
2498 else if (GET_CODE (data->entry_parm) == PARALLEL
2499 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2500 ;
2501 /* Also true if the target says that there's stack allocated for
2502 all register parameters. */
2503 else if (all->reg_parm_stack_space > 0)
2504 ;
2505 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2506 else
2507 return false;
6f086dfc 2508
6071dc7f
RH
2509 all->stack_args_size.constant += data->locate.size.constant;
2510 if (data->locate.size.var)
2511 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2512
6071dc7f
RH
2513 return true;
2514}
0d1416c6 2515
6071dc7f
RH
2516/* A subroutine of assign_parms. Given that this parameter is allocated
2517 stack space by the ABI, find it. */
6f086dfc 2518
6071dc7f
RH
2519static void
2520assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2521{
2522 rtx offset_rtx, stack_parm;
2523 unsigned int align, boundary;
6f086dfc 2524
6071dc7f
RH
2525 /* If we're passing this arg using a reg, make its stack home the
2526 aligned stack slot. */
2527 if (data->entry_parm)
2528 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2529 else
2530 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2531
38173d38 2532 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2533 if (offset_rtx != const0_rtx)
2534 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2535 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2536
08ab0acf 2537 if (!data->passed_pointer)
997f78fb 2538 {
08ab0acf
JJ
2539 set_mem_attributes (stack_parm, parm, 1);
2540 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2541 while promoted mode's size is needed. */
2542 if (data->promoted_mode != BLKmode
2543 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2544 {
08ab0acf
JJ
2545 set_mem_size (stack_parm,
2546 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2547 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2548 {
2549 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2550 data->promoted_mode);
2551 if (offset)
2552 set_mem_offset (stack_parm,
2553 plus_constant (MEM_OFFSET (stack_parm),
2554 -offset));
2555 }
997f78fb
JJ
2556 }
2557 }
6071dc7f 2558
bfc45551
AM
2559 boundary = data->locate.boundary;
2560 align = BITS_PER_UNIT;
6071dc7f
RH
2561
2562 /* If we're padding upward, we know that the alignment of the slot
2563 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2564 intentionally forcing upward padding. Otherwise we have to come
2565 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2566 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2567 align = boundary;
481683e1 2568 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2569 {
2570 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2571 align = align & -align;
2572 }
bfc45551 2573 set_mem_align (stack_parm, align);
6071dc7f
RH
2574
2575 if (data->entry_parm)
2576 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2577
2578 data->stack_parm = stack_parm;
2579}
2580
2581/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2582 always valid and contiguous. */
2583
2584static void
2585assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2586{
2587 rtx entry_parm = data->entry_parm;
2588 rtx stack_parm = data->stack_parm;
2589
2590 /* If this parm was passed part in regs and part in memory, pretend it
2591 arrived entirely in memory by pushing the register-part onto the stack.
2592 In the special case of a DImode or DFmode that is split, we could put
2593 it together in a pseudoreg directly, but for now that's not worth
2594 bothering with. */
2595 if (data->partial != 0)
2596 {
2597 /* Handle calls that pass values in multiple non-contiguous
2598 locations. The Irix 6 ABI has examples of this. */
2599 if (GET_CODE (entry_parm) == PARALLEL)
2600 emit_group_store (validize_mem (stack_parm), entry_parm,
b8698a0f 2601 data->passed_type,
6071dc7f 2602 int_size_in_bytes (data->passed_type));
6f086dfc 2603 else
78a52f11
RH
2604 {
2605 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2606 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2607 data->partial / UNITS_PER_WORD);
2608 }
6f086dfc 2609
6071dc7f
RH
2610 entry_parm = stack_parm;
2611 }
6f086dfc 2612
6071dc7f
RH
2613 /* If we didn't decide this parm came in a register, by default it came
2614 on the stack. */
2615 else if (entry_parm == NULL)
2616 entry_parm = stack_parm;
2617
2618 /* When an argument is passed in multiple locations, we can't make use
2619 of this information, but we can save some copying if the whole argument
2620 is passed in a single register. */
2621 else if (GET_CODE (entry_parm) == PARALLEL
2622 && data->nominal_mode != BLKmode
2623 && data->passed_mode != BLKmode)
2624 {
2625 size_t i, len = XVECLEN (entry_parm, 0);
2626
2627 for (i = 0; i < len; i++)
2628 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2629 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2630 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2631 == data->passed_mode)
2632 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2633 {
2634 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2635 break;
2636 }
2637 }
e68a6ce1 2638
6071dc7f
RH
2639 data->entry_parm = entry_parm;
2640}
6f086dfc 2641
4d2a9850
DJ
2642/* A subroutine of assign_parms. Reconstitute any values which were
2643 passed in multiple registers and would fit in a single register. */
2644
2645static void
2646assign_parm_remove_parallels (struct assign_parm_data_one *data)
2647{
2648 rtx entry_parm = data->entry_parm;
2649
2650 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2651 This can be done with register operations rather than on the
2652 stack, even if we will store the reconstituted parameter on the
2653 stack later. */
85776d60 2654 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2655 {
2656 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2657 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2658 GET_MODE_SIZE (GET_MODE (entry_parm)));
2659 entry_parm = parmreg;
2660 }
2661
2662 data->entry_parm = entry_parm;
2663}
2664
6071dc7f
RH
2665/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2666 always valid and properly aligned. */
6f086dfc 2667
6071dc7f
RH
2668static void
2669assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2670{
2671 rtx stack_parm = data->stack_parm;
2672
2673 /* If we can't trust the parm stack slot to be aligned enough for its
2674 ultimate type, don't use that slot after entry. We'll make another
2675 stack slot, if we need one. */
bfc45551
AM
2676 if (stack_parm
2677 && ((STRICT_ALIGNMENT
2678 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2679 || (data->nominal_type
2680 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2681 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2682 stack_parm = NULL;
2683
2684 /* If parm was passed in memory, and we need to convert it on entry,
2685 don't store it back in that same slot. */
2686 else if (data->entry_parm == stack_parm
2687 && data->nominal_mode != BLKmode
2688 && data->nominal_mode != data->passed_mode)
2689 stack_parm = NULL;
2690
7d69de61
RH
2691 /* If stack protection is in effect for this function, don't leave any
2692 pointers in their passed stack slots. */
cb91fab0 2693 else if (crtl->stack_protect_guard
7d69de61
RH
2694 && (flag_stack_protect == 2
2695 || data->passed_pointer
2696 || POINTER_TYPE_P (data->nominal_type)))
2697 stack_parm = NULL;
2698
6071dc7f
RH
2699 data->stack_parm = stack_parm;
2700}
a0506b54 2701
6071dc7f
RH
2702/* A subroutine of assign_parms. Return true if the current parameter
2703 should be stored as a BLKmode in the current frame. */
2704
2705static bool
2706assign_parm_setup_block_p (struct assign_parm_data_one *data)
2707{
2708 if (data->nominal_mode == BLKmode)
2709 return true;
85776d60
DJ
2710 if (GET_MODE (data->entry_parm) == BLKmode)
2711 return true;
531547e9 2712
6e985040 2713#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2714 /* Only assign_parm_setup_block knows how to deal with register arguments
2715 that are padded at the least significant end. */
2716 if (REG_P (data->entry_parm)
2717 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2718 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2719 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2720 return true;
6e985040 2721#endif
6071dc7f
RH
2722
2723 return false;
2724}
2725
b8698a0f 2726/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2727 present and valid in DATA->STACK_RTL. */
2728
2729static void
27e29549
RH
2730assign_parm_setup_block (struct assign_parm_data_all *all,
2731 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2732{
2733 rtx entry_parm = data->entry_parm;
2734 rtx stack_parm = data->stack_parm;
bfc45551
AM
2735 HOST_WIDE_INT size;
2736 HOST_WIDE_INT size_stored;
6071dc7f 2737
27e29549
RH
2738 if (GET_CODE (entry_parm) == PARALLEL)
2739 entry_parm = emit_group_move_into_temps (entry_parm);
2740
bfc45551
AM
2741 size = int_size_in_bytes (data->passed_type);
2742 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2743 if (stack_parm == 0)
2744 {
a561d88b 2745 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2746 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2747 DECL_ALIGN (parm));
bfc45551
AM
2748 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2749 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2750 set_mem_attributes (stack_parm, parm, 1);
2751 }
2752
6071dc7f
RH
2753 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2754 calls that pass values in multiple non-contiguous locations. */
2755 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2756 {
6071dc7f
RH
2757 rtx mem;
2758
2759 /* Note that we will be storing an integral number of words.
2760 So we have to be careful to ensure that we allocate an
bfc45551 2761 integral number of words. We do this above when we call
6071dc7f
RH
2762 assign_stack_local if space was not allocated in the argument
2763 list. If it was, this will not work if PARM_BOUNDARY is not
2764 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2765 if it becomes a problem. Exception is when BLKmode arrives
2766 with arguments not conforming to word_mode. */
2767
bfc45551
AM
2768 if (data->stack_parm == 0)
2769 ;
6071dc7f
RH
2770 else if (GET_CODE (entry_parm) == PARALLEL)
2771 ;
0bccc606
NS
2772 else
2773 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2774
6071dc7f 2775 mem = validize_mem (stack_parm);
c6b97fac 2776
6071dc7f
RH
2777 /* Handle values in multiple non-contiguous locations. */
2778 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2779 {
bb27eeda
SE
2780 push_to_sequence2 (all->first_conversion_insn,
2781 all->last_conversion_insn);
27e29549 2782 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2783 all->first_conversion_insn = get_insns ();
2784 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2785 end_sequence ();
2786 }
c6b97fac 2787
6071dc7f
RH
2788 else if (size == 0)
2789 ;
5c07bd7a 2790
6071dc7f
RH
2791 /* If SIZE is that of a mode no bigger than a word, just use
2792 that mode's store operation. */
2793 else if (size <= UNITS_PER_WORD)
2794 {
2795 enum machine_mode mode
2796 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2797
6071dc7f 2798 if (mode != BLKmode
6e985040 2799#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2800 && (size == UNITS_PER_WORD
2801 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2802 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2803#endif
6071dc7f
RH
2804 )
2805 {
208996c7
RS
2806 rtx reg;
2807
2808 /* We are really truncating a word_mode value containing
2809 SIZE bytes into a value of mode MODE. If such an
2810 operation requires no actual instructions, we can refer
2811 to the value directly in mode MODE, otherwise we must
2812 start with the register in word_mode and explicitly
2813 convert it. */
2814 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2815 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2816 else
2817 {
2818 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2819 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2820 }
6071dc7f
RH
2821 emit_move_insn (change_address (mem, mode, 0), reg);
2822 }
c6b97fac 2823
6071dc7f
RH
2824 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2825 machine must be aligned to the left before storing
2826 to memory. Note that the previous test doesn't
2827 handle all cases (e.g. SIZE == 3). */
2828 else if (size != UNITS_PER_WORD
6e985040 2829#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2830 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2831 == downward)
6e985040 2832#else
6071dc7f 2833 && BYTES_BIG_ENDIAN
6e985040 2834#endif
6071dc7f
RH
2835 )
2836 {
2837 rtx tem, x;
2838 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2839 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2840
09b52670 2841 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
7d60be94 2842 build_int_cst (NULL_TREE, by),
4a90aeeb 2843 NULL_RTX, 1);
6071dc7f
RH
2844 tem = change_address (mem, word_mode, 0);
2845 emit_move_insn (tem, x);
6f086dfc 2846 }
6071dc7f 2847 else
27e29549 2848 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2849 size_stored / UNITS_PER_WORD);
6f086dfc 2850 }
6071dc7f 2851 else
27e29549 2852 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2853 size_stored / UNITS_PER_WORD);
2854 }
bfc45551
AM
2855 else if (data->stack_parm == 0)
2856 {
bb27eeda 2857 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2858 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2859 BLOCK_OP_NORMAL);
bb27eeda
SE
2860 all->first_conversion_insn = get_insns ();
2861 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2862 end_sequence ();
2863 }
6071dc7f 2864
bfc45551 2865 data->stack_parm = stack_parm;
6071dc7f
RH
2866 SET_DECL_RTL (parm, stack_parm);
2867}
2868
71008de4
BS
2869/* A subroutine of assign_parm_setup_reg, called through note_stores.
2870 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2871 which is pointed to by DATA. */
2872static void
2873record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2874{
2875 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2876 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2877 {
2878 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
2879 while (nregs-- > 0)
2880 SET_HARD_REG_BIT (*pset, REGNO (x) + nregs);
2881 }
2882}
2883
6071dc7f
RH
2884/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2885 parameter. Get it there. Perform all ABI specified conversions. */
2886
2887static void
2888assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2889 struct assign_parm_data_one *data)
2890{
71008de4
BS
2891 rtx parmreg, validated_mem;
2892 rtx equiv_stack_parm;
6071dc7f
RH
2893 enum machine_mode promoted_nominal_mode;
2894 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2895 bool did_conversion = false;
71008de4 2896 bool need_conversion, moved;
6071dc7f
RH
2897
2898 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2899 need to do it in a wider mode. Using 2 here makes the result
2900 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2901 promoted_nominal_mode
cde0f3fd 2902 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2903 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
2904
2905 parmreg = gen_reg_rtx (promoted_nominal_mode);
2906
2907 if (!DECL_ARTIFICIAL (parm))
2908 mark_user_reg (parmreg);
2909
2910 /* If this was an item that we received a pointer to,
2911 set DECL_RTL appropriately. */
2912 if (data->passed_pointer)
2913 {
2914 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2915 set_mem_attributes (x, parm, 1);
2916 SET_DECL_RTL (parm, x);
2917 }
2918 else
389fdba0 2919 SET_DECL_RTL (parm, parmreg);
6071dc7f 2920
4d2a9850
DJ
2921 assign_parm_remove_parallels (data);
2922
666e3ceb
PB
2923 /* Copy the value into the register, thus bridging between
2924 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2925
71008de4
BS
2926 equiv_stack_parm = data->stack_parm;
2927 validated_mem = validize_mem (data->entry_parm);
2928
2929 need_conversion = (data->nominal_mode != data->passed_mode
2930 || promoted_nominal_mode != data->promoted_mode);
2931 moved = false;
2932
dbb94435
BS
2933 if (need_conversion
2934 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2935 && data->nominal_mode == data->passed_mode
2936 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 2937 {
6071dc7f
RH
2938 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2939 mode, by the caller. We now have to convert it to
2940 NOMINAL_MODE, if different. However, PARMREG may be in
2941 a different mode than NOMINAL_MODE if it is being stored
2942 promoted.
2943
2944 If ENTRY_PARM is a hard register, it might be in a register
2945 not valid for operating in its mode (e.g., an odd-numbered
2946 register for a DFmode). In that case, moves are the only
2947 thing valid, so we can't do a convert from there. This
2948 occurs when the calling sequence allow such misaligned
2949 usages.
2950
2951 In addition, the conversion may involve a call, which could
2952 clobber parameters which haven't been copied to pseudo
71008de4
BS
2953 registers yet.
2954
2955 First, we try to emit an insn which performs the necessary
2956 conversion. We verify that this insn does not clobber any
2957 hard registers. */
2958
2959 enum insn_code icode;
2960 rtx op0, op1;
2961
2962 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2963 unsignedp);
2964
2965 op0 = parmreg;
2966 op1 = validated_mem;
2967 if (icode != CODE_FOR_nothing
2968 && insn_data[icode].operand[0].predicate (op0, promoted_nominal_mode)
2969 && insn_data[icode].operand[1].predicate (op1, data->passed_mode))
2970 {
2971 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2972 rtx insn, insns;
2973 HARD_REG_SET hardregs;
2974
2975 start_sequence ();
2976 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
2977 data->passed_mode, unsignedp);
2978 emit_insn (insn);
2979 insns = get_insns ();
2980
2981 moved = true;
2982 CLEAR_HARD_REG_SET (hardregs);
2983 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
2984 {
2985 if (INSN_P (insn))
2986 note_stores (PATTERN (insn), record_hard_reg_sets,
2987 &hardregs);
2988 if (!hard_reg_set_empty_p (hardregs))
2989 moved = false;
2990 }
2991
2992 end_sequence ();
2993
2994 if (moved)
2995 {
2996 emit_insn (insns);
dbb94435
BS
2997 if (equiv_stack_parm != NULL_RTX)
2998 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
2999 equiv_stack_parm);
71008de4
BS
3000 }
3001 }
3002 }
3003
3004 if (moved)
3005 /* Nothing to do. */
3006 ;
3007 else if (need_conversion)
3008 {
3009 /* We did not have an insn to convert directly, or the sequence
3010 generated appeared unsafe. We must first copy the parm to a
3011 pseudo reg, and save the conversion until after all
6071dc7f
RH
3012 parameters have been moved. */
3013
71008de4 3014 int save_tree_used;
6071dc7f
RH
3015 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3016
71008de4 3017 emit_move_insn (tempreg, validated_mem);
6071dc7f 3018
bb27eeda 3019 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3020 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3021
3022 if (GET_CODE (tempreg) == SUBREG
3023 && GET_MODE (tempreg) == data->nominal_mode
3024 && REG_P (SUBREG_REG (tempreg))
3025 && data->nominal_mode == data->passed_mode
3026 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3027 && GET_MODE_SIZE (GET_MODE (tempreg))
3028 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3029 {
6071dc7f
RH
3030 /* The argument is already sign/zero extended, so note it
3031 into the subreg. */
3032 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3033 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3034 }
00d8a4c1 3035
6071dc7f
RH
3036 /* TREE_USED gets set erroneously during expand_assignment. */
3037 save_tree_used = TREE_USED (parm);
79f5e442 3038 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3039 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3040 all->first_conversion_insn = get_insns ();
3041 all->last_conversion_insn = get_last_insn ();
6071dc7f 3042 end_sequence ();
00d8a4c1 3043
6071dc7f
RH
3044 did_conversion = true;
3045 }
3046 else
71008de4 3047 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3048
3049 /* If we were passed a pointer but the actual value can safely live
3050 in a register, put it in one. */
3051 if (data->passed_pointer
3052 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3053 /* If by-reference argument was promoted, demote it. */
3054 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3055 || use_register_for_decl (parm)))
3056 {
3057 /* We can't use nominal_mode, because it will have been set to
3058 Pmode above. We must use the actual mode of the parm. */
3059 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3060 mark_user_reg (parmreg);
cd5b3469 3061
6071dc7f
RH
3062 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3063 {
3064 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3065 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3066
bb27eeda
SE
3067 push_to_sequence2 (all->first_conversion_insn,
3068 all->last_conversion_insn);
6071dc7f
RH
3069 emit_move_insn (tempreg, DECL_RTL (parm));
3070 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3071 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3072 all->first_conversion_insn = get_insns ();
3073 all->last_conversion_insn = get_last_insn ();
6071dc7f 3074 end_sequence ();
6f086dfc 3075
6071dc7f
RH
3076 did_conversion = true;
3077 }
3078 else
3079 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3080
6071dc7f 3081 SET_DECL_RTL (parm, parmreg);
797a6ac1 3082
6071dc7f
RH
3083 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3084 now the parm. */
3085 data->stack_parm = NULL;
3086 }
ddef6bc7 3087
6071dc7f
RH
3088 /* Mark the register as eliminable if we did no conversion and it was
3089 copied from memory at a fixed offset, and the arg pointer was not
3090 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3091 offset formed an invalid address, such memory-equivalences as we
3092 make here would screw up life analysis for it. */
3093 if (data->nominal_mode == data->passed_mode
3094 && !did_conversion
3095 && data->stack_parm != 0
3096 && MEM_P (data->stack_parm)
3097 && data->locate.offset.var == 0
3098 && reg_mentioned_p (virtual_incoming_args_rtx,
3099 XEXP (data->stack_parm, 0)))
3100 {
3101 rtx linsn = get_last_insn ();
3102 rtx sinsn, set;
a03caf76 3103
6071dc7f
RH
3104 /* Mark complex types separately. */
3105 if (GET_CODE (parmreg) == CONCAT)
3106 {
3107 enum machine_mode submode
3108 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3109 int regnor = REGNO (XEXP (parmreg, 0));
3110 int regnoi = REGNO (XEXP (parmreg, 1));
3111 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3112 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3113 GET_MODE_SIZE (submode));
6071dc7f
RH
3114
3115 /* Scan backwards for the set of the real and
3116 imaginary parts. */
3117 for (sinsn = linsn; sinsn != 0;
3118 sinsn = prev_nonnote_insn (sinsn))
3119 {
3120 set = single_set (sinsn);
3121 if (set == 0)
3122 continue;
3123
3124 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3125 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3126 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3127 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3128 }
6071dc7f
RH
3129 }
3130 else if ((set = single_set (linsn)) != 0
3131 && SET_DEST (set) == parmreg)
71008de4 3132 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
6071dc7f
RH
3133 }
3134
3135 /* For pointer data type, suggest pointer register. */
3136 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3137 mark_reg_pointer (parmreg,
3138 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3139}
3140
3141/* A subroutine of assign_parms. Allocate stack space to hold the current
3142 parameter. Get it there. Perform all ABI specified conversions. */
3143
3144static void
3145assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3146 struct assign_parm_data_one *data)
3147{
3148 /* Value must be stored in the stack slot STACK_PARM during function
3149 execution. */
bfc45551 3150 bool to_conversion = false;
6071dc7f 3151
4d2a9850
DJ
3152 assign_parm_remove_parallels (data);
3153
6071dc7f
RH
3154 if (data->promoted_mode != data->nominal_mode)
3155 {
3156 /* Conversion is required. */
3157 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3158
6071dc7f
RH
3159 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3160
bb27eeda 3161 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3162 to_conversion = true;
3163
6071dc7f
RH
3164 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3165 TYPE_UNSIGNED (TREE_TYPE (parm)));
3166
3167 if (data->stack_parm)
dd67163f
JJ
3168 {
3169 int offset = subreg_lowpart_offset (data->nominal_mode,
3170 GET_MODE (data->stack_parm));
3171 /* ??? This may need a big-endian conversion on sparc64. */
3172 data->stack_parm
3173 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3174 if (offset && MEM_OFFSET (data->stack_parm))
3175 set_mem_offset (data->stack_parm,
3176 plus_constant (MEM_OFFSET (data->stack_parm),
3177 offset));
3178 }
6071dc7f
RH
3179 }
3180
3181 if (data->entry_parm != data->stack_parm)
3182 {
bfc45551
AM
3183 rtx src, dest;
3184
6071dc7f
RH
3185 if (data->stack_parm == 0)
3186 {
3a695389
UW
3187 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3188 GET_MODE (data->entry_parm),
3189 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3190 data->stack_parm
3191 = assign_stack_local (GET_MODE (data->entry_parm),
3192 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3193 align);
6071dc7f 3194 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3195 }
6071dc7f 3196
bfc45551
AM
3197 dest = validize_mem (data->stack_parm);
3198 src = validize_mem (data->entry_parm);
3199
3200 if (MEM_P (src))
6f086dfc 3201 {
bfc45551
AM
3202 /* Use a block move to handle potentially misaligned entry_parm. */
3203 if (!to_conversion)
bb27eeda
SE
3204 push_to_sequence2 (all->first_conversion_insn,
3205 all->last_conversion_insn);
bfc45551
AM
3206 to_conversion = true;
3207
3208 emit_block_move (dest, src,
3209 GEN_INT (int_size_in_bytes (data->passed_type)),
3210 BLOCK_OP_NORMAL);
6071dc7f
RH
3211 }
3212 else
bfc45551
AM
3213 emit_move_insn (dest, src);
3214 }
3215
3216 if (to_conversion)
3217 {
bb27eeda
SE
3218 all->first_conversion_insn = get_insns ();
3219 all->last_conversion_insn = get_last_insn ();
bfc45551 3220 end_sequence ();
6071dc7f 3221 }
6f086dfc 3222
6071dc7f
RH
3223 SET_DECL_RTL (parm, data->stack_parm);
3224}
3412b298 3225
6071dc7f
RH
3226/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3227 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3228
6071dc7f 3229static void
3b3f318a
RG
3230assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3231 VEC(tree, heap) *fnargs)
6071dc7f
RH
3232{
3233 tree parm;
6ccd356e 3234 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3235 unsigned i = 0;
f4ef873c 3236
3b3f318a 3237 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3238 {
3239 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3240 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3241 {
3242 rtx tmp, real, imag;
3243 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3244
3b3f318a
RG
3245 real = DECL_RTL (VEC_index (tree, fnargs, i));
3246 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
6071dc7f 3247 if (inner != GET_MODE (real))
6f086dfc 3248 {
6071dc7f
RH
3249 real = gen_lowpart_SUBREG (inner, real);
3250 imag = gen_lowpart_SUBREG (inner, imag);
3251 }
6ccd356e
AM
3252
3253 if (TREE_ADDRESSABLE (parm))
3254 {
3255 rtx rmem, imem;
3256 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3257 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3258 DECL_MODE (parm),
3259 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3260
3261 /* split_complex_arg put the real and imag parts in
3262 pseudos. Move them to memory. */
3a695389 3263 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3264 set_mem_attributes (tmp, parm, 1);
3265 rmem = adjust_address_nv (tmp, inner, 0);
3266 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3267 push_to_sequence2 (all->first_conversion_insn,
3268 all->last_conversion_insn);
6ccd356e
AM
3269 emit_move_insn (rmem, real);
3270 emit_move_insn (imem, imag);
bb27eeda
SE
3271 all->first_conversion_insn = get_insns ();
3272 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3273 end_sequence ();
3274 }
3275 else
3276 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3277 SET_DECL_RTL (parm, tmp);
7e41ffa2 3278
3b3f318a
RG
3279 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3280 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
6071dc7f
RH
3281 if (inner != GET_MODE (real))
3282 {
3283 real = gen_lowpart_SUBREG (inner, real);
3284 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3285 }
6071dc7f 3286 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3287 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3288 i++;
6f086dfc 3289 }
6f086dfc 3290 }
6071dc7f
RH
3291}
3292
3293/* Assign RTL expressions to the function's parameters. This may involve
3294 copying them into registers and using those registers as the DECL_RTL. */
3295
6fe79279 3296static void
6071dc7f
RH
3297assign_parms (tree fndecl)
3298{
3299 struct assign_parm_data_all all;
3b3f318a
RG
3300 tree parm;
3301 VEC(tree, heap) *fnargs;
3302 unsigned i;
6f086dfc 3303
38173d38 3304 crtl->args.internal_arg_pointer
150cdc9e 3305 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3306
3307 assign_parms_initialize_all (&all);
3308 fnargs = assign_parms_augmented_arg_list (&all);
3309
ac47786e 3310 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
ded9bf77 3311 {
6071dc7f
RH
3312 struct assign_parm_data_one data;
3313
3314 /* Extract the type of PARM; adjust it according to ABI. */
3315 assign_parm_find_data_types (&all, parm, &data);
3316
3317 /* Early out for errors and void parameters. */
3318 if (data.passed_mode == VOIDmode)
ded9bf77 3319 {
6071dc7f
RH
3320 SET_DECL_RTL (parm, const0_rtx);
3321 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3322 continue;
3323 }
196c42cd 3324
2e3f842f
L
3325 /* Estimate stack alignment from parameter alignment. */
3326 if (SUPPORTS_STACK_ALIGNMENT)
3327 {
3328 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3329 data.passed_type);
ae58e548
JJ
3330 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3331 align);
2e3f842f 3332 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3333 align = MINIMUM_ALIGNMENT (data.nominal_type,
3334 TYPE_MODE (data.nominal_type),
3335 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3336 if (crtl->stack_alignment_estimated < align)
3337 {
3338 gcc_assert (!crtl->stack_realign_processed);
3339 crtl->stack_alignment_estimated = align;
3340 }
3341 }
b8698a0f 3342
910ad8de 3343 if (cfun->stdarg && !DECL_CHAIN (parm))
8117c488 3344 assign_parms_setup_varargs (&all, &data, false);
196c42cd 3345
6071dc7f
RH
3346 /* Find out where the parameter arrives in this function. */
3347 assign_parm_find_entry_rtl (&all, &data);
3348
3349 /* Find out where stack space for this parameter might be. */
3350 if (assign_parm_is_stack_parm (&all, &data))
3351 {
3352 assign_parm_find_stack_rtl (parm, &data);
3353 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3354 }
6071dc7f
RH
3355
3356 /* Record permanently how this parm was passed. */
5141868d 3357 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
6071dc7f
RH
3358
3359 /* Update info on where next arg arrives in registers. */
3c07301f
NF
3360 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3361 data.passed_type, data.named_arg);
6071dc7f
RH
3362
3363 assign_parm_adjust_stack_rtl (&data);
3364
3365 if (assign_parm_setup_block_p (&data))
27e29549 3366 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3367 else if (data.passed_pointer || use_register_for_decl (parm))
3368 assign_parm_setup_reg (&all, parm, &data);
3369 else
3370 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3371 }
3372
3b3f318a 3373 if (targetm.calls.split_complex_arg)
6ccd356e 3374 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3375
3b3f318a
RG
3376 VEC_free (tree, heap, fnargs);
3377
3412b298
JW
3378 /* Output all parameter conversion instructions (possibly including calls)
3379 now that all parameters have been copied out of hard registers. */
bb27eeda 3380 emit_insn (all.first_conversion_insn);
3412b298 3381
2e3f842f
L
3382 /* Estimate reload stack alignment from scalar return mode. */
3383 if (SUPPORTS_STACK_ALIGNMENT)
3384 {
3385 if (DECL_RESULT (fndecl))
3386 {
3387 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3388 enum machine_mode mode = TYPE_MODE (type);
3389
3390 if (mode != BLKmode
3391 && mode != VOIDmode
3392 && !AGGREGATE_TYPE_P (type))
3393 {
3394 unsigned int align = GET_MODE_ALIGNMENT (mode);
3395 if (crtl->stack_alignment_estimated < align)
3396 {
3397 gcc_assert (!crtl->stack_realign_processed);
3398 crtl->stack_alignment_estimated = align;
3399 }
3400 }
b8698a0f 3401 }
2e3f842f
L
3402 }
3403
b36a8cc2
OH
3404 /* If we are receiving a struct value address as the first argument, set up
3405 the RTL for the function result. As this might require code to convert
3406 the transmitted address to Pmode, we do this here to ensure that possible
3407 preliminary conversions of the address have been emitted already. */
6071dc7f 3408 if (all.function_result_decl)
b36a8cc2 3409 {
6071dc7f
RH
3410 tree result = DECL_RESULT (current_function_decl);
3411 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3412 rtx x;
fa8db1f7 3413
cc77ae10
JM
3414 if (DECL_BY_REFERENCE (result))
3415 x = addr;
3416 else
3417 {
3418 addr = convert_memory_address (Pmode, addr);
3419 x = gen_rtx_MEM (DECL_MODE (result), addr);
3420 set_mem_attributes (x, result, 1);
3421 }
b36a8cc2
OH
3422 SET_DECL_RTL (result, x);
3423 }
3424
53c428d0 3425 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3426 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3427 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3428 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3429
3430 /* Adjust function incoming argument size for alignment and
3431 minimum length. */
3432
3433#ifdef REG_PARM_STACK_SPACE
38173d38 3434 crtl->args.size = MAX (crtl->args.size,
6f086dfc 3435 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3436#endif
6f086dfc 3437
38173d38 3438 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3439 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3440
6f086dfc 3441#ifdef ARGS_GROW_DOWNWARD
38173d38 3442 crtl->args.arg_offset_rtx
477eff96 3443 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3444 : expand_expr (size_diffop (all.stack_args_size.var,
3445 size_int (-all.stack_args_size.constant)),
bbbbb16a 3446 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3447#else
38173d38 3448 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3449#endif
3450
3451 /* See how many bytes, if any, of its args a function should try to pop
3452 on return. */
3453
079e7538
NF
3454 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3455 TREE_TYPE (fndecl),
3456 crtl->args.size);
6f086dfc 3457
3b69d50e
RK
3458 /* For stdarg.h function, save info about
3459 regs and stack space used by the named args. */
6f086dfc 3460
38173d38 3461 crtl->args.info = all.args_so_far;
6f086dfc
RS
3462
3463 /* Set the rtx used for the function return value. Put this in its
3464 own variable so any optimizers that need this information don't have
3465 to include tree.h. Do this here so it gets done when an inlined
3466 function gets output. */
3467
38173d38 3468 crtl->return_rtx
19e7881c
MM
3469 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3470 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3471
3472 /* If scalar return value was computed in a pseudo-reg, or was a named
3473 return value that got dumped to the stack, copy that to the hard
3474 return register. */
3475 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3476 {
3477 tree decl_result = DECL_RESULT (fndecl);
3478 rtx decl_rtl = DECL_RTL (decl_result);
3479
3480 if (REG_P (decl_rtl)
3481 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3482 : DECL_REGISTER (decl_result))
3483 {
3484 rtx real_decl_rtl;
3485
1d636cc6
RG
3486 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3487 fndecl, true);
ce5e43d0 3488 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3489 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3490 holds the hard register containing the return value, not a
3491 temporary pseudo. */
38173d38 3492 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3493 }
3494 }
6f086dfc 3495}
4744afba
RH
3496
3497/* A subroutine of gimplify_parameters, invoked via walk_tree.
3498 For all seen types, gimplify their sizes. */
3499
3500static tree
3501gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3502{
3503 tree t = *tp;
3504
3505 *walk_subtrees = 0;
3506 if (TYPE_P (t))
3507 {
3508 if (POINTER_TYPE_P (t))
3509 *walk_subtrees = 1;
ad50bc8d
RH
3510 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3511 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3512 {
726a989a 3513 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3514 *walk_subtrees = 1;
3515 }
3516 }
3517
3518 return NULL;
3519}
3520
3521/* Gimplify the parameter list for current_function_decl. This involves
3522 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3523 to implement callee-copies reference parameters. Returns a sequence of
3524 statements to add to the beginning of the function. */
4744afba 3525
726a989a 3526gimple_seq
4744afba
RH
3527gimplify_parameters (void)
3528{
3529 struct assign_parm_data_all all;
3b3f318a 3530 tree parm;
726a989a 3531 gimple_seq stmts = NULL;
3b3f318a
RG
3532 VEC(tree, heap) *fnargs;
3533 unsigned i;
4744afba
RH
3534
3535 assign_parms_initialize_all (&all);
3536 fnargs = assign_parms_augmented_arg_list (&all);
3537
ac47786e 3538 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
4744afba
RH
3539 {
3540 struct assign_parm_data_one data;
3541
3542 /* Extract the type of PARM; adjust it according to ABI. */
3543 assign_parm_find_data_types (&all, parm, &data);
3544
3545 /* Early out for errors and void parameters. */
3546 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3547 continue;
3548
3549 /* Update info on where next arg arrives in registers. */
3c07301f
NF
3550 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3551 data.passed_type, data.named_arg);
4744afba
RH
3552
3553 /* ??? Once upon a time variable_size stuffed parameter list
3554 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3555 turned out to be less than manageable in the gimple world.
3556 Now we have to hunt them down ourselves. */
3557 walk_tree_without_duplicates (&data.passed_type,
3558 gimplify_parm_type, &stmts);
3559
b38f3813 3560 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3561 {
3562 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3563 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3564 }
3565
3566 if (data.passed_pointer)
3567 {
3568 tree type = TREE_TYPE (data.passed_type);
3569 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3570 type, data.named_arg))
3571 {
3572 tree local, t;
3573
b38f3813 3574 /* For constant-sized objects, this is trivial; for
4744afba 3575 variable-sized objects, we have to play games. */
b38f3813
EB
3576 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3577 && !(flag_stack_check == GENERIC_STACK_CHECK
3578 && compare_tree_int (DECL_SIZE_UNIT (parm),
3579 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba
RH
3580 {
3581 local = create_tmp_var (type, get_name (parm));
3582 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3583 /* If PARM was addressable, move that flag over
3584 to the local copy, as its address will be taken,
37609bf0
RG
3585 not the PARMs. Keep the parms address taken
3586 as we'll query that flag during gimplification. */
04487a2f 3587 if (TREE_ADDRESSABLE (parm))
37609bf0 3588 TREE_ADDRESSABLE (local) = 1;
4744afba
RH
3589 }
3590 else
3591 {
5039610b 3592 tree ptr_type, addr;
4744afba
RH
3593
3594 ptr_type = build_pointer_type (type);
3595 addr = create_tmp_var (ptr_type, get_name (parm));
3596 DECL_IGNORED_P (addr) = 0;
3597 local = build_fold_indirect_ref (addr);
3598
4744afba 3599 t = built_in_decls[BUILT_IN_ALLOCA];
5039610b 3600 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
d3c12306
EB
3601 /* The call has been built for a variable-sized object. */
3602 ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3603 t = fold_convert (ptr_type, t);
726a989a 3604 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3605 gimplify_and_add (t, &stmts);
3606 }
3607
726a989a 3608 gimplify_assign (local, parm, &stmts);
4744afba 3609
833b3afe
DB
3610 SET_DECL_VALUE_EXPR (parm, local);
3611 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3612 }
3613 }
3614 }
3615
3b3f318a
RG
3616 VEC_free (tree, heap, fnargs);
3617
4744afba
RH
3618 return stmts;
3619}
75dc3319 3620\f
6f086dfc
RS
3621/* Compute the size and offset from the start of the stacked arguments for a
3622 parm passed in mode PASSED_MODE and with type TYPE.
3623
3624 INITIAL_OFFSET_PTR points to the current offset into the stacked
3625 arguments.
3626
e7949876
AM
3627 The starting offset and size for this parm are returned in
3628 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3629 nonzero, the offset is that of stack slot, which is returned in
3630 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3631 padding required from the initial offset ptr to the stack slot.
6f086dfc 3632
cc2902df 3633 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3634 never be set if REG_PARM_STACK_SPACE is not defined.
3635
3636 FNDECL is the function in which the argument was defined.
3637
3638 There are two types of rounding that are done. The first, controlled by
3639 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3640 list to be aligned to the specific boundary (in bits). This rounding
3641 affects the initial and starting offsets, but not the argument size.
3642
3643 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3644 optionally rounds the size of the parm to PARM_BOUNDARY. The
3645 initial offset is not affected by this rounding, while the size always
3646 is and the starting offset may be. */
3647
e7949876
AM
3648/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3649 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3650 callers pass in the total size of args so far as
e7949876 3651 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3652
6f086dfc 3653void
fa8db1f7
AJ
3654locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3655 int partial, tree fndecl ATTRIBUTE_UNUSED,
3656 struct args_size *initial_offset_ptr,
3657 struct locate_and_pad_arg_data *locate)
6f086dfc 3658{
e7949876
AM
3659 tree sizetree;
3660 enum direction where_pad;
c7e777b5 3661 unsigned int boundary;
e7949876
AM
3662 int reg_parm_stack_space = 0;
3663 int part_size_in_regs;
6f086dfc
RS
3664
3665#ifdef REG_PARM_STACK_SPACE
e7949876 3666 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3667
6f086dfc
RS
3668 /* If we have found a stack parm before we reach the end of the
3669 area reserved for registers, skip that area. */
3670 if (! in_regs)
3671 {
6f086dfc
RS
3672 if (reg_parm_stack_space > 0)
3673 {
3674 if (initial_offset_ptr->var)
3675 {
3676 initial_offset_ptr->var
3677 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3678 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3679 initial_offset_ptr->constant = 0;
3680 }
3681 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3682 initial_offset_ptr->constant = reg_parm_stack_space;
3683 }
3684 }
3685#endif /* REG_PARM_STACK_SPACE */
3686
78a52f11 3687 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3688
3689 sizetree
3690 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3691 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3692 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
6e985040 3693 locate->where_pad = where_pad;
2e3f842f
L
3694
3695 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3696 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3697 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3698
bfc45551 3699 locate->boundary = boundary;
6f086dfc 3700
2e3f842f
L
3701 if (SUPPORTS_STACK_ALIGNMENT)
3702 {
3703 /* stack_alignment_estimated can't change after stack has been
3704 realigned. */
3705 if (crtl->stack_alignment_estimated < boundary)
3706 {
3707 if (!crtl->stack_realign_processed)
3708 crtl->stack_alignment_estimated = boundary;
3709 else
3710 {
3711 /* If stack is realigned and stack alignment value
3712 hasn't been finalized, it is OK not to increase
3713 stack_alignment_estimated. The bigger alignment
3714 requirement is recorded in stack_alignment_needed
3715 below. */
3716 gcc_assert (!crtl->stack_realign_finalized
3717 && crtl->stack_realign_needed);
3718 }
3719 }
3720 }
3721
c7e777b5
RH
3722 /* Remember if the outgoing parameter requires extra alignment on the
3723 calling function side. */
cb91fab0
JH
3724 if (crtl->stack_alignment_needed < boundary)
3725 crtl->stack_alignment_needed = boundary;
2e3f842f
L
3726 if (crtl->preferred_stack_boundary < boundary)
3727 crtl->preferred_stack_boundary = boundary;
c7e777b5 3728
6f086dfc 3729#ifdef ARGS_GROW_DOWNWARD
e7949876 3730 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3731 if (initial_offset_ptr->var)
e7949876
AM
3732 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3733 initial_offset_ptr->var);
9dff28ab 3734
e7949876
AM
3735 {
3736 tree s2 = sizetree;
3737 if (where_pad != none
3738 && (!host_integerp (sizetree, 1)
3739 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3740 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3741 SUB_PARM_SIZE (locate->slot_offset, s2);
3742 }
3743
3744 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3745
3746 if (!in_regs
3747#ifdef REG_PARM_STACK_SPACE
3748 || REG_PARM_STACK_SPACE (fndecl) > 0
3749#endif
3750 )
e7949876
AM
3751 pad_to_arg_alignment (&locate->slot_offset, boundary,
3752 &locate->alignment_pad);
9dff28ab 3753
e7949876
AM
3754 locate->size.constant = (-initial_offset_ptr->constant
3755 - locate->slot_offset.constant);
6f086dfc 3756 if (initial_offset_ptr->var)
e7949876
AM
3757 locate->size.var = size_binop (MINUS_EXPR,
3758 size_binop (MINUS_EXPR,
3759 ssize_int (0),
3760 initial_offset_ptr->var),
3761 locate->slot_offset.var);
3762
3763 /* Pad_below needs the pre-rounded size to know how much to pad
3764 below. */
3765 locate->offset = locate->slot_offset;
3766 if (where_pad == downward)
3767 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3768
6f086dfc 3769#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3770 if (!in_regs
3771#ifdef REG_PARM_STACK_SPACE
3772 || REG_PARM_STACK_SPACE (fndecl) > 0
3773#endif
3774 )
e7949876
AM
3775 pad_to_arg_alignment (initial_offset_ptr, boundary,
3776 &locate->alignment_pad);
3777 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3778
3779#ifdef PUSH_ROUNDING
3780 if (passed_mode != BLKmode)
3781 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3782#endif
3783
d4b0a7a0
DE
3784 /* Pad_below needs the pre-rounded size to know how much to pad below
3785 so this must be done before rounding up. */
e7949876
AM
3786 locate->offset = locate->slot_offset;
3787 if (where_pad == downward)
3788 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3789
6f086dfc 3790 if (where_pad != none
1468899d
RK
3791 && (!host_integerp (sizetree, 1)
3792 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
6f086dfc
RS
3793 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3794
e7949876
AM
3795 ADD_PARM_SIZE (locate->size, sizetree);
3796
3797 locate->size.constant -= part_size_in_regs;
6f086dfc 3798#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
3799
3800#ifdef FUNCTION_ARG_OFFSET
3801 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3802#endif
6f086dfc
RS
3803}
3804
e16c591a
RS
3805/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3806 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3807
6f086dfc 3808static void
fa8db1f7
AJ
3809pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3810 struct args_size *alignment_pad)
6f086dfc 3811{
a544cfd2
KG
3812 tree save_var = NULL_TREE;
3813 HOST_WIDE_INT save_constant = 0;
a751cd5b 3814 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3815 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3816
3817#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3818 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3819 the real alignment of %sp. However, when it does this, the
3820 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3821 if (SPARC_STACK_BOUNDARY_HACK)
3822 sp_offset = 0;
3823#endif
4fc026cd 3824
6f6b8f81 3825 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
3826 {
3827 save_var = offset_ptr->var;
3828 save_constant = offset_ptr->constant;
3829 }
3830
3831 alignment_pad->var = NULL_TREE;
3832 alignment_pad->constant = 0;
4fc026cd 3833
6f086dfc
RS
3834 if (boundary > BITS_PER_UNIT)
3835 {
3836 if (offset_ptr->var)
3837 {
a594a19c
GK
3838 tree sp_offset_tree = ssize_int (sp_offset);
3839 tree offset = size_binop (PLUS_EXPR,
3840 ARGS_SIZE_TREE (*offset_ptr),
3841 sp_offset_tree);
6f086dfc 3842#ifdef ARGS_GROW_DOWNWARD
a594a19c 3843 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3844#else
a594a19c 3845 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3846#endif
a594a19c
GK
3847
3848 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3849 /* ARGS_SIZE_TREE includes constant term. */
3850 offset_ptr->constant = 0;
6f6b8f81 3851 if (boundary > PARM_BOUNDARY)
dd3f0101 3852 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3853 save_var);
6f086dfc
RS
3854 }
3855 else
718fe406 3856 {
a594a19c 3857 offset_ptr->constant = -sp_offset +
6f086dfc 3858#ifdef ARGS_GROW_DOWNWARD
a594a19c 3859 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3860#else
a594a19c 3861 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3862#endif
6f6b8f81 3863 if (boundary > PARM_BOUNDARY)
718fe406
KH
3864 alignment_pad->constant = offset_ptr->constant - save_constant;
3865 }
6f086dfc
RS
3866 }
3867}
3868
3869static void
fa8db1f7 3870pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3871{
3872 if (passed_mode != BLKmode)
3873 {
3874 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3875 offset_ptr->constant
3876 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3877 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3878 - GET_MODE_SIZE (passed_mode));
3879 }
3880 else
3881 {
3882 if (TREE_CODE (sizetree) != INTEGER_CST
3883 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3884 {
3885 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3886 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3887 /* Add it in. */
3888 ADD_PARM_SIZE (*offset_ptr, s2);
3889 SUB_PARM_SIZE (*offset_ptr, sizetree);
3890 }
3891 }
3892}
6f086dfc 3893\f
6f086dfc 3894
6fb5fa3c
DB
3895/* True if register REGNO was alive at a place where `setjmp' was
3896 called and was set more than once or is an argument. Such regs may
3897 be clobbered by `longjmp'. */
3898
3899static bool
3900regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3901{
3902 /* There appear to be cases where some local vars never reach the
3903 backend but have bogus regnos. */
3904 if (regno >= max_reg_num ())
3905 return false;
3906
3907 return ((REG_N_SETS (regno) > 1
3908 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3909 && REGNO_REG_SET_P (setjmp_crosses, regno));
3910}
3911
3912/* Walk the tree of blocks describing the binding levels within a
3913 function and warn about variables the might be killed by setjmp or
3914 vfork. This is done after calling flow_analysis before register
3915 allocation since that will clobber the pseudo-regs to hard
3916 regs. */
3917
3918static void
3919setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 3920{
b3694847 3921 tree decl, sub;
6de9cd9a 3922
910ad8de 3923 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 3924 {
6de9cd9a 3925 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3926 && DECL_RTL_SET_P (decl)
f8cfc6aa 3927 && REG_P (DECL_RTL (decl))
6fb5fa3c 3928 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 3929 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 3930 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 3931 }
6de9cd9a 3932
87caf699 3933 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 3934 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
3935}
3936
6de9cd9a 3937/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3938 but for arguments instead of local variables. */
3939
6fb5fa3c
DB
3940static void
3941setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 3942{
b3694847 3943 tree decl;
6f086dfc 3944 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 3945 decl; decl = DECL_CHAIN (decl))
6f086dfc 3946 if (DECL_RTL (decl) != 0
f8cfc6aa 3947 && REG_P (DECL_RTL (decl))
6fb5fa3c 3948 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 3949 warning (OPT_Wclobbered,
2b001724 3950 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 3951 decl);
6f086dfc
RS
3952}
3953
6fb5fa3c
DB
3954/* Generate warning messages for variables live across setjmp. */
3955
b8698a0f 3956void
6fb5fa3c
DB
3957generate_setjmp_warnings (void)
3958{
3959 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3960
3961 if (n_basic_blocks == NUM_FIXED_BLOCKS
3962 || bitmap_empty_p (setjmp_crosses))
3963 return;
3964
3965 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3966 setjmp_args_warning (setjmp_crosses);
3967}
3968
6f086dfc 3969\f
3373692b
JJ
3970/* Reverse the order of elements in the fragment chain T of blocks,
3971 and return the new head of the chain (old last element). */
3972
3973static tree
3974block_fragments_nreverse (tree t)
3975{
3976 tree prev = 0, block, next;
3977 for (block = t; block; block = next)
3978 {
3979 next = BLOCK_FRAGMENT_CHAIN (block);
3980 BLOCK_FRAGMENT_CHAIN (block) = prev;
3981 prev = block;
3982 }
3983 return prev;
3984}
3985
3986/* Reverse the order of elements in the chain T of blocks,
3987 and return the new head of the chain (old last element).
3988 Also do the same on subblocks and reverse the order of elements
3989 in BLOCK_FRAGMENT_CHAIN as well. */
3990
3991static tree
3992blocks_nreverse_all (tree t)
3993{
3994 tree prev = 0, block, next;
3995 for (block = t; block; block = next)
3996 {
3997 next = BLOCK_CHAIN (block);
3998 BLOCK_CHAIN (block) = prev;
3999 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4000 if (BLOCK_FRAGMENT_CHAIN (block)
4001 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4002 BLOCK_FRAGMENT_CHAIN (block)
4003 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4004 prev = block;
4005 }
4006 return prev;
4007}
4008
4009
a20612aa
RH
4010/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4011 and create duplicate blocks. */
4012/* ??? Need an option to either create block fragments or to create
4013 abstract origin duplicates of a source block. It really depends
4014 on what optimization has been performed. */
467456d0 4015
116eebd6 4016void
fa8db1f7 4017reorder_blocks (void)
467456d0 4018{
116eebd6 4019 tree block = DECL_INITIAL (current_function_decl);
2c217442 4020 VEC(tree,heap) *block_stack;
467456d0 4021
1a4450c7 4022 if (block == NULL_TREE)
116eebd6 4023 return;
fc289cd1 4024
2c217442 4025 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 4026
a20612aa 4027 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4028 clear_block_marks (block);
a20612aa 4029
116eebd6
MM
4030 /* Prune the old trees away, so that they don't get in the way. */
4031 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4032 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4033
a20612aa 4034 /* Recreate the block tree from the note nesting. */
116eebd6 4035 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4036 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
18c038b9 4037
2c217442 4038 VEC_free (tree, heap, block_stack);
467456d0
RS
4039}
4040
a20612aa 4041/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4042
6de9cd9a
DN
4043void
4044clear_block_marks (tree block)
cc1fe44f 4045{
a20612aa 4046 while (block)
cc1fe44f 4047 {
a20612aa 4048 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4049 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4050 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4051 }
4052}
4053
0a1c58a2 4054static void
2c217442 4055reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
4056{
4057 rtx insn;
4058
4059 for (insn = insns; insn; insn = NEXT_INSN (insn))
4060 {
4b4bf941 4061 if (NOTE_P (insn))
0a1c58a2 4062 {
a38e7aa5 4063 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4064 {
4065 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4066 tree origin;
4067
3373692b
JJ
4068 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4069 origin = block;
a20612aa
RH
4070
4071 /* If we have seen this block before, that means it now
4072 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4073 if (TREE_ASM_WRITTEN (block))
4074 {
a20612aa 4075 tree new_block = copy_node (block);
a20612aa 4076
a20612aa
RH
4077 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4078 BLOCK_FRAGMENT_CHAIN (new_block)
4079 = BLOCK_FRAGMENT_CHAIN (origin);
4080 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4081
4082 NOTE_BLOCK (insn) = new_block;
4083 block = new_block;
0a1c58a2 4084 }
a20612aa 4085
0a1c58a2
JL
4086 BLOCK_SUBBLOCKS (block) = 0;
4087 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4088 /* When there's only one block for the entire function,
4089 current_block == block and we mustn't do this, it
4090 will cause infinite recursion. */
4091 if (block != current_block)
4092 {
51b7d006
DJ
4093 if (block != origin)
4094 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4095
339a28b9
ZW
4096 BLOCK_SUPERCONTEXT (block) = current_block;
4097 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4098 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4099 current_block = origin;
339a28b9 4100 }
2c217442 4101 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2 4102 }
a38e7aa5 4103 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4104 {
2c217442 4105 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2
JL
4106 current_block = BLOCK_SUPERCONTEXT (current_block);
4107 }
4108 }
0a1c58a2
JL
4109 }
4110}
4111
467456d0
RS
4112/* Reverse the order of elements in the chain T of blocks,
4113 and return the new head of the chain (old last element). */
4114
6de9cd9a 4115tree
fa8db1f7 4116blocks_nreverse (tree t)
467456d0 4117{
3373692b
JJ
4118 tree prev = 0, block, next;
4119 for (block = t; block; block = next)
467456d0 4120 {
3373692b
JJ
4121 next = BLOCK_CHAIN (block);
4122 BLOCK_CHAIN (block) = prev;
4123 prev = block;
467456d0
RS
4124 }
4125 return prev;
4126}
4127
18c038b9
MM
4128/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4129 non-NULL, list them all into VECTOR, in a depth-first preorder
4130 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4131 blocks. */
467456d0
RS
4132
4133static int
fa8db1f7 4134all_blocks (tree block, tree *vector)
467456d0 4135{
b2a59b15
MS
4136 int n_blocks = 0;
4137
a84efb51
JO
4138 while (block)
4139 {
4140 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4141
a84efb51
JO
4142 /* Record this block. */
4143 if (vector)
4144 vector[n_blocks] = block;
b2a59b15 4145
a84efb51 4146 ++n_blocks;
718fe406 4147
a84efb51
JO
4148 /* Record the subblocks, and their subblocks... */
4149 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4150 vector ? vector + n_blocks : 0);
4151 block = BLOCK_CHAIN (block);
4152 }
467456d0
RS
4153
4154 return n_blocks;
4155}
18c038b9
MM
4156
4157/* Return a vector containing all the blocks rooted at BLOCK. The
4158 number of elements in the vector is stored in N_BLOCKS_P. The
4159 vector is dynamically allocated; it is the caller's responsibility
4160 to call `free' on the pointer returned. */
718fe406 4161
18c038b9 4162static tree *
fa8db1f7 4163get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4164{
4165 tree *block_vector;
4166
4167 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4168 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4169 all_blocks (block, block_vector);
4170
4171 return block_vector;
4172}
4173
f83b236e 4174static GTY(()) int next_block_index = 2;
18c038b9
MM
4175
4176/* Set BLOCK_NUMBER for all the blocks in FN. */
4177
4178void
fa8db1f7 4179number_blocks (tree fn)
18c038b9
MM
4180{
4181 int i;
4182 int n_blocks;
4183 tree *block_vector;
4184
4185 /* For SDB and XCOFF debugging output, we start numbering the blocks
4186 from 1 within each function, rather than keeping a running
4187 count. */
4188#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4189 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4190 next_block_index = 1;
18c038b9
MM
4191#endif
4192
4193 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4194
4195 /* The top-level BLOCK isn't numbered at all. */
4196 for (i = 1; i < n_blocks; ++i)
4197 /* We number the blocks from two. */
4198 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4199
4200 free (block_vector);
4201
4202 return;
4203}
df8992f8
RH
4204
4205/* If VAR is present in a subblock of BLOCK, return the subblock. */
4206
24e47c76 4207DEBUG_FUNCTION tree
fa8db1f7 4208debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4209{
4210 tree t;
4211
4212 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4213 if (t == var)
4214 return block;
4215
4216 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4217 {
4218 tree ret = debug_find_var_in_block_tree (var, t);
4219 if (ret)
4220 return ret;
4221 }
4222
4223 return NULL_TREE;
4224}
467456d0 4225\f
db2960f4
SL
4226/* Keep track of whether we're in a dummy function context. If we are,
4227 we don't want to invoke the set_current_function hook, because we'll
4228 get into trouble if the hook calls target_reinit () recursively or
4229 when the initial initialization is not yet complete. */
4230
4231static bool in_dummy_function;
4232
ab442df7
MM
4233/* Invoke the target hook when setting cfun. Update the optimization options
4234 if the function uses different options than the default. */
db2960f4
SL
4235
4236static void
4237invoke_set_current_function_hook (tree fndecl)
4238{
4239 if (!in_dummy_function)
ab442df7
MM
4240 {
4241 tree opts = ((fndecl)
4242 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4243 : optimization_default_node);
4244
4245 if (!opts)
4246 opts = optimization_default_node;
4247
4248 /* Change optimization options if needed. */
4249 if (optimization_current_node != opts)
4250 {
4251 optimization_current_node = opts;
4252 cl_optimization_restore (TREE_OPTIMIZATION (opts));
4253 }
4254
4255 targetm.set_current_function (fndecl);
4256 }
db2960f4
SL
4257}
4258
4259/* cfun should never be set directly; use this function. */
4260
4261void
4262set_cfun (struct function *new_cfun)
4263{
4264 if (cfun != new_cfun)
4265 {
4266 cfun = new_cfun;
4267 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4268 }
4269}
4270
db2960f4
SL
4271/* Initialized with NOGC, making this poisonous to the garbage collector. */
4272
4273static VEC(function_p,heap) *cfun_stack;
4274
4275/* Push the current cfun onto the stack, and set cfun to new_cfun. */
4276
4277void
4278push_cfun (struct function *new_cfun)
4279{
4280 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4281 set_cfun (new_cfun);
4282}
4283
4284/* Pop cfun from the stack. */
4285
4286void
4287pop_cfun (void)
4288{
38d34676 4289 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
38d34676 4290 set_cfun (new_cfun);
db2960f4 4291}
3e87758a
RL
4292
4293/* Return value of funcdef and increase it. */
4294int
b8698a0f 4295get_next_funcdef_no (void)
3e87758a
RL
4296{
4297 return funcdef_no++;
4298}
4299
3a70d621 4300/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4301 to the defaults. Set cfun to the newly-allocated object.
4302 Some of the helper functions invoked during initialization assume
4303 that cfun has already been set. Therefore, assign the new object
4304 directly into cfun and invoke the back end hook explicitly at the
4305 very end, rather than initializing a temporary and calling set_cfun
4306 on it.
182e0d71
AK
4307
4308 ABSTRACT_P is true if this is a function that will never be seen by
4309 the middle-end. Such functions are front-end concepts (like C++
4310 function templates) that do not correspond directly to functions
4311 placed in object files. */
7a80cf9a 4312
3a70d621 4313void
182e0d71 4314allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4315{
3a70d621 4316 tree result;
6de9cd9a 4317 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4318
a9429e29 4319 cfun = ggc_alloc_cleared_function ();
b384405b 4320
3a70d621 4321 init_eh_for_function ();
6f086dfc 4322
3a70d621
RH
4323 if (init_machine_status)
4324 cfun->machine = (*init_machine_status) ();
e2ecd91c 4325
7c800926
KT
4326#ifdef OVERRIDE_ABI_FORMAT
4327 OVERRIDE_ABI_FORMAT (fndecl);
4328#endif
4329
179d2f74
RH
4330 invoke_set_current_function_hook (fndecl);
4331
81464b2c 4332 if (fndecl != NULL_TREE)
3a70d621 4333 {
db2960f4
SL
4334 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4335 cfun->decl = fndecl;
70cf5bc1 4336 current_function_funcdef_no = get_next_funcdef_no ();
db2960f4
SL
4337
4338 result = DECL_RESULT (fndecl);
182e0d71 4339 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4340 {
3a70d621 4341#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4342 cfun->returns_pcc_struct = 1;
3a70d621 4343#endif
e3b5732b 4344 cfun->returns_struct = 1;
db2960f4
SL
4345 }
4346
f38958e8 4347 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4348
db2960f4
SL
4349 /* Assume all registers in stdarg functions need to be saved. */
4350 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4351 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4352
4353 /* ??? This could be set on a per-function basis by the front-end
4354 but is this worth the hassle? */
4355 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
3a70d621 4356 }
db2960f4
SL
4357}
4358
4359/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4360 instead of just setting it. */
9d30f3c1 4361
db2960f4
SL
4362void
4363push_struct_function (tree fndecl)
4364{
4365 VEC_safe_push (function_p, heap, cfun_stack, cfun);
182e0d71 4366 allocate_struct_function (fndecl, false);
3a70d621 4367}
6f086dfc 4368
8f4f502f 4369/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4370 appropriate for emitting rtl at the start of a function. */
6f086dfc 4371
3a70d621 4372static void
db2960f4 4373prepare_function_start (void)
3a70d621 4374{
3e029763 4375 gcc_assert (!crtl->emit.x_last_insn);
fb0703f7 4376 init_temp_slots ();
0de456a5 4377 init_emit ();
bd60bab2 4378 init_varasm_status ();
0de456a5 4379 init_expr ();
bf08ebeb 4380 default_rtl_profile ();
6f086dfc 4381
d3c12306
EB
4382 if (flag_stack_usage)
4383 {
4384 cfun->su = ggc_alloc_cleared_stack_usage ();
4385 cfun->su->static_stack_size = -1;
4386 }
4387
3a70d621 4388 cse_not_expected = ! optimize;
6f086dfc 4389
3a70d621
RH
4390 /* Caller save not needed yet. */
4391 caller_save_needed = 0;
6f086dfc 4392
3a70d621
RH
4393 /* We haven't done register allocation yet. */
4394 reg_renumber = 0;
6f086dfc 4395
b384405b
BS
4396 /* Indicate that we have not instantiated virtual registers yet. */
4397 virtuals_instantiated = 0;
4398
1b3d8f8a
GK
4399 /* Indicate that we want CONCATs now. */
4400 generating_concat_p = 1;
4401
b384405b
BS
4402 /* Indicate we have no need of a frame pointer yet. */
4403 frame_pointer_needed = 0;
b384405b
BS
4404}
4405
4406/* Initialize the rtl expansion mechanism so that we can do simple things
4407 like generate sequences. This is used to provide a context during global
db2960f4
SL
4408 initialization of some passes. You must call expand_dummy_function_end
4409 to exit this context. */
4410
b384405b 4411void
fa8db1f7 4412init_dummy_function_start (void)
b384405b 4413{
db2960f4
SL
4414 gcc_assert (!in_dummy_function);
4415 in_dummy_function = true;
4416 push_struct_function (NULL_TREE);
4417 prepare_function_start ();
b384405b
BS
4418}
4419
4420/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4421 and initialize static variables for generating RTL for the statements
4422 of the function. */
4423
4424void
fa8db1f7 4425init_function_start (tree subr)
b384405b 4426{
db2960f4
SL
4427 if (subr && DECL_STRUCT_FUNCTION (subr))
4428 set_cfun (DECL_STRUCT_FUNCTION (subr));
4429 else
182e0d71 4430 allocate_struct_function (subr, false);
db2960f4 4431 prepare_function_start ();
b384405b 4432
6f086dfc
RS
4433 /* Warn if this value is an aggregate type,
4434 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4435 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4436 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4437}
5c7675e9 4438
cd9c1ca8 4439/* Make sure all values used by the optimization passes have sane defaults. */
c2924966 4440unsigned int
fa8db1f7 4441init_function_for_compilation (void)
49ad7cfa
BS
4442{
4443 reg_renumber = 0;
c2924966 4444 return 0;
6f086dfc
RS
4445}
4446
8ddbbcae 4447struct rtl_opt_pass pass_init_function =
ef330312 4448{
8ddbbcae
JH
4449 {
4450 RTL_PASS,
e0a42b0f 4451 "*init_function", /* name */
b8698a0f
L
4452 NULL, /* gate */
4453 init_function_for_compilation, /* execute */
ef330312
PB
4454 NULL, /* sub */
4455 NULL, /* next */
4456 0, /* static_pass_number */
7072a650 4457 TV_NONE, /* tv_id */
ef330312
PB
4458 0, /* properties_required */
4459 0, /* properties_provided */
4460 0, /* properties_destroyed */
4461 0, /* todo_flags_start */
8ddbbcae
JH
4462 0 /* todo_flags_finish */
4463 }
ef330312
PB
4464};
4465
4466
6f086dfc 4467void
fa8db1f7 4468expand_main_function (void)
6f086dfc 4469{
3a57c6cb
MM
4470#if (defined(INVOKE__main) \
4471 || (!defined(HAS_INIT_SECTION) \
4472 && !defined(INIT_SECTION_ASM_OP) \
4473 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 4474 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 4475#endif
6f086dfc
RS
4476}
4477\f
7d69de61
RH
4478/* Expand code to initialize the stack_protect_guard. This is invoked at
4479 the beginning of a function to be protected. */
4480
4481#ifndef HAVE_stack_protect_set
4482# define HAVE_stack_protect_set 0
4483# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4484#endif
4485
4486void
4487stack_protect_prologue (void)
4488{
4489 tree guard_decl = targetm.stack_protect_guard ();
4490 rtx x, y;
4491
08d4cc33
RH
4492 x = expand_normal (crtl->stack_protect_guard);
4493 y = expand_normal (guard_decl);
7d69de61
RH
4494
4495 /* Allow the target to copy from Y to X without leaking Y into a
4496 register. */
4497 if (HAVE_stack_protect_set)
4498 {
4499 rtx insn = gen_stack_protect_set (x, y);
4500 if (insn)
4501 {
4502 emit_insn (insn);
4503 return;
4504 }
4505 }
4506
4507 /* Otherwise do a straight move. */
4508 emit_move_insn (x, y);
4509}
4510
4511/* Expand code to verify the stack_protect_guard. This is invoked at
4512 the end of a function to be protected. */
4513
4514#ifndef HAVE_stack_protect_test
b76be05e
JJ
4515# define HAVE_stack_protect_test 0
4516# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4517#endif
4518
b755446c 4519void
7d69de61
RH
4520stack_protect_epilogue (void)
4521{
4522 tree guard_decl = targetm.stack_protect_guard ();
4523 rtx label = gen_label_rtx ();
4524 rtx x, y, tmp;
4525
08d4cc33
RH
4526 x = expand_normal (crtl->stack_protect_guard);
4527 y = expand_normal (guard_decl);
7d69de61
RH
4528
4529 /* Allow the target to compare Y with X without leaking either into
4530 a register. */
4531 switch (HAVE_stack_protect_test != 0)
4532 {
4533 case 1:
3aebbe5f 4534 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4535 if (tmp)
4536 {
4537 emit_insn (tmp);
7d69de61
RH
4538 break;
4539 }
4540 /* FALLTHRU */
4541
4542 default:
4543 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4544 break;
4545 }
4546
4547 /* The noreturn predictor has been moved to the tree level. The rtl-level
4548 predictors estimate this branch about 20%, which isn't enough to get
4549 things moved out of line. Since this is the only extant case of adding
4550 a noreturn function at the rtl level, it doesn't seem worth doing ought
4551 except adding the prediction by hand. */
4552 tmp = get_last_insn ();
4553 if (JUMP_P (tmp))
4554 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4555
4556 expand_expr_stmt (targetm.stack_protect_fail ());
4557 emit_label (label);
4558}
4559\f
6f086dfc
RS
4560/* Start the RTL for a new function, and set variables used for
4561 emitting RTL.
4562 SUBR is the FUNCTION_DECL node.
4563 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4564 the function's parameters, which must be run at any return statement. */
4565
4566void
b79c5284 4567expand_function_start (tree subr)
6f086dfc 4568{
6f086dfc
RS
4569 /* Make sure volatile mem refs aren't considered
4570 valid operands of arithmetic insns. */
4571 init_recog_no_volatile ();
4572
e3b5732b 4573 crtl->profile
70f4f91c
WC
4574 = (profile_flag
4575 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4576
e3b5732b 4577 crtl->limit_stack
a157febd
GK
4578 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4579
52a11cbf
RH
4580 /* Make the label for return statements to jump to. Do not special
4581 case machines with special return instructions -- they will be
4582 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4583 return_label = gen_label_rtx ();
6f086dfc
RS
4584
4585 /* Initialize rtx used to return the value. */
4586 /* Do this before assign_parms so that we copy the struct value address
4587 before any library calls that assign parms might generate. */
4588
4589 /* Decide whether to return the value in memory or in a register. */
61f71b34 4590 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4591 {
4592 /* Returning something that won't go in a register. */
b3694847 4593 rtx value_address = 0;
6f086dfc
RS
4594
4595#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4596 if (cfun->returns_pcc_struct)
6f086dfc
RS
4597 {
4598 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4599 value_address = assemble_static_space (size);
4600 }
4601 else
4602#endif
4603 {
2225b57c 4604 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4605 /* Expect to be passed the address of a place to store the value.
4606 If it is passed as an argument, assign_parms will take care of
4607 it. */
61f71b34 4608 if (sv)
6f086dfc
RS
4609 {
4610 value_address = gen_reg_rtx (Pmode);
61f71b34 4611 emit_move_insn (value_address, sv);
6f086dfc
RS
4612 }
4613 }
4614 if (value_address)
ccdecf58 4615 {
01c98570
JM
4616 rtx x = value_address;
4617 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4618 {
4619 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4620 set_mem_attributes (x, DECL_RESULT (subr), 1);
4621 }
abde42f7 4622 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4623 }
6f086dfc
RS
4624 }
4625 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4626 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4627 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4628 else
a53e14c0 4629 {
d5bf1143
RH
4630 /* Compute the return values into a pseudo reg, which we will copy
4631 into the true return register after the cleanups are done. */
bef5d8b6
RS
4632 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4633 if (TYPE_MODE (return_type) != BLKmode
4634 && targetm.calls.return_in_msb (return_type))
4635 /* expand_function_end will insert the appropriate padding in
4636 this case. Use the return value's natural (unpadded) mode
4637 within the function proper. */
4638 SET_DECL_RTL (DECL_RESULT (subr),
4639 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4640 else
0bccc606 4641 {
bef5d8b6
RS
4642 /* In order to figure out what mode to use for the pseudo, we
4643 figure out what the mode of the eventual return register will
4644 actually be, and use that. */
1d636cc6 4645 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4646
4647 /* Structures that are returned in registers are not
4648 aggregate_value_p, so we may see a PARALLEL or a REG. */
4649 if (REG_P (hard_reg))
4650 SET_DECL_RTL (DECL_RESULT (subr),
4651 gen_reg_rtx (GET_MODE (hard_reg)));
4652 else
4653 {
4654 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4655 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4656 }
0bccc606 4657 }
a53e14c0 4658
084a1106
JDA
4659 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4660 result to the real return register(s). */
4661 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4662 }
6f086dfc
RS
4663
4664 /* Initialize rtx for parameters and local variables.
4665 In some cases this requires emitting insns. */
0d1416c6 4666 assign_parms (subr);
6f086dfc 4667
6de9cd9a
DN
4668 /* If function gets a static chain arg, store it. */
4669 if (cfun->static_chain_decl)
4670 {
7e140280 4671 tree parm = cfun->static_chain_decl;
531ca746 4672 rtx local, chain, insn;
7e140280 4673
531ca746
RH
4674 local = gen_reg_rtx (Pmode);
4675 chain = targetm.calls.static_chain (current_function_decl, true);
4676
4677 set_decl_incoming_rtl (parm, chain, false);
7e140280 4678 SET_DECL_RTL (parm, local);
7e140280 4679 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4680
531ca746
RH
4681 insn = emit_move_insn (local, chain);
4682
4683 /* Mark the register as eliminable, similar to parameters. */
4684 if (MEM_P (chain)
4685 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4686 set_unique_reg_note (insn, REG_EQUIV, chain);
6de9cd9a
DN
4687 }
4688
4689 /* If the function receives a non-local goto, then store the
4690 bits we need to restore the frame pointer. */
4691 if (cfun->nonlocal_goto_save_area)
4692 {
4693 tree t_save;
4694 rtx r_save;
4695
4696 /* ??? We need to do this save early. Unfortunately here is
4697 before the frame variable gets declared. Help out... */
4846b435
PB
4698 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4699 if (!DECL_RTL_SET_P (var))
4700 expand_decl (var);
6de9cd9a 4701
3244e67d
RS
4702 t_save = build4 (ARRAY_REF, ptr_type_node,
4703 cfun->nonlocal_goto_save_area,
4704 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4705 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5e89a381 4706 r_save = convert_memory_address (Pmode, r_save);
f0c51a1e 4707
88280cf9 4708 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
4709 update_nonlocal_goto_save_area ();
4710 }
f0c51a1e 4711
6f086dfc
RS
4712 /* The following was moved from init_function_start.
4713 The move is supposed to make sdb output more accurate. */
4714 /* Indicate the beginning of the function body,
4715 as opposed to parm setup. */
2e040219 4716 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4717
ede497cf
SB
4718 gcc_assert (NOTE_P (get_last_insn ()));
4719
6f086dfc
RS
4720 parm_birth_insn = get_last_insn ();
4721
e3b5732b 4722 if (crtl->profile)
f6f315fe 4723 {
f6f315fe 4724#ifdef PROFILE_HOOK
df696a75 4725 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4726#endif
f6f315fe 4727 }
411707f4 4728
ede497cf
SB
4729 /* After the display initializations is where the stack checking
4730 probe should go. */
4731 if(flag_stack_check)
4732 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc 4733
6f086dfc
RS
4734 /* Make sure there is a line number after the function entry setup code. */
4735 force_next_line_note ();
4736}
4737\f
49ad7cfa
BS
4738/* Undo the effects of init_dummy_function_start. */
4739void
fa8db1f7 4740expand_dummy_function_end (void)
49ad7cfa 4741{
db2960f4
SL
4742 gcc_assert (in_dummy_function);
4743
49ad7cfa
BS
4744 /* End any sequences that failed to be closed due to syntax errors. */
4745 while (in_sequence_p ())
4746 end_sequence ();
4747
4748 /* Outside function body, can't compute type's actual size
4749 until next function's body starts. */
fa51b01b 4750
01d939e8
BS
4751 free_after_parsing (cfun);
4752 free_after_compilation (cfun);
db2960f4
SL
4753 pop_cfun ();
4754 in_dummy_function = false;
49ad7cfa
BS
4755}
4756
c13fde05
RH
4757/* Call DOIT for each hard register used as a return value from
4758 the current function. */
bd695e1e
RH
4759
4760void
fa8db1f7 4761diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4762{
38173d38 4763 rtx outgoing = crtl->return_rtx;
c13fde05
RH
4764
4765 if (! outgoing)
4766 return;
bd695e1e 4767
f8cfc6aa 4768 if (REG_P (outgoing))
c13fde05
RH
4769 (*doit) (outgoing, arg);
4770 else if (GET_CODE (outgoing) == PARALLEL)
4771 {
4772 int i;
bd695e1e 4773
c13fde05
RH
4774 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4775 {
4776 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4777
f8cfc6aa 4778 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4779 (*doit) (x, arg);
bd695e1e
RH
4780 }
4781 }
4782}
4783
c13fde05 4784static void
fa8db1f7 4785do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4786{
c41c1387 4787 emit_clobber (reg);
c13fde05
RH
4788}
4789
4790void
fa8db1f7 4791clobber_return_register (void)
c13fde05
RH
4792{
4793 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4794
4795 /* In case we do use pseudo to return value, clobber it too. */
4796 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4797 {
4798 tree decl_result = DECL_RESULT (current_function_decl);
4799 rtx decl_rtl = DECL_RTL (decl_result);
4800 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4801 {
4802 do_clobber_return_reg (decl_rtl, NULL);
4803 }
4804 }
c13fde05
RH
4805}
4806
4807static void
fa8db1f7 4808do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4809{
c41c1387 4810 emit_use (reg);
c13fde05
RH
4811}
4812
0bf8477d 4813static void
fa8db1f7 4814use_return_register (void)
c13fde05
RH
4815{
4816 diddle_return_value (do_use_return_reg, NULL);
4817}
4818
902edd36
JH
4819/* Possibly warn about unused parameters. */
4820void
4821do_warn_unused_parameter (tree fn)
4822{
4823 tree decl;
4824
4825 for (decl = DECL_ARGUMENTS (fn);
910ad8de 4826 decl; decl = DECL_CHAIN (decl))
902edd36 4827 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
4828 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4829 && !TREE_NO_WARNING (decl))
b9b8dde3 4830 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4831}
4832
e2500fed
GK
4833static GTY(()) rtx initial_trampoline;
4834
71c0e7fc 4835/* Generate RTL for the end of the current function. */
6f086dfc
RS
4836
4837void
fa8db1f7 4838expand_function_end (void)
6f086dfc 4839{
932f0847 4840 rtx clobber_after;
6f086dfc 4841
964be02f
RH
4842 /* If arg_pointer_save_area was referenced only from a nested
4843 function, we will not have initialized it yet. Do that now. */
e3b5732b 4844 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 4845 get_arg_pointer_save_area ();
964be02f 4846
b38f3813 4847 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
4848 do a stack probe at the start of the function to ensure we have enough
4849 space for another stack frame. */
b38f3813 4850 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66
RK
4851 {
4852 rtx insn, seq;
4853
4854 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4855 if (CALL_P (insn))
11044f66 4856 {
c35af30f 4857 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 4858 start_sequence ();
c35af30f
EB
4859 if (STACK_CHECK_MOVING_SP)
4860 anti_adjust_stack_and_probe (max_frame_size, true);
4861 else
4862 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
4863 seq = get_insns ();
4864 end_sequence ();
ede497cf 4865 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
4866 break;
4867 }
4868 }
4869
6f086dfc
RS
4870 /* End any sequences that failed to be closed due to syntax errors. */
4871 while (in_sequence_p ())
5f4f0e22 4872 end_sequence ();
6f086dfc 4873
6f086dfc
RS
4874 clear_pending_stack_adjust ();
4875 do_pending_stack_adjust ();
4876
6f086dfc
RS
4877 /* Output a linenumber for the end of the function.
4878 SDB depends on this. */
0cea056b 4879 force_next_line_note ();
55e092c4 4880 set_curr_insn_source_location (input_location);
6f086dfc 4881
fbffc70a 4882 /* Before the return label (if any), clobber the return
a1f300c0 4883 registers so that they are not propagated live to the rest of
fbffc70a
GK
4884 the function. This can only happen with functions that drop
4885 through; if there had been a return statement, there would
932f0847
JH
4886 have either been a return rtx, or a jump to the return label.
4887
4888 We delay actual code generation after the current_function_value_rtx
4889 is computed. */
4890 clobber_after = get_last_insn ();
fbffc70a 4891
526c334b
KH
4892 /* Output the label for the actual return from the function. */
4893 emit_label (return_label);
6f086dfc 4894
815eb8f0
AM
4895 if (USING_SJLJ_EXCEPTIONS)
4896 {
4897 /* Let except.c know where it should emit the call to unregister
4898 the function context for sjlj exceptions. */
4899 if (flag_exceptions)
4900 sjlj_emit_function_exit_after (get_last_insn ());
4901 }
6fb5fa3c
DB
4902 else
4903 {
4904 /* We want to ensure that instructions that may trap are not
4905 moved into the epilogue by scheduling, because we don't
4906 always emit unwind information for the epilogue. */
8f4f502f 4907 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
4908 emit_insn (gen_blockage ());
4909 }
0b59e81e 4910
652b0932
RH
4911 /* If this is an implementation of throw, do what's necessary to
4912 communicate between __builtin_eh_return and the epilogue. */
4913 expand_eh_return ();
4914
3e4eac3f
RH
4915 /* If scalar return value was computed in a pseudo-reg, or was a named
4916 return value that got dumped to the stack, copy that to the hard
4917 return register. */
19e7881c 4918 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 4919 {
3e4eac3f
RH
4920 tree decl_result = DECL_RESULT (current_function_decl);
4921 rtx decl_rtl = DECL_RTL (decl_result);
4922
4923 if (REG_P (decl_rtl)
4924 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4925 : DECL_REGISTER (decl_result))
4926 {
38173d38 4927 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 4928
ce5e43d0 4929 /* This should be set in assign_parms. */
0bccc606 4930 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
4931
4932 /* If this is a BLKmode structure being returned in registers,
4933 then use the mode computed in expand_return. Note that if
797a6ac1 4934 decl_rtl is memory, then its mode may have been changed,
38173d38 4935 but that crtl->return_rtx has not. */
3e4eac3f 4936 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 4937 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 4938
bef5d8b6
RS
4939 /* If a non-BLKmode return value should be padded at the least
4940 significant end of the register, shift it left by the appropriate
4941 amount. BLKmode results are handled using the group load/store
4942 machinery. */
4943 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4944 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4945 {
4946 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4947 REGNO (real_decl_rtl)),
4948 decl_rtl);
4949 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4950 }
3e4eac3f 4951 /* If a named return value dumped decl_return to memory, then
797a6ac1 4952 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 4953 extension. */
bef5d8b6 4954 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 4955 {
8df83eae 4956 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
4957 promote_function_mode (TREE_TYPE (decl_result),
4958 GET_MODE (decl_rtl), &unsignedp,
4959 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
4960
4961 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4962 }
aa570f54 4963 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
4964 {
4965 /* If expand_function_start has created a PARALLEL for decl_rtl,
4966 move the result to the real return registers. Otherwise, do
4967 a group load from decl_rtl for a named return. */
4968 if (GET_CODE (decl_rtl) == PARALLEL)
4969 emit_group_move (real_decl_rtl, decl_rtl);
4970 else
4971 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 4972 TREE_TYPE (decl_result),
084a1106
JDA
4973 int_size_in_bytes (TREE_TYPE (decl_result)));
4974 }
652b0932
RH
4975 /* In the case of complex integer modes smaller than a word, we'll
4976 need to generate some non-trivial bitfield insertions. Do that
4977 on a pseudo and not the hard register. */
4978 else if (GET_CODE (decl_rtl) == CONCAT
4979 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4980 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4981 {
4982 int old_generating_concat_p;
4983 rtx tmp;
4984
4985 old_generating_concat_p = generating_concat_p;
4986 generating_concat_p = 0;
4987 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4988 generating_concat_p = old_generating_concat_p;
4989
4990 emit_move_insn (tmp, decl_rtl);
4991 emit_move_insn (real_decl_rtl, tmp);
4992 }
3e4eac3f
RH
4993 else
4994 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 4995 }
6f086dfc
RS
4996 }
4997
4998 /* If returning a structure, arrange to return the address of the value
4999 in a place where debuggers expect to find it.
5000
5001 If returning a structure PCC style,
5002 the caller also depends on this value.
e3b5732b
JH
5003 And cfun->returns_pcc_struct is not necessarily set. */
5004 if (cfun->returns_struct
5005 || cfun->returns_pcc_struct)
6f086dfc 5006 {
cc77ae10 5007 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5008 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5009 rtx outgoing;
5010
5011 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5012 type = TREE_TYPE (type);
5013 else
5014 value_address = XEXP (value_address, 0);
5015
1d636cc6
RG
5016 outgoing = targetm.calls.function_value (build_pointer_type (type),
5017 current_function_decl, true);
6f086dfc
RS
5018
5019 /* Mark this as a function return value so integrate will delete the
5020 assignment and USE below when inlining this function. */
5021 REG_FUNCTION_VALUE_P (outgoing) = 1;
5022
d1608933 5023 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5024 value_address = convert_memory_address (GET_MODE (outgoing),
5025 value_address);
d1608933 5026
6f086dfc 5027 emit_move_insn (outgoing, value_address);
d1608933
RK
5028
5029 /* Show return register used to hold result (in this case the address
5030 of the result. */
38173d38 5031 crtl->return_rtx = outgoing;
6f086dfc
RS
5032 }
5033
932f0847
JH
5034 /* Emit the actual code to clobber return register. */
5035 {
609c3937 5036 rtx seq;
797a6ac1 5037
932f0847
JH
5038 start_sequence ();
5039 clobber_return_register ();
2f937369 5040 seq = get_insns ();
932f0847
JH
5041 end_sequence ();
5042
609c3937 5043 emit_insn_after (seq, clobber_after);
932f0847
JH
5044 }
5045
609c3937 5046 /* Output the label for the naked return from the function. */
4c33221c
UW
5047 if (naked_return_label)
5048 emit_label (naked_return_label);
6e3077c6 5049
25108646
AH
5050 /* @@@ This is a kludge. We want to ensure that instructions that
5051 may trap are not moved into the epilogue by scheduling, because
56d17681 5052 we don't always emit unwind information for the epilogue. */
8f4f502f 5053 if (!USING_SJLJ_EXCEPTIONS && cfun->can_throw_non_call_exceptions)
56d17681 5054 emit_insn (gen_blockage ());
25108646 5055
7d69de61 5056 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5057 if (crtl->stack_protect_guard)
7d69de61
RH
5058 stack_protect_epilogue ();
5059
40184445
BS
5060 /* If we had calls to alloca, and this machine needs
5061 an accurate stack pointer to exit the function,
5062 insert some code to save and restore the stack pointer. */
5063 if (! EXIT_IGNORE_STACK
e3b5732b 5064 && cfun->calls_alloca)
40184445
BS
5065 {
5066 rtx tem = 0;
5067
5068 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5069 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5070 }
5071
c13fde05
RH
5072 /* ??? This should no longer be necessary since stupid is no longer with
5073 us, but there are some parts of the compiler (eg reload_combine, and
5074 sh mach_dep_reorg) that still try and compute their own lifetime info
5075 instead of using the general framework. */
5076 use_return_register ();
6f086dfc 5077}
278ed218
RH
5078
5079rtx
bd60bab2 5080get_arg_pointer_save_area (void)
278ed218 5081{
bd60bab2 5082 rtx ret = arg_pointer_save_area;
278ed218
RH
5083
5084 if (! ret)
5085 {
bd60bab2
JH
5086 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5087 arg_pointer_save_area = ret;
964be02f
RH
5088 }
5089
e3b5732b 5090 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5091 {
5092 rtx seq;
278ed218 5093
797a6ac1 5094 /* Save the arg pointer at the beginning of the function. The
964be02f 5095 generated stack slot may not be a valid memory address, so we
278ed218
RH
5096 have to check it and fix it if necessary. */
5097 start_sequence ();
2e3f842f
L
5098 emit_move_insn (validize_mem (ret),
5099 crtl->args.internal_arg_pointer);
2f937369 5100 seq = get_insns ();
278ed218
RH
5101 end_sequence ();
5102
964be02f 5103 push_topmost_sequence ();
1cb2fc7b 5104 emit_insn_after (seq, entry_of_function ());
964be02f 5105 pop_topmost_sequence ();
278ed218
RH
5106 }
5107
5108 return ret;
5109}
bdac5f58 5110\f
cd9c1ca8
RH
5111/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5112 for the first time. */
bdac5f58 5113
0a1c58a2 5114static void
cd9c1ca8 5115record_insns (rtx insns, rtx end, htab_t *hashp)
bdac5f58 5116{
2f937369 5117 rtx tmp;
cd9c1ca8 5118 htab_t hash = *hashp;
0a1c58a2 5119
cd9c1ca8
RH
5120 if (hash == NULL)
5121 *hashp = hash
5122 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5123
5124 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5125 {
5126 void **slot = htab_find_slot (hash, tmp, INSERT);
5127 gcc_assert (*slot == NULL);
5128 *slot = tmp;
5129 }
5130}
5131
5132/* INSN has been duplicated as COPY, as part of duping a basic block.
5133 If INSN is an epilogue insn, then record COPY as epilogue as well. */
5134
5135void
5136maybe_copy_epilogue_insn (rtx insn, rtx copy)
5137{
5138 void **slot;
5139
5140 if (epilogue_insn_hash == NULL
5141 || htab_find (epilogue_insn_hash, insn) == NULL)
5142 return;
5143
5144 slot = htab_find_slot (epilogue_insn_hash, copy, INSERT);
5145 gcc_assert (*slot == NULL);
5146 *slot = copy;
bdac5f58
TW
5147}
5148
589fe865 5149/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 5150static void
fa8db1f7 5151set_insn_locators (rtx insn, int loc)
0435312e
JH
5152{
5153 while (insn != NULL_RTX)
5154 {
5155 if (INSN_P (insn))
5156 INSN_LOCATOR (insn) = loc;
5157 insn = NEXT_INSN (insn);
5158 }
5159}
5160
cd9c1ca8
RH
5161/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5162 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5163
cd9c1ca8
RH
5164static bool
5165contains (const_rtx insn, htab_t hash)
bdac5f58 5166{
cd9c1ca8
RH
5167 if (hash == NULL)
5168 return false;
bdac5f58 5169
cd9c1ca8 5170 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5171 {
cd9c1ca8 5172 int i;
bdac5f58 5173 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
cd9c1ca8
RH
5174 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5175 return true;
5176 return false;
bdac5f58 5177 }
cd9c1ca8
RH
5178
5179 return htab_find (hash, insn) != NULL;
bdac5f58 5180}
5c7675e9
RH
5181
5182int
4f588890 5183prologue_epilogue_contains (const_rtx insn)
5c7675e9 5184{
cd9c1ca8 5185 if (contains (insn, prologue_insn_hash))
5c7675e9 5186 return 1;
cd9c1ca8 5187 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5188 return 1;
5189 return 0;
5190}
bdac5f58 5191
73ef99fb 5192#ifdef HAVE_return
69732dcb
RH
5193/* Insert gen_return at the end of block BB. This also means updating
5194 block_for_insn appropriately. */
5195
5196static void
6039a0c7 5197emit_return_into_block (basic_block bb)
69732dcb 5198{
a813c111 5199 emit_jump_insn_after (gen_return (), BB_END (bb));
69732dcb 5200}
73ef99fb 5201#endif /* HAVE_return */
69732dcb 5202
9faa82d8 5203/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58
TW
5204 this into place with notes indicating where the prologue ends and where
5205 the epilogue begins. Update the basic block information when possible. */
5206
6fb5fa3c
DB
5207static void
5208thread_prologue_and_epilogue_insns (void)
bdac5f58 5209{
ca1117cc 5210 int inserted = 0;
19d3c25c 5211 edge e;
91ea4f8d 5212#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
19d3c25c 5213 rtx seq;
91ea4f8d 5214#endif
86c82654
RH
5215#if defined (HAVE_epilogue) || defined(HAVE_return)
5216 rtx epilogue_end = NULL_RTX;
5217#endif
628f6a4e 5218 edge_iterator ei;
e881bb1b 5219
a8ba47cb 5220 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
bdac5f58
TW
5221#ifdef HAVE_prologue
5222 if (HAVE_prologue)
5223 {
e881bb1b 5224 start_sequence ();
718fe406 5225 seq = gen_prologue ();
e881bb1b 5226 emit_insn (seq);
bdac5f58 5227
b8698a0f 5228 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5229 if the profiling is on and the frame pointer is required. */
e3b5732b 5230 if (crtl->profile && frame_pointer_needed)
c41c1387 5231 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5232
bdac5f58 5233 /* Retain a map of the prologue insns. */
cd9c1ca8 5234 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5235 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5236
56d17681
UB
5237 /* Ensure that instructions are not moved into the prologue when
5238 profiling is on. The call to the profiling routine can be
5239 emitted within the live range of a call-clobbered register. */
3c5273a9 5240 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5241 emit_insn (gen_blockage ());
9185a8d5 5242
2f937369 5243 seq = get_insns ();
e881bb1b 5244 end_sequence ();
0435312e 5245 set_insn_locators (seq, prologue_locator);
e881bb1b 5246
d6a7951f 5247 /* Can't deal with multiple successors of the entry block
75540af0
JH
5248 at the moment. Function should always have at least one
5249 entry point. */
c5cbcccf 5250 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
e881bb1b 5251
c5cbcccf 5252 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
75540af0 5253 inserted = 1;
bdac5f58 5254 }
bdac5f58 5255#endif
bdac5f58 5256
19d3c25c
RH
5257 /* If the exit block has no non-fake predecessors, we don't need
5258 an epilogue. */
628f6a4e 5259 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
5260 if ((e->flags & EDGE_FAKE) == 0)
5261 break;
5262 if (e == NULL)
5263 goto epilogue_done;
5264
a8ba47cb 5265 rtl_profile_for_bb (EXIT_BLOCK_PTR);
69732dcb
RH
5266#ifdef HAVE_return
5267 if (optimize && HAVE_return)
5268 {
5269 /* If we're allowed to generate a simple return instruction,
5270 then by definition we don't need a full epilogue. Examine
718fe406
KH
5271 the block that falls through to EXIT. If it does not
5272 contain any code, examine its predecessors and try to
69732dcb
RH
5273 emit (conditional) return instructions. */
5274
5275 basic_block last;
69732dcb
RH
5276 rtx label;
5277
628f6a4e 5278 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
69732dcb
RH
5279 if (e->flags & EDGE_FALLTHRU)
5280 break;
5281 if (e == NULL)
5282 goto epilogue_done;
5283 last = e->src;
5284
5285 /* Verify that there are no active instructions in the last block. */
a813c111 5286 label = BB_END (last);
4b4bf941 5287 while (label && !LABEL_P (label))
69732dcb
RH
5288 {
5289 if (active_insn_p (label))
5290 break;
5291 label = PREV_INSN (label);
5292 }
5293
4b4bf941 5294 if (BB_HEAD (last) == label && LABEL_P (label))
69732dcb 5295 {
628f6a4e 5296 edge_iterator ei2;
86c82654 5297
628f6a4e 5298 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
69732dcb
RH
5299 {
5300 basic_block bb = e->src;
5301 rtx jump;
5302
69732dcb 5303 if (bb == ENTRY_BLOCK_PTR)
628f6a4e
BE
5304 {
5305 ei_next (&ei2);
5306 continue;
5307 }
69732dcb 5308
a813c111 5309 jump = BB_END (bb);
4b4bf941 5310 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
628f6a4e
BE
5311 {
5312 ei_next (&ei2);
5313 continue;
5314 }
69732dcb
RH
5315
5316 /* If we have an unconditional jump, we can replace that
5317 with a simple return instruction. */
5318 if (simplejump_p (jump))
5319 {
6039a0c7 5320 emit_return_into_block (bb);
53c17031 5321 delete_insn (jump);
69732dcb
RH
5322 }
5323
5324 /* If we have a conditional jump, we can try to replace
5325 that with a conditional return instruction. */
5326 else if (condjump_p (jump))
5327 {
47009d11 5328 if (! redirect_jump (jump, 0, 0))
628f6a4e
BE
5329 {
5330 ei_next (&ei2);
5331 continue;
5332 }
718fe406 5333
3a75e42e
CP
5334 /* If this block has only one successor, it both jumps
5335 and falls through to the fallthru block, so we can't
5336 delete the edge. */
c5cbcccf 5337 if (single_succ_p (bb))
628f6a4e
BE
5338 {
5339 ei_next (&ei2);
5340 continue;
5341 }
69732dcb
RH
5342 }
5343 else
628f6a4e
BE
5344 {
5345 ei_next (&ei2);
5346 continue;
5347 }
69732dcb
RH
5348
5349 /* Fix up the CFG for the successful change we just made. */
86c82654 5350 redirect_edge_succ (e, EXIT_BLOCK_PTR);
69732dcb 5351 }
69732dcb 5352
2dd8bc01
GK
5353 /* Emit a return insn for the exit fallthru block. Whether
5354 this is still reachable will be determined later. */
69732dcb 5355
a813c111 5356 emit_barrier_after (BB_END (last));
6039a0c7 5357 emit_return_into_block (last);
a813c111 5358 epilogue_end = BB_END (last);
c5cbcccf 5359 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
718fe406 5360 goto epilogue_done;
2dd8bc01 5361 }
69732dcb
RH
5362 }
5363#endif
cd9c1ca8
RH
5364
5365 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5366 this marker for the splits of EH_RETURN patterns, and nothing else
5367 uses the flag in the meantime. */
5368 epilogue_completed = 1;
5369
5370#ifdef HAVE_eh_return
5371 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5372 some targets, these get split to a special version of the epilogue
5373 code. In order to be able to properly annotate these with unwind
5374 info, try to split them now. If we get a valid split, drop an
5375 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5376 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5377 {
5378 rtx prev, last, trial;
5379
5380 if (e->flags & EDGE_FALLTHRU)
5381 continue;
5382 last = BB_END (e->src);
5383 if (!eh_returnjump_p (last))
5384 continue;
5385
5386 prev = PREV_INSN (last);
5387 trial = try_split (PATTERN (last), last, 1);
5388 if (trial == last)
5389 continue;
5390
5391 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5392 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5393 }
5394#endif
5395
623a66fa
R
5396 /* Find the edge that falls through to EXIT. Other edges may exist
5397 due to RETURN instructions, but those don't need epilogues.
5398 There really shouldn't be a mixture -- either all should have
5399 been converted or none, however... */
5400
628f6a4e 5401 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
623a66fa
R
5402 if (e->flags & EDGE_FALLTHRU)
5403 break;
5404 if (e == NULL)
5405 goto epilogue_done;
5406
bdac5f58
TW
5407#ifdef HAVE_epilogue
5408 if (HAVE_epilogue)
5409 {
19d3c25c 5410 start_sequence ();
2e040219 5411 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
19d3c25c
RH
5412 seq = gen_epilogue ();
5413 emit_jump_insn (seq);
bdac5f58 5414
19d3c25c 5415 /* Retain a map of the epilogue insns. */
cd9c1ca8 5416 record_insns (seq, NULL, &epilogue_insn_hash);
0435312e 5417 set_insn_locators (seq, epilogue_locator);
bdac5f58 5418
2f937369 5419 seq = get_insns ();
718fe406 5420 end_sequence ();
e881bb1b 5421
19d3c25c 5422 insert_insn_on_edge (seq, e);
ca1117cc 5423 inserted = 1;
bdac5f58 5424 }
623a66fa 5425 else
bdac5f58 5426#endif
623a66fa
R
5427 {
5428 basic_block cur_bb;
5429
5430 if (! next_active_insn (BB_END (e->src)))
5431 goto epilogue_done;
5432 /* We have a fall-through edge to the exit block, the source is not
5433 at the end of the function, and there will be an assembler epilogue
5434 at the end of the function.
5435 We can't use force_nonfallthru here, because that would try to
5436 use return. Inserting a jump 'by hand' is extremely messy, so
5437 we take advantage of cfg_layout_finalize using
5438 fixup_fallthru_exit_predecessor. */
35b6b437 5439 cfg_layout_initialize (0);
623a66fa 5440 FOR_EACH_BB (cur_bb)
24bd1a0b
DB
5441 if (cur_bb->index >= NUM_FIXED_BLOCKS
5442 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5443 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5444 cfg_layout_finalize ();
5445 }
19d3c25c 5446epilogue_done:
a8ba47cb 5447 default_rtl_profile ();
e881bb1b 5448
ca1117cc 5449 if (inserted)
30a873c3
ZD
5450 {
5451 commit_edge_insertions ();
5452
5453 /* The epilogue insns we inserted may cause the exit edge to no longer
5454 be fallthru. */
5455 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5456 {
5457 if (((e->flags & EDGE_FALLTHRU) != 0)
5458 && returnjump_p (BB_END (e->src)))
5459 e->flags &= ~EDGE_FALLTHRU;
5460 }
5461 }
0a1c58a2
JL
5462
5463#ifdef HAVE_sibcall_epilogue
5464 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 5465 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
5466 {
5467 basic_block bb = e->src;
a813c111 5468 rtx insn = BB_END (bb);
0a1c58a2 5469
4b4bf941 5470 if (!CALL_P (insn)
0a1c58a2 5471 || ! SIBLING_CALL_P (insn))
628f6a4e
BE
5472 {
5473 ei_next (&ei);
5474 continue;
5475 }
0a1c58a2
JL
5476
5477 start_sequence ();
cd9c1ca8 5478 emit_note (NOTE_INSN_EPILOGUE_BEG);
0af5c896
RE
5479 emit_insn (gen_sibcall_epilogue ());
5480 seq = get_insns ();
0a1c58a2
JL
5481 end_sequence ();
5482
2f937369
DM
5483 /* Retain a map of the epilogue insns. Used in life analysis to
5484 avoid getting rid of sibcall epilogue insns. Do this before we
5485 actually emit the sequence. */
cd9c1ca8 5486 record_insns (seq, NULL, &epilogue_insn_hash);
0435312e 5487 set_insn_locators (seq, epilogue_locator);
2f937369 5488
5e35992a 5489 emit_insn_before (seq, insn);
628f6a4e 5490 ei_next (&ei);
0a1c58a2
JL
5491 }
5492#endif
ca1117cc 5493
86c82654
RH
5494#ifdef HAVE_epilogue
5495 if (epilogue_end)
5496 {
5497 rtx insn, next;
5498
5499 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5500 There is no need, however, to be quite so anal about the existence
071a42f9 5501 of such a note. Also possibly move
84c1fa24
UW
5502 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5503 info generation. */
718fe406 5504 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5505 {
5506 next = NEXT_INSN (insn);
b8698a0f 5507 if (NOTE_P (insn)
a38e7aa5 5508 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
5509 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5510 }
5511 }
5512#endif
6fb5fa3c
DB
5513
5514 /* Threading the prologue and epilogue changes the artificial refs
5515 in the entry and exit blocks. */
5516 epilogue_completed = 1;
5517 df_update_entry_exit_and_calls ();
bdac5f58
TW
5518}
5519
cd9c1ca8
RH
5520/* Reposition the prologue-end and epilogue-begin notes after
5521 instruction scheduling. */
bdac5f58
TW
5522
5523void
6fb5fa3c 5524reposition_prologue_and_epilogue_notes (void)
bdac5f58 5525{
cd9c1ca8
RH
5526#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5527 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
5528 /* Since the hash table is created on demand, the fact that it is
5529 non-null is a signal that it is non-empty. */
5530 if (prologue_insn_hash != NULL)
bdac5f58 5531 {
cd9c1ca8 5532 size_t len = htab_elements (prologue_insn_hash);
997704f1 5533 rtx insn, last = NULL, note = NULL;
bdac5f58 5534
cd9c1ca8
RH
5535 /* Scan from the beginning until we reach the last prologue insn. */
5536 /* ??? While we do have the CFG intact, there are two problems:
5537 (1) The prologue can contain loops (typically probing the stack),
5538 which means that the end of the prologue isn't in the first bb.
5539 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 5540 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 5541 {
4b4bf941 5542 if (NOTE_P (insn))
9392c110 5543 {
a38e7aa5 5544 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
5545 note = insn;
5546 }
cd9c1ca8 5547 else if (contains (insn, prologue_insn_hash))
0a1c58a2 5548 {
9f53e965
RH
5549 last = insn;
5550 if (--len == 0)
5551 break;
5552 }
5553 }
797a6ac1 5554
9f53e965
RH
5555 if (last)
5556 {
cd9c1ca8 5557 if (note == NULL)
9f53e965 5558 {
cd9c1ca8
RH
5559 /* Scan forward looking for the PROLOGUE_END note. It should
5560 be right at the beginning of the block, possibly with other
5561 insn notes that got moved there. */
5562 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5563 {
5564 if (NOTE_P (note)
5565 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5566 break;
5567 }
9f53e965 5568 }
c93b03c2 5569
9f53e965 5570 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 5571 if (LABEL_P (last))
9f53e965
RH
5572 last = NEXT_INSN (last);
5573 reorder_insns (note, note, last);
bdac5f58 5574 }
0a1c58a2
JL
5575 }
5576
cd9c1ca8 5577 if (epilogue_insn_hash != NULL)
0a1c58a2 5578 {
cd9c1ca8
RH
5579 edge_iterator ei;
5580 edge e;
bdac5f58 5581
cd9c1ca8 5582 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
bdac5f58 5583 {
997704f1
RH
5584 rtx insn, first = NULL, note = NULL;
5585 basic_block bb = e->src;
c93b03c2 5586
997704f1 5587 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 5588 FOR_BB_INSNS (bb, insn)
9f53e965 5589 {
cd9c1ca8
RH
5590 if (NOTE_P (insn))
5591 {
5592 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5593 {
5594 note = insn;
997704f1 5595 if (first != NULL)
cd9c1ca8
RH
5596 break;
5597 }
5598 }
997704f1 5599 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 5600 {
997704f1 5601 first = insn;
cd9c1ca8
RH
5602 if (note != NULL)
5603 break;
5604 }
9392c110 5605 }
997704f1
RH
5606
5607 if (note)
5608 {
5609 /* If the function has a single basic block, and no real
b8698a0f 5610 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
5611 epilogue note can get scheduled before the prologue
5612 note. If we have frame related prologue insns, having
5613 them scanned during the epilogue will result in a crash.
5614 In this case re-order the epilogue note to just before
5615 the last insn in the block. */
5616 if (first == NULL)
5617 first = BB_END (bb);
5618
5619 if (PREV_INSN (first) != note)
5620 reorder_insns (note, note, PREV_INSN (first));
5621 }
bdac5f58
TW
5622 }
5623 }
5624#endif /* HAVE_prologue or HAVE_epilogue */
5625}
87ff9c8e 5626
faed5cc3
SB
5627/* Returns the name of the current function. */
5628const char *
5629current_function_name (void)
5630{
c7ac4fb5
NC
5631 if (cfun == NULL)
5632 return "<none>";
ae2bcd98 5633 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3 5634}
ef330312
PB
5635\f
5636
c2924966 5637static unsigned int
ef330312
PB
5638rest_of_handle_check_leaf_regs (void)
5639{
5640#ifdef LEAF_REGISTERS
5641 current_function_uses_only_leaf_regs
5642 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5643#endif
c2924966 5644 return 0;
ef330312
PB
5645}
5646
8d8d1a28 5647/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 5648
8d8d1a28
AH
5649static void
5650used_types_insert_helper (tree type, struct function *func)
33c9159e 5651{
8d8d1a28 5652 if (type != NULL && func != NULL)
33c9159e
AH
5653 {
5654 void **slot;
5655
5656 if (func->used_types_hash == NULL)
5657 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
8d8d1a28
AH
5658 htab_eq_pointer, NULL);
5659 slot = htab_find_slot (func->used_types_hash, type, INSERT);
33c9159e 5660 if (*slot == NULL)
8d8d1a28 5661 *slot = type;
33c9159e
AH
5662 }
5663}
5664
8d8d1a28
AH
5665/* Given a type, insert it into the used hash table in cfun. */
5666void
5667used_types_insert (tree t)
5668{
5669 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
5670 if (TYPE_NAME (t))
5671 break;
5672 else
5673 t = TREE_TYPE (t);
5674 if (TYPE_NAME (t) == NULL_TREE
5675 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5676 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 5677 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
5678 {
5679 if (cfun)
5680 used_types_insert_helper (t, cfun);
5681 else
5682 /* So this might be a type referenced by a global variable.
5683 Record that type so that we can later decide to emit its debug
5684 information. */
bc87224e 5685 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
b646ba3f
DS
5686 }
5687}
5688
5689/* Helper to Hash a struct types_used_by_vars_entry. */
5690
5691static hashval_t
5692hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5693{
5694 gcc_assert (entry && entry->var_decl && entry->type);
5695
5696 return iterative_hash_object (entry->type,
5697 iterative_hash_object (entry->var_decl, 0));
5698}
5699
5700/* Hash function of the types_used_by_vars_entry hash table. */
5701
5702hashval_t
5703types_used_by_vars_do_hash (const void *x)
5704{
5705 const struct types_used_by_vars_entry *entry =
5706 (const struct types_used_by_vars_entry *) x;
5707
5708 return hash_types_used_by_vars_entry (entry);
5709}
5710
5711/*Equality function of the types_used_by_vars_entry hash table. */
5712
5713int
5714types_used_by_vars_eq (const void *x1, const void *x2)
5715{
5716 const struct types_used_by_vars_entry *e1 =
5717 (const struct types_used_by_vars_entry *) x1;
5718 const struct types_used_by_vars_entry *e2 =
5719 (const struct types_used_by_vars_entry *)x2;
5720
5721 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5722}
5723
5724/* Inserts an entry into the types_used_by_vars_hash hash table. */
5725
5726void
5727types_used_by_var_decl_insert (tree type, tree var_decl)
5728{
5729 if (type != NULL && var_decl != NULL)
5730 {
5731 void **slot;
5732 struct types_used_by_vars_entry e;
5733 e.var_decl = var_decl;
5734 e.type = type;
5735 if (types_used_by_vars_hash == NULL)
5736 types_used_by_vars_hash =
5737 htab_create_ggc (37, types_used_by_vars_do_hash,
5738 types_used_by_vars_eq, NULL);
5739 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5740 hash_types_used_by_vars_entry (&e), INSERT);
5741 if (*slot == NULL)
5742 {
5743 struct types_used_by_vars_entry *entry;
a9429e29 5744 entry = ggc_alloc_types_used_by_vars_entry ();
b646ba3f
DS
5745 entry->type = type;
5746 entry->var_decl = var_decl;
5747 *slot = entry;
5748 }
5749 }
8d8d1a28
AH
5750}
5751
8ddbbcae 5752struct rtl_opt_pass pass_leaf_regs =
ef330312 5753{
8ddbbcae
JH
5754 {
5755 RTL_PASS,
e0a42b0f 5756 "*leaf_regs", /* name */
ef330312
PB
5757 NULL, /* gate */
5758 rest_of_handle_check_leaf_regs, /* execute */
5759 NULL, /* sub */
5760 NULL, /* next */
5761 0, /* static_pass_number */
7072a650 5762 TV_NONE, /* tv_id */
ef330312
PB
5763 0, /* properties_required */
5764 0, /* properties_provided */
5765 0, /* properties_destroyed */
5766 0, /* todo_flags_start */
8ddbbcae
JH
5767 0 /* todo_flags_finish */
5768 }
ef330312
PB
5769};
5770
6fb5fa3c
DB
5771static unsigned int
5772rest_of_handle_thread_prologue_and_epilogue (void)
5773{
5774 if (optimize)
5775 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 5776
6fb5fa3c
DB
5777 /* On some machines, the prologue and epilogue code, or parts thereof,
5778 can be represented as RTL. Doing so lets us schedule insns between
5779 it and the rest of the code and also allows delayed branch
5780 scheduling to operate in the epilogue. */
6fb5fa3c 5781 thread_prologue_and_epilogue_insns ();
d3c12306
EB
5782
5783 /* The stack usage info is finalized during prologue expansion. */
5784 if (flag_stack_usage)
5785 output_stack_usage ();
5786
6fb5fa3c
DB
5787 return 0;
5788}
5789
8ddbbcae 5790struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6fb5fa3c 5791{
8ddbbcae
JH
5792 {
5793 RTL_PASS,
6fb5fa3c
DB
5794 "pro_and_epilogue", /* name */
5795 NULL, /* gate */
5796 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5797 NULL, /* sub */
5798 NULL, /* next */
5799 0, /* static_pass_number */
5800 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5801 0, /* properties_required */
5802 0, /* properties_provided */
5803 0, /* properties_destroyed */
5804 TODO_verify_flow, /* todo_flags_start */
5805 TODO_dump_func |
0d475361 5806 TODO_df_verify |
a36b8a1e 5807 TODO_df_finish | TODO_verify_rtl_sharing |
8ddbbcae
JH
5808 TODO_ggc_collect /* todo_flags_finish */
5809 }
6fb5fa3c 5810};
d8d72314
PB
5811\f
5812
5813/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 5814 in-out constraints. Say you start with
d8d72314
PB
5815
5816 orig = inout;
5817 asm ("": "+mr" (inout));
5818 use (orig);
5819
5820 which is transformed very early to use explicit output and match operands:
5821
5822 orig = inout;
5823 asm ("": "=mr" (inout) : "0" (inout));
5824 use (orig);
5825
5826 Or, after SSA and copyprop,
5827
5828 asm ("": "=mr" (inout_2) : "0" (inout_1));
5829 use (inout_1);
5830
5831 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5832 they represent two separate values, so they will get different pseudo
5833 registers during expansion. Then, since the two operands need to match
5834 per the constraints, but use different pseudo registers, reload can
5835 only register a reload for these operands. But reloads can only be
5836 satisfied by hardregs, not by memory, so we need a register for this
5837 reload, just because we are presented with non-matching operands.
5838 So, even though we allow memory for this operand, no memory can be
5839 used for it, just because the two operands don't match. This can
5840 cause reload failures on register-starved targets.
5841
5842 So it's a symptom of reload not being able to use memory for reloads
5843 or, alternatively it's also a symptom of both operands not coming into
5844 reload as matching (in which case the pseudo could go to memory just
5845 fine, as the alternative allows it, and no reload would be necessary).
5846 We fix the latter problem here, by transforming
5847
5848 asm ("": "=mr" (inout_2) : "0" (inout_1));
5849
5850 back to
5851
5852 inout_2 = inout_1;
5853 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5854
5855static void
5856match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5857{
5858 int i;
5859 bool changed = false;
5860 rtx op = SET_SRC (p_sets[0]);
5861 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5862 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 5863 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 5864
d7b8033f 5865 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
5866 for (i = 0; i < ninputs; i++)
5867 {
5868 rtx input, output, insns;
5869 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5870 char *end;
53220215 5871 int match, j;
d8d72314 5872
70f16287
JJ
5873 if (*constraint == '%')
5874 constraint++;
5875
d8d72314
PB
5876 match = strtoul (constraint, &end, 10);
5877 if (end == constraint)
5878 continue;
5879
5880 gcc_assert (match < noutputs);
5881 output = SET_DEST (p_sets[match]);
5882 input = RTVEC_ELT (inputs, i);
53220215
MM
5883 /* Only do the transformation for pseudos. */
5884 if (! REG_P (output)
5885 || rtx_equal_p (output, input)
d8d72314
PB
5886 || (GET_MODE (input) != VOIDmode
5887 && GET_MODE (input) != GET_MODE (output)))
5888 continue;
5889
53220215
MM
5890 /* We can't do anything if the output is also used as input,
5891 as we're going to overwrite it. */
5892 for (j = 0; j < ninputs; j++)
5893 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5894 break;
5895 if (j != ninputs)
5896 continue;
5897
d7b8033f
JJ
5898 /* Avoid changing the same input several times. For
5899 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5900 only change in once (to out1), rather than changing it
5901 first to out1 and afterwards to out2. */
5902 if (i > 0)
5903 {
5904 for (j = 0; j < noutputs; j++)
5905 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5906 break;
5907 if (j != noutputs)
5908 continue;
5909 }
5910 output_matched[match] = true;
5911
d8d72314 5912 start_sequence ();
53220215 5913 emit_move_insn (output, input);
d8d72314
PB
5914 insns = get_insns ();
5915 end_sequence ();
d8d72314 5916 emit_insn_before (insns, insn);
53220215
MM
5917
5918 /* Now replace all mentions of the input with output. We can't
fa10beec 5919 just replace the occurrence in inputs[i], as the register might
53220215
MM
5920 also be used in some other input (or even in an address of an
5921 output), which would mean possibly increasing the number of
5922 inputs by one (namely 'output' in addition), which might pose
5923 a too complicated problem for reload to solve. E.g. this situation:
5924
5925 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5926
84fbffb2 5927 Here 'input' is used in two occurrences as input (once for the
53220215 5928 input operand, once for the address in the second output operand).
fa10beec 5929 If we would replace only the occurrence of the input operand (to
53220215
MM
5930 make the matching) we would be left with this:
5931
5932 output = input
5933 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5934
5935 Now we suddenly have two different input values (containing the same
5936 value, but different pseudos) where we formerly had only one.
5937 With more complicated asms this might lead to reload failures
5938 which wouldn't have happen without this pass. So, iterate over
84fbffb2 5939 all operands and replace all occurrences of the register used. */
53220215 5940 for (j = 0; j < noutputs; j++)
1596d61e 5941 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
5942 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5943 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5944 input, output);
5945 for (j = 0; j < ninputs; j++)
5946 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5947 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5948 input, output);
5949
d8d72314
PB
5950 changed = true;
5951 }
5952
5953 if (changed)
5954 df_insn_rescan (insn);
5955}
5956
5957static unsigned
5958rest_of_match_asm_constraints (void)
5959{
5960 basic_block bb;
5961 rtx insn, pat, *p_sets;
5962 int noutputs;
5963
e3b5732b 5964 if (!crtl->has_asm_statement)
d8d72314
PB
5965 return 0;
5966
5967 df_set_flags (DF_DEFER_INSN_RESCAN);
5968 FOR_EACH_BB (bb)
5969 {
5970 FOR_BB_INSNS (bb, insn)
5971 {
5972 if (!INSN_P (insn))
5973 continue;
5974
5975 pat = PATTERN (insn);
5976 if (GET_CODE (pat) == PARALLEL)
5977 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5978 else if (GET_CODE (pat) == SET)
5979 p_sets = &PATTERN (insn), noutputs = 1;
5980 else
5981 continue;
5982
5983 if (GET_CODE (*p_sets) == SET
5984 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5985 match_asm_constraints_1 (insn, p_sets, noutputs);
5986 }
5987 }
5988
5989 return TODO_df_finish;
5990}
5991
8ddbbcae 5992struct rtl_opt_pass pass_match_asm_constraints =
d8d72314 5993{
8ddbbcae
JH
5994 {
5995 RTL_PASS,
d8d72314
PB
5996 "asmcons", /* name */
5997 NULL, /* gate */
5998 rest_of_match_asm_constraints, /* execute */
5999 NULL, /* sub */
6000 NULL, /* next */
6001 0, /* static_pass_number */
7072a650 6002 TV_NONE, /* tv_id */
d8d72314
PB
6003 0, /* properties_required */
6004 0, /* properties_provided */
6005 0, /* properties_destroyed */
6006 0, /* todo_flags_start */
8ddbbcae
JH
6007 TODO_dump_func /* todo_flags_finish */
6008 }
d8d72314 6009};
6fb5fa3c 6010
faed5cc3 6011
e2500fed 6012#include "gt-function.h"