]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
cfglayout.h: Remove.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
095c7b3c 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
ee3d2ecd 4 2010, 2011, 2012 Free Software Foundation, Inc.
6f086dfc 5
1322177d 6This file is part of GCC.
6f086dfc 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
6f086dfc 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
6f086dfc
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6f086dfc 21
6f086dfc
RS
22/* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 34 not get a hard register. */
6f086dfc
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
0cbd9993 40#include "rtl-error.h"
6f086dfc
RS
41#include "tree.h"
42#include "flags.h"
1ef08c63 43#include "except.h"
6f086dfc 44#include "function.h"
6f086dfc 45#include "expr.h"
c6b97fac 46#include "optabs.h"
e78d8e51 47#include "libfuncs.h"
6f086dfc
RS
48#include "regs.h"
49#include "hard-reg-set.h"
50#include "insn-config.h"
51#include "recog.h"
52#include "output.h"
bdac5f58 53#include "basic-block.h"
e2500fed 54#include "hashtab.h"
87ff9c8e 55#include "ggc.h"
b1474bb7 56#include "tm_p.h"
7afff7cf 57#include "langhooks.h"
61f71b34 58#include "target.h"
677f3fa8 59#include "common/common-target.h"
726a989a 60#include "gimple.h"
ef330312 61#include "tree-pass.h"
7d69de61 62#include "predict.h"
6fb5fa3c
DB
63#include "df.h"
64#include "timevar.h"
e3df376d 65#include "vecprim.h"
ffe14686
AM
66#include "params.h"
67#include "bb-reorder.h"
7d69de61 68
5576d6f2
TT
69/* So we can assign to cfun in this file. */
70#undef cfun
71
95f3f59e
JDA
72#ifndef STACK_ALIGNMENT_NEEDED
73#define STACK_ALIGNMENT_NEEDED 1
74#endif
75
975f3818
RS
76#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
293e3de4
RS
78/* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
0f41302f 81 must define both, or neither. */
293e3de4
RS
82#ifndef NAME__MAIN
83#define NAME__MAIN "__main"
293e3de4
RS
84#endif
85
6f086dfc
RS
86/* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90
91/* Similar, but round to the next highest integer that meets the
92 alignment. */
93#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94
54ff41b7
JW
95/* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
718fe406 98 compiler passes. */
54ff41b7
JW
99int current_function_is_leaf;
100
fdb8a883
JW
101/* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
6fb5fa3c 103 pass_stack_ptr_mod has run. */
fdb8a883
JW
104int current_function_sp_is_unchanging;
105
54ff41b7
JW
106/* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
54ff41b7
JW
109int current_function_uses_only_leaf_regs;
110
6f086dfc 111/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115int virtuals_instantiated;
6f086dfc 116
df696a75 117/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 118static GTY(()) int funcdef_no;
f6f315fe 119
414c4dc4
NC
120/* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
fa8db1f7 122struct machine_function * (*init_machine_status) (void);
46766466 123
b384405b 124/* The currently compiled function. */
01d939e8 125struct function *cfun = 0;
b384405b 126
cd9c1ca8
RH
127/* These hashes record the prologue and epilogue insns. */
128static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
129 htab_t prologue_insn_hash;
130static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t epilogue_insn_hash;
6f086dfc 132\f
b646ba3f
DS
133
134htab_t types_used_by_vars_hash = NULL;
bc87224e 135VEC(tree,gc) *types_used_by_cur_var_decl;
b646ba3f 136
e15679f8
RK
137/* Forward declarations. */
138
fa8db1f7 139static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7
AJ
140static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141static void pad_below (struct args_size *, enum machine_mode, tree);
2c217442 142static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
fa8db1f7
AJ
143static int all_blocks (tree, tree *);
144static tree *get_block_vector (tree, int *);
145extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 146/* We always define `record_insns' even if it's not used so that we
ec97b83a 147 can always export `prologue_epilogue_contains'. */
cd9c1ca8
RH
148static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
149static bool contains (const_rtx, htab_t);
db2960f4 150static void prepare_function_start (void);
fa8db1f7
AJ
151static void do_clobber_return_reg (rtx, void *);
152static void do_use_return_reg (rtx, void *);
4c4d143a 153static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
c20bf1f3 154\f
936fc9ba
JH
155/* Stack of nested functions. */
156/* Keep track of the cfun stack. */
e5e809f4 157
936fc9ba 158typedef struct function *function_p;
e5e809f4 159
936fc9ba
JH
160DEF_VEC_P(function_p);
161DEF_VEC_ALLOC_P(function_p,heap);
162static VEC(function_p,heap) *function_context_stack;
6f086dfc
RS
163
164/* Save the current context for compilation of a nested function.
d2784db4 165 This is called from language-specific code. */
6f086dfc
RS
166
167void
d2784db4 168push_function_context (void)
6f086dfc 169{
01d939e8 170 if (cfun == 0)
182e0d71 171 allocate_struct_function (NULL, false);
b384405b 172
936fc9ba 173 VEC_safe_push (function_p, heap, function_context_stack, cfun);
db2960f4 174 set_cfun (NULL);
6f086dfc
RS
175}
176
177/* Restore the last saved context, at the end of a nested function.
178 This function is called from language-specific code. */
179
180void
d2784db4 181pop_function_context (void)
6f086dfc 182{
936fc9ba 183 struct function *p = VEC_pop (function_p, function_context_stack);
db2960f4 184 set_cfun (p);
6f086dfc 185 current_function_decl = p->decl;
6f086dfc 186
6f086dfc 187 /* Reset variables that have known state during rtx generation. */
6f086dfc 188 virtuals_instantiated = 0;
1b3d8f8a 189 generating_concat_p = 1;
6f086dfc 190}
e4a4639e 191
fa51b01b
RH
192/* Clear out all parts of the state in F that can safely be discarded
193 after the function has been parsed, but not compiled, to let
194 garbage collection reclaim the memory. */
195
196void
fa8db1f7 197free_after_parsing (struct function *f)
fa51b01b 198{
e8924938 199 f->language = 0;
fa51b01b
RH
200}
201
e2ecd91c
BS
202/* Clear out all parts of the state in F that can safely be discarded
203 after the function has been compiled, to let garbage collection
0a8a198c 204 reclaim the memory. */
21cd906e 205
e2ecd91c 206void
fa8db1f7 207free_after_compilation (struct function *f)
e2ecd91c 208{
cd9c1ca8
RH
209 prologue_insn_hash = NULL;
210 epilogue_insn_hash = NULL;
211
04695783 212 free (crtl->emit.regno_pointer_align);
f995dcfe 213
3e029763 214 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 215 f->eh = NULL;
e2500fed 216 f->machine = NULL;
997de8ed 217 f->cfg = NULL;
fa51b01b 218
57b9e367 219 regno_reg_rtx = NULL;
825b2fe7 220 insn_locators_free ();
e2ecd91c 221}
6f086dfc 222\f
49ad7cfa
BS
223/* Return size needed for stack frame based on slots so far allocated.
224 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
225 the caller may have to do that. */
9fb798d7 226
49ad7cfa 227HOST_WIDE_INT
fa8db1f7 228get_frame_size (void)
49ad7cfa 229{
bd60bab2
JH
230 if (FRAME_GROWS_DOWNWARD)
231 return -frame_offset;
232 else
233 return frame_offset;
49ad7cfa
BS
234}
235
9fb798d7
EB
236/* Issue an error message and return TRUE if frame OFFSET overflows in
237 the signed target pointer arithmetics for function FUNC. Otherwise
238 return FALSE. */
239
240bool
241frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 242{
9fb798d7
EB
243 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
244
245 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
246 /* Leave room for the fixed part of the frame. */
247 - 64 * UNITS_PER_WORD)
248 {
c5d75364
MLI
249 error_at (DECL_SOURCE_LOCATION (func),
250 "total size of local objects too large");
9fb798d7
EB
251 return TRUE;
252 }
253
254 return FALSE;
255}
256
76fe54f0
L
257/* Return stack slot alignment in bits for TYPE and MODE. */
258
259static unsigned int
260get_stack_local_alignment (tree type, enum machine_mode mode)
261{
262 unsigned int alignment;
263
264 if (mode == BLKmode)
265 alignment = BIGGEST_ALIGNMENT;
266 else
267 alignment = GET_MODE_ALIGNMENT (mode);
268
269 /* Allow the frond-end to (possibly) increase the alignment of this
270 stack slot. */
271 if (! type)
272 type = lang_hooks.types.type_for_mode (mode, 0);
273
274 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
275}
276
56731d64
BS
277/* Determine whether it is possible to fit a stack slot of size SIZE and
278 alignment ALIGNMENT into an area in the stack frame that starts at
279 frame offset START and has a length of LENGTH. If so, store the frame
280 offset to be used for the stack slot in *POFFSET and return true;
281 return false otherwise. This function will extend the frame size when
282 given a start/length pair that lies at the end of the frame. */
283
284static bool
285try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
286 HOST_WIDE_INT size, unsigned int alignment,
287 HOST_WIDE_INT *poffset)
288{
289 HOST_WIDE_INT this_frame_offset;
290 int frame_off, frame_alignment, frame_phase;
291
292 /* Calculate how many bytes the start of local variables is off from
293 stack alignment. */
294 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
295 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
296 frame_phase = frame_off ? frame_alignment - frame_off : 0;
297
298 /* Round the frame offset to the specified alignment. */
299
300 /* We must be careful here, since FRAME_OFFSET might be negative and
301 division with a negative dividend isn't as well defined as we might
302 like. So we instead assume that ALIGNMENT is a power of two and
303 use logical operations which are unambiguous. */
304 if (FRAME_GROWS_DOWNWARD)
305 this_frame_offset
306 = (FLOOR_ROUND (start + length - size - frame_phase,
307 (unsigned HOST_WIDE_INT) alignment)
308 + frame_phase);
309 else
310 this_frame_offset
311 = (CEIL_ROUND (start - frame_phase,
312 (unsigned HOST_WIDE_INT) alignment)
313 + frame_phase);
314
315 /* See if it fits. If this space is at the edge of the frame,
316 consider extending the frame to make it fit. Our caller relies on
317 this when allocating a new slot. */
318 if (frame_offset == start && this_frame_offset < frame_offset)
319 frame_offset = this_frame_offset;
320 else if (this_frame_offset < start)
321 return false;
322 else if (start + length == frame_offset
323 && this_frame_offset + size > start + length)
324 frame_offset = this_frame_offset + size;
325 else if (this_frame_offset + size > start + length)
326 return false;
327
328 *poffset = this_frame_offset;
329 return true;
330}
331
332/* Create a new frame_space structure describing free space in the stack
333 frame beginning at START and ending at END, and chain it into the
334 function's frame_space_list. */
335
336static void
337add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
338{
a9429e29 339 struct frame_space *space = ggc_alloc_frame_space ();
56731d64
BS
340 space->next = crtl->frame_space_list;
341 crtl->frame_space_list = space;
342 space->start = start;
343 space->length = end - start;
344}
345
6f086dfc
RS
346/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
347 with machine mode MODE.
718fe406 348
6f086dfc
RS
349 ALIGN controls the amount of alignment for the address of the slot:
350 0 means according to MODE,
351 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 352 -2 means use BITS_PER_UNIT,
6f086dfc
RS
353 positive specifies alignment boundary in bits.
354
80a832cd
JJ
355 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
356 alignment and ASLK_RECORD_PAD bit set if we should remember
357 extra space we allocated for alignment purposes. When we are
358 called from assign_stack_temp_for_type, it is not set so we don't
359 track the same stack slot in two independent lists.
2e3f842f 360
bd60bab2 361 We do not round to stack_boundary here. */
6f086dfc 362
bd60bab2 363rtx
2e3f842f 364assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
80a832cd 365 int align, int kind)
6f086dfc 366{
b3694847 367 rtx x, addr;
6f086dfc 368 int bigend_correction = 0;
427188d5 369 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 370 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
371
372 if (align == 0)
373 {
76fe54f0 374 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 375 alignment /= BITS_PER_UNIT;
6f086dfc
RS
376 }
377 else if (align == -1)
378 {
379 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
380 size = CEIL_ROUND (size, alignment);
381 }
cfa29a4c
EB
382 else if (align == -2)
383 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
384 else
385 alignment = align / BITS_PER_UNIT;
386
2e3f842f
L
387 alignment_in_bits = alignment * BITS_PER_UNIT;
388
2e3f842f
L
389 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
390 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
391 {
392 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
393 alignment = alignment_in_bits / BITS_PER_UNIT;
394 }
a0871656 395
2e3f842f
L
396 if (SUPPORTS_STACK_ALIGNMENT)
397 {
398 if (crtl->stack_alignment_estimated < alignment_in_bits)
399 {
400 if (!crtl->stack_realign_processed)
401 crtl->stack_alignment_estimated = alignment_in_bits;
402 else
403 {
404 /* If stack is realigned and stack alignment value
405 hasn't been finalized, it is OK not to increase
406 stack_alignment_estimated. The bigger alignment
407 requirement is recorded in stack_alignment_needed
408 below. */
409 gcc_assert (!crtl->stack_realign_finalized);
410 if (!crtl->stack_realign_needed)
411 {
412 /* It is OK to reduce the alignment as long as the
413 requested size is 0 or the estimated stack
414 alignment >= mode alignment. */
80a832cd 415 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
416 || size == 0
417 || (crtl->stack_alignment_estimated
418 >= GET_MODE_ALIGNMENT (mode)));
419 alignment_in_bits = crtl->stack_alignment_estimated;
420 alignment = alignment_in_bits / BITS_PER_UNIT;
421 }
422 }
423 }
424 }
76fe54f0
L
425
426 if (crtl->stack_alignment_needed < alignment_in_bits)
427 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
428 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
429 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 430
56731d64
BS
431 if (mode != BLKmode || size != 0)
432 {
80a832cd 433 if (kind & ASLK_RECORD_PAD)
56731d64 434 {
80a832cd
JJ
435 struct frame_space **psp;
436
437 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
438 {
439 struct frame_space *space = *psp;
440 if (!try_fit_stack_local (space->start, space->length, size,
441 alignment, &slot_offset))
442 continue;
443 *psp = space->next;
444 if (slot_offset > space->start)
445 add_frame_space (space->start, slot_offset);
446 if (slot_offset + size < space->start + space->length)
447 add_frame_space (slot_offset + size,
448 space->start + space->length);
449 goto found_space;
450 }
56731d64
BS
451 }
452 }
453 else if (!STACK_ALIGNMENT_NEEDED)
454 {
455 slot_offset = frame_offset;
456 goto found_space;
457 }
458
459 old_frame_offset = frame_offset;
460
461 if (FRAME_GROWS_DOWNWARD)
462 {
463 frame_offset -= size;
464 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 465
80a832cd
JJ
466 if (kind & ASLK_RECORD_PAD)
467 {
468 if (slot_offset > frame_offset)
469 add_frame_space (frame_offset, slot_offset);
470 if (slot_offset + size < old_frame_offset)
471 add_frame_space (slot_offset + size, old_frame_offset);
472 }
56731d64
BS
473 }
474 else
95f3f59e 475 {
56731d64
BS
476 frame_offset += size;
477 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
478
80a832cd
JJ
479 if (kind & ASLK_RECORD_PAD)
480 {
481 if (slot_offset > old_frame_offset)
482 add_frame_space (old_frame_offset, slot_offset);
483 if (slot_offset + size < frame_offset)
484 add_frame_space (slot_offset + size, frame_offset);
485 }
95f3f59e 486 }
6f086dfc 487
56731d64 488 found_space:
6f086dfc
RS
489 /* On a big-endian machine, if we are allocating more space than we will use,
490 use the least significant bytes of those that are allocated. */
d70eadf7 491 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 492 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 493
6f086dfc
RS
494 /* If we have already instantiated virtual registers, return the actual
495 address relative to the frame pointer. */
bd60bab2 496 if (virtuals_instantiated)
0a81f074 497 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 498 trunc_int_for_mode
56731d64 499 (slot_offset + bigend_correction
c41536f5 500 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 501 else
0a81f074 502 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 503 trunc_int_for_mode
56731d64 504 (slot_offset + bigend_correction,
c41536f5 505 Pmode));
6f086dfc 506
38a448ca 507 x = gen_rtx_MEM (mode, addr);
76fe54f0 508 set_mem_align (x, alignment_in_bits);
be0c514c 509 MEM_NOTRAP_P (x) = 1;
6f086dfc 510
bd60bab2
JH
511 stack_slot_list
512 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 513
bd60bab2
JH
514 if (frame_offset_overflow (frame_offset, current_function_decl))
515 frame_offset = 0;
9070115b 516
6f086dfc
RS
517 return x;
518}
2e3f842f
L
519
520/* Wrap up assign_stack_local_1 with last parameter as false. */
521
522rtx
523assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
524{
80a832cd 525 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 526}
0aea6467 527\f
fb0703f7
SB
528/* In order to evaluate some expressions, such as function calls returning
529 structures in memory, we need to temporarily allocate stack locations.
530 We record each allocated temporary in the following structure.
531
532 Associated with each temporary slot is a nesting level. When we pop up
533 one level, all temporaries associated with the previous level are freed.
534 Normally, all temporaries are freed after the execution of the statement
535 in which they were created. However, if we are inside a ({...}) grouping,
536 the result may be in a temporary and hence must be preserved. If the
537 result could be in a temporary, we preserve it if we can determine which
538 one it is in. If we cannot determine which temporary may contain the
539 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 540 pretending it was allocated at the previous nesting level. */
fb0703f7 541
d1b38208 542struct GTY(()) temp_slot {
fb0703f7
SB
543 /* Points to next temporary slot. */
544 struct temp_slot *next;
545 /* Points to previous temporary slot. */
546 struct temp_slot *prev;
547 /* The rtx to used to reference the slot. */
548 rtx slot;
fb0703f7
SB
549 /* The size, in units, of the slot. */
550 HOST_WIDE_INT size;
551 /* The type of the object in the slot, or zero if it doesn't correspond
552 to a type. We use this to determine whether a slot can be reused.
553 It can be reused if objects of the type of the new slot will always
554 conflict with objects of the type of the old slot. */
555 tree type;
8f5929e1
JJ
556 /* The alignment (in bits) of the slot. */
557 unsigned int align;
fb0703f7
SB
558 /* Nonzero if this temporary is currently in use. */
559 char in_use;
fb0703f7
SB
560 /* Nesting level at which this slot is being used. */
561 int level;
fb0703f7
SB
562 /* The offset of the slot from the frame_pointer, including extra space
563 for alignment. This info is for combine_temp_slots. */
564 HOST_WIDE_INT base_offset;
565 /* The size of the slot, including extra space for alignment. This
566 info is for combine_temp_slots. */
567 HOST_WIDE_INT full_size;
568};
569
570/* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
f8395d62 573static size_t n_temp_slots_in_use;
fb0703f7
SB
574
575/* Entry for the above hash table. */
d1b38208 576struct GTY(()) temp_slot_address_entry {
fb0703f7
SB
577 hashval_t hash;
578 rtx address;
579 struct temp_slot *temp_slot;
580};
581
0aea6467
ZD
582/* Removes temporary slot TEMP from LIST. */
583
584static void
585cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
586{
587 if (temp->next)
588 temp->next->prev = temp->prev;
589 if (temp->prev)
590 temp->prev->next = temp->next;
591 else
592 *list = temp->next;
593
594 temp->prev = temp->next = NULL;
595}
596
597/* Inserts temporary slot TEMP to LIST. */
598
599static void
600insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
601{
602 temp->next = *list;
603 if (*list)
604 (*list)->prev = temp;
605 temp->prev = NULL;
606 *list = temp;
607}
608
609/* Returns the list of used temp slots at LEVEL. */
610
611static struct temp_slot **
612temp_slots_at_level (int level)
613{
6370682a 614 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
a590ac65 615 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
0aea6467 616
6370682a 617 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
0aea6467
ZD
618}
619
620/* Returns the maximal temporary slot level. */
621
622static int
623max_slot_level (void)
624{
625 if (!used_temp_slots)
626 return -1;
627
6370682a 628 return VEC_length (temp_slot_p, used_temp_slots) - 1;
0aea6467
ZD
629}
630
631/* Moves temporary slot TEMP to LEVEL. */
632
633static void
634move_slot_to_level (struct temp_slot *temp, int level)
635{
636 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
637 insert_slot_to_list (temp, temp_slots_at_level (level));
638 temp->level = level;
639}
640
641/* Make temporary slot TEMP available. */
642
643static void
644make_slot_available (struct temp_slot *temp)
645{
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, &avail_temp_slots);
648 temp->in_use = 0;
649 temp->level = -1;
f8395d62 650 n_temp_slots_in_use--;
0aea6467 651}
fb0703f7
SB
652
653/* Compute the hash value for an address -> temp slot mapping.
654 The value is cached on the mapping entry. */
655static hashval_t
656temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
657{
658 int do_not_record = 0;
659 return hash_rtx (t->address, GET_MODE (t->address),
660 &do_not_record, NULL, false);
661}
662
663/* Return the hash value for an address -> temp slot mapping. */
664static hashval_t
665temp_slot_address_hash (const void *p)
666{
667 const struct temp_slot_address_entry *t;
668 t = (const struct temp_slot_address_entry *) p;
669 return t->hash;
670}
671
672/* Compare two address -> temp slot mapping entries. */
673static int
674temp_slot_address_eq (const void *p1, const void *p2)
675{
676 const struct temp_slot_address_entry *t1, *t2;
677 t1 = (const struct temp_slot_address_entry *) p1;
678 t2 = (const struct temp_slot_address_entry *) p2;
679 return exp_equiv_p (t1->address, t2->address, 0, true);
680}
681
682/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
683static void
684insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
685{
686 void **slot;
a9429e29 687 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
fb0703f7
SB
688 t->address = address;
689 t->temp_slot = temp_slot;
690 t->hash = temp_slot_address_compute_hash (t);
691 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
692 *slot = t;
693}
694
695/* Remove an address -> temp slot mapping entry if the temp slot is
696 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
697static int
698remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
699{
700 const struct temp_slot_address_entry *t;
701 t = (const struct temp_slot_address_entry *) *slot;
702 if (! t->temp_slot->in_use)
f8395d62 703 htab_clear_slot (temp_slot_address_table, slot);
fb0703f7
SB
704 return 1;
705}
706
707/* Remove all mappings of addresses to unused temp slots. */
708static void
709remove_unused_temp_slot_addresses (void)
710{
f8395d62
MM
711 /* Use quicker clearing if there aren't any active temp slots. */
712 if (n_temp_slots_in_use)
713 htab_traverse (temp_slot_address_table,
714 remove_unused_temp_slot_addresses_1,
715 NULL);
716 else
717 htab_empty (temp_slot_address_table);
fb0703f7
SB
718}
719
720/* Find the temp slot corresponding to the object at address X. */
721
722static struct temp_slot *
723find_temp_slot_from_address (rtx x)
724{
725 struct temp_slot *p;
726 struct temp_slot_address_entry tmp, *t;
727
728 /* First try the easy way:
729 See if X exists in the address -> temp slot mapping. */
730 tmp.address = x;
731 tmp.temp_slot = NULL;
732 tmp.hash = temp_slot_address_compute_hash (&tmp);
733 t = (struct temp_slot_address_entry *)
734 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
735 if (t)
736 return t->temp_slot;
737
738 /* If we have a sum involving a register, see if it points to a temp
739 slot. */
740 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
741 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
742 return p;
743 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
744 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
745 return p;
746
747 /* Last resort: Address is a virtual stack var address. */
748 if (GET_CODE (x) == PLUS
749 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 750 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
751 {
752 int i;
753 for (i = max_slot_level (); i >= 0; i--)
754 for (p = *temp_slots_at_level (i); p; p = p->next)
755 {
756 if (INTVAL (XEXP (x, 1)) >= p->base_offset
757 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
758 return p;
759 }
760 }
761
762 return NULL;
763}
6f086dfc
RS
764\f
765/* Allocate a temporary stack slot and record it for possible later
766 reuse.
767
768 MODE is the machine mode to be given to the returned rtx.
769
770 SIZE is the size in units of the space required. We do no rounding here
771 since assign_stack_local will do any required rounding.
772
a4c6502a 773 TYPE is the type that will be used for the stack slot. */
6f086dfc 774
a06ef755 775rtx
535a42b1 776assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
9474e8ab 777 tree type)
6f086dfc 778{
74e2819c 779 unsigned int align;
0aea6467 780 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 781 rtx slot;
6f086dfc 782
303ec2aa
RK
783 /* If SIZE is -1 it means that somebody tried to allocate a temporary
784 of a variable size. */
0bccc606 785 gcc_assert (size != -1);
303ec2aa 786
76fe54f0 787 align = get_stack_local_alignment (type, mode);
d16790f2
JW
788
789 /* Try to find an available, already-allocated temporary of the proper
790 mode which meets the size and alignment requirements. Choose the
3e8b0446 791 smallest one with the closest alignment.
b8698a0f 792
3e8b0446
ZD
793 If assign_stack_temp is called outside of the tree->rtl expansion,
794 we cannot reuse the stack slots (that may still refer to
795 VIRTUAL_STACK_VARS_REGNUM). */
796 if (!virtuals_instantiated)
0aea6467 797 {
3e8b0446 798 for (p = avail_temp_slots; p; p = p->next)
0aea6467 799 {
3e8b0446
ZD
800 if (p->align >= align && p->size >= size
801 && GET_MODE (p->slot) == mode
802 && objects_must_conflict_p (p->type, type)
803 && (best_p == 0 || best_p->size > p->size
804 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 805 {
3e8b0446
ZD
806 if (p->align == align && p->size == size)
807 {
808 selected = p;
809 cut_slot_from_list (selected, &avail_temp_slots);
810 best_p = 0;
811 break;
812 }
813 best_p = p;
0aea6467 814 }
0aea6467
ZD
815 }
816 }
6f086dfc
RS
817
818 /* Make our best, if any, the one to use. */
819 if (best_p)
a45035b6 820 {
0aea6467
ZD
821 selected = best_p;
822 cut_slot_from_list (selected, &avail_temp_slots);
823
a45035b6
JW
824 /* If there are enough aligned bytes left over, make them into a new
825 temp_slot so that the extra bytes don't get wasted. Do this only
826 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 827 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 828 {
d16790f2 829 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 830 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
831
832 if (best_p->size - rounded_size >= alignment)
833 {
a9429e29 834 p = ggc_alloc_temp_slot ();
9474e8ab 835 p->in_use = 0;
a45035b6 836 p->size = best_p->size - rounded_size;
307d8cd6
RK
837 p->base_offset = best_p->base_offset + rounded_size;
838 p->full_size = best_p->full_size - rounded_size;
be0c514c 839 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 840 p->align = best_p->align;
1da68f56 841 p->type = best_p->type;
0aea6467 842 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 843
38a448ca
RH
844 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
845 stack_slot_list);
a45035b6
JW
846
847 best_p->size = rounded_size;
291dde90 848 best_p->full_size = rounded_size;
a45035b6
JW
849 }
850 }
a45035b6 851 }
718fe406 852
6f086dfc 853 /* If we still didn't find one, make a new temporary. */
0aea6467 854 if (selected == 0)
6f086dfc 855 {
e5e809f4
JL
856 HOST_WIDE_INT frame_offset_old = frame_offset;
857
a9429e29 858 p = ggc_alloc_temp_slot ();
e5e809f4 859
c87a0a39
JL
860 /* We are passing an explicit alignment request to assign_stack_local.
861 One side effect of that is assign_stack_local will not round SIZE
862 to ensure the frame offset remains suitably aligned.
863
864 So for requests which depended on the rounding of SIZE, we go ahead
865 and round it now. We also make sure ALIGNMENT is at least
866 BIGGEST_ALIGNMENT. */
0bccc606 867 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
868 p->slot = assign_stack_local_1 (mode,
869 (mode == BLKmode
870 ? CEIL_ROUND (size,
871 (int) align
872 / BITS_PER_UNIT)
873 : size),
874 align, 0);
d16790f2
JW
875
876 p->align = align;
e5e809f4 877
b2a80c0d
DE
878 /* The following slot size computation is necessary because we don't
879 know the actual size of the temporary slot until assign_stack_local
880 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
881 requested temporary. Note that extra space added for alignment
882 can be either above or below this stack slot depending on which
883 way the frame grows. We include the extra space if and only if it
884 is above this slot. */
f62c8a5c
JJ
885 if (FRAME_GROWS_DOWNWARD)
886 p->size = frame_offset_old - frame_offset;
887 else
888 p->size = size;
e5e809f4 889
fc91b0d0 890 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
891 if (FRAME_GROWS_DOWNWARD)
892 {
893 p->base_offset = frame_offset;
894 p->full_size = frame_offset_old - frame_offset;
895 }
896 else
897 {
898 p->base_offset = frame_offset_old;
899 p->full_size = frame_offset - frame_offset_old;
900 }
0aea6467
ZD
901
902 selected = p;
6f086dfc
RS
903 }
904
0aea6467 905 p = selected;
6f086dfc 906 p->in_use = 1;
1da68f56 907 p->type = type;
7efcb746 908 p->level = temp_slot_level;
f8395d62 909 n_temp_slots_in_use++;
1995f267 910
0aea6467
ZD
911 pp = temp_slots_at_level (p->level);
912 insert_slot_to_list (p, pp);
fb0703f7 913 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
914
915 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
916 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
917 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 918
1da68f56
RK
919 /* If we know the alias set for the memory that will be used, use
920 it. If there's no TYPE, then we don't know anything about the
921 alias set for the memory. */
faa964e5
UW
922 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
923 set_mem_align (slot, align);
1da68f56 924
30f7a378 925 /* If a type is specified, set the relevant flags. */
3bdf5ad1 926 if (type != 0)
55356334 927 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 928 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 929
faa964e5 930 return slot;
6f086dfc 931}
d16790f2
JW
932
933/* Allocate a temporary stack slot and record it for possible later
9474e8ab 934 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
935
936rtx
9474e8ab 937assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
d16790f2 938{
9474e8ab 939 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 940}
638141a6 941\f
9432c136
EB
942/* Assign a temporary.
943 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
944 and so that should be used in error messages. In either case, we
945 allocate of the given type.
230f21b4 946 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
947 it is 0 if a register is OK.
948 DONT_PROMOTE is 1 if we should not promote values in register
949 to wider modes. */
230f21b4
PB
950
951rtx
9474e8ab 952assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 953 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 954{
9432c136
EB
955 tree type, decl;
956 enum machine_mode mode;
9e1622ed 957#ifdef PROMOTE_MODE
9432c136
EB
958 int unsignedp;
959#endif
960
961 if (DECL_P (type_or_decl))
962 decl = type_or_decl, type = TREE_TYPE (decl);
963 else
964 decl = NULL, type = type_or_decl;
965
966 mode = TYPE_MODE (type);
9e1622ed 967#ifdef PROMOTE_MODE
8df83eae 968 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 969#endif
638141a6 970
230f21b4
PB
971 if (mode == BLKmode || memory_required)
972 {
e5e809f4 973 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
974 rtx tmp;
975
44affdae
JH
976 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
977 problems with allocating the stack space. */
978 if (size == 0)
979 size = 1;
980
230f21b4 981 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
982 temporaries. However, sometimes we can find a fixed upper limit on
983 the size, so try that instead. */
984 else if (size == -1)
985 size = max_int_size_in_bytes (type);
e30bb772 986
9432c136
EB
987 /* The size of the temporary may be too large to fit into an integer. */
988 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 989 this to things that aren't compiler-generated temporaries. The
535a42b1 990 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
991 if (decl && size == -1
992 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
993 {
dee15844 994 error ("size of variable %q+D is too large", decl);
9432c136
EB
995 size = 1;
996 }
997
9474e8ab 998 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
999 return tmp;
1000 }
638141a6 1001
9e1622ed 1002#ifdef PROMOTE_MODE
b55d9ff8 1003 if (! dont_promote)
cde0f3fd 1004 mode = promote_mode (type, mode, &unsignedp);
230f21b4 1005#endif
638141a6 1006
230f21b4
PB
1007 return gen_reg_rtx (mode);
1008}
638141a6 1009\f
a45035b6
JW
1010/* Combine temporary stack slots which are adjacent on the stack.
1011
1012 This allows for better use of already allocated stack space. This is only
1013 done for BLKmode slots because we can be sure that we won't have alignment
1014 problems in this case. */
1015
6fe79279 1016static void
fa8db1f7 1017combine_temp_slots (void)
a45035b6 1018{
0aea6467 1019 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1020 int num_slots;
1021
a4c6502a
MM
1022 /* We can't combine slots, because the information about which slot
1023 is in which alias set will be lost. */
1024 if (flag_strict_aliasing)
1025 return;
1026
718fe406 1027 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1028 high levels of optimization. */
e5e809f4 1029 if (! flag_expensive_optimizations)
0aea6467 1030 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1031 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1032 return;
a45035b6 1033
0aea6467 1034 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1035 {
1036 int delete_p = 0;
e5e809f4 1037
0aea6467
ZD
1038 next = p->next;
1039
1040 if (GET_MODE (p->slot) != BLKmode)
1041 continue;
1042
1043 for (q = p->next; q; q = next_q)
e9b7093a 1044 {
0aea6467
ZD
1045 int delete_q = 0;
1046
1047 next_q = q->next;
1048
1049 if (GET_MODE (q->slot) != BLKmode)
1050 continue;
1051
1052 if (p->base_offset + p->full_size == q->base_offset)
1053 {
1054 /* Q comes after P; combine Q into P. */
1055 p->size += q->size;
1056 p->full_size += q->full_size;
1057 delete_q = 1;
1058 }
1059 else if (q->base_offset + q->full_size == p->base_offset)
1060 {
1061 /* P comes after Q; combine P into Q. */
1062 q->size += p->size;
1063 q->full_size += p->full_size;
1064 delete_p = 1;
1065 break;
1066 }
1067 if (delete_q)
1068 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1069 }
0aea6467
ZD
1070
1071 /* Either delete P or advance past it. */
1072 if (delete_p)
1073 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1074 }
a45035b6 1075}
6f086dfc 1076\f
82d6e6fc
KG
1077/* Indicate that NEW_RTX is an alternate way of referring to the temp
1078 slot that previously was known by OLD_RTX. */
e5e76139
RK
1079
1080void
82d6e6fc 1081update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1082{
14a774a9 1083 struct temp_slot *p;
e5e76139 1084
82d6e6fc 1085 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1086 return;
14a774a9 1087
82d6e6fc 1088 p = find_temp_slot_from_address (old_rtx);
14a774a9 1089
82d6e6fc
KG
1090 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1091 NEW_RTX is a register, see if one operand of the PLUS is a
1092 temporary location. If so, NEW_RTX points into it. Otherwise,
1093 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1094 in common between them. If so, try a recursive call on those
1095 values. */
14a774a9
RK
1096 if (p == 0)
1097 {
82d6e6fc 1098 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1099 return;
1100
82d6e6fc 1101 if (REG_P (new_rtx))
700f19f0 1102 {
82d6e6fc
KG
1103 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1104 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1105 return;
1106 }
82d6e6fc 1107 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1108 return;
1109
82d6e6fc
KG
1110 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1111 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1112 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1113 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1114 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1115 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1116 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1117 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1118
1119 return;
1120 }
1121
718fe406 1122 /* Otherwise add an alias for the temp's address. */
fb0703f7 1123 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1124}
1125
9cca6a99
MS
1126/* If X could be a reference to a temporary slot, mark that slot as
1127 belonging to the to one level higher than the current level. If X
1128 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1129 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1130
1131 This is called when an ({...}) construct occurs and a statement
1132 returns a value in memory. */
1133
1134void
fa8db1f7 1135preserve_temp_slots (rtx x)
6f086dfc 1136{
0aea6467 1137 struct temp_slot *p = 0, *next;
6f086dfc 1138
e3a77161 1139 if (x == 0)
9474e8ab 1140 return;
f7b6d104 1141
8fff4fc1 1142 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1143 a temporary slot we know it points to. */
8fff4fc1
RH
1144 if (REG_P (x) && REG_POINTER (x))
1145 p = find_temp_slot_from_address (x);
f7b6d104 1146
8fff4fc1 1147 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1148 a temporary slot. */
8fff4fc1 1149 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1150 return;
8fff4fc1
RH
1151
1152 /* First see if we can find a match. */
1153 if (p == 0)
1154 p = find_temp_slot_from_address (XEXP (x, 0));
1155
1156 if (p != 0)
1157 {
8fff4fc1 1158 if (p->level == temp_slot_level)
9474e8ab 1159 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1160 return;
f7b6d104 1161 }
e9a25f70 1162
8fff4fc1
RH
1163 /* Otherwise, preserve all non-kept slots at this level. */
1164 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1165 {
8fff4fc1 1166 next = p->next;
9474e8ab 1167 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1168 }
fe9b4957
MM
1169}
1170
8fff4fc1
RH
1171/* Free all temporaries used so far. This is normally called at the
1172 end of generating code for a statement. */
fe9b4957 1173
8fff4fc1
RH
1174void
1175free_temp_slots (void)
fe9b4957 1176{
8fff4fc1 1177 struct temp_slot *p, *next;
5d7cefe5 1178 bool some_available = false;
fe9b4957 1179
8fff4fc1
RH
1180 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1181 {
1182 next = p->next;
9474e8ab
MM
1183 make_slot_available (p);
1184 some_available = true;
8fff4fc1 1185 }
fe9b4957 1186
5d7cefe5
MM
1187 if (some_available)
1188 {
1189 remove_unused_temp_slot_addresses ();
1190 combine_temp_slots ();
1191 }
8fff4fc1 1192}
fe9b4957 1193
8fff4fc1 1194/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1195
8fff4fc1
RH
1196void
1197push_temp_slots (void)
fe9b4957 1198{
8fff4fc1 1199 temp_slot_level++;
fe9b4957
MM
1200}
1201
8fff4fc1
RH
1202/* Pop a temporary nesting level. All slots in use in the current level
1203 are freed. */
fe9b4957 1204
8fff4fc1
RH
1205void
1206pop_temp_slots (void)
fe9b4957 1207{
9474e8ab 1208 free_temp_slots ();
8fff4fc1 1209 temp_slot_level--;
8c36698e
NC
1210}
1211
8fff4fc1 1212/* Initialize temporary slots. */
e9a25f70
JL
1213
1214void
8fff4fc1 1215init_temp_slots (void)
e9a25f70 1216{
8fff4fc1
RH
1217 /* We have not allocated any temporaries yet. */
1218 avail_temp_slots = 0;
1219 used_temp_slots = 0;
1220 temp_slot_level = 0;
f8395d62 1221 n_temp_slots_in_use = 0;
fb0703f7
SB
1222
1223 /* Set up the table to map addresses to temp slots. */
1224 if (! temp_slot_address_table)
1225 temp_slot_address_table = htab_create_ggc (32,
1226 temp_slot_address_hash,
1227 temp_slot_address_eq,
1228 NULL);
1229 else
1230 htab_empty (temp_slot_address_table);
8fff4fc1
RH
1231}
1232\f
6399c0ab
SB
1233/* Functions and data structures to keep track of the values hard regs
1234 had at the start of the function. */
1235
1236/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1237 and has_hard_reg_initial_val.. */
1238typedef struct GTY(()) initial_value_pair {
1239 rtx hard_reg;
1240 rtx pseudo;
1241} initial_value_pair;
1242/* ??? This could be a VEC but there is currently no way to define an
1243 opaque VEC type. This could be worked around by defining struct
1244 initial_value_pair in function.h. */
1245typedef struct GTY(()) initial_value_struct {
1246 int num_entries;
1247 int max_entries;
1248 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1249} initial_value_struct;
1250
1251/* If a pseudo represents an initial hard reg (or expression), return
1252 it, else return NULL_RTX. */
1253
1254rtx
1255get_hard_reg_initial_reg (rtx reg)
1256{
1257 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1258 int i;
1259
1260 if (ivs == 0)
1261 return NULL_RTX;
1262
1263 for (i = 0; i < ivs->num_entries; i++)
1264 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1265 return ivs->entries[i].hard_reg;
1266
1267 return NULL_RTX;
1268}
1269
1270/* Make sure that there's a pseudo register of mode MODE that stores the
1271 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1272
1273rtx
1274get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1275{
1276 struct initial_value_struct *ivs;
1277 rtx rv;
1278
1279 rv = has_hard_reg_initial_val (mode, regno);
1280 if (rv)
1281 return rv;
1282
1283 ivs = crtl->hard_reg_initial_vals;
1284 if (ivs == 0)
1285 {
1286 ivs = ggc_alloc_initial_value_struct ();
1287 ivs->num_entries = 0;
1288 ivs->max_entries = 5;
1289 ivs->entries = ggc_alloc_vec_initial_value_pair (5);
1290 crtl->hard_reg_initial_vals = ivs;
1291 }
1292
1293 if (ivs->num_entries >= ivs->max_entries)
1294 {
1295 ivs->max_entries += 5;
1296 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1297 ivs->max_entries);
1298 }
1299
1300 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1301 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1302
1303 return ivs->entries[ivs->num_entries++].pseudo;
1304}
1305
1306/* See if get_hard_reg_initial_val has been used to create a pseudo
1307 for the initial value of hard register REGNO in mode MODE. Return
1308 the associated pseudo if so, otherwise return NULL. */
1309
1310rtx
1311has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1312{
1313 struct initial_value_struct *ivs;
1314 int i;
1315
1316 ivs = crtl->hard_reg_initial_vals;
1317 if (ivs != 0)
1318 for (i = 0; i < ivs->num_entries; i++)
1319 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1320 && REGNO (ivs->entries[i].hard_reg) == regno)
1321 return ivs->entries[i].pseudo;
1322
1323 return NULL_RTX;
1324}
1325
1326unsigned int
1327emit_initial_value_sets (void)
1328{
1329 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1330 int i;
1331 rtx seq;
1332
1333 if (ivs == 0)
1334 return 0;
1335
1336 start_sequence ();
1337 for (i = 0; i < ivs->num_entries; i++)
1338 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1339 seq = get_insns ();
1340 end_sequence ();
1341
1342 emit_insn_at_entry (seq);
1343 return 0;
1344}
1345
1346/* Return the hardreg-pseudoreg initial values pair entry I and
1347 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1348bool
1349initial_value_entry (int i, rtx *hreg, rtx *preg)
1350{
1351 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1352 if (!ivs || i >= ivs->num_entries)
1353 return false;
1354
1355 *hreg = ivs->entries[i].hard_reg;
1356 *preg = ivs->entries[i].pseudo;
1357 return true;
1358}
1359\f
8fff4fc1
RH
1360/* These routines are responsible for converting virtual register references
1361 to the actual hard register references once RTL generation is complete.
718fe406 1362
8fff4fc1
RH
1363 The following four variables are used for communication between the
1364 routines. They contain the offsets of the virtual registers from their
1365 respective hard registers. */
fe9b4957 1366
8fff4fc1
RH
1367static int in_arg_offset;
1368static int var_offset;
1369static int dynamic_offset;
1370static int out_arg_offset;
1371static int cfa_offset;
8a5275eb 1372
8fff4fc1
RH
1373/* In most machines, the stack pointer register is equivalent to the bottom
1374 of the stack. */
718fe406 1375
8fff4fc1
RH
1376#ifndef STACK_POINTER_OFFSET
1377#define STACK_POINTER_OFFSET 0
1378#endif
8c36698e 1379
8fff4fc1
RH
1380/* If not defined, pick an appropriate default for the offset of dynamically
1381 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1382 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1383
8fff4fc1 1384#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1385
8fff4fc1
RH
1386/* The bottom of the stack points to the actual arguments. If
1387 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1388 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1389 stack space for register parameters is not pushed by the caller, but
1390 rather part of the fixed stack areas and hence not included in
38173d38 1391 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1392 for it when allocating stack dynamic objects. */
8a5275eb 1393
ac294f0b 1394#if defined(REG_PARM_STACK_SPACE)
8fff4fc1
RH
1395#define STACK_DYNAMIC_OFFSET(FNDECL) \
1396((ACCUMULATE_OUTGOING_ARGS \
38173d38 1397 ? (crtl->outgoing_args_size \
81464b2c
KT
1398 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1399 : REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1400 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1401#else
1402#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1403((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1404 + (STACK_POINTER_OFFSET))
1405#endif
1406#endif
4fa48eae 1407
659e47fb 1408\f
bbf9b913
RH
1409/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1410 is a virtual register, return the equivalent hard register and set the
1411 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1412
bbf9b913
RH
1413static rtx
1414instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1415{
82d6e6fc 1416 rtx new_rtx;
bbf9b913 1417 HOST_WIDE_INT offset;
6f086dfc 1418
bbf9b913 1419 if (x == virtual_incoming_args_rtx)
2e3f842f 1420 {
d015f7cc 1421 if (stack_realign_drap)
2e3f842f 1422 {
d015f7cc
L
1423 /* Replace virtual_incoming_args_rtx with internal arg
1424 pointer if DRAP is used to realign stack. */
82d6e6fc 1425 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1426 offset = 0;
1427 }
1428 else
82d6e6fc 1429 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1430 }
bbf9b913 1431 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1432 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1433 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1434 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1435 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1436 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1437 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1438 {
1439#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1440 new_rtx = frame_pointer_rtx;
f6672e8e 1441#else
82d6e6fc 1442 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1443#endif
1444 offset = cfa_offset;
1445 }
32990d5b
JJ
1446 else if (x == virtual_preferred_stack_boundary_rtx)
1447 {
1448 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1449 offset = 0;
1450 }
bbf9b913
RH
1451 else
1452 return NULL_RTX;
6f086dfc 1453
bbf9b913 1454 *poffset = offset;
82d6e6fc 1455 return new_rtx;
6f086dfc
RS
1456}
1457
bbf9b913
RH
1458/* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1459 Instantiate any virtual registers present inside of *LOC. The expression
1460 is simplified, as much as possible, but is not to be considered "valid"
1461 in any sense implied by the target. If any change is made, set CHANGED
1462 to true. */
6f086dfc 1463
bbf9b913
RH
1464static int
1465instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
6f086dfc 1466{
bbf9b913
RH
1467 HOST_WIDE_INT offset;
1468 bool *changed = (bool *) data;
82d6e6fc 1469 rtx x, new_rtx;
6f086dfc 1470
bbf9b913
RH
1471 x = *loc;
1472 if (x == 0)
1473 return 0;
1474
1475 switch (GET_CODE (x))
6f086dfc 1476 {
bbf9b913 1477 case REG:
82d6e6fc
KG
1478 new_rtx = instantiate_new_reg (x, &offset);
1479 if (new_rtx)
bbf9b913 1480 {
0a81f074 1481 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
bbf9b913
RH
1482 if (changed)
1483 *changed = true;
1484 }
1485 return -1;
1486
1487 case PLUS:
82d6e6fc
KG
1488 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1489 if (new_rtx)
bbf9b913 1490 {
0a81f074 1491 new_rtx = plus_constant (GET_MODE (x), new_rtx, offset);
82d6e6fc 1492 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
bbf9b913
RH
1493 if (changed)
1494 *changed = true;
1495 return -1;
1496 }
e5e809f4 1497
bbf9b913
RH
1498 /* FIXME -- from old code */
1499 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1500 we can commute the PLUS and SUBREG because pointers into the
1501 frame are well-behaved. */
1502 break;
ce717ce4 1503
bbf9b913
RH
1504 default:
1505 break;
6f086dfc
RS
1506 }
1507
bbf9b913 1508 return 0;
6f086dfc
RS
1509}
1510
bbf9b913
RH
1511/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1512 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1513
bbf9b913
RH
1514static int
1515safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1516{
2ef6ce06 1517 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1518}
5a73491b 1519
bbf9b913
RH
1520/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1521 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1522
1523static void
bbf9b913 1524instantiate_virtual_regs_in_insn (rtx insn)
5a73491b 1525{
bbf9b913
RH
1526 HOST_WIDE_INT offset;
1527 int insn_code, i;
9325973e 1528 bool any_change = false;
82d6e6fc 1529 rtx set, new_rtx, x, seq;
32e66afd 1530
bbf9b913
RH
1531 /* There are some special cases to be handled first. */
1532 set = single_set (insn);
1533 if (set)
32e66afd 1534 {
bbf9b913
RH
1535 /* We're allowed to assign to a virtual register. This is interpreted
1536 to mean that the underlying register gets assigned the inverse
1537 transformation. This is used, for example, in the handling of
1538 non-local gotos. */
82d6e6fc
KG
1539 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1540 if (new_rtx)
bbf9b913
RH
1541 {
1542 start_sequence ();
32e66afd 1543
bbf9b913 1544 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
82d6e6fc 1545 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
bbf9b913 1546 GEN_INT (-offset));
82d6e6fc
KG
1547 x = force_operand (x, new_rtx);
1548 if (x != new_rtx)
1549 emit_move_insn (new_rtx, x);
5a73491b 1550
bbf9b913
RH
1551 seq = get_insns ();
1552 end_sequence ();
5a73491b 1553
bbf9b913
RH
1554 emit_insn_before (seq, insn);
1555 delete_insn (insn);
1556 return;
1557 }
5a73491b 1558
bbf9b913
RH
1559 /* Handle a straight copy from a virtual register by generating a
1560 new add insn. The difference between this and falling through
1561 to the generic case is avoiding a new pseudo and eliminating a
1562 move insn in the initial rtl stream. */
82d6e6fc
KG
1563 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1564 if (new_rtx && offset != 0
bbf9b913
RH
1565 && REG_P (SET_DEST (set))
1566 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1567 {
1568 start_sequence ();
5a73491b 1569
bbf9b913 1570 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
82d6e6fc 1571 new_rtx, GEN_INT (offset), SET_DEST (set),
bbf9b913
RH
1572 1, OPTAB_LIB_WIDEN);
1573 if (x != SET_DEST (set))
1574 emit_move_insn (SET_DEST (set), x);
770ae6cc 1575
bbf9b913
RH
1576 seq = get_insns ();
1577 end_sequence ();
87ce34d6 1578
bbf9b913
RH
1579 emit_insn_before (seq, insn);
1580 delete_insn (insn);
87ce34d6 1581 return;
bbf9b913 1582 }
5a73491b 1583
bbf9b913 1584 extract_insn (insn);
9325973e 1585 insn_code = INSN_CODE (insn);
5a73491b 1586
bbf9b913
RH
1587 /* Handle a plus involving a virtual register by determining if the
1588 operands remain valid if they're modified in place. */
1589 if (GET_CODE (SET_SRC (set)) == PLUS
1590 && recog_data.n_operands >= 3
1591 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1592 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1593 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1594 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1595 {
1596 offset += INTVAL (recog_data.operand[2]);
5a73491b 1597
bbf9b913 1598 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1599 if (offset == 0
1600 && REG_P (SET_DEST (set))
1601 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1602 {
1603 start_sequence ();
82d6e6fc 1604 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1605 seq = get_insns ();
1606 end_sequence ();
d1405722 1607
bbf9b913
RH
1608 emit_insn_before (seq, insn);
1609 delete_insn (insn);
1610 return;
1611 }
d1405722 1612
bbf9b913 1613 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1614
1615 /* Using validate_change and apply_change_group here leaves
1616 recog_data in an invalid state. Since we know exactly what
1617 we want to check, do those two by hand. */
82d6e6fc 1618 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1619 && safe_insn_predicate (insn_code, 2, x))
1620 {
82d6e6fc 1621 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1622 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1623 any_change = true;
9325973e
RH
1624
1625 /* Fall through into the regular operand fixup loop in
1626 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1627 }
1628 }
1629 }
d1405722 1630 else
9325973e
RH
1631 {
1632 extract_insn (insn);
1633 insn_code = INSN_CODE (insn);
1634 }
5dc96d60 1635
bbf9b913
RH
1636 /* In the general case, we expect virtual registers to appear only in
1637 operands, and then only as either bare registers or inside memories. */
1638 for (i = 0; i < recog_data.n_operands; ++i)
1639 {
1640 x = recog_data.operand[i];
1641 switch (GET_CODE (x))
1642 {
1643 case MEM:
1644 {
1645 rtx addr = XEXP (x, 0);
1646 bool changed = false;
1647
1648 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1649 if (!changed)
1650 continue;
1651
1652 start_sequence ();
1653 x = replace_equiv_address (x, addr);
a5bfb13a
MM
1654 /* It may happen that the address with the virtual reg
1655 was valid (e.g. based on the virtual stack reg, which might
1656 be acceptable to the predicates with all offsets), whereas
1657 the address now isn't anymore, for instance when the address
1658 is still offsetted, but the base reg isn't virtual-stack-reg
1659 anymore. Below we would do a force_reg on the whole operand,
1660 but this insn might actually only accept memory. Hence,
1661 before doing that last resort, try to reload the address into
1662 a register, so this operand stays a MEM. */
1663 if (!safe_insn_predicate (insn_code, i, x))
1664 {
1665 addr = force_reg (GET_MODE (addr), addr);
1666 x = replace_equiv_address (x, addr);
1667 }
bbf9b913
RH
1668 seq = get_insns ();
1669 end_sequence ();
1670 if (seq)
1671 emit_insn_before (seq, insn);
1672 }
1673 break;
1674
1675 case REG:
82d6e6fc
KG
1676 new_rtx = instantiate_new_reg (x, &offset);
1677 if (new_rtx == NULL)
bbf9b913
RH
1678 continue;
1679 if (offset == 0)
82d6e6fc 1680 x = new_rtx;
bbf9b913
RH
1681 else
1682 {
1683 start_sequence ();
6f086dfc 1684
bbf9b913
RH
1685 /* Careful, special mode predicates may have stuff in
1686 insn_data[insn_code].operand[i].mode that isn't useful
1687 to us for computing a new value. */
1688 /* ??? Recognize address_operand and/or "p" constraints
1689 to see if (plus new offset) is a valid before we put
1690 this through expand_simple_binop. */
82d6e6fc 1691 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
bbf9b913
RH
1692 GEN_INT (offset), NULL_RTX,
1693 1, OPTAB_LIB_WIDEN);
1694 seq = get_insns ();
1695 end_sequence ();
1696 emit_insn_before (seq, insn);
1697 }
1698 break;
6f086dfc 1699
bbf9b913 1700 case SUBREG:
82d6e6fc
KG
1701 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1702 if (new_rtx == NULL)
bbf9b913
RH
1703 continue;
1704 if (offset != 0)
1705 {
1706 start_sequence ();
82d6e6fc 1707 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
bbf9b913
RH
1708 GEN_INT (offset), NULL_RTX,
1709 1, OPTAB_LIB_WIDEN);
1710 seq = get_insns ();
1711 end_sequence ();
1712 emit_insn_before (seq, insn);
1713 }
82d6e6fc
KG
1714 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1715 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1716 gcc_assert (x);
bbf9b913 1717 break;
6f086dfc 1718
bbf9b913
RH
1719 default:
1720 continue;
1721 }
6f086dfc 1722
bbf9b913
RH
1723 /* At this point, X contains the new value for the operand.
1724 Validate the new value vs the insn predicate. Note that
1725 asm insns will have insn_code -1 here. */
1726 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1727 {
1728 start_sequence ();
f7ce0951
SE
1729 if (REG_P (x))
1730 {
1731 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1732 x = copy_to_reg (x);
1733 }
1734 else
1735 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1736 seq = get_insns ();
1737 end_sequence ();
1738 if (seq)
1739 emit_insn_before (seq, insn);
1740 }
6f086dfc 1741
bbf9b913
RH
1742 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1743 any_change = true;
1744 }
6f086dfc 1745
bbf9b913
RH
1746 if (any_change)
1747 {
1748 /* Propagate operand changes into the duplicates. */
1749 for (i = 0; i < recog_data.n_dups; ++i)
1750 *recog_data.dup_loc[i]
3e916873 1751 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1752
bbf9b913
RH
1753 /* Force re-recognition of the instruction for validation. */
1754 INSN_CODE (insn) = -1;
1755 }
6f086dfc 1756
bbf9b913 1757 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1758 {
bbf9b913 1759 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1760 {
bbf9b913 1761 error_for_asm (insn, "impossible constraint in %<asm%>");
229bfbcf 1762 delete_insn_and_edges (insn);
bbf9b913
RH
1763 }
1764 }
1765 else
1766 {
1767 if (recog_memoized (insn) < 0)
1768 fatal_insn_not_found (insn);
1769 }
1770}
14a774a9 1771
bbf9b913
RH
1772/* Subroutine of instantiate_decls. Given RTL representing a decl,
1773 do any instantiation required. */
14a774a9 1774
e41b2a33
PB
1775void
1776instantiate_decl_rtl (rtx x)
bbf9b913
RH
1777{
1778 rtx addr;
6f086dfc 1779
bbf9b913
RH
1780 if (x == 0)
1781 return;
6f086dfc 1782
bbf9b913
RH
1783 /* If this is a CONCAT, recurse for the pieces. */
1784 if (GET_CODE (x) == CONCAT)
1785 {
e41b2a33
PB
1786 instantiate_decl_rtl (XEXP (x, 0));
1787 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1788 return;
1789 }
6f086dfc 1790
bbf9b913
RH
1791 /* If this is not a MEM, no need to do anything. Similarly if the
1792 address is a constant or a register that is not a virtual register. */
1793 if (!MEM_P (x))
1794 return;
6f086dfc 1795
bbf9b913
RH
1796 addr = XEXP (x, 0);
1797 if (CONSTANT_P (addr)
1798 || (REG_P (addr)
1799 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1800 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1801 return;
6f086dfc 1802
bbf9b913
RH
1803 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1804}
6f086dfc 1805
434eba35
JJ
1806/* Helper for instantiate_decls called via walk_tree: Process all decls
1807 in the given DECL_VALUE_EXPR. */
1808
1809static tree
1810instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1811{
1812 tree t = *tp;
726a989a 1813 if (! EXPR_P (t))
434eba35
JJ
1814 {
1815 *walk_subtrees = 0;
37d6a488
AO
1816 if (DECL_P (t))
1817 {
1818 if (DECL_RTL_SET_P (t))
1819 instantiate_decl_rtl (DECL_RTL (t));
1820 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1821 && DECL_INCOMING_RTL (t))
1822 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1823 if ((TREE_CODE (t) == VAR_DECL
1824 || TREE_CODE (t) == RESULT_DECL)
1825 && DECL_HAS_VALUE_EXPR_P (t))
1826 {
1827 tree v = DECL_VALUE_EXPR (t);
1828 walk_tree (&v, instantiate_expr, NULL, NULL);
1829 }
1830 }
434eba35
JJ
1831 }
1832 return NULL;
1833}
1834
bbf9b913
RH
1835/* Subroutine of instantiate_decls: Process all decls in the given
1836 BLOCK node and all its subblocks. */
6f086dfc 1837
bbf9b913
RH
1838static void
1839instantiate_decls_1 (tree let)
1840{
1841 tree t;
6f086dfc 1842
910ad8de 1843 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1844 {
1845 if (DECL_RTL_SET_P (t))
e41b2a33 1846 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1847 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1848 {
1849 tree v = DECL_VALUE_EXPR (t);
1850 walk_tree (&v, instantiate_expr, NULL, NULL);
1851 }
1852 }
6f086dfc 1853
bbf9b913 1854 /* Process all subblocks. */
87caf699 1855 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1856 instantiate_decls_1 (t);
1857}
6f086dfc 1858
bbf9b913
RH
1859/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1860 all virtual registers in their DECL_RTL's. */
6f086dfc 1861
bbf9b913
RH
1862static void
1863instantiate_decls (tree fndecl)
1864{
c021f10b
NF
1865 tree decl;
1866 unsigned ix;
6f086dfc 1867
bbf9b913 1868 /* Process all parameters of the function. */
910ad8de 1869 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1870 {
e41b2a33
PB
1871 instantiate_decl_rtl (DECL_RTL (decl));
1872 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1874 {
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1877 }
bbf9b913 1878 }
4fd796bb 1879
37d6a488
AO
1880 if ((decl = DECL_RESULT (fndecl))
1881 && TREE_CODE (decl) == RESULT_DECL)
1882 {
1883 if (DECL_RTL_SET_P (decl))
1884 instantiate_decl_rtl (DECL_RTL (decl));
1885 if (DECL_HAS_VALUE_EXPR_P (decl))
1886 {
1887 tree v = DECL_VALUE_EXPR (decl);
1888 walk_tree (&v, instantiate_expr, NULL, NULL);
1889 }
1890 }
1891
bbf9b913
RH
1892 /* Now process all variables defined in the function or its subblocks. */
1893 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1894
c021f10b
NF
1895 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1896 if (DECL_RTL_SET_P (decl))
1897 instantiate_decl_rtl (DECL_RTL (decl));
1898 VEC_free (tree, gc, cfun->local_decls);
bbf9b913 1899}
6f086dfc 1900
bbf9b913
RH
1901/* Pass through the INSNS of function FNDECL and convert virtual register
1902 references to hard register references. */
6f086dfc 1903
c2924966 1904static unsigned int
bbf9b913
RH
1905instantiate_virtual_regs (void)
1906{
45dbce1b 1907 rtx insn;
6f086dfc 1908
bbf9b913
RH
1909 /* Compute the offsets to use for this function. */
1910 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1911 var_offset = STARTING_FRAME_OFFSET;
1912 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1913 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1914#ifdef FRAME_POINTER_CFA_OFFSET
1915 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1916#else
bbf9b913 1917 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1918#endif
e9a25f70 1919
bbf9b913
RH
1920 /* Initialize recognition, indicating that volatile is OK. */
1921 init_recog ();
6f086dfc 1922
bbf9b913
RH
1923 /* Scan through all the insns, instantiating every virtual register still
1924 present. */
45dbce1b
NF
1925 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1926 if (INSN_P (insn))
1927 {
1928 /* These patterns in the instruction stream can never be recognized.
1929 Fortunately, they shouldn't contain virtual registers either. */
1930 if (GET_CODE (PATTERN (insn)) == USE
1931 || GET_CODE (PATTERN (insn)) == CLOBBER
1932 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1933 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1934 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1935 continue;
1936 else if (DEBUG_INSN_P (insn))
1937 for_each_rtx (&INSN_VAR_LOCATION (insn),
1938 instantiate_virtual_regs_in_rtx, NULL);
1939 else
1940 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1941
45dbce1b
NF
1942 if (INSN_DELETED_P (insn))
1943 continue;
7114321e 1944
45dbce1b 1945 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
ba4807a0 1946
45dbce1b
NF
1947 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1948 if (CALL_P (insn))
1949 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1950 instantiate_virtual_regs_in_rtx, NULL);
1951 }
6f086dfc 1952
bbf9b913
RH
1953 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1954 instantiate_decls (current_function_decl);
1955
e41b2a33
PB
1956 targetm.instantiate_decls ();
1957
bbf9b913
RH
1958 /* Indicate that, from now on, assign_stack_local should use
1959 frame_pointer_rtx. */
1960 virtuals_instantiated = 1;
d3c12306 1961
c2924966 1962 return 0;
6f086dfc 1963}
ef330312 1964
8ddbbcae 1965struct rtl_opt_pass pass_instantiate_virtual_regs =
ef330312 1966{
8ddbbcae
JH
1967 {
1968 RTL_PASS,
defb77dc 1969 "vregs", /* name */
ef330312
PB
1970 NULL, /* gate */
1971 instantiate_virtual_regs, /* execute */
1972 NULL, /* sub */
1973 NULL, /* next */
1974 0, /* static_pass_number */
7072a650 1975 TV_NONE, /* tv_id */
45dbce1b 1976 0, /* properties_required */
ef330312
PB
1977 0, /* properties_provided */
1978 0, /* properties_destroyed */
1979 0, /* todo_flags_start */
22c5fa5f 1980 0 /* todo_flags_finish */
8ddbbcae 1981 }
ef330312
PB
1982};
1983
6f086dfc 1984\f
d181c154
RS
1985/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1986 This means a type for which function calls must pass an address to the
1987 function or get an address back from the function.
1988 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
1989
1990int
586de218 1991aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 1992{
d47d0a8d 1993 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
1994 int i, regno, nregs;
1995 rtx reg;
2f939d94 1996
61f71b34
DD
1997 if (fntype)
1998 switch (TREE_CODE (fntype))
1999 {
2000 case CALL_EXPR:
d47d0a8d
EB
2001 {
2002 tree fndecl = get_callee_fndecl (fntype);
2003 fntype = (fndecl
2004 ? TREE_TYPE (fndecl)
2005 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2006 }
61f71b34
DD
2007 break;
2008 case FUNCTION_DECL:
d47d0a8d 2009 fntype = TREE_TYPE (fntype);
61f71b34
DD
2010 break;
2011 case FUNCTION_TYPE:
2012 case METHOD_TYPE:
2013 break;
2014 case IDENTIFIER_NODE:
d47d0a8d 2015 fntype = NULL_TREE;
61f71b34
DD
2016 break;
2017 default:
d47d0a8d 2018 /* We don't expect other tree types here. */
0bccc606 2019 gcc_unreachable ();
61f71b34
DD
2020 }
2021
d47d0a8d 2022 if (VOID_TYPE_P (type))
d7bf8ada 2023 return 0;
500c353d 2024
ebf0bf7f
JJ
2025 /* If a record should be passed the same as its first (and only) member
2026 don't pass it as an aggregate. */
2027 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2028 return aggregate_value_p (first_field (type), fntype);
2029
cc77ae10
JM
2030 /* If the front end has decided that this needs to be passed by
2031 reference, do so. */
2032 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2033 && DECL_BY_REFERENCE (exp))
2034 return 1;
500c353d 2035
d47d0a8d
EB
2036 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2037 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2038 return 1;
b8698a0f 2039
956d6950 2040 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2041 and thus can't be returned in registers. */
2042 if (TREE_ADDRESSABLE (type))
2043 return 1;
d47d0a8d 2044
05e3bdb9 2045 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2046 return 1;
d47d0a8d
EB
2047
2048 if (targetm.calls.return_in_memory (type, fntype))
2049 return 1;
2050
9d790a4f
RS
2051 /* Make sure we have suitable call-clobbered regs to return
2052 the value in; if not, we must return it in memory. */
1d636cc6 2053 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2054
2055 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2056 it is OK. */
f8cfc6aa 2057 if (!REG_P (reg))
e71f7aa5
JW
2058 return 0;
2059
9d790a4f 2060 regno = REGNO (reg);
66fd46b6 2061 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2062 for (i = 0; i < nregs; i++)
2063 if (! call_used_regs[regno + i])
2064 return 1;
d47d0a8d 2065
6f086dfc
RS
2066 return 0;
2067}
2068\f
8fff4fc1
RH
2069/* Return true if we should assign DECL a pseudo register; false if it
2070 should live on the local stack. */
2071
2072bool
fa233e34 2073use_register_for_decl (const_tree decl)
8fff4fc1 2074{
007e61c2
PB
2075 if (!targetm.calls.allocate_stack_slots_for_args())
2076 return true;
b8698a0f 2077
8fff4fc1
RH
2078 /* Honor volatile. */
2079 if (TREE_SIDE_EFFECTS (decl))
2080 return false;
2081
2082 /* Honor addressability. */
2083 if (TREE_ADDRESSABLE (decl))
2084 return false;
2085
2086 /* Only register-like things go in registers. */
2087 if (DECL_MODE (decl) == BLKmode)
2088 return false;
2089
2090 /* If -ffloat-store specified, don't put explicit float variables
2091 into registers. */
2092 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2093 propagates values across these stores, and it probably shouldn't. */
2094 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2095 return false;
2096
78e0d62b
RH
2097 /* If we're not interested in tracking debugging information for
2098 this decl, then we can certainly put it in a register. */
2099 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2100 return true;
2101
d130d647
JJ
2102 if (optimize)
2103 return true;
2104
2105 if (!DECL_REGISTER (decl))
2106 return false;
2107
2108 switch (TREE_CODE (TREE_TYPE (decl)))
2109 {
2110 case RECORD_TYPE:
2111 case UNION_TYPE:
2112 case QUAL_UNION_TYPE:
2113 /* When not optimizing, disregard register keyword for variables with
2114 types containing methods, otherwise the methods won't be callable
2115 from the debugger. */
2116 if (TYPE_METHODS (TREE_TYPE (decl)))
2117 return false;
2118 break;
2119 default:
2120 break;
2121 }
2122
2123 return true;
8fff4fc1
RH
2124}
2125
0976078c
RH
2126/* Return true if TYPE should be passed by invisible reference. */
2127
2128bool
8cd5a4e0
RH
2129pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2130 tree type, bool named_arg)
0976078c
RH
2131{
2132 if (type)
2133 {
2134 /* If this type contains non-trivial constructors, then it is
2135 forbidden for the middle-end to create any new copies. */
2136 if (TREE_ADDRESSABLE (type))
2137 return true;
2138
d58247a3
RH
2139 /* GCC post 3.4 passes *all* variable sized types by reference. */
2140 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2141 return true;
ebf0bf7f
JJ
2142
2143 /* If a record type should be passed the same as its first (and only)
2144 member, use the type and mode of that member. */
2145 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2146 {
2147 type = TREE_TYPE (first_field (type));
2148 mode = TYPE_MODE (type);
2149 }
0976078c
RH
2150 }
2151
d5cc9181
JR
2152 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2153 type, named_arg);
0976078c
RH
2154}
2155
6cdd5672
RH
2156/* Return true if TYPE, which is passed by reference, should be callee
2157 copied instead of caller copied. */
2158
2159bool
2160reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2161 tree type, bool named_arg)
2162{
2163 if (type && TREE_ADDRESSABLE (type))
2164 return false;
d5cc9181
JR
2165 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2166 named_arg);
6cdd5672
RH
2167}
2168
6071dc7f
RH
2169/* Structures to communicate between the subroutines of assign_parms.
2170 The first holds data persistent across all parameters, the second
2171 is cleared out for each parameter. */
6f086dfc 2172
6071dc7f 2173struct assign_parm_data_all
6f086dfc 2174{
d5cc9181
JR
2175 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2176 should become a job of the target or otherwise encapsulated. */
2177 CUMULATIVE_ARGS args_so_far_v;
2178 cumulative_args_t args_so_far;
6f086dfc 2179 struct args_size stack_args_size;
6071dc7f
RH
2180 tree function_result_decl;
2181 tree orig_fnargs;
bb27eeda
SE
2182 rtx first_conversion_insn;
2183 rtx last_conversion_insn;
6071dc7f
RH
2184 HOST_WIDE_INT pretend_args_size;
2185 HOST_WIDE_INT extra_pretend_bytes;
2186 int reg_parm_stack_space;
2187};
6f086dfc 2188
6071dc7f
RH
2189struct assign_parm_data_one
2190{
2191 tree nominal_type;
2192 tree passed_type;
2193 rtx entry_parm;
2194 rtx stack_parm;
2195 enum machine_mode nominal_mode;
2196 enum machine_mode passed_mode;
2197 enum machine_mode promoted_mode;
2198 struct locate_and_pad_arg_data locate;
2199 int partial;
2200 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2201 BOOL_BITFIELD passed_pointer : 1;
2202 BOOL_BITFIELD on_stack : 1;
2203 BOOL_BITFIELD loaded_in_reg : 1;
2204};
ebb904cb 2205
6071dc7f 2206/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2207
6071dc7f
RH
2208static void
2209assign_parms_initialize_all (struct assign_parm_data_all *all)
2210{
fc2f1f53 2211 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2212
6071dc7f
RH
2213 memset (all, 0, sizeof (*all));
2214
2215 fntype = TREE_TYPE (current_function_decl);
2216
2217#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2218 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2219#else
d5cc9181 2220 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2221 current_function_decl, -1);
2222#endif
d5cc9181 2223 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f
RH
2224
2225#ifdef REG_PARM_STACK_SPACE
2226 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2227#endif
2228}
6f086dfc 2229
6071dc7f
RH
2230/* If ARGS contains entries with complex types, split the entry into two
2231 entries of the component type. Return a new list of substitutions are
2232 needed, else the old list. */
2233
3b3f318a
RG
2234static void
2235split_complex_args (VEC(tree, heap) **args)
6071dc7f 2236{
3b3f318a 2237 unsigned i;
6071dc7f
RH
2238 tree p;
2239
ac47786e 2240 FOR_EACH_VEC_ELT (tree, *args, i, p)
6071dc7f
RH
2241 {
2242 tree type = TREE_TYPE (p);
2243 if (TREE_CODE (type) == COMPLEX_TYPE
2244 && targetm.calls.split_complex_arg (type))
2245 {
2246 tree decl;
2247 tree subtype = TREE_TYPE (type);
6ccd356e 2248 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2249
2250 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2251 p = copy_node (p);
6071dc7f
RH
2252 TREE_TYPE (p) = subtype;
2253 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2254 DECL_MODE (p) = VOIDmode;
2255 DECL_SIZE (p) = NULL;
2256 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2257 /* If this arg must go in memory, put it in a pseudo here.
2258 We can't allow it to go in memory as per normal parms,
2259 because the usual place might not have the imag part
2260 adjacent to the real part. */
2261 DECL_ARTIFICIAL (p) = addressable;
2262 DECL_IGNORED_P (p) = addressable;
2263 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2264 layout_decl (p, 0);
3b3f318a 2265 VEC_replace (tree, *args, i, p);
6071dc7f
RH
2266
2267 /* Build a second synthetic decl. */
c2255bc4
AH
2268 decl = build_decl (EXPR_LOCATION (p),
2269 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2270 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2271 DECL_ARTIFICIAL (decl) = addressable;
2272 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2273 layout_decl (decl, 0);
3b3f318a 2274 VEC_safe_insert (tree, heap, *args, ++i, decl);
6071dc7f
RH
2275 }
2276 }
6071dc7f
RH
2277}
2278
2279/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2280 the hidden struct return argument, and (abi willing) complex args.
2281 Return the new parameter list. */
2282
3b3f318a 2283static VEC(tree, heap) *
6071dc7f
RH
2284assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2285{
2286 tree fndecl = current_function_decl;
2287 tree fntype = TREE_TYPE (fndecl);
3b3f318a
RG
2288 VEC(tree, heap) *fnargs = NULL;
2289 tree arg;
2290
910ad8de 2291 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
3b3f318a
RG
2292 VEC_safe_push (tree, heap, fnargs, arg);
2293
2294 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2295
2296 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2297 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2298 && ! cfun->returns_pcc_struct
61f71b34 2299 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2300 {
f9f29478 2301 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2302 tree decl;
6f086dfc 2303
c2255bc4 2304 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2305 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2306 DECL_ARG_TYPE (decl) = type;
2307 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2308 DECL_NAMELESS (decl) = 1;
2309 TREE_CONSTANT (decl) = 1;
6f086dfc 2310
910ad8de 2311 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a
RG
2312 all->orig_fnargs = decl;
2313 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2314
6071dc7f 2315 all->function_result_decl = decl;
6f086dfc 2316 }
718fe406 2317
42ba5130
RH
2318 /* If the target wants to split complex arguments into scalars, do so. */
2319 if (targetm.calls.split_complex_arg)
3b3f318a 2320 split_complex_args (&fnargs);
ded9bf77 2321
6071dc7f
RH
2322 return fnargs;
2323}
e7949876 2324
6071dc7f
RH
2325/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2326 data for the parameter. Incorporate ABI specifics such as pass-by-
2327 reference and type promotion. */
6f086dfc 2328
6071dc7f
RH
2329static void
2330assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2331 struct assign_parm_data_one *data)
2332{
2333 tree nominal_type, passed_type;
2334 enum machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2335 int unsignedp;
6f086dfc 2336
6071dc7f
RH
2337 memset (data, 0, sizeof (*data));
2338
fa10beec 2339 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2340 if (!cfun->stdarg)
fa10beec 2341 data->named_arg = 1; /* No variadic parms. */
910ad8de 2342 else if (DECL_CHAIN (parm))
fa10beec 2343 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2344 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2345 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2346 else
fa10beec 2347 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2348
2349 nominal_type = TREE_TYPE (parm);
2350 passed_type = DECL_ARG_TYPE (parm);
2351
2352 /* Look out for errors propagating this far. Also, if the parameter's
2353 type is void then its value doesn't matter. */
2354 if (TREE_TYPE (parm) == error_mark_node
2355 /* This can happen after weird syntax errors
2356 or if an enum type is defined among the parms. */
2357 || TREE_CODE (parm) != PARM_DECL
2358 || passed_type == NULL
2359 || VOID_TYPE_P (nominal_type))
2360 {
2361 nominal_type = passed_type = void_type_node;
2362 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2363 goto egress;
2364 }
108b7d3d 2365
6071dc7f
RH
2366 /* Find mode of arg as it is passed, and mode of arg as it should be
2367 during execution of this function. */
2368 passed_mode = TYPE_MODE (passed_type);
2369 nominal_mode = TYPE_MODE (nominal_type);
2370
ebf0bf7f
JJ
2371 /* If the parm is to be passed as a transparent union or record, use the
2372 type of the first field for the tests below. We have already verified
2373 that the modes are the same. */
2374 if ((TREE_CODE (passed_type) == UNION_TYPE
2375 || TREE_CODE (passed_type) == RECORD_TYPE)
2376 && TYPE_TRANSPARENT_AGGR (passed_type))
2377 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2378
0976078c 2379 /* See if this arg was passed by invisible reference. */
d5cc9181 2380 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2381 passed_type, data->named_arg))
6071dc7f
RH
2382 {
2383 passed_type = nominal_type = build_pointer_type (passed_type);
2384 data->passed_pointer = true;
2385 passed_mode = nominal_mode = Pmode;
2386 }
6f086dfc 2387
6071dc7f 2388 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2389 unsignedp = TYPE_UNSIGNED (passed_type);
2390 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2391 TREE_TYPE (current_function_decl), 0);
6f086dfc 2392
6071dc7f
RH
2393 egress:
2394 data->nominal_type = nominal_type;
2395 data->passed_type = passed_type;
2396 data->nominal_mode = nominal_mode;
2397 data->passed_mode = passed_mode;
2398 data->promoted_mode = promoted_mode;
2399}
16bae307 2400
6071dc7f 2401/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2402
6071dc7f
RH
2403static void
2404assign_parms_setup_varargs (struct assign_parm_data_all *all,
2405 struct assign_parm_data_one *data, bool no_rtl)
2406{
2407 int varargs_pretend_bytes = 0;
2408
d5cc9181 2409 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2410 data->promoted_mode,
2411 data->passed_type,
2412 &varargs_pretend_bytes, no_rtl);
2413
2414 /* If the back-end has requested extra stack space, record how much is
2415 needed. Do not change pretend_args_size otherwise since it may be
2416 nonzero from an earlier partial argument. */
2417 if (varargs_pretend_bytes > 0)
2418 all->pretend_args_size = varargs_pretend_bytes;
2419}
a53e14c0 2420
6071dc7f
RH
2421/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2422 the incoming location of the current parameter. */
2423
2424static void
2425assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2426 struct assign_parm_data_one *data)
2427{
2428 HOST_WIDE_INT pretend_bytes = 0;
2429 rtx entry_parm;
2430 bool in_regs;
2431
2432 if (data->promoted_mode == VOIDmode)
2433 {
2434 data->entry_parm = data->stack_parm = const0_rtx;
2435 return;
2436 }
a53e14c0 2437
d5cc9181 2438 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2439 data->promoted_mode,
2440 data->passed_type,
2441 data->named_arg);
6f086dfc 2442
6071dc7f
RH
2443 if (entry_parm == 0)
2444 data->promoted_mode = data->passed_mode;
6f086dfc 2445
6071dc7f
RH
2446 /* Determine parm's home in the stack, in case it arrives in the stack
2447 or we should pretend it did. Compute the stack position and rtx where
2448 the argument arrives and its size.
6f086dfc 2449
6071dc7f
RH
2450 There is one complexity here: If this was a parameter that would
2451 have been passed in registers, but wasn't only because it is
2452 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2453 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2454 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2455 as it was the previous time. */
2456 in_regs = entry_parm != 0;
6f086dfc 2457#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2458 in_regs = true;
e7949876 2459#endif
6071dc7f
RH
2460 if (!in_regs && !data->named_arg)
2461 {
d5cc9181 2462 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2463 {
6071dc7f 2464 rtx tem;
d5cc9181 2465 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2466 data->promoted_mode,
2467 data->passed_type, true);
6071dc7f 2468 in_regs = tem != NULL;
e7949876 2469 }
6071dc7f 2470 }
e7949876 2471
6071dc7f
RH
2472 /* If this parameter was passed both in registers and in the stack, use
2473 the copy on the stack. */
fe984136
RH
2474 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2475 data->passed_type))
6071dc7f 2476 entry_parm = 0;
e7949876 2477
6071dc7f
RH
2478 if (entry_parm)
2479 {
2480 int partial;
2481
d5cc9181 2482 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2483 data->promoted_mode,
2484 data->passed_type,
2485 data->named_arg);
6071dc7f
RH
2486 data->partial = partial;
2487
2488 /* The caller might already have allocated stack space for the
2489 register parameters. */
2490 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2491 {
6071dc7f
RH
2492 /* Part of this argument is passed in registers and part
2493 is passed on the stack. Ask the prologue code to extend
2494 the stack part so that we can recreate the full value.
2495
2496 PRETEND_BYTES is the size of the registers we need to store.
2497 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2498 stack space that the prologue should allocate.
2499
2500 Internally, gcc assumes that the argument pointer is aligned
2501 to STACK_BOUNDARY bits. This is used both for alignment
2502 optimizations (see init_emit) and to locate arguments that are
2503 aligned to more than PARM_BOUNDARY bits. We must preserve this
2504 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2505 a stack boundary. */
2506
2507 /* We assume at most one partial arg, and it must be the first
2508 argument on the stack. */
0bccc606 2509 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2510
78a52f11 2511 pretend_bytes = partial;
6071dc7f
RH
2512 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2513
2514 /* We want to align relative to the actual stack pointer, so
2515 don't include this in the stack size until later. */
2516 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2517 }
6071dc7f 2518 }
e7949876 2519
6071dc7f
RH
2520 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2521 entry_parm ? data->partial : 0, current_function_decl,
2522 &all->stack_args_size, &data->locate);
6f086dfc 2523
e94a448f
L
2524 /* Update parm_stack_boundary if this parameter is passed in the
2525 stack. */
2526 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2527 crtl->parm_stack_boundary = data->locate.boundary;
2528
6071dc7f
RH
2529 /* Adjust offsets to include the pretend args. */
2530 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2531 data->locate.slot_offset.constant += pretend_bytes;
2532 data->locate.offset.constant += pretend_bytes;
ebca59c3 2533
6071dc7f
RH
2534 data->entry_parm = entry_parm;
2535}
6f086dfc 2536
6071dc7f
RH
2537/* A subroutine of assign_parms. If there is actually space on the stack
2538 for this parm, count it in stack_args_size and return true. */
6f086dfc 2539
6071dc7f
RH
2540static bool
2541assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2542 struct assign_parm_data_one *data)
2543{
2e6ae27f 2544 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2545 if (data->entry_parm == NULL)
2546 ;
2547 /* Also true if we're partially in registers and partially not,
2548 since we've arranged to drop the entire argument on the stack. */
2549 else if (data->partial != 0)
2550 ;
2551 /* Also true if the target says that it's passed in both registers
2552 and on the stack. */
2553 else if (GET_CODE (data->entry_parm) == PARALLEL
2554 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2555 ;
2556 /* Also true if the target says that there's stack allocated for
2557 all register parameters. */
2558 else if (all->reg_parm_stack_space > 0)
2559 ;
2560 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2561 else
2562 return false;
6f086dfc 2563
6071dc7f
RH
2564 all->stack_args_size.constant += data->locate.size.constant;
2565 if (data->locate.size.var)
2566 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2567
6071dc7f
RH
2568 return true;
2569}
0d1416c6 2570
6071dc7f
RH
2571/* A subroutine of assign_parms. Given that this parameter is allocated
2572 stack space by the ABI, find it. */
6f086dfc 2573
6071dc7f
RH
2574static void
2575assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2576{
2577 rtx offset_rtx, stack_parm;
2578 unsigned int align, boundary;
6f086dfc 2579
6071dc7f
RH
2580 /* If we're passing this arg using a reg, make its stack home the
2581 aligned stack slot. */
2582 if (data->entry_parm)
2583 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2584 else
2585 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2586
38173d38 2587 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2588 if (offset_rtx != const0_rtx)
2589 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2590 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2591
08ab0acf 2592 if (!data->passed_pointer)
997f78fb 2593 {
08ab0acf
JJ
2594 set_mem_attributes (stack_parm, parm, 1);
2595 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2596 while promoted mode's size is needed. */
2597 if (data->promoted_mode != BLKmode
2598 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2599 {
f5541398 2600 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2601 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2602 {
2603 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2604 data->promoted_mode);
2605 if (offset)
527210c4 2606 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2607 }
997f78fb
JJ
2608 }
2609 }
6071dc7f 2610
bfc45551
AM
2611 boundary = data->locate.boundary;
2612 align = BITS_PER_UNIT;
6071dc7f
RH
2613
2614 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2615 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2616 intentionally forcing upward padding. Otherwise we have to come
2617 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2618 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2619 align = boundary;
481683e1 2620 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2621 {
2622 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2623 align = align & -align;
2624 }
bfc45551 2625 set_mem_align (stack_parm, align);
6071dc7f
RH
2626
2627 if (data->entry_parm)
2628 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2629
2630 data->stack_parm = stack_parm;
2631}
2632
2633/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2634 always valid and contiguous. */
2635
2636static void
2637assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2638{
2639 rtx entry_parm = data->entry_parm;
2640 rtx stack_parm = data->stack_parm;
2641
2642 /* If this parm was passed part in regs and part in memory, pretend it
2643 arrived entirely in memory by pushing the register-part onto the stack.
2644 In the special case of a DImode or DFmode that is split, we could put
2645 it together in a pseudoreg directly, but for now that's not worth
2646 bothering with. */
2647 if (data->partial != 0)
2648 {
2649 /* Handle calls that pass values in multiple non-contiguous
2650 locations. The Irix 6 ABI has examples of this. */
2651 if (GET_CODE (entry_parm) == PARALLEL)
2652 emit_group_store (validize_mem (stack_parm), entry_parm,
b8698a0f 2653 data->passed_type,
6071dc7f 2654 int_size_in_bytes (data->passed_type));
6f086dfc 2655 else
78a52f11
RH
2656 {
2657 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2658 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2659 data->partial / UNITS_PER_WORD);
2660 }
6f086dfc 2661
6071dc7f
RH
2662 entry_parm = stack_parm;
2663 }
6f086dfc 2664
6071dc7f
RH
2665 /* If we didn't decide this parm came in a register, by default it came
2666 on the stack. */
2667 else if (entry_parm == NULL)
2668 entry_parm = stack_parm;
2669
2670 /* When an argument is passed in multiple locations, we can't make use
2671 of this information, but we can save some copying if the whole argument
2672 is passed in a single register. */
2673 else if (GET_CODE (entry_parm) == PARALLEL
2674 && data->nominal_mode != BLKmode
2675 && data->passed_mode != BLKmode)
2676 {
2677 size_t i, len = XVECLEN (entry_parm, 0);
2678
2679 for (i = 0; i < len; i++)
2680 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2681 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2682 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2683 == data->passed_mode)
2684 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2685 {
2686 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2687 break;
2688 }
2689 }
e68a6ce1 2690
6071dc7f
RH
2691 data->entry_parm = entry_parm;
2692}
6f086dfc 2693
4d2a9850
DJ
2694/* A subroutine of assign_parms. Reconstitute any values which were
2695 passed in multiple registers and would fit in a single register. */
2696
2697static void
2698assign_parm_remove_parallels (struct assign_parm_data_one *data)
2699{
2700 rtx entry_parm = data->entry_parm;
2701
2702 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2703 This can be done with register operations rather than on the
2704 stack, even if we will store the reconstituted parameter on the
2705 stack later. */
85776d60 2706 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2707 {
2708 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2709 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2710 GET_MODE_SIZE (GET_MODE (entry_parm)));
2711 entry_parm = parmreg;
2712 }
2713
2714 data->entry_parm = entry_parm;
2715}
2716
6071dc7f
RH
2717/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2718 always valid and properly aligned. */
6f086dfc 2719
6071dc7f
RH
2720static void
2721assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2722{
2723 rtx stack_parm = data->stack_parm;
2724
2725 /* If we can't trust the parm stack slot to be aligned enough for its
2726 ultimate type, don't use that slot after entry. We'll make another
2727 stack slot, if we need one. */
bfc45551
AM
2728 if (stack_parm
2729 && ((STRICT_ALIGNMENT
2730 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2731 || (data->nominal_type
2732 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2733 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2734 stack_parm = NULL;
2735
2736 /* If parm was passed in memory, and we need to convert it on entry,
2737 don't store it back in that same slot. */
2738 else if (data->entry_parm == stack_parm
2739 && data->nominal_mode != BLKmode
2740 && data->nominal_mode != data->passed_mode)
2741 stack_parm = NULL;
2742
7d69de61
RH
2743 /* If stack protection is in effect for this function, don't leave any
2744 pointers in their passed stack slots. */
cb91fab0 2745 else if (crtl->stack_protect_guard
7d69de61
RH
2746 && (flag_stack_protect == 2
2747 || data->passed_pointer
2748 || POINTER_TYPE_P (data->nominal_type)))
2749 stack_parm = NULL;
2750
6071dc7f
RH
2751 data->stack_parm = stack_parm;
2752}
a0506b54 2753
6071dc7f
RH
2754/* A subroutine of assign_parms. Return true if the current parameter
2755 should be stored as a BLKmode in the current frame. */
2756
2757static bool
2758assign_parm_setup_block_p (struct assign_parm_data_one *data)
2759{
2760 if (data->nominal_mode == BLKmode)
2761 return true;
85776d60
DJ
2762 if (GET_MODE (data->entry_parm) == BLKmode)
2763 return true;
531547e9 2764
6e985040 2765#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2766 /* Only assign_parm_setup_block knows how to deal with register arguments
2767 that are padded at the least significant end. */
2768 if (REG_P (data->entry_parm)
2769 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2770 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2771 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2772 return true;
6e985040 2773#endif
6071dc7f
RH
2774
2775 return false;
2776}
2777
b8698a0f 2778/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2779 present and valid in DATA->STACK_RTL. */
2780
2781static void
27e29549
RH
2782assign_parm_setup_block (struct assign_parm_data_all *all,
2783 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2784{
2785 rtx entry_parm = data->entry_parm;
2786 rtx stack_parm = data->stack_parm;
bfc45551
AM
2787 HOST_WIDE_INT size;
2788 HOST_WIDE_INT size_stored;
6071dc7f 2789
27e29549
RH
2790 if (GET_CODE (entry_parm) == PARALLEL)
2791 entry_parm = emit_group_move_into_temps (entry_parm);
2792
bfc45551
AM
2793 size = int_size_in_bytes (data->passed_type);
2794 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2795 if (stack_parm == 0)
2796 {
a561d88b 2797 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2798 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2799 DECL_ALIGN (parm));
bfc45551
AM
2800 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2801 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2802 set_mem_attributes (stack_parm, parm, 1);
2803 }
2804
6071dc7f
RH
2805 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2806 calls that pass values in multiple non-contiguous locations. */
2807 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2808 {
6071dc7f
RH
2809 rtx mem;
2810
2811 /* Note that we will be storing an integral number of words.
2812 So we have to be careful to ensure that we allocate an
bfc45551 2813 integral number of words. We do this above when we call
6071dc7f
RH
2814 assign_stack_local if space was not allocated in the argument
2815 list. If it was, this will not work if PARM_BOUNDARY is not
2816 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2817 if it becomes a problem. Exception is when BLKmode arrives
2818 with arguments not conforming to word_mode. */
2819
bfc45551
AM
2820 if (data->stack_parm == 0)
2821 ;
6071dc7f
RH
2822 else if (GET_CODE (entry_parm) == PARALLEL)
2823 ;
0bccc606
NS
2824 else
2825 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2826
6071dc7f 2827 mem = validize_mem (stack_parm);
c6b97fac 2828
6071dc7f
RH
2829 /* Handle values in multiple non-contiguous locations. */
2830 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2831 {
bb27eeda
SE
2832 push_to_sequence2 (all->first_conversion_insn,
2833 all->last_conversion_insn);
27e29549 2834 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2835 all->first_conversion_insn = get_insns ();
2836 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2837 end_sequence ();
2838 }
c6b97fac 2839
6071dc7f
RH
2840 else if (size == 0)
2841 ;
5c07bd7a 2842
6071dc7f
RH
2843 /* If SIZE is that of a mode no bigger than a word, just use
2844 that mode's store operation. */
2845 else if (size <= UNITS_PER_WORD)
2846 {
2847 enum machine_mode mode
2848 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2849
6071dc7f 2850 if (mode != BLKmode
6e985040 2851#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2852 && (size == UNITS_PER_WORD
2853 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2854 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2855#endif
6071dc7f
RH
2856 )
2857 {
208996c7
RS
2858 rtx reg;
2859
2860 /* We are really truncating a word_mode value containing
2861 SIZE bytes into a value of mode MODE. If such an
2862 operation requires no actual instructions, we can refer
2863 to the value directly in mode MODE, otherwise we must
2864 start with the register in word_mode and explicitly
2865 convert it. */
2866 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2867 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2868 else
2869 {
2870 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2871 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2872 }
6071dc7f
RH
2873 emit_move_insn (change_address (mem, mode, 0), reg);
2874 }
c6b97fac 2875
6071dc7f
RH
2876 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2877 machine must be aligned to the left before storing
2878 to memory. Note that the previous test doesn't
2879 handle all cases (e.g. SIZE == 3). */
2880 else if (size != UNITS_PER_WORD
6e985040 2881#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2882 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2883 == downward)
6e985040 2884#else
6071dc7f 2885 && BYTES_BIG_ENDIAN
6e985040 2886#endif
6071dc7f
RH
2887 )
2888 {
2889 rtx tem, x;
2890 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2891 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2892
eb6c3df1 2893 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2894 tem = change_address (mem, word_mode, 0);
2895 emit_move_insn (tem, x);
6f086dfc 2896 }
6071dc7f 2897 else
27e29549 2898 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2899 size_stored / UNITS_PER_WORD);
6f086dfc 2900 }
6071dc7f 2901 else
27e29549 2902 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2903 size_stored / UNITS_PER_WORD);
2904 }
bfc45551
AM
2905 else if (data->stack_parm == 0)
2906 {
bb27eeda 2907 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2908 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2909 BLOCK_OP_NORMAL);
bb27eeda
SE
2910 all->first_conversion_insn = get_insns ();
2911 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2912 end_sequence ();
2913 }
6071dc7f 2914
bfc45551 2915 data->stack_parm = stack_parm;
6071dc7f
RH
2916 SET_DECL_RTL (parm, stack_parm);
2917}
2918
2919/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2920 parameter. Get it there. Perform all ABI specified conversions. */
2921
2922static void
2923assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2924 struct assign_parm_data_one *data)
2925{
71008de4
BS
2926 rtx parmreg, validated_mem;
2927 rtx equiv_stack_parm;
6071dc7f
RH
2928 enum machine_mode promoted_nominal_mode;
2929 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2930 bool did_conversion = false;
71008de4 2931 bool need_conversion, moved;
6071dc7f
RH
2932
2933 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2934 need to do it in a wider mode. Using 2 here makes the result
2935 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2936 promoted_nominal_mode
cde0f3fd 2937 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2938 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
2939
2940 parmreg = gen_reg_rtx (promoted_nominal_mode);
2941
2942 if (!DECL_ARTIFICIAL (parm))
2943 mark_user_reg (parmreg);
2944
2945 /* If this was an item that we received a pointer to,
2946 set DECL_RTL appropriately. */
2947 if (data->passed_pointer)
2948 {
2949 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2950 set_mem_attributes (x, parm, 1);
2951 SET_DECL_RTL (parm, x);
2952 }
2953 else
389fdba0 2954 SET_DECL_RTL (parm, parmreg);
6071dc7f 2955
4d2a9850
DJ
2956 assign_parm_remove_parallels (data);
2957
666e3ceb
PB
2958 /* Copy the value into the register, thus bridging between
2959 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2960
71008de4
BS
2961 equiv_stack_parm = data->stack_parm;
2962 validated_mem = validize_mem (data->entry_parm);
2963
2964 need_conversion = (data->nominal_mode != data->passed_mode
2965 || promoted_nominal_mode != data->promoted_mode);
2966 moved = false;
2967
dbb94435
BS
2968 if (need_conversion
2969 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2970 && data->nominal_mode == data->passed_mode
2971 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 2972 {
6071dc7f
RH
2973 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2974 mode, by the caller. We now have to convert it to
2975 NOMINAL_MODE, if different. However, PARMREG may be in
2976 a different mode than NOMINAL_MODE if it is being stored
2977 promoted.
2978
2979 If ENTRY_PARM is a hard register, it might be in a register
2980 not valid for operating in its mode (e.g., an odd-numbered
2981 register for a DFmode). In that case, moves are the only
2982 thing valid, so we can't do a convert from there. This
2983 occurs when the calling sequence allow such misaligned
2984 usages.
2985
2986 In addition, the conversion may involve a call, which could
2987 clobber parameters which haven't been copied to pseudo
71008de4
BS
2988 registers yet.
2989
2990 First, we try to emit an insn which performs the necessary
2991 conversion. We verify that this insn does not clobber any
2992 hard registers. */
2993
2994 enum insn_code icode;
2995 rtx op0, op1;
2996
2997 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2998 unsignedp);
2999
3000 op0 = parmreg;
3001 op1 = validated_mem;
3002 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3003 && insn_operand_matches (icode, 0, op0)
3004 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3005 {
3006 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3007 rtx insn, insns;
3008 HARD_REG_SET hardregs;
3009
3010 start_sequence ();
3011 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3012 data->passed_mode, unsignedp);
3013 emit_insn (insn);
3014 insns = get_insns ();
3015
3016 moved = true;
3017 CLEAR_HARD_REG_SET (hardregs);
3018 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3019 {
3020 if (INSN_P (insn))
3021 note_stores (PATTERN (insn), record_hard_reg_sets,
3022 &hardregs);
3023 if (!hard_reg_set_empty_p (hardregs))
3024 moved = false;
3025 }
3026
3027 end_sequence ();
3028
3029 if (moved)
3030 {
3031 emit_insn (insns);
dbb94435
BS
3032 if (equiv_stack_parm != NULL_RTX)
3033 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3034 equiv_stack_parm);
71008de4
BS
3035 }
3036 }
3037 }
3038
3039 if (moved)
3040 /* Nothing to do. */
3041 ;
3042 else if (need_conversion)
3043 {
3044 /* We did not have an insn to convert directly, or the sequence
3045 generated appeared unsafe. We must first copy the parm to a
3046 pseudo reg, and save the conversion until after all
6071dc7f
RH
3047 parameters have been moved. */
3048
71008de4 3049 int save_tree_used;
6071dc7f
RH
3050 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3051
71008de4 3052 emit_move_insn (tempreg, validated_mem);
6071dc7f 3053
bb27eeda 3054 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3055 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3056
3057 if (GET_CODE (tempreg) == SUBREG
3058 && GET_MODE (tempreg) == data->nominal_mode
3059 && REG_P (SUBREG_REG (tempreg))
3060 && data->nominal_mode == data->passed_mode
3061 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3062 && GET_MODE_SIZE (GET_MODE (tempreg))
3063 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3064 {
6071dc7f
RH
3065 /* The argument is already sign/zero extended, so note it
3066 into the subreg. */
3067 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3068 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3069 }
00d8a4c1 3070
6071dc7f
RH
3071 /* TREE_USED gets set erroneously during expand_assignment. */
3072 save_tree_used = TREE_USED (parm);
79f5e442 3073 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3074 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3075 all->first_conversion_insn = get_insns ();
3076 all->last_conversion_insn = get_last_insn ();
6071dc7f 3077 end_sequence ();
00d8a4c1 3078
6071dc7f
RH
3079 did_conversion = true;
3080 }
3081 else
71008de4 3082 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3083
3084 /* If we were passed a pointer but the actual value can safely live
3085 in a register, put it in one. */
3086 if (data->passed_pointer
3087 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3088 /* If by-reference argument was promoted, demote it. */
3089 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3090 || use_register_for_decl (parm)))
3091 {
3092 /* We can't use nominal_mode, because it will have been set to
3093 Pmode above. We must use the actual mode of the parm. */
3094 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3095 mark_user_reg (parmreg);
cd5b3469 3096
6071dc7f
RH
3097 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3098 {
3099 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3100 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3101
bb27eeda
SE
3102 push_to_sequence2 (all->first_conversion_insn,
3103 all->last_conversion_insn);
6071dc7f
RH
3104 emit_move_insn (tempreg, DECL_RTL (parm));
3105 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3106 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3107 all->first_conversion_insn = get_insns ();
3108 all->last_conversion_insn = get_last_insn ();
6071dc7f 3109 end_sequence ();
6f086dfc 3110
6071dc7f
RH
3111 did_conversion = true;
3112 }
3113 else
3114 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3115
6071dc7f 3116 SET_DECL_RTL (parm, parmreg);
797a6ac1 3117
6071dc7f
RH
3118 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3119 now the parm. */
3120 data->stack_parm = NULL;
3121 }
ddef6bc7 3122
6071dc7f
RH
3123 /* Mark the register as eliminable if we did no conversion and it was
3124 copied from memory at a fixed offset, and the arg pointer was not
3125 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3126 offset formed an invalid address, such memory-equivalences as we
3127 make here would screw up life analysis for it. */
3128 if (data->nominal_mode == data->passed_mode
3129 && !did_conversion
3130 && data->stack_parm != 0
3131 && MEM_P (data->stack_parm)
3132 && data->locate.offset.var == 0
3133 && reg_mentioned_p (virtual_incoming_args_rtx,
3134 XEXP (data->stack_parm, 0)))
3135 {
3136 rtx linsn = get_last_insn ();
3137 rtx sinsn, set;
a03caf76 3138
6071dc7f
RH
3139 /* Mark complex types separately. */
3140 if (GET_CODE (parmreg) == CONCAT)
3141 {
3142 enum machine_mode submode
3143 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3144 int regnor = REGNO (XEXP (parmreg, 0));
3145 int regnoi = REGNO (XEXP (parmreg, 1));
3146 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3147 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3148 GET_MODE_SIZE (submode));
6071dc7f
RH
3149
3150 /* Scan backwards for the set of the real and
3151 imaginary parts. */
3152 for (sinsn = linsn; sinsn != 0;
3153 sinsn = prev_nonnote_insn (sinsn))
3154 {
3155 set = single_set (sinsn);
3156 if (set == 0)
3157 continue;
3158
3159 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3160 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3161 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3162 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3163 }
6071dc7f 3164 }
7543f918
JR
3165 else
3166 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3167 }
3168
3169 /* For pointer data type, suggest pointer register. */
3170 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3171 mark_reg_pointer (parmreg,
3172 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3173}
3174
3175/* A subroutine of assign_parms. Allocate stack space to hold the current
3176 parameter. Get it there. Perform all ABI specified conversions. */
3177
3178static void
3179assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3180 struct assign_parm_data_one *data)
3181{
3182 /* Value must be stored in the stack slot STACK_PARM during function
3183 execution. */
bfc45551 3184 bool to_conversion = false;
6071dc7f 3185
4d2a9850
DJ
3186 assign_parm_remove_parallels (data);
3187
6071dc7f
RH
3188 if (data->promoted_mode != data->nominal_mode)
3189 {
3190 /* Conversion is required. */
3191 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3192
6071dc7f
RH
3193 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3194
bb27eeda 3195 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3196 to_conversion = true;
3197
6071dc7f
RH
3198 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3199 TYPE_UNSIGNED (TREE_TYPE (parm)));
3200
3201 if (data->stack_parm)
dd67163f
JJ
3202 {
3203 int offset = subreg_lowpart_offset (data->nominal_mode,
3204 GET_MODE (data->stack_parm));
3205 /* ??? This may need a big-endian conversion on sparc64. */
3206 data->stack_parm
3207 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3208 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3209 set_mem_offset (data->stack_parm,
527210c4 3210 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3211 }
6071dc7f
RH
3212 }
3213
3214 if (data->entry_parm != data->stack_parm)
3215 {
bfc45551
AM
3216 rtx src, dest;
3217
6071dc7f
RH
3218 if (data->stack_parm == 0)
3219 {
3a695389
UW
3220 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3221 GET_MODE (data->entry_parm),
3222 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3223 data->stack_parm
3224 = assign_stack_local (GET_MODE (data->entry_parm),
3225 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3226 align);
6071dc7f 3227 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3228 }
6071dc7f 3229
bfc45551
AM
3230 dest = validize_mem (data->stack_parm);
3231 src = validize_mem (data->entry_parm);
3232
3233 if (MEM_P (src))
6f086dfc 3234 {
bfc45551
AM
3235 /* Use a block move to handle potentially misaligned entry_parm. */
3236 if (!to_conversion)
bb27eeda
SE
3237 push_to_sequence2 (all->first_conversion_insn,
3238 all->last_conversion_insn);
bfc45551
AM
3239 to_conversion = true;
3240
3241 emit_block_move (dest, src,
3242 GEN_INT (int_size_in_bytes (data->passed_type)),
3243 BLOCK_OP_NORMAL);
6071dc7f
RH
3244 }
3245 else
bfc45551
AM
3246 emit_move_insn (dest, src);
3247 }
3248
3249 if (to_conversion)
3250 {
bb27eeda
SE
3251 all->first_conversion_insn = get_insns ();
3252 all->last_conversion_insn = get_last_insn ();
bfc45551 3253 end_sequence ();
6071dc7f 3254 }
6f086dfc 3255
6071dc7f
RH
3256 SET_DECL_RTL (parm, data->stack_parm);
3257}
3412b298 3258
6071dc7f
RH
3259/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3260 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3261
6071dc7f 3262static void
3b3f318a
RG
3263assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3264 VEC(tree, heap) *fnargs)
6071dc7f
RH
3265{
3266 tree parm;
6ccd356e 3267 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3268 unsigned i = 0;
f4ef873c 3269
3b3f318a 3270 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3271 {
3272 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3273 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3274 {
3275 rtx tmp, real, imag;
3276 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3277
3b3f318a
RG
3278 real = DECL_RTL (VEC_index (tree, fnargs, i));
3279 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
6071dc7f 3280 if (inner != GET_MODE (real))
6f086dfc 3281 {
6071dc7f
RH
3282 real = gen_lowpart_SUBREG (inner, real);
3283 imag = gen_lowpart_SUBREG (inner, imag);
3284 }
6ccd356e
AM
3285
3286 if (TREE_ADDRESSABLE (parm))
3287 {
3288 rtx rmem, imem;
3289 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3290 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3291 DECL_MODE (parm),
3292 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3293
3294 /* split_complex_arg put the real and imag parts in
3295 pseudos. Move them to memory. */
3a695389 3296 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3297 set_mem_attributes (tmp, parm, 1);
3298 rmem = adjust_address_nv (tmp, inner, 0);
3299 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3300 push_to_sequence2 (all->first_conversion_insn,
3301 all->last_conversion_insn);
6ccd356e
AM
3302 emit_move_insn (rmem, real);
3303 emit_move_insn (imem, imag);
bb27eeda
SE
3304 all->first_conversion_insn = get_insns ();
3305 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3306 end_sequence ();
3307 }
3308 else
3309 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3310 SET_DECL_RTL (parm, tmp);
7e41ffa2 3311
3b3f318a
RG
3312 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3313 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
6071dc7f
RH
3314 if (inner != GET_MODE (real))
3315 {
3316 real = gen_lowpart_SUBREG (inner, real);
3317 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3318 }
6071dc7f 3319 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3320 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3321 i++;
6f086dfc 3322 }
6f086dfc 3323 }
6071dc7f
RH
3324}
3325
3326/* Assign RTL expressions to the function's parameters. This may involve
3327 copying them into registers and using those registers as the DECL_RTL. */
3328
6fe79279 3329static void
6071dc7f
RH
3330assign_parms (tree fndecl)
3331{
3332 struct assign_parm_data_all all;
3b3f318a
RG
3333 tree parm;
3334 VEC(tree, heap) *fnargs;
3335 unsigned i;
6f086dfc 3336
38173d38 3337 crtl->args.internal_arg_pointer
150cdc9e 3338 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3339
3340 assign_parms_initialize_all (&all);
3341 fnargs = assign_parms_augmented_arg_list (&all);
3342
ac47786e 3343 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
ded9bf77 3344 {
6071dc7f
RH
3345 struct assign_parm_data_one data;
3346
3347 /* Extract the type of PARM; adjust it according to ABI. */
3348 assign_parm_find_data_types (&all, parm, &data);
3349
3350 /* Early out for errors and void parameters. */
3351 if (data.passed_mode == VOIDmode)
ded9bf77 3352 {
6071dc7f
RH
3353 SET_DECL_RTL (parm, const0_rtx);
3354 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3355 continue;
3356 }
196c42cd 3357
2e3f842f
L
3358 /* Estimate stack alignment from parameter alignment. */
3359 if (SUPPORTS_STACK_ALIGNMENT)
3360 {
c2ed6cf8
NF
3361 unsigned int align
3362 = targetm.calls.function_arg_boundary (data.promoted_mode,
3363 data.passed_type);
ae58e548
JJ
3364 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3365 align);
2e3f842f 3366 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3367 align = MINIMUM_ALIGNMENT (data.nominal_type,
3368 TYPE_MODE (data.nominal_type),
3369 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3370 if (crtl->stack_alignment_estimated < align)
3371 {
3372 gcc_assert (!crtl->stack_realign_processed);
3373 crtl->stack_alignment_estimated = align;
3374 }
3375 }
b8698a0f 3376
910ad8de 3377 if (cfun->stdarg && !DECL_CHAIN (parm))
8117c488 3378 assign_parms_setup_varargs (&all, &data, false);
196c42cd 3379
6071dc7f
RH
3380 /* Find out where the parameter arrives in this function. */
3381 assign_parm_find_entry_rtl (&all, &data);
3382
3383 /* Find out where stack space for this parameter might be. */
3384 if (assign_parm_is_stack_parm (&all, &data))
3385 {
3386 assign_parm_find_stack_rtl (parm, &data);
3387 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3388 }
6071dc7f
RH
3389
3390 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3391 if (data.passed_pointer)
3392 {
3393 rtx incoming_rtl
3394 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3395 data.entry_parm);
3396 set_decl_incoming_rtl (parm, incoming_rtl, true);
3397 }
3398 else
3399 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f
RH
3400
3401 /* Update info on where next arg arrives in registers. */
d5cc9181 3402 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3403 data.passed_type, data.named_arg);
6071dc7f
RH
3404
3405 assign_parm_adjust_stack_rtl (&data);
3406
3407 if (assign_parm_setup_block_p (&data))
27e29549 3408 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3409 else if (data.passed_pointer || use_register_for_decl (parm))
3410 assign_parm_setup_reg (&all, parm, &data);
3411 else
3412 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3413 }
3414
3b3f318a 3415 if (targetm.calls.split_complex_arg)
6ccd356e 3416 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3417
3b3f318a
RG
3418 VEC_free (tree, heap, fnargs);
3419
3412b298
JW
3420 /* Output all parameter conversion instructions (possibly including calls)
3421 now that all parameters have been copied out of hard registers. */
bb27eeda 3422 emit_insn (all.first_conversion_insn);
3412b298 3423
2e3f842f
L
3424 /* Estimate reload stack alignment from scalar return mode. */
3425 if (SUPPORTS_STACK_ALIGNMENT)
3426 {
3427 if (DECL_RESULT (fndecl))
3428 {
3429 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3430 enum machine_mode mode = TYPE_MODE (type);
3431
3432 if (mode != BLKmode
3433 && mode != VOIDmode
3434 && !AGGREGATE_TYPE_P (type))
3435 {
3436 unsigned int align = GET_MODE_ALIGNMENT (mode);
3437 if (crtl->stack_alignment_estimated < align)
3438 {
3439 gcc_assert (!crtl->stack_realign_processed);
3440 crtl->stack_alignment_estimated = align;
3441 }
3442 }
b8698a0f 3443 }
2e3f842f
L
3444 }
3445
b36a8cc2
OH
3446 /* If we are receiving a struct value address as the first argument, set up
3447 the RTL for the function result. As this might require code to convert
3448 the transmitted address to Pmode, we do this here to ensure that possible
3449 preliminary conversions of the address have been emitted already. */
6071dc7f 3450 if (all.function_result_decl)
b36a8cc2 3451 {
6071dc7f
RH
3452 tree result = DECL_RESULT (current_function_decl);
3453 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3454 rtx x;
fa8db1f7 3455
cc77ae10 3456 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3457 {
3458 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3459 x = addr;
3460 }
cc77ae10
JM
3461 else
3462 {
8dcfef8f
AO
3463 SET_DECL_VALUE_EXPR (result,
3464 build1 (INDIRECT_REF, TREE_TYPE (result),
3465 all.function_result_decl));
cc77ae10
JM
3466 addr = convert_memory_address (Pmode, addr);
3467 x = gen_rtx_MEM (DECL_MODE (result), addr);
3468 set_mem_attributes (x, result, 1);
3469 }
8dcfef8f
AO
3470
3471 DECL_HAS_VALUE_EXPR_P (result) = 1;
3472
b36a8cc2
OH
3473 SET_DECL_RTL (result, x);
3474 }
3475
53c428d0 3476 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3477 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3478 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3479 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3480
3481 /* Adjust function incoming argument size for alignment and
3482 minimum length. */
3483
3484#ifdef REG_PARM_STACK_SPACE
38173d38 3485 crtl->args.size = MAX (crtl->args.size,
6f086dfc 3486 REG_PARM_STACK_SPACE (fndecl));
6f90e075 3487#endif
6f086dfc 3488
38173d38 3489 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3490 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3491
6f086dfc 3492#ifdef ARGS_GROW_DOWNWARD
38173d38 3493 crtl->args.arg_offset_rtx
477eff96 3494 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3495 : expand_expr (size_diffop (all.stack_args_size.var,
3496 size_int (-all.stack_args_size.constant)),
bbbbb16a 3497 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3498#else
38173d38 3499 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3500#endif
3501
3502 /* See how many bytes, if any, of its args a function should try to pop
3503 on return. */
3504
079e7538
NF
3505 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3506 TREE_TYPE (fndecl),
3507 crtl->args.size);
6f086dfc 3508
3b69d50e
RK
3509 /* For stdarg.h function, save info about
3510 regs and stack space used by the named args. */
6f086dfc 3511
d5cc9181 3512 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3513
3514 /* Set the rtx used for the function return value. Put this in its
3515 own variable so any optimizers that need this information don't have
3516 to include tree.h. Do this here so it gets done when an inlined
3517 function gets output. */
3518
38173d38 3519 crtl->return_rtx
19e7881c
MM
3520 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3521 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3522
3523 /* If scalar return value was computed in a pseudo-reg, or was a named
3524 return value that got dumped to the stack, copy that to the hard
3525 return register. */
3526 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3527 {
3528 tree decl_result = DECL_RESULT (fndecl);
3529 rtx decl_rtl = DECL_RTL (decl_result);
3530
3531 if (REG_P (decl_rtl)
3532 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3533 : DECL_REGISTER (decl_result))
3534 {
3535 rtx real_decl_rtl;
3536
1d636cc6
RG
3537 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3538 fndecl, true);
ce5e43d0 3539 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3540 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3541 holds the hard register containing the return value, not a
3542 temporary pseudo. */
38173d38 3543 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3544 }
3545 }
6f086dfc 3546}
4744afba
RH
3547
3548/* A subroutine of gimplify_parameters, invoked via walk_tree.
3549 For all seen types, gimplify their sizes. */
3550
3551static tree
3552gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3553{
3554 tree t = *tp;
3555
3556 *walk_subtrees = 0;
3557 if (TYPE_P (t))
3558 {
3559 if (POINTER_TYPE_P (t))
3560 *walk_subtrees = 1;
ad50bc8d
RH
3561 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3562 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3563 {
726a989a 3564 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3565 *walk_subtrees = 1;
3566 }
3567 }
3568
3569 return NULL;
3570}
3571
3572/* Gimplify the parameter list for current_function_decl. This involves
3573 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3574 to implement callee-copies reference parameters. Returns a sequence of
3575 statements to add to the beginning of the function. */
4744afba 3576
726a989a 3577gimple_seq
4744afba
RH
3578gimplify_parameters (void)
3579{
3580 struct assign_parm_data_all all;
3b3f318a 3581 tree parm;
726a989a 3582 gimple_seq stmts = NULL;
3b3f318a
RG
3583 VEC(tree, heap) *fnargs;
3584 unsigned i;
4744afba
RH
3585
3586 assign_parms_initialize_all (&all);
3587 fnargs = assign_parms_augmented_arg_list (&all);
3588
ac47786e 3589 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
4744afba
RH
3590 {
3591 struct assign_parm_data_one data;
3592
3593 /* Extract the type of PARM; adjust it according to ABI. */
3594 assign_parm_find_data_types (&all, parm, &data);
3595
3596 /* Early out for errors and void parameters. */
3597 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3598 continue;
3599
3600 /* Update info on where next arg arrives in registers. */
d5cc9181 3601 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3602 data.passed_type, data.named_arg);
4744afba
RH
3603
3604 /* ??? Once upon a time variable_size stuffed parameter list
3605 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3606 turned out to be less than manageable in the gimple world.
3607 Now we have to hunt them down ourselves. */
3608 walk_tree_without_duplicates (&data.passed_type,
3609 gimplify_parm_type, &stmts);
3610
b38f3813 3611 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3612 {
3613 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3614 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3615 }
3616
3617 if (data.passed_pointer)
3618 {
3619 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3620 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3621 type, data.named_arg))
3622 {
3623 tree local, t;
3624
b38f3813 3625 /* For constant-sized objects, this is trivial; for
4744afba 3626 variable-sized objects, we have to play games. */
b38f3813
EB
3627 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3628 && !(flag_stack_check == GENERIC_STACK_CHECK
3629 && compare_tree_int (DECL_SIZE_UNIT (parm),
3630 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3631 {
5dac1dae 3632 local = create_tmp_var (type, get_name (parm));
4744afba 3633 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3634 /* If PARM was addressable, move that flag over
3635 to the local copy, as its address will be taken,
37609bf0
RG
3636 not the PARMs. Keep the parms address taken
3637 as we'll query that flag during gimplification. */
04487a2f 3638 if (TREE_ADDRESSABLE (parm))
37609bf0 3639 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3640 else if (TREE_CODE (type) == COMPLEX_TYPE
3641 || TREE_CODE (type) == VECTOR_TYPE)
3642 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3643 }
3644 else
3645 {
5039610b 3646 tree ptr_type, addr;
4744afba
RH
3647
3648 ptr_type = build_pointer_type (type);
c98b08ff 3649 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3650 DECL_IGNORED_P (addr) = 0;
3651 local = build_fold_indirect_ref (addr);
3652
e79983f4 3653 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3654 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3655 size_int (DECL_ALIGN (parm)));
3656
d3c12306 3657 /* The call has been built for a variable-sized object. */
63d2a353 3658 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3659 t = fold_convert (ptr_type, t);
726a989a 3660 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3661 gimplify_and_add (t, &stmts);
3662 }
3663
726a989a 3664 gimplify_assign (local, parm, &stmts);
4744afba 3665
833b3afe
DB
3666 SET_DECL_VALUE_EXPR (parm, local);
3667 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3668 }
3669 }
3670 }
3671
3b3f318a
RG
3672 VEC_free (tree, heap, fnargs);
3673
4744afba
RH
3674 return stmts;
3675}
75dc3319 3676\f
6f086dfc
RS
3677/* Compute the size and offset from the start of the stacked arguments for a
3678 parm passed in mode PASSED_MODE and with type TYPE.
3679
3680 INITIAL_OFFSET_PTR points to the current offset into the stacked
3681 arguments.
3682
e7949876
AM
3683 The starting offset and size for this parm are returned in
3684 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3685 nonzero, the offset is that of stack slot, which is returned in
3686 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3687 padding required from the initial offset ptr to the stack slot.
6f086dfc 3688
cc2902df 3689 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3690 never be set if REG_PARM_STACK_SPACE is not defined.
3691
3692 FNDECL is the function in which the argument was defined.
3693
3694 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3695 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3696 argument list to be aligned to the specific boundary (in bits). This
3697 rounding affects the initial and starting offsets, but not the argument
3698 size.
6f086dfc
RS
3699
3700 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3701 optionally rounds the size of the parm to PARM_BOUNDARY. The
3702 initial offset is not affected by this rounding, while the size always
3703 is and the starting offset may be. */
3704
e7949876
AM
3705/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3706 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3707 callers pass in the total size of args so far as
e7949876 3708 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3709
6f086dfc 3710void
fa8db1f7
AJ
3711locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3712 int partial, tree fndecl ATTRIBUTE_UNUSED,
3713 struct args_size *initial_offset_ptr,
3714 struct locate_and_pad_arg_data *locate)
6f086dfc 3715{
e7949876
AM
3716 tree sizetree;
3717 enum direction where_pad;
123148b5 3718 unsigned int boundary, round_boundary;
e7949876
AM
3719 int reg_parm_stack_space = 0;
3720 int part_size_in_regs;
6f086dfc
RS
3721
3722#ifdef REG_PARM_STACK_SPACE
e7949876 3723 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
e7949876 3724
6f086dfc
RS
3725 /* If we have found a stack parm before we reach the end of the
3726 area reserved for registers, skip that area. */
3727 if (! in_regs)
3728 {
6f086dfc
RS
3729 if (reg_parm_stack_space > 0)
3730 {
3731 if (initial_offset_ptr->var)
3732 {
3733 initial_offset_ptr->var
3734 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3735 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3736 initial_offset_ptr->constant = 0;
3737 }
3738 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3739 initial_offset_ptr->constant = reg_parm_stack_space;
3740 }
3741 }
3742#endif /* REG_PARM_STACK_SPACE */
3743
78a52f11 3744 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3745
3746 sizetree
3747 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3748 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 3749 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
3750 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3751 type);
6e985040 3752 locate->where_pad = where_pad;
2e3f842f
L
3753
3754 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3755 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3756 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3757
bfc45551 3758 locate->boundary = boundary;
6f086dfc 3759
2e3f842f
L
3760 if (SUPPORTS_STACK_ALIGNMENT)
3761 {
3762 /* stack_alignment_estimated can't change after stack has been
3763 realigned. */
3764 if (crtl->stack_alignment_estimated < boundary)
3765 {
3766 if (!crtl->stack_realign_processed)
3767 crtl->stack_alignment_estimated = boundary;
3768 else
3769 {
3770 /* If stack is realigned and stack alignment value
3771 hasn't been finalized, it is OK not to increase
3772 stack_alignment_estimated. The bigger alignment
3773 requirement is recorded in stack_alignment_needed
3774 below. */
3775 gcc_assert (!crtl->stack_realign_finalized
3776 && crtl->stack_realign_needed);
3777 }
3778 }
3779 }
3780
c7e777b5
RH
3781 /* Remember if the outgoing parameter requires extra alignment on the
3782 calling function side. */
cb91fab0
JH
3783 if (crtl->stack_alignment_needed < boundary)
3784 crtl->stack_alignment_needed = boundary;
2e3f842f
L
3785 if (crtl->preferred_stack_boundary < boundary)
3786 crtl->preferred_stack_boundary = boundary;
c7e777b5 3787
6f086dfc 3788#ifdef ARGS_GROW_DOWNWARD
e7949876 3789 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3790 if (initial_offset_ptr->var)
e7949876
AM
3791 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3792 initial_offset_ptr->var);
9dff28ab 3793
e7949876
AM
3794 {
3795 tree s2 = sizetree;
3796 if (where_pad != none
3797 && (!host_integerp (sizetree, 1)
123148b5
BS
3798 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3799 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
e7949876
AM
3800 SUB_PARM_SIZE (locate->slot_offset, s2);
3801 }
3802
3803 locate->slot_offset.constant += part_size_in_regs;
9dff28ab
JDA
3804
3805 if (!in_regs
3806#ifdef REG_PARM_STACK_SPACE
3807 || REG_PARM_STACK_SPACE (fndecl) > 0
3808#endif
3809 )
e7949876
AM
3810 pad_to_arg_alignment (&locate->slot_offset, boundary,
3811 &locate->alignment_pad);
9dff28ab 3812
e7949876
AM
3813 locate->size.constant = (-initial_offset_ptr->constant
3814 - locate->slot_offset.constant);
6f086dfc 3815 if (initial_offset_ptr->var)
e7949876
AM
3816 locate->size.var = size_binop (MINUS_EXPR,
3817 size_binop (MINUS_EXPR,
3818 ssize_int (0),
3819 initial_offset_ptr->var),
3820 locate->slot_offset.var);
3821
3822 /* Pad_below needs the pre-rounded size to know how much to pad
3823 below. */
3824 locate->offset = locate->slot_offset;
3825 if (where_pad == downward)
3826 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3827
6f086dfc 3828#else /* !ARGS_GROW_DOWNWARD */
832ea3b3
FS
3829 if (!in_regs
3830#ifdef REG_PARM_STACK_SPACE
3831 || REG_PARM_STACK_SPACE (fndecl) > 0
3832#endif
3833 )
e7949876
AM
3834 pad_to_arg_alignment (initial_offset_ptr, boundary,
3835 &locate->alignment_pad);
3836 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3837
3838#ifdef PUSH_ROUNDING
3839 if (passed_mode != BLKmode)
3840 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3841#endif
3842
d4b0a7a0
DE
3843 /* Pad_below needs the pre-rounded size to know how much to pad below
3844 so this must be done before rounding up. */
e7949876
AM
3845 locate->offset = locate->slot_offset;
3846 if (where_pad == downward)
3847 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3848
6f086dfc 3849 if (where_pad != none
1468899d 3850 && (!host_integerp (sizetree, 1)
123148b5
BS
3851 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3852 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 3853
e7949876
AM
3854 ADD_PARM_SIZE (locate->size, sizetree);
3855
3856 locate->size.constant -= part_size_in_regs;
6f086dfc 3857#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
3858
3859#ifdef FUNCTION_ARG_OFFSET
3860 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3861#endif
6f086dfc
RS
3862}
3863
e16c591a
RS
3864/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3865 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3866
6f086dfc 3867static void
fa8db1f7
AJ
3868pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3869 struct args_size *alignment_pad)
6f086dfc 3870{
a544cfd2
KG
3871 tree save_var = NULL_TREE;
3872 HOST_WIDE_INT save_constant = 0;
a751cd5b 3873 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3874 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3875
3876#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3877 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3878 the real alignment of %sp. However, when it does this, the
3879 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3880 if (SPARC_STACK_BOUNDARY_HACK)
3881 sp_offset = 0;
3882#endif
4fc026cd 3883
6f6b8f81 3884 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
3885 {
3886 save_var = offset_ptr->var;
3887 save_constant = offset_ptr->constant;
3888 }
3889
3890 alignment_pad->var = NULL_TREE;
3891 alignment_pad->constant = 0;
4fc026cd 3892
6f086dfc
RS
3893 if (boundary > BITS_PER_UNIT)
3894 {
3895 if (offset_ptr->var)
3896 {
a594a19c
GK
3897 tree sp_offset_tree = ssize_int (sp_offset);
3898 tree offset = size_binop (PLUS_EXPR,
3899 ARGS_SIZE_TREE (*offset_ptr),
3900 sp_offset_tree);
6f086dfc 3901#ifdef ARGS_GROW_DOWNWARD
a594a19c 3902 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3903#else
a594a19c 3904 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3905#endif
a594a19c
GK
3906
3907 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3908 /* ARGS_SIZE_TREE includes constant term. */
3909 offset_ptr->constant = 0;
6f6b8f81 3910 if (boundary > PARM_BOUNDARY)
dd3f0101 3911 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3912 save_var);
6f086dfc
RS
3913 }
3914 else
718fe406 3915 {
a594a19c 3916 offset_ptr->constant = -sp_offset +
6f086dfc 3917#ifdef ARGS_GROW_DOWNWARD
a594a19c 3918 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3919#else
a594a19c 3920 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3921#endif
6f6b8f81 3922 if (boundary > PARM_BOUNDARY)
718fe406
KH
3923 alignment_pad->constant = offset_ptr->constant - save_constant;
3924 }
6f086dfc
RS
3925 }
3926}
3927
3928static void
fa8db1f7 3929pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3930{
3931 if (passed_mode != BLKmode)
3932 {
3933 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3934 offset_ptr->constant
3935 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3936 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3937 - GET_MODE_SIZE (passed_mode));
3938 }
3939 else
3940 {
3941 if (TREE_CODE (sizetree) != INTEGER_CST
3942 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3943 {
3944 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3945 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3946 /* Add it in. */
3947 ADD_PARM_SIZE (*offset_ptr, s2);
3948 SUB_PARM_SIZE (*offset_ptr, sizetree);
3949 }
3950 }
3951}
6f086dfc 3952\f
6f086dfc 3953
6fb5fa3c
DB
3954/* True if register REGNO was alive at a place where `setjmp' was
3955 called and was set more than once or is an argument. Such regs may
3956 be clobbered by `longjmp'. */
3957
3958static bool
3959regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3960{
3961 /* There appear to be cases where some local vars never reach the
3962 backend but have bogus regnos. */
3963 if (regno >= max_reg_num ())
3964 return false;
3965
3966 return ((REG_N_SETS (regno) > 1
3967 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3968 && REGNO_REG_SET_P (setjmp_crosses, regno));
3969}
3970
3971/* Walk the tree of blocks describing the binding levels within a
3972 function and warn about variables the might be killed by setjmp or
3973 vfork. This is done after calling flow_analysis before register
3974 allocation since that will clobber the pseudo-regs to hard
3975 regs. */
3976
3977static void
3978setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 3979{
b3694847 3980 tree decl, sub;
6de9cd9a 3981
910ad8de 3982 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 3983 {
6de9cd9a 3984 if (TREE_CODE (decl) == VAR_DECL
bc41842b 3985 && DECL_RTL_SET_P (decl)
f8cfc6aa 3986 && REG_P (DECL_RTL (decl))
6fb5fa3c 3987 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 3988 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 3989 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 3990 }
6de9cd9a 3991
87caf699 3992 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 3993 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
3994}
3995
6de9cd9a 3996/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
3997 but for arguments instead of local variables. */
3998
6fb5fa3c
DB
3999static void
4000setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4001{
b3694847 4002 tree decl;
6f086dfc 4003 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4004 decl; decl = DECL_CHAIN (decl))
6f086dfc 4005 if (DECL_RTL (decl) != 0
f8cfc6aa 4006 && REG_P (DECL_RTL (decl))
6fb5fa3c 4007 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4008 warning (OPT_Wclobbered,
2b001724 4009 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4010 decl);
6f086dfc
RS
4011}
4012
6fb5fa3c
DB
4013/* Generate warning messages for variables live across setjmp. */
4014
b8698a0f 4015void
6fb5fa3c
DB
4016generate_setjmp_warnings (void)
4017{
4018 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4019
4020 if (n_basic_blocks == NUM_FIXED_BLOCKS
4021 || bitmap_empty_p (setjmp_crosses))
4022 return;
4023
4024 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4025 setjmp_args_warning (setjmp_crosses);
4026}
4027
6f086dfc 4028\f
3373692b 4029/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4030 and return the new head of the chain (old last element).
4031 In addition to that clear BLOCK_SAME_RANGE flags when needed
4032 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4033 its super fragment origin. */
3373692b
JJ
4034
4035static tree
4036block_fragments_nreverse (tree t)
4037{
1e3c1d95
JJ
4038 tree prev = 0, block, next, prev_super = 0;
4039 tree super = BLOCK_SUPERCONTEXT (t);
4040 if (BLOCK_FRAGMENT_ORIGIN (super))
4041 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4042 for (block = t; block; block = next)
4043 {
4044 next = BLOCK_FRAGMENT_CHAIN (block);
4045 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4046 if ((prev && !BLOCK_SAME_RANGE (prev))
4047 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4048 != prev_super))
4049 BLOCK_SAME_RANGE (block) = 0;
4050 prev_super = BLOCK_SUPERCONTEXT (block);
4051 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4052 prev = block;
4053 }
1e3c1d95
JJ
4054 t = BLOCK_FRAGMENT_ORIGIN (t);
4055 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4056 != prev_super)
4057 BLOCK_SAME_RANGE (t) = 0;
4058 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4059 return prev;
4060}
4061
4062/* Reverse the order of elements in the chain T of blocks,
4063 and return the new head of the chain (old last element).
4064 Also do the same on subblocks and reverse the order of elements
4065 in BLOCK_FRAGMENT_CHAIN as well. */
4066
4067static tree
4068blocks_nreverse_all (tree t)
4069{
4070 tree prev = 0, block, next;
4071 for (block = t; block; block = next)
4072 {
4073 next = BLOCK_CHAIN (block);
4074 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4075 if (BLOCK_FRAGMENT_CHAIN (block)
4076 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4077 {
4078 BLOCK_FRAGMENT_CHAIN (block)
4079 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4080 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4081 BLOCK_SAME_RANGE (block) = 0;
4082 }
4083 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4084 prev = block;
4085 }
4086 return prev;
4087}
4088
4089
a20612aa
RH
4090/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4091 and create duplicate blocks. */
4092/* ??? Need an option to either create block fragments or to create
4093 abstract origin duplicates of a source block. It really depends
4094 on what optimization has been performed. */
467456d0 4095
116eebd6 4096void
fa8db1f7 4097reorder_blocks (void)
467456d0 4098{
116eebd6 4099 tree block = DECL_INITIAL (current_function_decl);
2c217442 4100 VEC(tree,heap) *block_stack;
467456d0 4101
1a4450c7 4102 if (block == NULL_TREE)
116eebd6 4103 return;
fc289cd1 4104
2c217442 4105 block_stack = VEC_alloc (tree, heap, 10);
18c038b9 4106
a20612aa 4107 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4108 clear_block_marks (block);
a20612aa 4109
116eebd6
MM
4110 /* Prune the old trees away, so that they don't get in the way. */
4111 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4112 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4113
a20612aa 4114 /* Recreate the block tree from the note nesting. */
116eebd6 4115 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4116 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
18c038b9 4117
2c217442 4118 VEC_free (tree, heap, block_stack);
467456d0
RS
4119}
4120
a20612aa 4121/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4122
6de9cd9a
DN
4123void
4124clear_block_marks (tree block)
cc1fe44f 4125{
a20612aa 4126 while (block)
cc1fe44f 4127 {
a20612aa 4128 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4129 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4130 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4131 }
4132}
4133
0a1c58a2 4134static void
2c217442 4135reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
0a1c58a2
JL
4136{
4137 rtx insn;
1e3c1d95 4138 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4139
4140 for (insn = insns; insn; insn = NEXT_INSN (insn))
4141 {
4b4bf941 4142 if (NOTE_P (insn))
0a1c58a2 4143 {
a38e7aa5 4144 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4145 {
4146 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4147 tree origin;
4148
3373692b
JJ
4149 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4150 origin = block;
a20612aa 4151
1e3c1d95
JJ
4152 if (prev_end)
4153 BLOCK_SAME_RANGE (prev_end) = 0;
4154 prev_end = NULL_TREE;
4155
a20612aa
RH
4156 /* If we have seen this block before, that means it now
4157 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4158 if (TREE_ASM_WRITTEN (block))
4159 {
a20612aa 4160 tree new_block = copy_node (block);
a20612aa 4161
1e3c1d95 4162 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4163 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4164 BLOCK_FRAGMENT_CHAIN (new_block)
4165 = BLOCK_FRAGMENT_CHAIN (origin);
4166 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4167
4168 NOTE_BLOCK (insn) = new_block;
4169 block = new_block;
0a1c58a2 4170 }
a20612aa 4171
1e3c1d95
JJ
4172 if (prev_beg == current_block && prev_beg)
4173 BLOCK_SAME_RANGE (block) = 1;
4174
4175 prev_beg = origin;
4176
0a1c58a2
JL
4177 BLOCK_SUBBLOCKS (block) = 0;
4178 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4179 /* When there's only one block for the entire function,
4180 current_block == block and we mustn't do this, it
4181 will cause infinite recursion. */
4182 if (block != current_block)
4183 {
1e3c1d95 4184 tree super;
51b7d006 4185 if (block != origin)
1e3c1d95
JJ
4186 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4187 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4188 (origin))
4189 == current_block);
4190 if (VEC_empty (tree, *p_block_stack))
4191 super = current_block;
4192 else
4193 {
4194 super = VEC_last (tree, *p_block_stack);
4195 gcc_assert (super == current_block
4196 || BLOCK_FRAGMENT_ORIGIN (super)
4197 == current_block);
4198 }
4199 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4200 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4201 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4202 current_block = origin;
339a28b9 4203 }
2c217442 4204 VEC_safe_push (tree, heap, *p_block_stack, block);
0a1c58a2 4205 }
a38e7aa5 4206 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4207 {
2c217442 4208 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
0a1c58a2 4209 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4210 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4211 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4212 prev_beg = NULL_TREE;
4213 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4214 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4215 }
4216 }
1e3c1d95
JJ
4217 else
4218 {
4219 prev_beg = NULL_TREE;
4220 if (prev_end)
4221 BLOCK_SAME_RANGE (prev_end) = 0;
4222 prev_end = NULL_TREE;
4223 }
0a1c58a2
JL
4224 }
4225}
4226
467456d0
RS
4227/* Reverse the order of elements in the chain T of blocks,
4228 and return the new head of the chain (old last element). */
4229
6de9cd9a 4230tree
fa8db1f7 4231blocks_nreverse (tree t)
467456d0 4232{
3373692b
JJ
4233 tree prev = 0, block, next;
4234 for (block = t; block; block = next)
467456d0 4235 {
3373692b
JJ
4236 next = BLOCK_CHAIN (block);
4237 BLOCK_CHAIN (block) = prev;
4238 prev = block;
467456d0
RS
4239 }
4240 return prev;
4241}
4242
61e46a7d
NF
4243/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4244 by modifying the last node in chain 1 to point to chain 2. */
4245
4246tree
4247block_chainon (tree op1, tree op2)
4248{
4249 tree t1;
4250
4251 if (!op1)
4252 return op2;
4253 if (!op2)
4254 return op1;
4255
4256 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4257 continue;
4258 BLOCK_CHAIN (t1) = op2;
4259
4260#ifdef ENABLE_TREE_CHECKING
4261 {
4262 tree t2;
4263 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4264 gcc_assert (t2 != t1);
4265 }
4266#endif
4267
4268 return op1;
4269}
4270
18c038b9
MM
4271/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4272 non-NULL, list them all into VECTOR, in a depth-first preorder
4273 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4274 blocks. */
467456d0
RS
4275
4276static int
fa8db1f7 4277all_blocks (tree block, tree *vector)
467456d0 4278{
b2a59b15
MS
4279 int n_blocks = 0;
4280
a84efb51
JO
4281 while (block)
4282 {
4283 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4284
a84efb51
JO
4285 /* Record this block. */
4286 if (vector)
4287 vector[n_blocks] = block;
b2a59b15 4288
a84efb51 4289 ++n_blocks;
718fe406 4290
a84efb51
JO
4291 /* Record the subblocks, and their subblocks... */
4292 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4293 vector ? vector + n_blocks : 0);
4294 block = BLOCK_CHAIN (block);
4295 }
467456d0
RS
4296
4297 return n_blocks;
4298}
18c038b9
MM
4299
4300/* Return a vector containing all the blocks rooted at BLOCK. The
4301 number of elements in the vector is stored in N_BLOCKS_P. The
4302 vector is dynamically allocated; it is the caller's responsibility
4303 to call `free' on the pointer returned. */
718fe406 4304
18c038b9 4305static tree *
fa8db1f7 4306get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4307{
4308 tree *block_vector;
4309
4310 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4311 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4312 all_blocks (block, block_vector);
4313
4314 return block_vector;
4315}
4316
f83b236e 4317static GTY(()) int next_block_index = 2;
18c038b9
MM
4318
4319/* Set BLOCK_NUMBER for all the blocks in FN. */
4320
4321void
fa8db1f7 4322number_blocks (tree fn)
18c038b9
MM
4323{
4324 int i;
4325 int n_blocks;
4326 tree *block_vector;
4327
4328 /* For SDB and XCOFF debugging output, we start numbering the blocks
4329 from 1 within each function, rather than keeping a running
4330 count. */
4331#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4332 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4333 next_block_index = 1;
18c038b9
MM
4334#endif
4335
4336 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4337
4338 /* The top-level BLOCK isn't numbered at all. */
4339 for (i = 1; i < n_blocks; ++i)
4340 /* We number the blocks from two. */
4341 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4342
4343 free (block_vector);
4344
4345 return;
4346}
df8992f8
RH
4347
4348/* If VAR is present in a subblock of BLOCK, return the subblock. */
4349
24e47c76 4350DEBUG_FUNCTION tree
fa8db1f7 4351debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4352{
4353 tree t;
4354
4355 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4356 if (t == var)
4357 return block;
4358
4359 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4360 {
4361 tree ret = debug_find_var_in_block_tree (var, t);
4362 if (ret)
4363 return ret;
4364 }
4365
4366 return NULL_TREE;
4367}
467456d0 4368\f
db2960f4
SL
4369/* Keep track of whether we're in a dummy function context. If we are,
4370 we don't want to invoke the set_current_function hook, because we'll
4371 get into trouble if the hook calls target_reinit () recursively or
4372 when the initial initialization is not yet complete. */
4373
4374static bool in_dummy_function;
4375
ab442df7
MM
4376/* Invoke the target hook when setting cfun. Update the optimization options
4377 if the function uses different options than the default. */
db2960f4
SL
4378
4379static void
4380invoke_set_current_function_hook (tree fndecl)
4381{
4382 if (!in_dummy_function)
ab442df7
MM
4383 {
4384 tree opts = ((fndecl)
4385 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4386 : optimization_default_node);
4387
4388 if (!opts)
4389 opts = optimization_default_node;
4390
4391 /* Change optimization options if needed. */
4392 if (optimization_current_node != opts)
4393 {
4394 optimization_current_node = opts;
46625112 4395 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4396 }
4397
4398 targetm.set_current_function (fndecl);
4399 }
db2960f4
SL
4400}
4401
4402/* cfun should never be set directly; use this function. */
4403
4404void
4405set_cfun (struct function *new_cfun)
4406{
4407 if (cfun != new_cfun)
4408 {
4409 cfun = new_cfun;
4410 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4411 }
4412}
4413
db2960f4
SL
4414/* Initialized with NOGC, making this poisonous to the garbage collector. */
4415
4416static VEC(function_p,heap) *cfun_stack;
4417
4418/* Push the current cfun onto the stack, and set cfun to new_cfun. */
4419
4420void
4421push_cfun (struct function *new_cfun)
4422{
4423 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4424 set_cfun (new_cfun);
4425}
4426
4427/* Pop cfun from the stack. */
4428
4429void
4430pop_cfun (void)
4431{
38d34676 4432 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
38d34676 4433 set_cfun (new_cfun);
db2960f4 4434}
3e87758a
RL
4435
4436/* Return value of funcdef and increase it. */
4437int
b8698a0f 4438get_next_funcdef_no (void)
3e87758a
RL
4439{
4440 return funcdef_no++;
4441}
4442
903d1e67
XDL
4443/* Return value of funcdef. */
4444int
4445get_last_funcdef_no (void)
4446{
4447 return funcdef_no;
4448}
4449
3a70d621 4450/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4451 to the defaults. Set cfun to the newly-allocated object.
4452 Some of the helper functions invoked during initialization assume
4453 that cfun has already been set. Therefore, assign the new object
4454 directly into cfun and invoke the back end hook explicitly at the
4455 very end, rather than initializing a temporary and calling set_cfun
4456 on it.
182e0d71
AK
4457
4458 ABSTRACT_P is true if this is a function that will never be seen by
4459 the middle-end. Such functions are front-end concepts (like C++
4460 function templates) that do not correspond directly to functions
4461 placed in object files. */
7a80cf9a 4462
3a70d621 4463void
182e0d71 4464allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4465{
3a70d621 4466 tree result;
6de9cd9a 4467 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4468
a9429e29 4469 cfun = ggc_alloc_cleared_function ();
b384405b 4470
3a70d621 4471 init_eh_for_function ();
6f086dfc 4472
3a70d621
RH
4473 if (init_machine_status)
4474 cfun->machine = (*init_machine_status) ();
e2ecd91c 4475
7c800926
KT
4476#ifdef OVERRIDE_ABI_FORMAT
4477 OVERRIDE_ABI_FORMAT (fndecl);
4478#endif
4479
179d2f74
RH
4480 invoke_set_current_function_hook (fndecl);
4481
81464b2c 4482 if (fndecl != NULL_TREE)
3a70d621 4483 {
db2960f4
SL
4484 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4485 cfun->decl = fndecl;
70cf5bc1 4486 current_function_funcdef_no = get_next_funcdef_no ();
db2960f4
SL
4487
4488 result = DECL_RESULT (fndecl);
182e0d71 4489 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4490 {
3a70d621 4491#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4492 cfun->returns_pcc_struct = 1;
3a70d621 4493#endif
e3b5732b 4494 cfun->returns_struct = 1;
db2960f4
SL
4495 }
4496
f38958e8 4497 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4498
db2960f4
SL
4499 /* Assume all registers in stdarg functions need to be saved. */
4500 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4501 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4502
4503 /* ??? This could be set on a per-function basis by the front-end
4504 but is this worth the hassle? */
4505 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
3a70d621 4506 }
db2960f4
SL
4507}
4508
4509/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4510 instead of just setting it. */
9d30f3c1 4511
db2960f4
SL
4512void
4513push_struct_function (tree fndecl)
4514{
4515 VEC_safe_push (function_p, heap, cfun_stack, cfun);
182e0d71 4516 allocate_struct_function (fndecl, false);
3a70d621 4517}
6f086dfc 4518
8f4f502f 4519/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4520 appropriate for emitting rtl at the start of a function. */
6f086dfc 4521
3a70d621 4522static void
db2960f4 4523prepare_function_start (void)
3a70d621 4524{
3e029763 4525 gcc_assert (!crtl->emit.x_last_insn);
fb0703f7 4526 init_temp_slots ();
0de456a5 4527 init_emit ();
bd60bab2 4528 init_varasm_status ();
0de456a5 4529 init_expr ();
bf08ebeb 4530 default_rtl_profile ();
6f086dfc 4531
a11e0df4 4532 if (flag_stack_usage_info)
d3c12306
EB
4533 {
4534 cfun->su = ggc_alloc_cleared_stack_usage ();
4535 cfun->su->static_stack_size = -1;
4536 }
4537
3a70d621 4538 cse_not_expected = ! optimize;
6f086dfc 4539
3a70d621
RH
4540 /* Caller save not needed yet. */
4541 caller_save_needed = 0;
6f086dfc 4542
3a70d621
RH
4543 /* We haven't done register allocation yet. */
4544 reg_renumber = 0;
6f086dfc 4545
b384405b
BS
4546 /* Indicate that we have not instantiated virtual registers yet. */
4547 virtuals_instantiated = 0;
4548
1b3d8f8a
GK
4549 /* Indicate that we want CONCATs now. */
4550 generating_concat_p = 1;
4551
b384405b
BS
4552 /* Indicate we have no need of a frame pointer yet. */
4553 frame_pointer_needed = 0;
b384405b
BS
4554}
4555
4556/* Initialize the rtl expansion mechanism so that we can do simple things
4557 like generate sequences. This is used to provide a context during global
db2960f4
SL
4558 initialization of some passes. You must call expand_dummy_function_end
4559 to exit this context. */
4560
b384405b 4561void
fa8db1f7 4562init_dummy_function_start (void)
b384405b 4563{
db2960f4
SL
4564 gcc_assert (!in_dummy_function);
4565 in_dummy_function = true;
4566 push_struct_function (NULL_TREE);
4567 prepare_function_start ();
b384405b
BS
4568}
4569
4570/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4571 and initialize static variables for generating RTL for the statements
4572 of the function. */
4573
4574void
fa8db1f7 4575init_function_start (tree subr)
b384405b 4576{
db2960f4
SL
4577 if (subr && DECL_STRUCT_FUNCTION (subr))
4578 set_cfun (DECL_STRUCT_FUNCTION (subr));
4579 else
182e0d71 4580 allocate_struct_function (subr, false);
db2960f4 4581 prepare_function_start ();
2c7eebae 4582 decide_function_section (subr);
b384405b 4583
6f086dfc
RS
4584 /* Warn if this value is an aggregate type,
4585 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4586 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4587 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4588}
5c7675e9 4589
ef330312 4590
6f086dfc 4591void
fa8db1f7 4592expand_main_function (void)
6f086dfc 4593{
3a57c6cb
MM
4594#if (defined(INVOKE__main) \
4595 || (!defined(HAS_INIT_SECTION) \
4596 && !defined(INIT_SECTION_ASM_OP) \
4597 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
68d28100 4598 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
1d482056 4599#endif
6f086dfc
RS
4600}
4601\f
7d69de61
RH
4602/* Expand code to initialize the stack_protect_guard. This is invoked at
4603 the beginning of a function to be protected. */
4604
4605#ifndef HAVE_stack_protect_set
4606# define HAVE_stack_protect_set 0
4607# define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4608#endif
4609
4610void
4611stack_protect_prologue (void)
4612{
4613 tree guard_decl = targetm.stack_protect_guard ();
4614 rtx x, y;
4615
08d4cc33
RH
4616 x = expand_normal (crtl->stack_protect_guard);
4617 y = expand_normal (guard_decl);
7d69de61
RH
4618
4619 /* Allow the target to copy from Y to X without leaking Y into a
4620 register. */
4621 if (HAVE_stack_protect_set)
4622 {
4623 rtx insn = gen_stack_protect_set (x, y);
4624 if (insn)
4625 {
4626 emit_insn (insn);
4627 return;
4628 }
4629 }
4630
4631 /* Otherwise do a straight move. */
4632 emit_move_insn (x, y);
4633}
4634
4635/* Expand code to verify the stack_protect_guard. This is invoked at
4636 the end of a function to be protected. */
4637
4638#ifndef HAVE_stack_protect_test
b76be05e
JJ
4639# define HAVE_stack_protect_test 0
4640# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4641#endif
4642
b755446c 4643void
7d69de61
RH
4644stack_protect_epilogue (void)
4645{
4646 tree guard_decl = targetm.stack_protect_guard ();
4647 rtx label = gen_label_rtx ();
4648 rtx x, y, tmp;
4649
08d4cc33
RH
4650 x = expand_normal (crtl->stack_protect_guard);
4651 y = expand_normal (guard_decl);
7d69de61
RH
4652
4653 /* Allow the target to compare Y with X without leaking either into
4654 a register. */
4655 switch (HAVE_stack_protect_test != 0)
4656 {
4657 case 1:
3aebbe5f 4658 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4659 if (tmp)
4660 {
4661 emit_insn (tmp);
7d69de61
RH
4662 break;
4663 }
4664 /* FALLTHRU */
4665
4666 default:
4667 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4668 break;
4669 }
4670
4671 /* The noreturn predictor has been moved to the tree level. The rtl-level
4672 predictors estimate this branch about 20%, which isn't enough to get
4673 things moved out of line. Since this is the only extant case of adding
4674 a noreturn function at the rtl level, it doesn't seem worth doing ought
4675 except adding the prediction by hand. */
4676 tmp = get_last_insn ();
4677 if (JUMP_P (tmp))
4678 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4679
4680 expand_expr_stmt (targetm.stack_protect_fail ());
4681 emit_label (label);
4682}
4683\f
6f086dfc
RS
4684/* Start the RTL for a new function, and set variables used for
4685 emitting RTL.
4686 SUBR is the FUNCTION_DECL node.
4687 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4688 the function's parameters, which must be run at any return statement. */
4689
4690void
b79c5284 4691expand_function_start (tree subr)
6f086dfc 4692{
6f086dfc
RS
4693 /* Make sure volatile mem refs aren't considered
4694 valid operands of arithmetic insns. */
4695 init_recog_no_volatile ();
4696
e3b5732b 4697 crtl->profile
70f4f91c
WC
4698 = (profile_flag
4699 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4700
e3b5732b 4701 crtl->limit_stack
a157febd
GK
4702 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4703
52a11cbf
RH
4704 /* Make the label for return statements to jump to. Do not special
4705 case machines with special return instructions -- they will be
4706 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4707 return_label = gen_label_rtx ();
6f086dfc
RS
4708
4709 /* Initialize rtx used to return the value. */
4710 /* Do this before assign_parms so that we copy the struct value address
4711 before any library calls that assign parms might generate. */
4712
4713 /* Decide whether to return the value in memory or in a register. */
61f71b34 4714 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4715 {
4716 /* Returning something that won't go in a register. */
b3694847 4717 rtx value_address = 0;
6f086dfc
RS
4718
4719#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4720 if (cfun->returns_pcc_struct)
6f086dfc
RS
4721 {
4722 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4723 value_address = assemble_static_space (size);
4724 }
4725 else
4726#endif
4727 {
2225b57c 4728 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4729 /* Expect to be passed the address of a place to store the value.
4730 If it is passed as an argument, assign_parms will take care of
4731 it. */
61f71b34 4732 if (sv)
6f086dfc
RS
4733 {
4734 value_address = gen_reg_rtx (Pmode);
61f71b34 4735 emit_move_insn (value_address, sv);
6f086dfc
RS
4736 }
4737 }
4738 if (value_address)
ccdecf58 4739 {
01c98570
JM
4740 rtx x = value_address;
4741 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4742 {
4743 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4744 set_mem_attributes (x, DECL_RESULT (subr), 1);
4745 }
abde42f7 4746 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4747 }
6f086dfc
RS
4748 }
4749 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4750 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4751 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4752 else
a53e14c0 4753 {
d5bf1143
RH
4754 /* Compute the return values into a pseudo reg, which we will copy
4755 into the true return register after the cleanups are done. */
bef5d8b6
RS
4756 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4757 if (TYPE_MODE (return_type) != BLKmode
4758 && targetm.calls.return_in_msb (return_type))
4759 /* expand_function_end will insert the appropriate padding in
4760 this case. Use the return value's natural (unpadded) mode
4761 within the function proper. */
4762 SET_DECL_RTL (DECL_RESULT (subr),
4763 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4764 else
0bccc606 4765 {
bef5d8b6
RS
4766 /* In order to figure out what mode to use for the pseudo, we
4767 figure out what the mode of the eventual return register will
4768 actually be, and use that. */
1d636cc6 4769 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4770
4771 /* Structures that are returned in registers are not
4772 aggregate_value_p, so we may see a PARALLEL or a REG. */
4773 if (REG_P (hard_reg))
4774 SET_DECL_RTL (DECL_RESULT (subr),
4775 gen_reg_rtx (GET_MODE (hard_reg)));
4776 else
4777 {
4778 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4779 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4780 }
0bccc606 4781 }
a53e14c0 4782
084a1106
JDA
4783 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4784 result to the real return register(s). */
4785 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4786 }
6f086dfc
RS
4787
4788 /* Initialize rtx for parameters and local variables.
4789 In some cases this requires emitting insns. */
0d1416c6 4790 assign_parms (subr);
6f086dfc 4791
6de9cd9a
DN
4792 /* If function gets a static chain arg, store it. */
4793 if (cfun->static_chain_decl)
4794 {
7e140280 4795 tree parm = cfun->static_chain_decl;
531ca746 4796 rtx local, chain, insn;
7e140280 4797
531ca746
RH
4798 local = gen_reg_rtx (Pmode);
4799 chain = targetm.calls.static_chain (current_function_decl, true);
4800
4801 set_decl_incoming_rtl (parm, chain, false);
7e140280 4802 SET_DECL_RTL (parm, local);
7e140280 4803 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4804
531ca746
RH
4805 insn = emit_move_insn (local, chain);
4806
4807 /* Mark the register as eliminable, similar to parameters. */
4808 if (MEM_P (chain)
4809 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 4810 set_dst_reg_note (insn, REG_EQUIV, chain, local);
6de9cd9a
DN
4811 }
4812
4813 /* If the function receives a non-local goto, then store the
4814 bits we need to restore the frame pointer. */
4815 if (cfun->nonlocal_goto_save_area)
4816 {
4817 tree t_save;
4818 rtx r_save;
4819
4846b435 4820 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 4821 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 4822
6bbec3e1
L
4823 t_save = build4 (ARRAY_REF,
4824 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
4825 cfun->nonlocal_goto_save_area,
4826 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4827 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 4828 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 4829
88280cf9 4830 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
4831 update_nonlocal_goto_save_area ();
4832 }
f0c51a1e 4833
6f086dfc
RS
4834 /* The following was moved from init_function_start.
4835 The move is supposed to make sdb output more accurate. */
4836 /* Indicate the beginning of the function body,
4837 as opposed to parm setup. */
2e040219 4838 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4839
ede497cf
SB
4840 gcc_assert (NOTE_P (get_last_insn ()));
4841
6f086dfc
RS
4842 parm_birth_insn = get_last_insn ();
4843
e3b5732b 4844 if (crtl->profile)
f6f315fe 4845 {
f6f315fe 4846#ifdef PROFILE_HOOK
df696a75 4847 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4848#endif
f6f315fe 4849 }
411707f4 4850
6d3cc8f0
EB
4851 /* If we are doing generic stack checking, the probe should go here. */
4852 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 4853 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4854}
4855\f
49ad7cfa
BS
4856/* Undo the effects of init_dummy_function_start. */
4857void
fa8db1f7 4858expand_dummy_function_end (void)
49ad7cfa 4859{
db2960f4
SL
4860 gcc_assert (in_dummy_function);
4861
49ad7cfa
BS
4862 /* End any sequences that failed to be closed due to syntax errors. */
4863 while (in_sequence_p ())
4864 end_sequence ();
4865
4866 /* Outside function body, can't compute type's actual size
4867 until next function's body starts. */
fa51b01b 4868
01d939e8
BS
4869 free_after_parsing (cfun);
4870 free_after_compilation (cfun);
db2960f4
SL
4871 pop_cfun ();
4872 in_dummy_function = false;
49ad7cfa
BS
4873}
4874
c13fde05
RH
4875/* Call DOIT for each hard register used as a return value from
4876 the current function. */
bd695e1e
RH
4877
4878void
fa8db1f7 4879diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4880{
38173d38 4881 rtx outgoing = crtl->return_rtx;
c13fde05
RH
4882
4883 if (! outgoing)
4884 return;
bd695e1e 4885
f8cfc6aa 4886 if (REG_P (outgoing))
c13fde05
RH
4887 (*doit) (outgoing, arg);
4888 else if (GET_CODE (outgoing) == PARALLEL)
4889 {
4890 int i;
bd695e1e 4891
c13fde05
RH
4892 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4893 {
4894 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4895
f8cfc6aa 4896 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4897 (*doit) (x, arg);
bd695e1e
RH
4898 }
4899 }
4900}
4901
c13fde05 4902static void
fa8db1f7 4903do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4904{
c41c1387 4905 emit_clobber (reg);
c13fde05
RH
4906}
4907
4908void
fa8db1f7 4909clobber_return_register (void)
c13fde05
RH
4910{
4911 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4912
4913 /* In case we do use pseudo to return value, clobber it too. */
4914 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4915 {
4916 tree decl_result = DECL_RESULT (current_function_decl);
4917 rtx decl_rtl = DECL_RTL (decl_result);
4918 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4919 {
4920 do_clobber_return_reg (decl_rtl, NULL);
4921 }
4922 }
c13fde05
RH
4923}
4924
4925static void
fa8db1f7 4926do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4927{
c41c1387 4928 emit_use (reg);
c13fde05
RH
4929}
4930
0bf8477d 4931static void
fa8db1f7 4932use_return_register (void)
c13fde05
RH
4933{
4934 diddle_return_value (do_use_return_reg, NULL);
4935}
4936
902edd36
JH
4937/* Possibly warn about unused parameters. */
4938void
4939do_warn_unused_parameter (tree fn)
4940{
4941 tree decl;
4942
4943 for (decl = DECL_ARGUMENTS (fn);
910ad8de 4944 decl; decl = DECL_CHAIN (decl))
902edd36 4945 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
4946 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4947 && !TREE_NO_WARNING (decl))
b9b8dde3 4948 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4949}
4950
e2500fed
GK
4951static GTY(()) rtx initial_trampoline;
4952
71c0e7fc 4953/* Generate RTL for the end of the current function. */
6f086dfc
RS
4954
4955void
fa8db1f7 4956expand_function_end (void)
6f086dfc 4957{
932f0847 4958 rtx clobber_after;
6f086dfc 4959
964be02f
RH
4960 /* If arg_pointer_save_area was referenced only from a nested
4961 function, we will not have initialized it yet. Do that now. */
e3b5732b 4962 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 4963 get_arg_pointer_save_area ();
964be02f 4964
b38f3813 4965 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
4966 do a stack probe at the start of the function to ensure we have enough
4967 space for another stack frame. */
b38f3813 4968 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66
RK
4969 {
4970 rtx insn, seq;
4971
4972 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 4973 if (CALL_P (insn))
11044f66 4974 {
c35af30f 4975 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 4976 start_sequence ();
c35af30f
EB
4977 if (STACK_CHECK_MOVING_SP)
4978 anti_adjust_stack_and_probe (max_frame_size, true);
4979 else
4980 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
4981 seq = get_insns ();
4982 end_sequence ();
d16e455d 4983 set_insn_locators (seq, prologue_locator);
ede497cf 4984 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
4985 break;
4986 }
4987 }
4988
6f086dfc
RS
4989 /* End any sequences that failed to be closed due to syntax errors. */
4990 while (in_sequence_p ())
5f4f0e22 4991 end_sequence ();
6f086dfc 4992
6f086dfc
RS
4993 clear_pending_stack_adjust ();
4994 do_pending_stack_adjust ();
4995
6f086dfc
RS
4996 /* Output a linenumber for the end of the function.
4997 SDB depends on this. */
55e092c4 4998 set_curr_insn_source_location (input_location);
6f086dfc 4999
fbffc70a 5000 /* Before the return label (if any), clobber the return
a1f300c0 5001 registers so that they are not propagated live to the rest of
fbffc70a
GK
5002 the function. This can only happen with functions that drop
5003 through; if there had been a return statement, there would
932f0847
JH
5004 have either been a return rtx, or a jump to the return label.
5005
5006 We delay actual code generation after the current_function_value_rtx
5007 is computed. */
5008 clobber_after = get_last_insn ();
fbffc70a 5009
526c334b
KH
5010 /* Output the label for the actual return from the function. */
5011 emit_label (return_label);
6f086dfc 5012
677f3fa8 5013 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5014 {
5015 /* Let except.c know where it should emit the call to unregister
5016 the function context for sjlj exceptions. */
5017 if (flag_exceptions)
5018 sjlj_emit_function_exit_after (get_last_insn ());
5019 }
6fb5fa3c
DB
5020 else
5021 {
5022 /* We want to ensure that instructions that may trap are not
5023 moved into the epilogue by scheduling, because we don't
5024 always emit unwind information for the epilogue. */
8f4f502f 5025 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5026 emit_insn (gen_blockage ());
5027 }
0b59e81e 5028
652b0932
RH
5029 /* If this is an implementation of throw, do what's necessary to
5030 communicate between __builtin_eh_return and the epilogue. */
5031 expand_eh_return ();
5032
3e4eac3f
RH
5033 /* If scalar return value was computed in a pseudo-reg, or was a named
5034 return value that got dumped to the stack, copy that to the hard
5035 return register. */
19e7881c 5036 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5037 {
3e4eac3f
RH
5038 tree decl_result = DECL_RESULT (current_function_decl);
5039 rtx decl_rtl = DECL_RTL (decl_result);
5040
5041 if (REG_P (decl_rtl)
5042 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5043 : DECL_REGISTER (decl_result))
5044 {
38173d38 5045 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5046
ce5e43d0 5047 /* This should be set in assign_parms. */
0bccc606 5048 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5049
5050 /* If this is a BLKmode structure being returned in registers,
5051 then use the mode computed in expand_return. Note that if
797a6ac1 5052 decl_rtl is memory, then its mode may have been changed,
38173d38 5053 but that crtl->return_rtx has not. */
3e4eac3f 5054 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5055 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5056
bef5d8b6
RS
5057 /* If a non-BLKmode return value should be padded at the least
5058 significant end of the register, shift it left by the appropriate
5059 amount. BLKmode results are handled using the group load/store
5060 machinery. */
5061 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5062 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5063 {
5064 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5065 REGNO (real_decl_rtl)),
5066 decl_rtl);
5067 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5068 }
3e4eac3f 5069 /* If a named return value dumped decl_return to memory, then
797a6ac1 5070 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5071 extension. */
bef5d8b6 5072 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5073 {
8df83eae 5074 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5075 promote_function_mode (TREE_TYPE (decl_result),
5076 GET_MODE (decl_rtl), &unsignedp,
5077 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5078
5079 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5080 }
aa570f54 5081 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5082 {
5083 /* If expand_function_start has created a PARALLEL for decl_rtl,
5084 move the result to the real return registers. Otherwise, do
5085 a group load from decl_rtl for a named return. */
5086 if (GET_CODE (decl_rtl) == PARALLEL)
5087 emit_group_move (real_decl_rtl, decl_rtl);
5088 else
5089 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5090 TREE_TYPE (decl_result),
084a1106
JDA
5091 int_size_in_bytes (TREE_TYPE (decl_result)));
5092 }
652b0932
RH
5093 /* In the case of complex integer modes smaller than a word, we'll
5094 need to generate some non-trivial bitfield insertions. Do that
5095 on a pseudo and not the hard register. */
5096 else if (GET_CODE (decl_rtl) == CONCAT
5097 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5098 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5099 {
5100 int old_generating_concat_p;
5101 rtx tmp;
5102
5103 old_generating_concat_p = generating_concat_p;
5104 generating_concat_p = 0;
5105 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5106 generating_concat_p = old_generating_concat_p;
5107
5108 emit_move_insn (tmp, decl_rtl);
5109 emit_move_insn (real_decl_rtl, tmp);
5110 }
3e4eac3f
RH
5111 else
5112 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5113 }
6f086dfc
RS
5114 }
5115
5116 /* If returning a structure, arrange to return the address of the value
5117 in a place where debuggers expect to find it.
5118
5119 If returning a structure PCC style,
5120 the caller also depends on this value.
e3b5732b
JH
5121 And cfun->returns_pcc_struct is not necessarily set. */
5122 if (cfun->returns_struct
5123 || cfun->returns_pcc_struct)
6f086dfc 5124 {
cc77ae10 5125 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5126 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5127 rtx outgoing;
5128
5129 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5130 type = TREE_TYPE (type);
5131 else
5132 value_address = XEXP (value_address, 0);
5133
1d636cc6
RG
5134 outgoing = targetm.calls.function_value (build_pointer_type (type),
5135 current_function_decl, true);
6f086dfc
RS
5136
5137 /* Mark this as a function return value so integrate will delete the
5138 assignment and USE below when inlining this function. */
5139 REG_FUNCTION_VALUE_P (outgoing) = 1;
5140
d1608933 5141 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5142 value_address = convert_memory_address (GET_MODE (outgoing),
5143 value_address);
d1608933 5144
6f086dfc 5145 emit_move_insn (outgoing, value_address);
d1608933
RK
5146
5147 /* Show return register used to hold result (in this case the address
5148 of the result. */
38173d38 5149 crtl->return_rtx = outgoing;
6f086dfc
RS
5150 }
5151
932f0847
JH
5152 /* Emit the actual code to clobber return register. */
5153 {
609c3937 5154 rtx seq;
797a6ac1 5155
932f0847
JH
5156 start_sequence ();
5157 clobber_return_register ();
2f937369 5158 seq = get_insns ();
932f0847
JH
5159 end_sequence ();
5160
609c3937 5161 emit_insn_after (seq, clobber_after);
932f0847
JH
5162 }
5163
609c3937 5164 /* Output the label for the naked return from the function. */
4c33221c
UW
5165 if (naked_return_label)
5166 emit_label (naked_return_label);
6e3077c6 5167
25108646
AH
5168 /* @@@ This is a kludge. We want to ensure that instructions that
5169 may trap are not moved into the epilogue by scheduling, because
56d17681 5170 we don't always emit unwind information for the epilogue. */
f0a0390e 5171 if (cfun->can_throw_non_call_exceptions
677f3fa8 5172 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5173 emit_insn (gen_blockage ());
25108646 5174
7d69de61 5175 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5176 if (crtl->stack_protect_guard)
7d69de61
RH
5177 stack_protect_epilogue ();
5178
40184445
BS
5179 /* If we had calls to alloca, and this machine needs
5180 an accurate stack pointer to exit the function,
5181 insert some code to save and restore the stack pointer. */
5182 if (! EXIT_IGNORE_STACK
e3b5732b 5183 && cfun->calls_alloca)
40184445 5184 {
9eac0f2a 5185 rtx tem = 0, seq;
40184445 5186
9eac0f2a
RH
5187 start_sequence ();
5188 emit_stack_save (SAVE_FUNCTION, &tem);
5189 seq = get_insns ();
5190 end_sequence ();
5191 emit_insn_before (seq, parm_birth_insn);
5192
5193 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5194 }
5195
c13fde05
RH
5196 /* ??? This should no longer be necessary since stupid is no longer with
5197 us, but there are some parts of the compiler (eg reload_combine, and
5198 sh mach_dep_reorg) that still try and compute their own lifetime info
5199 instead of using the general framework. */
5200 use_return_register ();
6f086dfc 5201}
278ed218
RH
5202
5203rtx
bd60bab2 5204get_arg_pointer_save_area (void)
278ed218 5205{
bd60bab2 5206 rtx ret = arg_pointer_save_area;
278ed218
RH
5207
5208 if (! ret)
5209 {
bd60bab2
JH
5210 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5211 arg_pointer_save_area = ret;
964be02f
RH
5212 }
5213
e3b5732b 5214 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5215 {
5216 rtx seq;
278ed218 5217
797a6ac1 5218 /* Save the arg pointer at the beginning of the function. The
964be02f 5219 generated stack slot may not be a valid memory address, so we
278ed218
RH
5220 have to check it and fix it if necessary. */
5221 start_sequence ();
2e3f842f
L
5222 emit_move_insn (validize_mem (ret),
5223 crtl->args.internal_arg_pointer);
2f937369 5224 seq = get_insns ();
278ed218
RH
5225 end_sequence ();
5226
964be02f 5227 push_topmost_sequence ();
1cb2fc7b 5228 emit_insn_after (seq, entry_of_function ());
964be02f 5229 pop_topmost_sequence ();
c1d9a70a
ILT
5230
5231 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5232 }
5233
5234 return ret;
5235}
bdac5f58 5236\f
cd9c1ca8
RH
5237/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5238 for the first time. */
bdac5f58 5239
0a1c58a2 5240static void
cd9c1ca8 5241record_insns (rtx insns, rtx end, htab_t *hashp)
bdac5f58 5242{
2f937369 5243 rtx tmp;
cd9c1ca8 5244 htab_t hash = *hashp;
0a1c58a2 5245
cd9c1ca8
RH
5246 if (hash == NULL)
5247 *hashp = hash
5248 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5249
5250 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5251 {
5252 void **slot = htab_find_slot (hash, tmp, INSERT);
5253 gcc_assert (*slot == NULL);
5254 *slot = tmp;
5255 }
5256}
5257
cd400280
RH
5258/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5259 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5260 insn, then record COPY as well. */
cd9c1ca8
RH
5261
5262void
cd400280 5263maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5264{
cd400280 5265 htab_t hash;
cd9c1ca8
RH
5266 void **slot;
5267
cd400280
RH
5268 hash = epilogue_insn_hash;
5269 if (!hash || !htab_find (hash, insn))
5270 {
5271 hash = prologue_insn_hash;
5272 if (!hash || !htab_find (hash, insn))
5273 return;
5274 }
cd9c1ca8 5275
cd400280 5276 slot = htab_find_slot (hash, copy, INSERT);
cd9c1ca8
RH
5277 gcc_assert (*slot == NULL);
5278 *slot = copy;
bdac5f58
TW
5279}
5280
589fe865 5281/* Set the locator of the insn chain starting at INSN to LOC. */
0435312e 5282static void
fa8db1f7 5283set_insn_locators (rtx insn, int loc)
0435312e
JH
5284{
5285 while (insn != NULL_RTX)
5286 {
5287 if (INSN_P (insn))
5288 INSN_LOCATOR (insn) = loc;
5289 insn = NEXT_INSN (insn);
5290 }
5291}
5292
cd9c1ca8
RH
5293/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5294 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5295
cd9c1ca8
RH
5296static bool
5297contains (const_rtx insn, htab_t hash)
bdac5f58 5298{
cd9c1ca8
RH
5299 if (hash == NULL)
5300 return false;
bdac5f58 5301
cd9c1ca8 5302 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5303 {
cd9c1ca8 5304 int i;
bdac5f58 5305 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
cd9c1ca8
RH
5306 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5307 return true;
5308 return false;
bdac5f58 5309 }
cd9c1ca8
RH
5310
5311 return htab_find (hash, insn) != NULL;
bdac5f58 5312}
5c7675e9
RH
5313
5314int
4f588890 5315prologue_epilogue_contains (const_rtx insn)
5c7675e9 5316{
cd9c1ca8 5317 if (contains (insn, prologue_insn_hash))
5c7675e9 5318 return 1;
cd9c1ca8 5319 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5320 return 1;
5321 return 0;
5322}
bdac5f58 5323
484db665
BS
5324#ifdef HAVE_simple_return
5325
484db665
BS
5326/* Return true if INSN requires the stack frame to be set up.
5327 PROLOGUE_USED contains the hard registers used in the function
764a2546
BS
5328 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5329 prologue to set up for the function. */
0ff0609d 5330bool
764a2546
BS
5331requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5332 HARD_REG_SET set_up_by_prologue)
484db665 5333{
764a2546 5334 df_ref *df_rec;
484db665
BS
5335 HARD_REG_SET hardregs;
5336 unsigned regno;
5337
484db665
BS
5338 if (CALL_P (insn))
5339 return !SIBLING_CALL_P (insn);
484db665 5340
4265801b
EB
5341 /* We need a frame to get the unique CFA expected by the unwinder. */
5342 if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5343 return true;
5344
484db665 5345 CLEAR_HARD_REG_SET (hardregs);
764a2546 5346 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
484db665 5347 {
764a2546 5348 rtx dreg = DF_REF_REG (*df_rec);
484db665
BS
5349
5350 if (!REG_P (dreg))
5351 continue;
5352
5353 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5354 REGNO (dreg));
5355 }
5356 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5357 return true;
5358 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5359 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5360 if (TEST_HARD_REG_BIT (hardregs, regno)
5361 && df_regs_ever_live_p (regno))
5362 return true;
764a2546
BS
5363
5364 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5365 {
5366 rtx reg = DF_REF_REG (*df_rec);
5367
5368 if (!REG_P (reg))
5369 continue;
5370
5371 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5372 REGNO (reg));
5373 }
5374 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5375 return true;
5376
484db665
BS
5377 return false;
5378}
39d52ae5 5379
ef2be249
RS
5380/* See whether BB has a single successor that uses [REGNO, END_REGNO),
5381 and if BB is its only predecessor. Return that block if so,
5382 otherwise return null. */
39d52ae5 5383
ef2be249
RS
5384static basic_block
5385next_block_for_reg (basic_block bb, int regno, int end_regno)
39d52ae5 5386{
ef2be249
RS
5387 edge e, live_edge;
5388 edge_iterator ei;
5389 bitmap live;
5390 int i;
5391
5392 live_edge = NULL;
5393 FOR_EACH_EDGE (e, ei, bb->succs)
39d52ae5 5394 {
ef2be249
RS
5395 live = df_get_live_in (e->dest);
5396 for (i = regno; i < end_regno; i++)
5397 if (REGNO_REG_SET_P (live, i))
5398 {
5399 if (live_edge && live_edge != e)
5400 return NULL;
5401 live_edge = e;
5402 }
5403 }
39d52ae5 5404
ef2be249
RS
5405 /* We can sometimes encounter dead code. Don't try to move it
5406 into the exit block. */
5407 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5408 return NULL;
39d52ae5 5409
ef2be249
RS
5410 /* Reject targets of abnormal edges. This is needed for correctness
5411 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5412 exception edges even though it is generally treated as call-saved
5413 for the majority of the compilation. Moving across abnormal edges
5414 isn't going to be interesting for shrink-wrap usage anyway. */
5415 if (live_edge->flags & EDGE_ABNORMAL)
5416 return NULL;
5417
5418 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5419 return NULL;
39d52ae5 5420
ef2be249
RS
5421 return live_edge->dest;
5422}
5423
5424/* Try to move INSN from BB to a successor. Return true on success.
5425 USES and DEFS are the set of registers that are used and defined
5426 after INSN in BB. */
39d52ae5 5427
ef2be249
RS
5428static bool
5429move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5430 const HARD_REG_SET uses,
5431 const HARD_REG_SET defs)
5432{
5433 rtx set, src, dest;
5434 bitmap live_out, live_in, bb_uses, bb_defs;
5435 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5436 basic_block next_block;
5437
5438 /* Look for a simple register copy. */
5439 set = single_set (insn);
5440 if (!set)
5441 return false;
5442 src = SET_SRC (set);
5443 dest = SET_DEST (set);
5444 if (!REG_P (dest) || !REG_P (src))
5445 return false;
5446
5447 /* Make sure that the source register isn't defined later in BB. */
5448 sregno = REGNO (src);
5449 end_sregno = END_REGNO (src);
5450 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5451 return false;
5452
5453 /* Make sure that the destination register isn't referenced later in BB. */
5454 dregno = REGNO (dest);
5455 end_dregno = END_REGNO (dest);
5456 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5457 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5458 return false;
5459
5460 /* See whether there is a successor block to which we could move INSN. */
5461 next_block = next_block_for_reg (bb, dregno, end_dregno);
5462 if (!next_block)
5463 return false;
5464
5465 /* At this point we are committed to moving INSN, but let's try to
5466 move it as far as we can. */
5467 do
5468 {
5469 live_out = df_get_live_out (bb);
5470 live_in = df_get_live_in (next_block);
5471 bb = next_block;
5472
5473 /* Check whether BB uses DEST or clobbers DEST. We need to add
5474 INSN to BB if so. Either way, DEST is no longer live on entry,
5475 except for any part that overlaps SRC (next loop). */
5476 bb_uses = &DF_LR_BB_INFO (bb)->use;
5477 bb_defs = &DF_LR_BB_INFO (bb)->def;
5478 for (i = dregno; i < end_dregno; i++)
39d52ae5 5479 {
ef2be249
RS
5480 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i))
5481 next_block = NULL;
5482 CLEAR_REGNO_REG_SET (live_out, i);
5483 CLEAR_REGNO_REG_SET (live_in, i);
39d52ae5
BS
5484 }
5485
ef2be249
RS
5486 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5487 Either way, SRC is now live on entry. */
5488 for (i = sregno; i < end_sregno; i++)
39d52ae5 5489 {
ef2be249
RS
5490 if (REGNO_REG_SET_P (bb_defs, i))
5491 next_block = NULL;
5492 SET_REGNO_REG_SET (live_out, i);
5493 SET_REGNO_REG_SET (live_in, i);
39d52ae5 5494 }
ef2be249
RS
5495
5496 /* If we don't need to add the move to BB, look for a single
5497 successor block. */
5498 if (next_block)
5499 next_block = next_block_for_reg (next_block, dregno, end_dregno);
39d52ae5 5500 }
ef2be249
RS
5501 while (next_block);
5502
5503 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5504 (next loop). */
5505 for (i = dregno; i < end_dregno; i++)
5506 {
5507 CLEAR_REGNO_REG_SET (bb_uses, i);
5508 SET_REGNO_REG_SET (bb_defs, i);
5509 }
5510
5511 /* BB now uses SRC. */
5512 for (i = sregno; i < end_sregno; i++)
5513 SET_REGNO_REG_SET (bb_uses, i);
5514
5515 emit_insn_after (PATTERN (insn), bb_note (bb));
5516 delete_insn (insn);
5517 return true;
5518}
5519
5520/* Look for register copies in the first block of the function, and move
5521 them down into successor blocks if the register is used only on one
5522 path. This exposes more opportunities for shrink-wrapping. These
5523 kinds of sets often occur when incoming argument registers are moved
5524 to call-saved registers because their values are live across one or
5525 more calls during the function. */
5526
5527static void
5528prepare_shrink_wrap (basic_block entry_block)
5529{
5530 rtx insn, curr, x;
5531 HARD_REG_SET uses, defs;
5532 df_ref *ref;
5533
5534 CLEAR_HARD_REG_SET (uses);
5535 CLEAR_HARD_REG_SET (defs);
5536 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5537 if (NONDEBUG_INSN_P (insn)
5538 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5539 {
5540 /* Add all defined registers to DEFs. */
5541 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5542 {
5543 x = DF_REF_REG (*ref);
5544 if (REG_P (x) && HARD_REGISTER_P (x))
5545 SET_HARD_REG_BIT (defs, REGNO (x));
5546 }
5547
5548 /* Add all used registers to USESs. */
5549 for (ref = DF_INSN_USES (insn); *ref; ref++)
5550 {
5551 x = DF_REF_REG (*ref);
5552 if (REG_P (x) && HARD_REGISTER_P (x))
5553 SET_HARD_REG_BIT (uses, REGNO (x));
5554 }
5555 }
39d52ae5
BS
5556}
5557
484db665
BS
5558#endif
5559
170d8157 5560#ifdef HAVE_return
4c029f40
TV
5561/* Insert use of return register before the end of BB. */
5562
5563static void
5564emit_use_return_register_into_block (basic_block bb)
5565{
5566 rtx seq;
5567 start_sequence ();
5568 use_return_register ();
5569 seq = get_insns ();
5570 end_sequence ();
5571 emit_insn_before (seq, BB_END (bb));
5572}
5573
484db665
BS
5574
5575/* Create a return pattern, either simple_return or return, depending on
5576 simple_p. */
5577
5578static rtx
5579gen_return_pattern (bool simple_p)
5580{
5581#ifdef HAVE_simple_return
5582 return simple_p ? gen_simple_return () : gen_return ();
5583#else
5584 gcc_assert (!simple_p);
5585 return gen_return ();
5586#endif
5587}
5588
5589/* Insert an appropriate return pattern at the end of block BB. This
5590 also means updating block_for_insn appropriately. SIMPLE_P is
5591 the same as in gen_return_pattern and passed to it. */
69732dcb
RH
5592
5593static void
484db665 5594emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5595{
484db665
BS
5596 rtx jump, pat;
5597 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5598 pat = PATTERN (jump);
26898771
BS
5599 if (GET_CODE (pat) == PARALLEL)
5600 pat = XVECEXP (pat, 0, 0);
5601 gcc_assert (ANY_RETURN_P (pat));
5602 JUMP_LABEL (jump) = pat;
69732dcb 5603}
484db665 5604#endif
69732dcb 5605
387748de
AM
5606/* Set JUMP_LABEL for a return insn. */
5607
5608void
5609set_return_jump_label (rtx returnjump)
5610{
5611 rtx pat = PATTERN (returnjump);
5612 if (GET_CODE (pat) == PARALLEL)
5613 pat = XVECEXP (pat, 0, 0);
5614 if (ANY_RETURN_P (pat))
5615 JUMP_LABEL (returnjump) = pat;
5616 else
5617 JUMP_LABEL (returnjump) = ret_rtx;
5618}
5619
ffe14686
AM
5620#ifdef HAVE_simple_return
5621/* Create a copy of BB instructions and insert at BEFORE. Redirect
5622 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5623static void
5624dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5625 bitmap_head *need_prologue)
5626{
5627 edge_iterator ei;
5628 edge e;
5629 rtx insn = BB_END (bb);
5630
5631 /* We know BB has a single successor, so there is no need to copy a
5632 simple jump at the end of BB. */
5633 if (simplejump_p (insn))
5634 insn = PREV_INSN (insn);
5635
5636 start_sequence ();
5637 duplicate_insn_chain (BB_HEAD (bb), insn);
5638 if (dump_file)
5639 {
5640 unsigned count = 0;
5641 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5642 if (active_insn_p (insn))
5643 ++count;
5644 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5645 bb->index, copy_bb->index, count);
5646 }
5647 insn = get_insns ();
5648 end_sequence ();
5649 emit_insn_before (insn, before);
5650
5651 /* Redirect all the paths that need no prologue into copy_bb. */
5652 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5653 if (!bitmap_bit_p (need_prologue, e->src->index))
5654 {
5655 redirect_edge_and_branch_force (e, copy_bb);
5656 continue;
5657 }
5658 else
5659 ei_next (&ei);
5660}
5661#endif
5662
5663#if defined (HAVE_return) || defined (HAVE_simple_return)
5664/* Return true if there are any active insns between HEAD and TAIL. */
39d52ae5 5665static bool
ffe14686 5666active_insn_between (rtx head, rtx tail)
39d52ae5 5667{
ffe14686
AM
5668 while (tail)
5669 {
5670 if (active_insn_p (tail))
5671 return true;
5672 if (tail == head)
5673 return false;
5674 tail = PREV_INSN (tail);
5675 }
5676 return false;
5677}
5678
5679/* LAST_BB is a block that exits, and empty of active instructions.
5680 Examine its predecessors for jumps that can be converted to
5681 (conditional) returns. */
5682static VEC (edge, heap) *
5683convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5684 VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED)
5685{
5686 int i;
5687 basic_block bb;
39d52ae5 5688 rtx label;
ffe14686
AM
5689 edge_iterator ei;
5690 edge e;
5691 VEC(basic_block,heap) *src_bbs;
39d52ae5 5692
ffe14686
AM
5693 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
5694 FOR_EACH_EDGE (e, ei, last_bb->preds)
5695 if (e->src != ENTRY_BLOCK_PTR)
5696 VEC_quick_push (basic_block, src_bbs, e->src);
5697
5698 label = BB_HEAD (last_bb);
5699
5700 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
39d52ae5 5701 {
ffe14686
AM
5702 rtx jump = BB_END (bb);
5703
5704 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5705 continue;
5706
5707 e = find_edge (bb, last_bb);
5708
5709 /* If we have an unconditional jump, we can replace that
5710 with a simple return instruction. */
5711 if (simplejump_p (jump))
5712 {
5713 /* The use of the return register might be present in the exit
5714 fallthru block. Either:
5715 - removing the use is safe, and we should remove the use in
5716 the exit fallthru block, or
5717 - removing the use is not safe, and we should add it here.
5718 For now, we conservatively choose the latter. Either of the
5719 2 helps in crossjumping. */
5720 emit_use_return_register_into_block (bb);
5721
5722 emit_return_into_block (simple_p, bb);
5723 delete_insn (jump);
5724 }
5725
5726 /* If we have a conditional jump branching to the last
5727 block, we can try to replace that with a conditional
5728 return instruction. */
5729 else if (condjump_p (jump))
5730 {
5731 rtx dest;
5732
5733 if (simple_p)
5734 dest = simple_return_rtx;
5735 else
5736 dest = ret_rtx;
5737 if (!redirect_jump (jump, dest, 0))
5738 {
5739#ifdef HAVE_simple_return
5740 if (simple_p)
5741 {
5742 if (dump_file)
5743 fprintf (dump_file,
5744 "Failed to redirect bb %d branch.\n", bb->index);
5745 VEC_safe_push (edge, heap, unconverted, e);
5746 }
5747#endif
5748 continue;
5749 }
5750
5751 /* See comment in simplejump_p case above. */
5752 emit_use_return_register_into_block (bb);
5753
5754 /* If this block has only one successor, it both jumps
5755 and falls through to the fallthru block, so we can't
5756 delete the edge. */
5757 if (single_succ_p (bb))
5758 continue;
5759 }
5760 else
5761 {
5762#ifdef HAVE_simple_return
5763 if (simple_p)
5764 {
5765 if (dump_file)
5766 fprintf (dump_file,
5767 "Failed to redirect bb %d branch.\n", bb->index);
5768 VEC_safe_push (edge, heap, unconverted, e);
5769 }
5770#endif
5771 continue;
5772 }
5773
5774 /* Fix up the CFG for the successful change we just made. */
5775 redirect_edge_succ (e, EXIT_BLOCK_PTR);
d3b623c7 5776 e->flags &= ~EDGE_CROSSING;
39d52ae5 5777 }
ffe14686
AM
5778 VEC_free (basic_block, heap, src_bbs);
5779 return unconverted;
39d52ae5
BS
5780}
5781
ffe14686
AM
5782/* Emit a return insn for the exit fallthru block. */
5783static basic_block
5784emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5785{
5786 basic_block last_bb = exit_fallthru_edge->src;
5787
5788 if (JUMP_P (BB_END (last_bb)))
5789 {
5790 last_bb = split_edge (exit_fallthru_edge);
5791 exit_fallthru_edge = single_succ_edge (last_bb);
5792 }
5793 emit_barrier_after (BB_END (last_bb));
5794 emit_return_into_block (simple_p, last_bb);
5795 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5796 return last_bb;
5797}
5798#endif
5799
5800
9faa82d8 5801/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5802 this into place with notes indicating where the prologue ends and where
484db665
BS
5803 the epilogue begins. Update the basic block information when possible.
5804
5805 Notes on epilogue placement:
5806 There are several kinds of edges to the exit block:
5807 * a single fallthru edge from LAST_BB
5808 * possibly, edges from blocks containing sibcalls
5809 * possibly, fake edges from infinite loops
5810
5811 The epilogue is always emitted on the fallthru edge from the last basic
5812 block in the function, LAST_BB, into the exit block.
5813
5814 If LAST_BB is empty except for a label, it is the target of every
5815 other basic block in the function that ends in a return. If a
5816 target has a return or simple_return pattern (possibly with
5817 conditional variants), these basic blocks can be changed so that a
5818 return insn is emitted into them, and their target is adjusted to
5819 the real exit block.
5820
5821 Notes on shrink wrapping: We implement a fairly conservative
5822 version of shrink-wrapping rather than the textbook one. We only
5823 generate a single prologue and a single epilogue. This is
5824 sufficient to catch a number of interesting cases involving early
5825 exits.
5826
5827 First, we identify the blocks that require the prologue to occur before
5828 them. These are the ones that modify a call-saved register, or reference
5829 any of the stack or frame pointer registers. To simplify things, we then
5830 mark everything reachable from these blocks as also requiring a prologue.
5831 This takes care of loops automatically, and avoids the need to examine
5832 whether MEMs reference the frame, since it is sufficient to check for
5833 occurrences of the stack or frame pointer.
5834
5835 We then compute the set of blocks for which the need for a prologue
5836 is anticipatable (borrowing terminology from the shrink-wrapping
5837 description in Muchnick's book). These are the blocks which either
5838 require a prologue themselves, or those that have only successors
5839 where the prologue is anticipatable. The prologue needs to be
5840 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5841 is not. For the moment, we ensure that only one such edge exists.
5842
5843 The epilogue is placed as described above, but we make a
5844 distinction between inserting return and simple_return patterns
5845 when modifying other blocks that end in a return. Blocks that end
5846 in a sibcall omit the sibcall_epilogue if the block is not in
5847 ANTIC. */
bdac5f58 5848
6fb5fa3c
DB
5849static void
5850thread_prologue_and_epilogue_insns (void)
bdac5f58 5851{
7458026b 5852 bool inserted;
484db665 5853#ifdef HAVE_simple_return
ffe14686 5854 VEC (edge, heap) *unconverted_simple_returns = NULL;
484db665 5855 bool nonempty_prologue;
ffe14686
AM
5856 bitmap_head bb_flags;
5857 unsigned max_grow_size;
484db665 5858#endif
ffe14686 5859 rtx returnjump;
2e239f9d 5860 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
484db665
BS
5861 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5862 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5863 edge_iterator ei;
484db665
BS
5864
5865 df_analyze ();
e881bb1b 5866
a8ba47cb 5867 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
7458026b
ILT
5868
5869 inserted = false;
5870 seq = NULL_RTX;
5871 epilogue_end = NULL_RTX;
484db665 5872 returnjump = NULL_RTX;
7458026b
ILT
5873
5874 /* Can't deal with multiple successors of the entry block at the
5875 moment. Function should always have at least one entry
5876 point. */
5877 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5878 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
484db665
BS
5879 orig_entry_edge = entry_edge;
5880
484db665 5881 split_prologue_seq = NULL_RTX;
7458026b
ILT
5882 if (flag_split_stack
5883 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5884 == NULL))
5885 {
5886#ifndef HAVE_split_stack_prologue
5887 gcc_unreachable ();
5888#else
5889 gcc_assert (HAVE_split_stack_prologue);
5890
5891 start_sequence ();
5892 emit_insn (gen_split_stack_prologue ());
484db665 5893 split_prologue_seq = get_insns ();
7458026b
ILT
5894 end_sequence ();
5895
484db665
BS
5896 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5897 set_insn_locators (split_prologue_seq, prologue_locator);
7458026b
ILT
5898#endif
5899 }
5900
484db665 5901 prologue_seq = NULL_RTX;
bdac5f58
TW
5902#ifdef HAVE_prologue
5903 if (HAVE_prologue)
5904 {
e881bb1b 5905 start_sequence ();
718fe406 5906 seq = gen_prologue ();
e881bb1b 5907 emit_insn (seq);
bdac5f58 5908
b8698a0f 5909 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5910 if the profiling is on and the frame pointer is required. */
e3b5732b 5911 if (crtl->profile && frame_pointer_needed)
c41c1387 5912 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5913
bdac5f58 5914 /* Retain a map of the prologue insns. */
cd9c1ca8 5915 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5916 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5917
56d17681
UB
5918 /* Ensure that instructions are not moved into the prologue when
5919 profiling is on. The call to the profiling routine can be
5920 emitted within the live range of a call-clobbered register. */
3c5273a9 5921 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5922 emit_insn (gen_blockage ());
9185a8d5 5923
484db665 5924 prologue_seq = get_insns ();
e881bb1b 5925 end_sequence ();
484db665
BS
5926 set_insn_locators (prologue_seq, prologue_locator);
5927 }
5928#endif
e881bb1b 5929
ffe14686 5930#ifdef HAVE_simple_return
484db665
BS
5931 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5932
484db665
BS
5933 /* Try to perform a kind of shrink-wrapping, making sure the
5934 prologue/epilogue is emitted only around those parts of the
5935 function that require it. */
5936
5937 nonempty_prologue = false;
5938 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5939 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5940 {
5941 nonempty_prologue = true;
5942 break;
5943 }
5944
5945 if (flag_shrink_wrap && HAVE_simple_return
ee049cb7 5946 && (targetm.profile_before_prologue () || !crtl->profile)
484db665
BS
5947 && nonempty_prologue && !crtl->calls_eh_return)
5948 {
5949 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
ee3d2ecd 5950 struct hard_reg_set_container set_up_by_prologue;
484db665 5951 rtx p_insn;
484db665
BS
5952 VEC(basic_block, heap) *vec;
5953 basic_block bb;
5954 bitmap_head bb_antic_flags;
5955 bitmap_head bb_on_list;
ffe14686 5956 bitmap_head bb_tail;
484db665
BS
5957
5958 if (dump_file)
5959 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
5960
5961 /* Compute the registers set and used in the prologue. */
5962 CLEAR_HARD_REG_SET (prologue_clobbered);
5963 CLEAR_HARD_REG_SET (prologue_used);
5964 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
5965 {
5966 HARD_REG_SET this_used;
5967 if (!NONDEBUG_INSN_P (p_insn))
5968 continue;
5969
5970 CLEAR_HARD_REG_SET (this_used);
5971 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
5972 &this_used);
5973 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
5974 IOR_HARD_REG_SET (prologue_used, this_used);
5975 note_stores (PATTERN (p_insn), record_hard_reg_sets,
5976 &prologue_clobbered);
5977 }
484db665 5978
39d52ae5
BS
5979 prepare_shrink_wrap (entry_edge->dest);
5980
484db665
BS
5981 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
5982 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
ffe14686 5983 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
484db665 5984
ffe14686
AM
5985 /* Find the set of basic blocks that require a stack frame,
5986 and blocks that are too big to be duplicated. */
484db665
BS
5987
5988 vec = VEC_alloc (basic_block, heap, n_basic_blocks);
5989
ee3d2ecd
JJ
5990 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
5991 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5992 STACK_POINTER_REGNUM);
5993 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
764a2546 5994 if (frame_pointer_needed)
ee3d2ecd 5995 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
764a2546
BS
5996 HARD_FRAME_POINTER_REGNUM);
5997 if (pic_offset_table_rtx)
ee3d2ecd 5998 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
764a2546 5999 PIC_OFFSET_TABLE_REGNUM);
5c43016f 6000 if (stack_realign_drap && crtl->drap_reg)
ee3d2ecd
JJ
6001 add_to_hard_reg_set (&set_up_by_prologue.set,
6002 GET_MODE (crtl->drap_reg),
5c43016f 6003 REGNO (crtl->drap_reg));
ee3d2ecd
JJ
6004 if (targetm.set_up_by_prologue)
6005 targetm.set_up_by_prologue (&set_up_by_prologue);
764a2546 6006
ffe14686
AM
6007 /* We don't use a different max size depending on
6008 optimize_bb_for_speed_p because increasing shrink-wrapping
6009 opportunities by duplicating tail blocks can actually result
6010 in an overall decrease in code size. */
6011 max_grow_size = get_uncond_jump_length ();
6012 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
6013
484db665
BS
6014 FOR_EACH_BB (bb)
6015 {
6016 rtx insn;
ffe14686
AM
6017 unsigned size = 0;
6018
6019 FOR_BB_INSNS (bb, insn)
6020 if (NONDEBUG_INSN_P (insn))
6021 {
6022 if (requires_stack_frame_p (insn, prologue_used,
ee3d2ecd 6023 set_up_by_prologue.set))
ffe14686
AM
6024 {
6025 if (bb == entry_edge->dest)
6026 goto fail_shrinkwrap;
6027 bitmap_set_bit (&bb_flags, bb->index);
6028 VEC_quick_push (basic_block, vec, bb);
6029 break;
6030 }
6031 else if (size <= max_grow_size)
6032 {
6033 size += get_attr_min_length (insn);
6034 if (size > max_grow_size)
6035 bitmap_set_bit (&bb_on_list, bb->index);
6036 }
6037 }
484db665
BS
6038 }
6039
ffe14686
AM
6040 /* Blocks that really need a prologue, or are too big for tails. */
6041 bitmap_ior_into (&bb_on_list, &bb_flags);
6042
484db665
BS
6043 /* For every basic block that needs a prologue, mark all blocks
6044 reachable from it, so as to ensure they are also seen as
6045 requiring a prologue. */
6046 while (!VEC_empty (basic_block, vec))
6047 {
6048 basic_block tmp_bb = VEC_pop (basic_block, vec);
ffe14686 6049
484db665
BS
6050 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6051 if (e->dest != EXIT_BLOCK_PTR
6052 && bitmap_set_bit (&bb_flags, e->dest->index))
6053 VEC_quick_push (basic_block, vec, e->dest);
6054 }
ffe14686
AM
6055
6056 /* Find the set of basic blocks that need no prologue, have a
6057 single successor, can be duplicated, meet a max size
6058 requirement, and go to the exit via like blocks. */
6059 VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR);
6060 while (!VEC_empty (basic_block, vec))
484db665 6061 {
ffe14686
AM
6062 basic_block tmp_bb = VEC_pop (basic_block, vec);
6063
6064 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6065 if (single_succ_p (e->src)
6066 && !bitmap_bit_p (&bb_on_list, e->src->index)
6e456f4c
JJ
6067 && can_duplicate_block_p (e->src))
6068 {
6069 edge pe;
6070 edge_iterator pei;
6071
6072 /* If there is predecessor of e->src which doesn't
6073 need prologue and the edge is complex,
6074 we might not be able to redirect the branch
6075 to a copy of e->src. */
6076 FOR_EACH_EDGE (pe, pei, e->src->preds)
6077 if ((pe->flags & EDGE_COMPLEX) != 0
6078 && !bitmap_bit_p (&bb_flags, pe->src->index))
6079 break;
6080 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6081 VEC_quick_push (basic_block, vec, e->src);
6082 }
484db665
BS
6083 }
6084
6085 /* Now walk backwards from every block that is marked as needing
ffe14686
AM
6086 a prologue to compute the bb_antic_flags bitmap. Exclude
6087 tail blocks; They can be duplicated to be used on paths not
6088 needing a prologue. */
6089 bitmap_clear (&bb_on_list);
6090 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
484db665
BS
6091 FOR_EACH_BB (bb)
6092 {
ffe14686 6093 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
484db665
BS
6094 continue;
6095 FOR_EACH_EDGE (e, ei, bb->preds)
6096 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6097 && bitmap_set_bit (&bb_on_list, e->src->index))
6098 VEC_quick_push (basic_block, vec, e->src);
6099 }
6100 while (!VEC_empty (basic_block, vec))
6101 {
6102 basic_block tmp_bb = VEC_pop (basic_block, vec);
484db665
BS
6103 bool all_set = true;
6104
6105 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6106 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6107 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6108 {
6109 all_set = false;
6110 break;
6111 }
6112
6113 if (all_set)
6114 {
6115 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6116 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6117 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6118 && bitmap_set_bit (&bb_on_list, e->src->index))
6119 VEC_quick_push (basic_block, vec, e->src);
6120 }
6121 }
6122 /* Find exactly one edge that leads to a block in ANTIC from
6123 a block that isn't. */
6124 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6125 FOR_EACH_BB (bb)
6126 {
6127 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6128 continue;
6129 FOR_EACH_EDGE (e, ei, bb->preds)
6130 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6131 {
6132 if (entry_edge != orig_entry_edge)
6133 {
6134 entry_edge = orig_entry_edge;
6135 if (dump_file)
6136 fprintf (dump_file, "More than one candidate edge.\n");
6137 goto fail_shrinkwrap;
6138 }
6139 if (dump_file)
6140 fprintf (dump_file, "Found candidate edge for "
6141 "shrink-wrapping, %d->%d.\n", e->src->index,
6142 e->dest->index);
6143 entry_edge = e;
6144 }
6145 }
6146
ffe14686 6147 if (entry_edge != orig_entry_edge)
484db665 6148 {
ffe14686
AM
6149 /* Test whether the prologue is known to clobber any register
6150 (other than FP or SP) which are live on the edge. */
6151 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6152 if (frame_pointer_needed)
6153 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6154 CLEAR_HARD_REG_SET (live_on_edge);
6155 reg_set_to_hard_reg_set (&live_on_edge,
6156 df_get_live_in (entry_edge->dest));
6157 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6158 {
6159 entry_edge = orig_entry_edge;
6160 if (dump_file)
6161 fprintf (dump_file,
6162 "Shrink-wrapping aborted due to clobber.\n");
6163 }
484db665 6164 }
ffe14686 6165 if (entry_edge != orig_entry_edge)
484db665
BS
6166 {
6167 crtl->shrink_wrapped = true;
17a3dae3
BS
6168 if (dump_file)
6169 fprintf (dump_file, "Performing shrink-wrapping.\n");
ffe14686
AM
6170
6171 /* Find tail blocks reachable from both blocks needing a
6172 prologue and blocks not needing a prologue. */
6173 if (!bitmap_empty_p (&bb_tail))
6174 FOR_EACH_BB (bb)
6175 {
6176 bool some_pro, some_no_pro;
6177 if (!bitmap_bit_p (&bb_tail, bb->index))
6178 continue;
6179 some_pro = some_no_pro = false;
6180 FOR_EACH_EDGE (e, ei, bb->preds)
6181 {
6182 if (bitmap_bit_p (&bb_flags, e->src->index))
6183 some_pro = true;
6184 else
6185 some_no_pro = true;
6186 }
6187 if (some_pro && some_no_pro)
6188 VEC_quick_push (basic_block, vec, bb);
6189 else
6190 bitmap_clear_bit (&bb_tail, bb->index);
6191 }
6192 /* Find the head of each tail. */
6193 while (!VEC_empty (basic_block, vec))
6194 {
6195 basic_block tbb = VEC_pop (basic_block, vec);
6196
6197 if (!bitmap_bit_p (&bb_tail, tbb->index))
6198 continue;
6199
6200 while (single_succ_p (tbb))
6201 {
6202 tbb = single_succ (tbb);
6203 bitmap_clear_bit (&bb_tail, tbb->index);
6204 }
6205 }
6206 /* Now duplicate the tails. */
6207 if (!bitmap_empty_p (&bb_tail))
6208 FOR_EACH_BB_REVERSE (bb)
6209 {
6210 basic_block copy_bb, tbb;
6211 rtx insert_point;
6212 int eflags;
6213
6214 if (!bitmap_clear_bit (&bb_tail, bb->index))
6215 continue;
6216
6217 /* Create a copy of BB, instructions and all, for
6218 use on paths that don't need a prologue.
6219 Ideal placement of the copy is on a fall-thru edge
6220 or after a block that would jump to the copy. */
6221 FOR_EACH_EDGE (e, ei, bb->preds)
6222 if (!bitmap_bit_p (&bb_flags, e->src->index)
6223 && single_succ_p (e->src))
6224 break;
6225 if (e)
6226 {
6227 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6228 NULL_RTX, e->src);
6229 BB_COPY_PARTITION (copy_bb, e->src);
6230 }
6231 else
6232 {
6233 /* Otherwise put the copy at the end of the function. */
6234 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6235 EXIT_BLOCK_PTR->prev_bb);
6236 BB_COPY_PARTITION (copy_bb, bb);
6237 }
6238
6239 insert_point = emit_note_after (NOTE_INSN_DELETED,
6240 BB_END (copy_bb));
6241 emit_barrier_after (BB_END (copy_bb));
6242
6243 tbb = bb;
6244 while (1)
6245 {
6246 dup_block_and_redirect (tbb, copy_bb, insert_point,
6247 &bb_flags);
6248 tbb = single_succ (tbb);
6249 if (tbb == EXIT_BLOCK_PTR)
6250 break;
6251 e = split_block (copy_bb, PREV_INSN (insert_point));
6252 copy_bb = e->dest;
6253 }
6254
6255 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6256 We have yet to add a simple_return to the tails,
6257 as we'd like to first convert_jumps_to_returns in
6258 case the block is no longer used after that. */
6259 eflags = EDGE_FAKE;
6260 if (CALL_P (PREV_INSN (insert_point))
6261 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6262 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6263 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6264
6265 /* verify_flow_info doesn't like a note after a
6266 sibling call. */
6267 delete_insn (insert_point);
6268 if (bitmap_empty_p (&bb_tail))
6269 break;
6270 }
484db665
BS
6271 }
6272
6273 fail_shrinkwrap:
ffe14686 6274 bitmap_clear (&bb_tail);
484db665
BS
6275 bitmap_clear (&bb_antic_flags);
6276 bitmap_clear (&bb_on_list);
6277 VEC_free (basic_block, heap, vec);
bdac5f58 6278 }
bdac5f58 6279#endif
bdac5f58 6280
484db665
BS
6281 if (split_prologue_seq != NULL_RTX)
6282 {
f4b31a33 6283 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
6284 inserted = true;
6285 }
6286 if (prologue_seq != NULL_RTX)
6287 {
6288 insert_insn_on_edge (prologue_seq, entry_edge);
6289 inserted = true;
6290 }
6291
19d3c25c
RH
6292 /* If the exit block has no non-fake predecessors, we don't need
6293 an epilogue. */
628f6a4e 6294 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
19d3c25c
RH
6295 if ((e->flags & EDGE_FAKE) == 0)
6296 break;
6297 if (e == NULL)
6298 goto epilogue_done;
6299
a8ba47cb 6300 rtl_profile_for_bb (EXIT_BLOCK_PTR);
484db665 6301
ffe14686
AM
6302 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6303
484db665
BS
6304 /* If we're allowed to generate a simple return instruction, then by
6305 definition we don't need a full epilogue. If the last basic
6306 block before the exit block does not contain active instructions,
6307 examine its predecessors and try to emit (conditional) return
6308 instructions. */
ffe14686
AM
6309#ifdef HAVE_simple_return
6310 if (entry_edge != orig_entry_edge)
69732dcb 6311 {
ffe14686 6312 if (optimize)
69732dcb 6313 {
ffe14686 6314 unsigned i, last;
69732dcb 6315
ffe14686
AM
6316 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6317 (but won't remove). Stop at end of current preds. */
6318 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6319 for (i = 0; i < last; i++)
484db665 6320 {
ffe14686
AM
6321 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6322 if (LABEL_P (BB_HEAD (e->src))
6323 && !bitmap_bit_p (&bb_flags, e->src->index)
6324 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6325 unconverted_simple_returns
6326 = convert_jumps_to_returns (e->src, true,
6327 unconverted_simple_returns);
484db665 6328 }
ffe14686 6329 }
484db665 6330
ffe14686
AM
6331 if (exit_fallthru_edge != NULL
6332 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6333 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6334 {
6335 basic_block last_bb;
4c029f40 6336
ffe14686
AM
6337 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6338 returnjump = BB_END (last_bb);
6339 exit_fallthru_edge = NULL;
6340 }
6341 }
484db665 6342#endif
ffe14686
AM
6343#ifdef HAVE_return
6344 if (HAVE_return)
6345 {
6346 if (exit_fallthru_edge == NULL)
6347 goto epilogue_done;
69732dcb 6348
ffe14686
AM
6349 if (optimize)
6350 {
6351 basic_block last_bb = exit_fallthru_edge->src;
484db665 6352
ffe14686
AM
6353 if (LABEL_P (BB_HEAD (last_bb))
6354 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6355 convert_jumps_to_returns (last_bb, false, NULL);
6356
1ff2fd21
AM
6357 if (EDGE_COUNT (last_bb->preds) != 0
6358 && single_succ_p (last_bb))
484db665 6359 {
ffe14686
AM
6360 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6361 epilogue_end = returnjump = BB_END (last_bb);
484db665 6362#ifdef HAVE_simple_return
ffe14686
AM
6363 /* Emitting the return may add a basic block.
6364 Fix bb_flags for the added block. */
6365 if (last_bb != exit_fallthru_edge->src)
6366 bitmap_set_bit (&bb_flags, last_bb->index);
484db665 6367#endif
ffe14686 6368 goto epilogue_done;
69732dcb 6369 }
2dd8bc01 6370 }
69732dcb
RH
6371 }
6372#endif
cd9c1ca8
RH
6373
6374 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6375 this marker for the splits of EH_RETURN patterns, and nothing else
6376 uses the flag in the meantime. */
6377 epilogue_completed = 1;
6378
6379#ifdef HAVE_eh_return
6380 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6381 some targets, these get split to a special version of the epilogue
6382 code. In order to be able to properly annotate these with unwind
6383 info, try to split them now. If we get a valid split, drop an
6384 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6385 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6386 {
6387 rtx prev, last, trial;
6388
6389 if (e->flags & EDGE_FALLTHRU)
6390 continue;
6391 last = BB_END (e->src);
6392 if (!eh_returnjump_p (last))
6393 continue;
6394
6395 prev = PREV_INSN (last);
6396 trial = try_split (PATTERN (last), last, 1);
6397 if (trial == last)
6398 continue;
6399
6400 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6401 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6402 }
6403#endif
6404
484db665
BS
6405 /* If nothing falls through into the exit block, we don't need an
6406 epilogue. */
623a66fa 6407
484db665 6408 if (exit_fallthru_edge == NULL)
623a66fa
R
6409 goto epilogue_done;
6410
bdac5f58
TW
6411#ifdef HAVE_epilogue
6412 if (HAVE_epilogue)
6413 {
19d3c25c 6414 start_sequence ();
2e040219 6415 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
19d3c25c 6416 seq = gen_epilogue ();
55c623b5
UW
6417 if (seq)
6418 emit_jump_insn (seq);
bdac5f58 6419
19d3c25c 6420 /* Retain a map of the epilogue insns. */
cd9c1ca8 6421 record_insns (seq, NULL, &epilogue_insn_hash);
0435312e 6422 set_insn_locators (seq, epilogue_locator);
bdac5f58 6423
2f937369 6424 seq = get_insns ();
484db665 6425 returnjump = get_last_insn ();
718fe406 6426 end_sequence ();
e881bb1b 6427
484db665 6428 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 6429 inserted = true;
dc0ff1c8
BS
6430
6431 if (JUMP_P (returnjump))
387748de 6432 set_return_jump_label (returnjump);
bdac5f58 6433 }
623a66fa 6434 else
bdac5f58 6435#endif
623a66fa
R
6436 {
6437 basic_block cur_bb;
6438
484db665 6439 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
6440 goto epilogue_done;
6441 /* We have a fall-through edge to the exit block, the source is not
6442 at the end of the function, and there will be an assembler epilogue
6443 at the end of the function.
6444 We can't use force_nonfallthru here, because that would try to
484db665 6445 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 6446 we take advantage of cfg_layout_finalize using
484db665 6447 fixup_fallthru_exit_predecessor. */
35b6b437 6448 cfg_layout_initialize (0);
623a66fa 6449 FOR_EACH_BB (cur_bb)
24bd1a0b
DB
6450 if (cur_bb->index >= NUM_FIXED_BLOCKS
6451 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 6452 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
6453 cfg_layout_finalize ();
6454 }
cf103ca4 6455
19d3c25c 6456epilogue_done:
484db665 6457
a8ba47cb 6458 default_rtl_profile ();
e881bb1b 6459
ca1117cc 6460 if (inserted)
30a873c3 6461 {
cf103ca4
EB
6462 sbitmap blocks;
6463
30a873c3
ZD
6464 commit_edge_insertions ();
6465
cf103ca4
EB
6466 /* Look for basic blocks within the prologue insns. */
6467 blocks = sbitmap_alloc (last_basic_block);
6468 sbitmap_zero (blocks);
6469 SET_BIT (blocks, entry_edge->dest->index);
764a2546 6470 SET_BIT (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
6471 find_many_sub_basic_blocks (blocks);
6472 sbitmap_free (blocks);
6473
30a873c3
ZD
6474 /* The epilogue insns we inserted may cause the exit edge to no longer
6475 be fallthru. */
6476 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6477 {
6478 if (((e->flags & EDGE_FALLTHRU) != 0)
6479 && returnjump_p (BB_END (e->src)))
6480 e->flags &= ~EDGE_FALLTHRU;
6481 }
6482 }
0a1c58a2 6483
484db665
BS
6484#ifdef HAVE_simple_return
6485 /* If there were branches to an empty LAST_BB which we tried to
6486 convert to conditional simple_returns, but couldn't for some
6487 reason, create a block to hold a simple_return insn and redirect
6488 those remaining edges. */
ffe14686 6489 if (!VEC_empty (edge, unconverted_simple_returns))
484db665 6490 {
ffe14686
AM
6491 basic_block simple_return_block_hot = NULL;
6492 basic_block simple_return_block_cold = NULL;
6493 edge pending_edge_hot = NULL;
6494 edge pending_edge_cold = NULL;
484db665 6495 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
b966d3a9 6496 int i;
484db665
BS
6497
6498 gcc_assert (entry_edge != orig_entry_edge);
6499
6500 /* See if we can reuse the last insn that was emitted for the
6501 epilogue. */
6502 if (returnjump != NULL_RTX
6503 && JUMP_LABEL (returnjump) == simple_return_rtx)
6504 {
ffe14686 6505 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
484db665
BS
6506 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6507 simple_return_block_hot = e->dest;
6508 else
6509 simple_return_block_cold = e->dest;
6510 }
6511
ffe14686
AM
6512 /* Also check returns we might need to add to tail blocks. */
6513 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6514 if (EDGE_COUNT (e->src->preds) != 0
6515 && (e->flags & EDGE_FAKE) != 0
6516 && !bitmap_bit_p (&bb_flags, e->src->index))
6517 {
6518 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6519 pending_edge_hot = e;
6520 else
6521 pending_edge_cold = e;
6522 }
6523
6524 FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e)
484db665 6525 {
484db665 6526 basic_block *pdest_bb;
ffe14686 6527 edge pending;
484db665 6528
ffe14686
AM
6529 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6530 {
6531 pdest_bb = &simple_return_block_hot;
6532 pending = pending_edge_hot;
6533 }
484db665 6534 else
ffe14686
AM
6535 {
6536 pdest_bb = &simple_return_block_cold;
6537 pending = pending_edge_cold;
6538 }
6539
6540 if (*pdest_bb == NULL && pending != NULL)
6541 {
6542 emit_return_into_block (true, pending->src);
6543 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6544 *pdest_bb = pending->src;
6545 }
6546 else if (*pdest_bb == NULL)
484db665
BS
6547 {
6548 basic_block bb;
6549 rtx start;
6550
6551 bb = create_basic_block (NULL, NULL, exit_pred);
6552 BB_COPY_PARTITION (bb, e->src);
6553 start = emit_jump_insn_after (gen_simple_return (),
6554 BB_END (bb));
6555 JUMP_LABEL (start) = simple_return_rtx;
6556 emit_barrier_after (start);
6557
6558 *pdest_bb = bb;
6559 make_edge (bb, EXIT_BLOCK_PTR, 0);
6560 }
6561 redirect_edge_and_branch_force (e, *pdest_bb);
484db665 6562 }
ffe14686
AM
6563 VEC_free (edge, heap, unconverted_simple_returns);
6564 }
6565
6566 if (entry_edge != orig_entry_edge)
6567 {
6568 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6569 if (EDGE_COUNT (e->src->preds) != 0
6570 && (e->flags & EDGE_FAKE) != 0
6571 && !bitmap_bit_p (&bb_flags, e->src->index))
6572 {
6573 emit_return_into_block (true, e->src);
6574 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6575 }
484db665
BS
6576 }
6577#endif
6578
0a1c58a2
JL
6579#ifdef HAVE_sibcall_epilogue
6580 /* Emit sibling epilogues before any sibling call sites. */
628f6a4e 6581 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
0a1c58a2
JL
6582 {
6583 basic_block bb = e->src;
a813c111 6584 rtx insn = BB_END (bb);
484db665 6585 rtx ep_seq;
0a1c58a2 6586
4b4bf941 6587 if (!CALL_P (insn)
484db665 6588 || ! SIBLING_CALL_P (insn)
ffe14686 6589#ifdef HAVE_simple_return
484db665 6590 || (entry_edge != orig_entry_edge
ffe14686
AM
6591 && !bitmap_bit_p (&bb_flags, bb->index))
6592#endif
6593 )
628f6a4e
BE
6594 {
6595 ei_next (&ei);
6596 continue;
6597 }
0a1c58a2 6598
484db665
BS
6599 ep_seq = gen_sibcall_epilogue ();
6600 if (ep_seq)
6601 {
6602 start_sequence ();
6603 emit_note (NOTE_INSN_EPILOGUE_BEG);
6604 emit_insn (ep_seq);
6605 seq = get_insns ();
6606 end_sequence ();
0a1c58a2 6607
484db665
BS
6608 /* Retain a map of the epilogue insns. Used in life analysis to
6609 avoid getting rid of sibcall epilogue insns. Do this before we
6610 actually emit the sequence. */
6611 record_insns (seq, NULL, &epilogue_insn_hash);
6612 set_insn_locators (seq, epilogue_locator);
2f937369 6613
484db665
BS
6614 emit_insn_before (seq, insn);
6615 }
628f6a4e 6616 ei_next (&ei);
0a1c58a2
JL
6617 }
6618#endif
ca1117cc 6619
86c82654
RH
6620#ifdef HAVE_epilogue
6621 if (epilogue_end)
6622 {
6623 rtx insn, next;
6624
6625 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 6626 There is no need, however, to be quite so anal about the existence
071a42f9 6627 of such a note. Also possibly move
84c1fa24
UW
6628 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6629 info generation. */
718fe406 6630 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
6631 {
6632 next = NEXT_INSN (insn);
b8698a0f 6633 if (NOTE_P (insn)
a38e7aa5 6634 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
6635 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6636 }
6637 }
6638#endif
6fb5fa3c 6639
ffe14686 6640#ifdef HAVE_simple_return
484db665 6641 bitmap_clear (&bb_flags);
ffe14686 6642#endif
484db665 6643
6fb5fa3c
DB
6644 /* Threading the prologue and epilogue changes the artificial refs
6645 in the entry and exit blocks. */
6646 epilogue_completed = 1;
6647 df_update_entry_exit_and_calls ();
bdac5f58
TW
6648}
6649
cd9c1ca8
RH
6650/* Reposition the prologue-end and epilogue-begin notes after
6651 instruction scheduling. */
bdac5f58
TW
6652
6653void
6fb5fa3c 6654reposition_prologue_and_epilogue_notes (void)
bdac5f58 6655{
cd9c1ca8
RH
6656#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6657 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
6658 /* Since the hash table is created on demand, the fact that it is
6659 non-null is a signal that it is non-empty. */
6660 if (prologue_insn_hash != NULL)
bdac5f58 6661 {
cd9c1ca8 6662 size_t len = htab_elements (prologue_insn_hash);
997704f1 6663 rtx insn, last = NULL, note = NULL;
bdac5f58 6664
cd9c1ca8
RH
6665 /* Scan from the beginning until we reach the last prologue insn. */
6666 /* ??? While we do have the CFG intact, there are two problems:
6667 (1) The prologue can contain loops (typically probing the stack),
6668 which means that the end of the prologue isn't in the first bb.
6669 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 6670 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 6671 {
4b4bf941 6672 if (NOTE_P (insn))
9392c110 6673 {
a38e7aa5 6674 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
6675 note = insn;
6676 }
cd9c1ca8 6677 else if (contains (insn, prologue_insn_hash))
0a1c58a2 6678 {
9f53e965
RH
6679 last = insn;
6680 if (--len == 0)
6681 break;
6682 }
6683 }
797a6ac1 6684
9f53e965
RH
6685 if (last)
6686 {
cd9c1ca8 6687 if (note == NULL)
9f53e965 6688 {
cd9c1ca8
RH
6689 /* Scan forward looking for the PROLOGUE_END note. It should
6690 be right at the beginning of the block, possibly with other
6691 insn notes that got moved there. */
6692 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6693 {
6694 if (NOTE_P (note)
6695 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6696 break;
6697 }
9f53e965 6698 }
c93b03c2 6699
9f53e965 6700 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6701 if (LABEL_P (last))
9f53e965
RH
6702 last = NEXT_INSN (last);
6703 reorder_insns (note, note, last);
bdac5f58 6704 }
0a1c58a2
JL
6705 }
6706
cd9c1ca8 6707 if (epilogue_insn_hash != NULL)
0a1c58a2 6708 {
cd9c1ca8
RH
6709 edge_iterator ei;
6710 edge e;
bdac5f58 6711
cd9c1ca8 6712 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
bdac5f58 6713 {
997704f1
RH
6714 rtx insn, first = NULL, note = NULL;
6715 basic_block bb = e->src;
c93b03c2 6716
997704f1 6717 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6718 FOR_BB_INSNS (bb, insn)
9f53e965 6719 {
cd9c1ca8
RH
6720 if (NOTE_P (insn))
6721 {
6722 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6723 {
6724 note = insn;
997704f1 6725 if (first != NULL)
cd9c1ca8
RH
6726 break;
6727 }
6728 }
997704f1 6729 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6730 {
997704f1 6731 first = insn;
cd9c1ca8
RH
6732 if (note != NULL)
6733 break;
6734 }
9392c110 6735 }
997704f1
RH
6736
6737 if (note)
6738 {
6739 /* If the function has a single basic block, and no real
b8698a0f 6740 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6741 epilogue note can get scheduled before the prologue
6742 note. If we have frame related prologue insns, having
6743 them scanned during the epilogue will result in a crash.
6744 In this case re-order the epilogue note to just before
6745 the last insn in the block. */
6746 if (first == NULL)
6747 first = BB_END (bb);
6748
6749 if (PREV_INSN (first) != note)
6750 reorder_insns (note, note, PREV_INSN (first));
6751 }
bdac5f58
TW
6752 }
6753 }
6754#endif /* HAVE_prologue or HAVE_epilogue */
6755}
87ff9c8e 6756
faed5cc3
SB
6757/* Returns the name of the current function. */
6758const char *
6759current_function_name (void)
6760{
c7ac4fb5
NC
6761 if (cfun == NULL)
6762 return "<none>";
ae2bcd98 6763 return lang_hooks.decl_printable_name (cfun->decl, 2);
faed5cc3 6764}
ef330312
PB
6765\f
6766
c2924966 6767static unsigned int
ef330312
PB
6768rest_of_handle_check_leaf_regs (void)
6769{
6770#ifdef LEAF_REGISTERS
6771 current_function_uses_only_leaf_regs
6772 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6773#endif
c2924966 6774 return 0;
ef330312
PB
6775}
6776
8d8d1a28 6777/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6778
8d8d1a28
AH
6779static void
6780used_types_insert_helper (tree type, struct function *func)
33c9159e 6781{
8d8d1a28 6782 if (type != NULL && func != NULL)
33c9159e
AH
6783 {
6784 void **slot;
6785
6786 if (func->used_types_hash == NULL)
6787 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
8d8d1a28
AH
6788 htab_eq_pointer, NULL);
6789 slot = htab_find_slot (func->used_types_hash, type, INSERT);
33c9159e 6790 if (*slot == NULL)
8d8d1a28 6791 *slot = type;
33c9159e
AH
6792 }
6793}
6794
8d8d1a28
AH
6795/* Given a type, insert it into the used hash table in cfun. */
6796void
6797used_types_insert (tree t)
6798{
6799 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6800 if (TYPE_NAME (t))
6801 break;
6802 else
6803 t = TREE_TYPE (t);
29ce73cb
PB
6804 if (TREE_CODE (t) == ERROR_MARK)
6805 return;
095c7b3c
JJ
6806 if (TYPE_NAME (t) == NULL_TREE
6807 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6808 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6809 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6810 {
6811 if (cfun)
6812 used_types_insert_helper (t, cfun);
6813 else
6814 /* So this might be a type referenced by a global variable.
6815 Record that type so that we can later decide to emit its debug
6816 information. */
bc87224e 6817 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
b646ba3f
DS
6818 }
6819}
6820
6821/* Helper to Hash a struct types_used_by_vars_entry. */
6822
6823static hashval_t
6824hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6825{
6826 gcc_assert (entry && entry->var_decl && entry->type);
6827
6828 return iterative_hash_object (entry->type,
6829 iterative_hash_object (entry->var_decl, 0));
6830}
6831
6832/* Hash function of the types_used_by_vars_entry hash table. */
6833
6834hashval_t
6835types_used_by_vars_do_hash (const void *x)
6836{
6837 const struct types_used_by_vars_entry *entry =
6838 (const struct types_used_by_vars_entry *) x;
6839
6840 return hash_types_used_by_vars_entry (entry);
6841}
6842
6843/*Equality function of the types_used_by_vars_entry hash table. */
6844
6845int
6846types_used_by_vars_eq (const void *x1, const void *x2)
6847{
6848 const struct types_used_by_vars_entry *e1 =
6849 (const struct types_used_by_vars_entry *) x1;
6850 const struct types_used_by_vars_entry *e2 =
6851 (const struct types_used_by_vars_entry *)x2;
6852
6853 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6854}
6855
6856/* Inserts an entry into the types_used_by_vars_hash hash table. */
6857
6858void
6859types_used_by_var_decl_insert (tree type, tree var_decl)
6860{
6861 if (type != NULL && var_decl != NULL)
6862 {
6863 void **slot;
6864 struct types_used_by_vars_entry e;
6865 e.var_decl = var_decl;
6866 e.type = type;
6867 if (types_used_by_vars_hash == NULL)
6868 types_used_by_vars_hash =
6869 htab_create_ggc (37, types_used_by_vars_do_hash,
6870 types_used_by_vars_eq, NULL);
6871 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6872 hash_types_used_by_vars_entry (&e), INSERT);
6873 if (*slot == NULL)
6874 {
6875 struct types_used_by_vars_entry *entry;
a9429e29 6876 entry = ggc_alloc_types_used_by_vars_entry ();
b646ba3f
DS
6877 entry->type = type;
6878 entry->var_decl = var_decl;
6879 *slot = entry;
6880 }
6881 }
8d8d1a28
AH
6882}
6883
8ddbbcae 6884struct rtl_opt_pass pass_leaf_regs =
ef330312 6885{
8ddbbcae
JH
6886 {
6887 RTL_PASS,
e0a42b0f 6888 "*leaf_regs", /* name */
ef330312
PB
6889 NULL, /* gate */
6890 rest_of_handle_check_leaf_regs, /* execute */
6891 NULL, /* sub */
6892 NULL, /* next */
6893 0, /* static_pass_number */
7072a650 6894 TV_NONE, /* tv_id */
ef330312
PB
6895 0, /* properties_required */
6896 0, /* properties_provided */
6897 0, /* properties_destroyed */
6898 0, /* todo_flags_start */
8ddbbcae
JH
6899 0 /* todo_flags_finish */
6900 }
ef330312
PB
6901};
6902
6fb5fa3c
DB
6903static unsigned int
6904rest_of_handle_thread_prologue_and_epilogue (void)
6905{
6906 if (optimize)
6907 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6908
6fb5fa3c
DB
6909 /* On some machines, the prologue and epilogue code, or parts thereof,
6910 can be represented as RTL. Doing so lets us schedule insns between
6911 it and the rest of the code and also allows delayed branch
6912 scheduling to operate in the epilogue. */
6fb5fa3c 6913 thread_prologue_and_epilogue_insns ();
d3c12306
EB
6914
6915 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6916 if (flag_stack_usage_info)
d3c12306
EB
6917 output_stack_usage ();
6918
6fb5fa3c
DB
6919 return 0;
6920}
6921
8ddbbcae 6922struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6fb5fa3c 6923{
8ddbbcae
JH
6924 {
6925 RTL_PASS,
6fb5fa3c
DB
6926 "pro_and_epilogue", /* name */
6927 NULL, /* gate */
6928 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6929 NULL, /* sub */
6930 NULL, /* next */
6931 0, /* static_pass_number */
6932 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6933 0, /* properties_required */
6934 0, /* properties_provided */
6935 0, /* properties_destroyed */
6936 TODO_verify_flow, /* todo_flags_start */
0d475361 6937 TODO_df_verify |
a36b8a1e 6938 TODO_df_finish | TODO_verify_rtl_sharing |
8ddbbcae
JH
6939 TODO_ggc_collect /* todo_flags_finish */
6940 }
6fb5fa3c 6941};
d8d72314
PB
6942\f
6943
6944/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6945 in-out constraints. Say you start with
d8d72314
PB
6946
6947 orig = inout;
6948 asm ("": "+mr" (inout));
6949 use (orig);
6950
6951 which is transformed very early to use explicit output and match operands:
6952
6953 orig = inout;
6954 asm ("": "=mr" (inout) : "0" (inout));
6955 use (orig);
6956
6957 Or, after SSA and copyprop,
6958
6959 asm ("": "=mr" (inout_2) : "0" (inout_1));
6960 use (inout_1);
6961
6962 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6963 they represent two separate values, so they will get different pseudo
6964 registers during expansion. Then, since the two operands need to match
6965 per the constraints, but use different pseudo registers, reload can
6966 only register a reload for these operands. But reloads can only be
6967 satisfied by hardregs, not by memory, so we need a register for this
6968 reload, just because we are presented with non-matching operands.
6969 So, even though we allow memory for this operand, no memory can be
6970 used for it, just because the two operands don't match. This can
6971 cause reload failures on register-starved targets.
6972
6973 So it's a symptom of reload not being able to use memory for reloads
6974 or, alternatively it's also a symptom of both operands not coming into
6975 reload as matching (in which case the pseudo could go to memory just
6976 fine, as the alternative allows it, and no reload would be necessary).
6977 We fix the latter problem here, by transforming
6978
6979 asm ("": "=mr" (inout_2) : "0" (inout_1));
6980
6981 back to
6982
6983 inout_2 = inout_1;
6984 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6985
6986static void
6987match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6988{
6989 int i;
6990 bool changed = false;
6991 rtx op = SET_SRC (p_sets[0]);
6992 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6993 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6994 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6995
d7b8033f 6996 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6997 for (i = 0; i < ninputs; i++)
6998 {
6999 rtx input, output, insns;
7000 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
7001 char *end;
53220215 7002 int match, j;
d8d72314 7003
70f16287
JJ
7004 if (*constraint == '%')
7005 constraint++;
7006
d8d72314
PB
7007 match = strtoul (constraint, &end, 10);
7008 if (end == constraint)
7009 continue;
7010
7011 gcc_assert (match < noutputs);
7012 output = SET_DEST (p_sets[match]);
7013 input = RTVEC_ELT (inputs, i);
53220215
MM
7014 /* Only do the transformation for pseudos. */
7015 if (! REG_P (output)
7016 || rtx_equal_p (output, input)
d8d72314
PB
7017 || (GET_MODE (input) != VOIDmode
7018 && GET_MODE (input) != GET_MODE (output)))
7019 continue;
7020
53220215
MM
7021 /* We can't do anything if the output is also used as input,
7022 as we're going to overwrite it. */
7023 for (j = 0; j < ninputs; j++)
7024 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
7025 break;
7026 if (j != ninputs)
7027 continue;
7028
d7b8033f
JJ
7029 /* Avoid changing the same input several times. For
7030 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7031 only change in once (to out1), rather than changing it
7032 first to out1 and afterwards to out2. */
7033 if (i > 0)
7034 {
7035 for (j = 0; j < noutputs; j++)
7036 if (output_matched[j] && input == SET_DEST (p_sets[j]))
7037 break;
7038 if (j != noutputs)
7039 continue;
7040 }
7041 output_matched[match] = true;
7042
d8d72314 7043 start_sequence ();
53220215 7044 emit_move_insn (output, input);
d8d72314
PB
7045 insns = get_insns ();
7046 end_sequence ();
d8d72314 7047 emit_insn_before (insns, insn);
53220215
MM
7048
7049 /* Now replace all mentions of the input with output. We can't
fa10beec 7050 just replace the occurrence in inputs[i], as the register might
53220215
MM
7051 also be used in some other input (or even in an address of an
7052 output), which would mean possibly increasing the number of
7053 inputs by one (namely 'output' in addition), which might pose
7054 a too complicated problem for reload to solve. E.g. this situation:
7055
7056 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7057
84fbffb2 7058 Here 'input' is used in two occurrences as input (once for the
53220215 7059 input operand, once for the address in the second output operand).
fa10beec 7060 If we would replace only the occurrence of the input operand (to
53220215
MM
7061 make the matching) we would be left with this:
7062
7063 output = input
7064 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7065
7066 Now we suddenly have two different input values (containing the same
7067 value, but different pseudos) where we formerly had only one.
7068 With more complicated asms this might lead to reload failures
7069 which wouldn't have happen without this pass. So, iterate over
84fbffb2 7070 all operands and replace all occurrences of the register used. */
53220215 7071 for (j = 0; j < noutputs; j++)
1596d61e 7072 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
7073 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7074 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7075 input, output);
7076 for (j = 0; j < ninputs; j++)
7077 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7078 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7079 input, output);
7080
d8d72314
PB
7081 changed = true;
7082 }
7083
7084 if (changed)
7085 df_insn_rescan (insn);
7086}
7087
7088static unsigned
7089rest_of_match_asm_constraints (void)
7090{
7091 basic_block bb;
7092 rtx insn, pat, *p_sets;
7093 int noutputs;
7094
e3b5732b 7095 if (!crtl->has_asm_statement)
d8d72314
PB
7096 return 0;
7097
7098 df_set_flags (DF_DEFER_INSN_RESCAN);
7099 FOR_EACH_BB (bb)
7100 {
7101 FOR_BB_INSNS (bb, insn)
7102 {
7103 if (!INSN_P (insn))
7104 continue;
7105
7106 pat = PATTERN (insn);
7107 if (GET_CODE (pat) == PARALLEL)
7108 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7109 else if (GET_CODE (pat) == SET)
7110 p_sets = &PATTERN (insn), noutputs = 1;
7111 else
7112 continue;
7113
7114 if (GET_CODE (*p_sets) == SET
7115 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7116 match_asm_constraints_1 (insn, p_sets, noutputs);
7117 }
7118 }
7119
7120 return TODO_df_finish;
7121}
7122
8ddbbcae 7123struct rtl_opt_pass pass_match_asm_constraints =
d8d72314 7124{
8ddbbcae
JH
7125 {
7126 RTL_PASS,
d8d72314
PB
7127 "asmcons", /* name */
7128 NULL, /* gate */
7129 rest_of_match_asm_constraints, /* execute */
7130 NULL, /* sub */
7131 NULL, /* next */
7132 0, /* static_pass_number */
7072a650 7133 TV_NONE, /* tv_id */
d8d72314
PB
7134 0, /* properties_required */
7135 0, /* properties_provided */
7136 0, /* properties_destroyed */
7137 0, /* todo_flags_start */
22c5fa5f 7138 0 /* todo_flags_finish */
8ddbbcae 7139 }
d8d72314 7140};
6fb5fa3c 7141
faed5cc3 7142
e2500fed 7143#include "gt-function.h"