]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
decl.c, [...]: Remove redundant enum from machine_mode.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
23a5b65a 2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
6f086dfc 3
1322177d 4This file is part of GCC.
6f086dfc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6f086dfc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6f086dfc
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6f086dfc 19
6f086dfc
RS
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 32 not get a hard register. */
6f086dfc
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6
ZW
36#include "coretypes.h"
37#include "tm.h"
0cbd9993 38#include "rtl-error.h"
6f086dfc 39#include "tree.h"
d8a2d370
DN
40#include "stor-layout.h"
41#include "varasm.h"
42#include "stringpool.h"
6f086dfc 43#include "flags.h"
1ef08c63 44#include "except.h"
83685514
AM
45#include "hashtab.h"
46#include "hash-set.h"
47#include "vec.h"
48#include "machmode.h"
49#include "hard-reg-set.h"
50#include "input.h"
6f086dfc 51#include "function.h"
6f086dfc 52#include "expr.h"
c6b97fac 53#include "optabs.h"
e78d8e51 54#include "libfuncs.h"
6f086dfc 55#include "regs.h"
6f086dfc
RS
56#include "insn-config.h"
57#include "recog.h"
58#include "output.h"
b1474bb7 59#include "tm_p.h"
7afff7cf 60#include "langhooks.h"
61f71b34 61#include "target.h"
677f3fa8 62#include "common/common-target.h"
2fb9a547 63#include "gimple-expr.h"
45b0be94 64#include "gimplify.h"
ef330312 65#include "tree-pass.h"
7d69de61 66#include "predict.h"
60393bbc
AM
67#include "dominance.h"
68#include "cfg.h"
69#include "cfgrtl.h"
70#include "cfganal.h"
71#include "cfgbuild.h"
72#include "cfgcleanup.h"
73#include "basic-block.h"
6fb5fa3c 74#include "df.h"
ffe14686
AM
75#include "params.h"
76#include "bb-reorder.h"
f30e25a3 77#include "shrink-wrap.h"
b9b5f433 78#include "toplev.h"
b8704801 79#include "rtl-iter.h"
7d69de61 80
5576d6f2
TT
81/* So we can assign to cfun in this file. */
82#undef cfun
83
95f3f59e
JDA
84#ifndef STACK_ALIGNMENT_NEEDED
85#define STACK_ALIGNMENT_NEEDED 1
86#endif
87
975f3818
RS
88#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
89
6f086dfc
RS
90/* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94
95/* Similar, but round to the next highest integer that meets the
96 alignment. */
97#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98
6f086dfc 99/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
100 assign_stack_local uses frame_pointer_rtx when this is nonzero.
101 calls.c:emit_library_call_value_1 uses it to set up
102 post-instantiation libcalls. */
103int virtuals_instantiated;
6f086dfc 104
df696a75 105/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 106static GTY(()) int funcdef_no;
f6f315fe 107
414c4dc4
NC
108/* These variables hold pointers to functions to create and destroy
109 target specific, per-function data structures. */
fa8db1f7 110struct machine_function * (*init_machine_status) (void);
46766466 111
b384405b 112/* The currently compiled function. */
01d939e8 113struct function *cfun = 0;
b384405b 114
cd9c1ca8
RH
115/* These hashes record the prologue and epilogue insns. */
116static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
117 htab_t prologue_insn_hash;
118static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
119 htab_t epilogue_insn_hash;
6f086dfc 120\f
b646ba3f 121
2a22f99c 122hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
9771b263 123vec<tree, va_gc> *types_used_by_cur_var_decl;
b646ba3f 124
e15679f8
RK
125/* Forward declarations. */
126
fa8db1f7 127static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7 128static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
ef4bddc2 129static void pad_below (struct args_size *, machine_mode, tree);
691fe203 130static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
fa8db1f7
AJ
131static int all_blocks (tree, tree *);
132static tree *get_block_vector (tree, int *);
133extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 134/* We always define `record_insns' even if it's not used so that we
ec97b83a 135 can always export `prologue_epilogue_contains'. */
dc01c3d1 136static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
cd9c1ca8 137static bool contains (const_rtx, htab_t);
db2960f4 138static void prepare_function_start (void);
fa8db1f7
AJ
139static void do_clobber_return_reg (rtx, void *);
140static void do_use_return_reg (rtx, void *);
c20bf1f3 141\f
936fc9ba
JH
142/* Stack of nested functions. */
143/* Keep track of the cfun stack. */
e5e809f4 144
936fc9ba 145typedef struct function *function_p;
e5e809f4 146
9771b263 147static vec<function_p> function_context_stack;
6f086dfc
RS
148
149/* Save the current context for compilation of a nested function.
d2784db4 150 This is called from language-specific code. */
6f086dfc
RS
151
152void
d2784db4 153push_function_context (void)
6f086dfc 154{
01d939e8 155 if (cfun == 0)
182e0d71 156 allocate_struct_function (NULL, false);
b384405b 157
9771b263 158 function_context_stack.safe_push (cfun);
db2960f4 159 set_cfun (NULL);
6f086dfc
RS
160}
161
162/* Restore the last saved context, at the end of a nested function.
163 This function is called from language-specific code. */
164
165void
d2784db4 166pop_function_context (void)
6f086dfc 167{
9771b263 168 struct function *p = function_context_stack.pop ();
db2960f4 169 set_cfun (p);
6f086dfc 170 current_function_decl = p->decl;
6f086dfc 171
6f086dfc 172 /* Reset variables that have known state during rtx generation. */
6f086dfc 173 virtuals_instantiated = 0;
1b3d8f8a 174 generating_concat_p = 1;
6f086dfc 175}
e4a4639e 176
fa51b01b
RH
177/* Clear out all parts of the state in F that can safely be discarded
178 after the function has been parsed, but not compiled, to let
179 garbage collection reclaim the memory. */
180
181void
fa8db1f7 182free_after_parsing (struct function *f)
fa51b01b 183{
e8924938 184 f->language = 0;
fa51b01b
RH
185}
186
e2ecd91c
BS
187/* Clear out all parts of the state in F that can safely be discarded
188 after the function has been compiled, to let garbage collection
0a8a198c 189 reclaim the memory. */
21cd906e 190
e2ecd91c 191void
fa8db1f7 192free_after_compilation (struct function *f)
e2ecd91c 193{
cd9c1ca8
RH
194 prologue_insn_hash = NULL;
195 epilogue_insn_hash = NULL;
196
04695783 197 free (crtl->emit.regno_pointer_align);
f995dcfe 198
3e029763 199 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 200 f->eh = NULL;
e2500fed 201 f->machine = NULL;
997de8ed 202 f->cfg = NULL;
fa51b01b 203
57b9e367 204 regno_reg_rtx = NULL;
e2ecd91c 205}
6f086dfc 206\f
49ad7cfa
BS
207/* Return size needed for stack frame based on slots so far allocated.
208 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
209 the caller may have to do that. */
9fb798d7 210
49ad7cfa 211HOST_WIDE_INT
fa8db1f7 212get_frame_size (void)
49ad7cfa 213{
bd60bab2
JH
214 if (FRAME_GROWS_DOWNWARD)
215 return -frame_offset;
216 else
217 return frame_offset;
49ad7cfa
BS
218}
219
9fb798d7
EB
220/* Issue an error message and return TRUE if frame OFFSET overflows in
221 the signed target pointer arithmetics for function FUNC. Otherwise
222 return FALSE. */
223
224bool
225frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 226{
9fb798d7
EB
227 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
228
229 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
230 /* Leave room for the fixed part of the frame. */
231 - 64 * UNITS_PER_WORD)
232 {
c5d75364
MLI
233 error_at (DECL_SOURCE_LOCATION (func),
234 "total size of local objects too large");
9fb798d7
EB
235 return TRUE;
236 }
237
238 return FALSE;
239}
240
76fe54f0
L
241/* Return stack slot alignment in bits for TYPE and MODE. */
242
243static unsigned int
ef4bddc2 244get_stack_local_alignment (tree type, machine_mode mode)
76fe54f0
L
245{
246 unsigned int alignment;
247
248 if (mode == BLKmode)
249 alignment = BIGGEST_ALIGNMENT;
250 else
251 alignment = GET_MODE_ALIGNMENT (mode);
252
253 /* Allow the frond-end to (possibly) increase the alignment of this
254 stack slot. */
255 if (! type)
256 type = lang_hooks.types.type_for_mode (mode, 0);
257
258 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
259}
260
56731d64
BS
261/* Determine whether it is possible to fit a stack slot of size SIZE and
262 alignment ALIGNMENT into an area in the stack frame that starts at
263 frame offset START and has a length of LENGTH. If so, store the frame
264 offset to be used for the stack slot in *POFFSET and return true;
265 return false otherwise. This function will extend the frame size when
266 given a start/length pair that lies at the end of the frame. */
267
268static bool
269try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
270 HOST_WIDE_INT size, unsigned int alignment,
271 HOST_WIDE_INT *poffset)
272{
273 HOST_WIDE_INT this_frame_offset;
274 int frame_off, frame_alignment, frame_phase;
275
276 /* Calculate how many bytes the start of local variables is off from
277 stack alignment. */
278 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
279 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
280 frame_phase = frame_off ? frame_alignment - frame_off : 0;
281
282 /* Round the frame offset to the specified alignment. */
283
284 /* We must be careful here, since FRAME_OFFSET might be negative and
285 division with a negative dividend isn't as well defined as we might
286 like. So we instead assume that ALIGNMENT is a power of two and
287 use logical operations which are unambiguous. */
288 if (FRAME_GROWS_DOWNWARD)
289 this_frame_offset
290 = (FLOOR_ROUND (start + length - size - frame_phase,
291 (unsigned HOST_WIDE_INT) alignment)
292 + frame_phase);
293 else
294 this_frame_offset
295 = (CEIL_ROUND (start - frame_phase,
296 (unsigned HOST_WIDE_INT) alignment)
297 + frame_phase);
298
299 /* See if it fits. If this space is at the edge of the frame,
300 consider extending the frame to make it fit. Our caller relies on
301 this when allocating a new slot. */
302 if (frame_offset == start && this_frame_offset < frame_offset)
303 frame_offset = this_frame_offset;
304 else if (this_frame_offset < start)
305 return false;
306 else if (start + length == frame_offset
307 && this_frame_offset + size > start + length)
308 frame_offset = this_frame_offset + size;
309 else if (this_frame_offset + size > start + length)
310 return false;
311
312 *poffset = this_frame_offset;
313 return true;
314}
315
316/* Create a new frame_space structure describing free space in the stack
317 frame beginning at START and ending at END, and chain it into the
318 function's frame_space_list. */
319
320static void
321add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
322{
766090c2 323 struct frame_space *space = ggc_alloc<frame_space> ();
56731d64
BS
324 space->next = crtl->frame_space_list;
325 crtl->frame_space_list = space;
326 space->start = start;
327 space->length = end - start;
328}
329
6f086dfc
RS
330/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
331 with machine mode MODE.
718fe406 332
6f086dfc
RS
333 ALIGN controls the amount of alignment for the address of the slot:
334 0 means according to MODE,
335 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 336 -2 means use BITS_PER_UNIT,
6f086dfc
RS
337 positive specifies alignment boundary in bits.
338
80a832cd
JJ
339 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
340 alignment and ASLK_RECORD_PAD bit set if we should remember
341 extra space we allocated for alignment purposes. When we are
342 called from assign_stack_temp_for_type, it is not set so we don't
343 track the same stack slot in two independent lists.
2e3f842f 344
bd60bab2 345 We do not round to stack_boundary here. */
6f086dfc 346
bd60bab2 347rtx
ef4bddc2 348assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
80a832cd 349 int align, int kind)
6f086dfc 350{
b3694847 351 rtx x, addr;
6f086dfc 352 int bigend_correction = 0;
427188d5 353 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 354 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
355
356 if (align == 0)
357 {
76fe54f0 358 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 359 alignment /= BITS_PER_UNIT;
6f086dfc
RS
360 }
361 else if (align == -1)
362 {
363 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
364 size = CEIL_ROUND (size, alignment);
365 }
cfa29a4c
EB
366 else if (align == -2)
367 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
368 else
369 alignment = align / BITS_PER_UNIT;
370
2e3f842f
L
371 alignment_in_bits = alignment * BITS_PER_UNIT;
372
2e3f842f
L
373 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
374 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
375 {
376 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
377 alignment = alignment_in_bits / BITS_PER_UNIT;
378 }
a0871656 379
2e3f842f
L
380 if (SUPPORTS_STACK_ALIGNMENT)
381 {
382 if (crtl->stack_alignment_estimated < alignment_in_bits)
383 {
384 if (!crtl->stack_realign_processed)
385 crtl->stack_alignment_estimated = alignment_in_bits;
386 else
387 {
388 /* If stack is realigned and stack alignment value
389 hasn't been finalized, it is OK not to increase
390 stack_alignment_estimated. The bigger alignment
391 requirement is recorded in stack_alignment_needed
392 below. */
393 gcc_assert (!crtl->stack_realign_finalized);
394 if (!crtl->stack_realign_needed)
395 {
396 /* It is OK to reduce the alignment as long as the
397 requested size is 0 or the estimated stack
398 alignment >= mode alignment. */
80a832cd 399 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
400 || size == 0
401 || (crtl->stack_alignment_estimated
402 >= GET_MODE_ALIGNMENT (mode)));
403 alignment_in_bits = crtl->stack_alignment_estimated;
404 alignment = alignment_in_bits / BITS_PER_UNIT;
405 }
406 }
407 }
408 }
76fe54f0
L
409
410 if (crtl->stack_alignment_needed < alignment_in_bits)
411 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
412 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
413 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 414
56731d64
BS
415 if (mode != BLKmode || size != 0)
416 {
80a832cd 417 if (kind & ASLK_RECORD_PAD)
56731d64 418 {
80a832cd
JJ
419 struct frame_space **psp;
420
421 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
422 {
423 struct frame_space *space = *psp;
424 if (!try_fit_stack_local (space->start, space->length, size,
425 alignment, &slot_offset))
426 continue;
427 *psp = space->next;
428 if (slot_offset > space->start)
429 add_frame_space (space->start, slot_offset);
430 if (slot_offset + size < space->start + space->length)
431 add_frame_space (slot_offset + size,
432 space->start + space->length);
433 goto found_space;
434 }
56731d64
BS
435 }
436 }
437 else if (!STACK_ALIGNMENT_NEEDED)
438 {
439 slot_offset = frame_offset;
440 goto found_space;
441 }
442
443 old_frame_offset = frame_offset;
444
445 if (FRAME_GROWS_DOWNWARD)
446 {
447 frame_offset -= size;
448 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 449
80a832cd
JJ
450 if (kind & ASLK_RECORD_PAD)
451 {
452 if (slot_offset > frame_offset)
453 add_frame_space (frame_offset, slot_offset);
454 if (slot_offset + size < old_frame_offset)
455 add_frame_space (slot_offset + size, old_frame_offset);
456 }
56731d64
BS
457 }
458 else
95f3f59e 459 {
56731d64
BS
460 frame_offset += size;
461 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
462
80a832cd
JJ
463 if (kind & ASLK_RECORD_PAD)
464 {
465 if (slot_offset > old_frame_offset)
466 add_frame_space (old_frame_offset, slot_offset);
467 if (slot_offset + size < frame_offset)
468 add_frame_space (slot_offset + size, frame_offset);
469 }
95f3f59e 470 }
6f086dfc 471
56731d64 472 found_space:
6f086dfc
RS
473 /* On a big-endian machine, if we are allocating more space than we will use,
474 use the least significant bytes of those that are allocated. */
d70eadf7 475 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 476 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 477
6f086dfc
RS
478 /* If we have already instantiated virtual registers, return the actual
479 address relative to the frame pointer. */
bd60bab2 480 if (virtuals_instantiated)
0a81f074 481 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 482 trunc_int_for_mode
56731d64 483 (slot_offset + bigend_correction
c41536f5 484 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 485 else
0a81f074 486 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 487 trunc_int_for_mode
56731d64 488 (slot_offset + bigend_correction,
c41536f5 489 Pmode));
6f086dfc 490
38a448ca 491 x = gen_rtx_MEM (mode, addr);
76fe54f0 492 set_mem_align (x, alignment_in_bits);
be0c514c 493 MEM_NOTRAP_P (x) = 1;
6f086dfc 494
bd60bab2
JH
495 stack_slot_list
496 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 497
bd60bab2
JH
498 if (frame_offset_overflow (frame_offset, current_function_decl))
499 frame_offset = 0;
9070115b 500
6f086dfc
RS
501 return x;
502}
2e3f842f
L
503
504/* Wrap up assign_stack_local_1 with last parameter as false. */
505
506rtx
ef4bddc2 507assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
2e3f842f 508{
80a832cd 509 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 510}
0aea6467 511\f
fb0703f7
SB
512/* In order to evaluate some expressions, such as function calls returning
513 structures in memory, we need to temporarily allocate stack locations.
514 We record each allocated temporary in the following structure.
515
516 Associated with each temporary slot is a nesting level. When we pop up
517 one level, all temporaries associated with the previous level are freed.
518 Normally, all temporaries are freed after the execution of the statement
519 in which they were created. However, if we are inside a ({...}) grouping,
520 the result may be in a temporary and hence must be preserved. If the
521 result could be in a temporary, we preserve it if we can determine which
522 one it is in. If we cannot determine which temporary may contain the
523 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 524 pretending it was allocated at the previous nesting level. */
fb0703f7 525
d1b38208 526struct GTY(()) temp_slot {
fb0703f7
SB
527 /* Points to next temporary slot. */
528 struct temp_slot *next;
529 /* Points to previous temporary slot. */
530 struct temp_slot *prev;
531 /* The rtx to used to reference the slot. */
532 rtx slot;
fb0703f7
SB
533 /* The size, in units, of the slot. */
534 HOST_WIDE_INT size;
535 /* The type of the object in the slot, or zero if it doesn't correspond
536 to a type. We use this to determine whether a slot can be reused.
537 It can be reused if objects of the type of the new slot will always
538 conflict with objects of the type of the old slot. */
539 tree type;
8f5929e1
JJ
540 /* The alignment (in bits) of the slot. */
541 unsigned int align;
fb0703f7
SB
542 /* Nonzero if this temporary is currently in use. */
543 char in_use;
fb0703f7
SB
544 /* Nesting level at which this slot is being used. */
545 int level;
fb0703f7
SB
546 /* The offset of the slot from the frame_pointer, including extra space
547 for alignment. This info is for combine_temp_slots. */
548 HOST_WIDE_INT base_offset;
549 /* The size of the slot, including extra space for alignment. This
550 info is for combine_temp_slots. */
551 HOST_WIDE_INT full_size;
552};
553
2a22f99c
TS
554/* Entry for the below hash table. */
555struct GTY((for_user)) temp_slot_address_entry {
fb0703f7
SB
556 hashval_t hash;
557 rtx address;
558 struct temp_slot *temp_slot;
559};
560
2a22f99c
TS
561struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
562{
563 static hashval_t hash (temp_slot_address_entry *);
564 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
565};
566
567/* A table of addresses that represent a stack slot. The table is a mapping
568 from address RTXen to a temp slot. */
569static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
570static size_t n_temp_slots_in_use;
571
0aea6467
ZD
572/* Removes temporary slot TEMP from LIST. */
573
574static void
575cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
576{
577 if (temp->next)
578 temp->next->prev = temp->prev;
579 if (temp->prev)
580 temp->prev->next = temp->next;
581 else
582 *list = temp->next;
583
584 temp->prev = temp->next = NULL;
585}
586
587/* Inserts temporary slot TEMP to LIST. */
588
589static void
590insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
591{
592 temp->next = *list;
593 if (*list)
594 (*list)->prev = temp;
595 temp->prev = NULL;
596 *list = temp;
597}
598
599/* Returns the list of used temp slots at LEVEL. */
600
601static struct temp_slot **
602temp_slots_at_level (int level)
603{
9771b263
DN
604 if (level >= (int) vec_safe_length (used_temp_slots))
605 vec_safe_grow_cleared (used_temp_slots, level + 1);
0aea6467 606
9771b263 607 return &(*used_temp_slots)[level];
0aea6467
ZD
608}
609
610/* Returns the maximal temporary slot level. */
611
612static int
613max_slot_level (void)
614{
615 if (!used_temp_slots)
616 return -1;
617
9771b263 618 return used_temp_slots->length () - 1;
0aea6467
ZD
619}
620
621/* Moves temporary slot TEMP to LEVEL. */
622
623static void
624move_slot_to_level (struct temp_slot *temp, int level)
625{
626 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
627 insert_slot_to_list (temp, temp_slots_at_level (level));
628 temp->level = level;
629}
630
631/* Make temporary slot TEMP available. */
632
633static void
634make_slot_available (struct temp_slot *temp)
635{
636 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
637 insert_slot_to_list (temp, &avail_temp_slots);
638 temp->in_use = 0;
639 temp->level = -1;
f8395d62 640 n_temp_slots_in_use--;
0aea6467 641}
fb0703f7
SB
642
643/* Compute the hash value for an address -> temp slot mapping.
644 The value is cached on the mapping entry. */
645static hashval_t
646temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
647{
648 int do_not_record = 0;
649 return hash_rtx (t->address, GET_MODE (t->address),
650 &do_not_record, NULL, false);
651}
652
653/* Return the hash value for an address -> temp slot mapping. */
2a22f99c
TS
654hashval_t
655temp_address_hasher::hash (temp_slot_address_entry *t)
fb0703f7 656{
fb0703f7
SB
657 return t->hash;
658}
659
660/* Compare two address -> temp slot mapping entries. */
2a22f99c
TS
661bool
662temp_address_hasher::equal (temp_slot_address_entry *t1,
663 temp_slot_address_entry *t2)
fb0703f7 664{
fb0703f7
SB
665 return exp_equiv_p (t1->address, t2->address, 0, true);
666}
667
668/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
669static void
670insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
671{
766090c2 672 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fb0703f7
SB
673 t->address = address;
674 t->temp_slot = temp_slot;
675 t->hash = temp_slot_address_compute_hash (t);
2a22f99c 676 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fb0703f7
SB
677}
678
679/* Remove an address -> temp slot mapping entry if the temp slot is
680 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2a22f99c
TS
681int
682remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fb0703f7 683{
2a22f99c 684 const struct temp_slot_address_entry *t = *slot;
fb0703f7 685 if (! t->temp_slot->in_use)
2a22f99c 686 temp_slot_address_table->clear_slot (slot);
fb0703f7
SB
687 return 1;
688}
689
690/* Remove all mappings of addresses to unused temp slots. */
691static void
692remove_unused_temp_slot_addresses (void)
693{
f8395d62
MM
694 /* Use quicker clearing if there aren't any active temp slots. */
695 if (n_temp_slots_in_use)
2a22f99c
TS
696 temp_slot_address_table->traverse
697 <void *, remove_unused_temp_slot_addresses_1> (NULL);
f8395d62 698 else
2a22f99c 699 temp_slot_address_table->empty ();
fb0703f7
SB
700}
701
702/* Find the temp slot corresponding to the object at address X. */
703
704static struct temp_slot *
705find_temp_slot_from_address (rtx x)
706{
707 struct temp_slot *p;
708 struct temp_slot_address_entry tmp, *t;
709
710 /* First try the easy way:
711 See if X exists in the address -> temp slot mapping. */
712 tmp.address = x;
713 tmp.temp_slot = NULL;
714 tmp.hash = temp_slot_address_compute_hash (&tmp);
2a22f99c 715 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fb0703f7
SB
716 if (t)
717 return t->temp_slot;
718
719 /* If we have a sum involving a register, see if it points to a temp
720 slot. */
721 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
722 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
723 return p;
724 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
725 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
726 return p;
727
728 /* Last resort: Address is a virtual stack var address. */
729 if (GET_CODE (x) == PLUS
730 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 731 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
732 {
733 int i;
734 for (i = max_slot_level (); i >= 0; i--)
735 for (p = *temp_slots_at_level (i); p; p = p->next)
736 {
737 if (INTVAL (XEXP (x, 1)) >= p->base_offset
738 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
739 return p;
740 }
741 }
742
743 return NULL;
744}
6f086dfc
RS
745\f
746/* Allocate a temporary stack slot and record it for possible later
747 reuse.
748
749 MODE is the machine mode to be given to the returned rtx.
750
751 SIZE is the size in units of the space required. We do no rounding here
752 since assign_stack_local will do any required rounding.
753
a4c6502a 754 TYPE is the type that will be used for the stack slot. */
6f086dfc 755
a06ef755 756rtx
ef4bddc2 757assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
9474e8ab 758 tree type)
6f086dfc 759{
74e2819c 760 unsigned int align;
0aea6467 761 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 762 rtx slot;
6f086dfc 763
303ec2aa
RK
764 /* If SIZE is -1 it means that somebody tried to allocate a temporary
765 of a variable size. */
0bccc606 766 gcc_assert (size != -1);
303ec2aa 767
76fe54f0 768 align = get_stack_local_alignment (type, mode);
d16790f2
JW
769
770 /* Try to find an available, already-allocated temporary of the proper
771 mode which meets the size and alignment requirements. Choose the
3e8b0446 772 smallest one with the closest alignment.
b8698a0f 773
3e8b0446
ZD
774 If assign_stack_temp is called outside of the tree->rtl expansion,
775 we cannot reuse the stack slots (that may still refer to
776 VIRTUAL_STACK_VARS_REGNUM). */
777 if (!virtuals_instantiated)
0aea6467 778 {
3e8b0446 779 for (p = avail_temp_slots; p; p = p->next)
0aea6467 780 {
3e8b0446
ZD
781 if (p->align >= align && p->size >= size
782 && GET_MODE (p->slot) == mode
783 && objects_must_conflict_p (p->type, type)
784 && (best_p == 0 || best_p->size > p->size
785 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 786 {
3e8b0446
ZD
787 if (p->align == align && p->size == size)
788 {
789 selected = p;
790 cut_slot_from_list (selected, &avail_temp_slots);
791 best_p = 0;
792 break;
793 }
794 best_p = p;
0aea6467 795 }
0aea6467
ZD
796 }
797 }
6f086dfc
RS
798
799 /* Make our best, if any, the one to use. */
800 if (best_p)
a45035b6 801 {
0aea6467
ZD
802 selected = best_p;
803 cut_slot_from_list (selected, &avail_temp_slots);
804
a45035b6
JW
805 /* If there are enough aligned bytes left over, make them into a new
806 temp_slot so that the extra bytes don't get wasted. Do this only
807 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 808 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 809 {
d16790f2 810 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 811 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
812
813 if (best_p->size - rounded_size >= alignment)
814 {
766090c2 815 p = ggc_alloc<temp_slot> ();
9474e8ab 816 p->in_use = 0;
a45035b6 817 p->size = best_p->size - rounded_size;
307d8cd6
RK
818 p->base_offset = best_p->base_offset + rounded_size;
819 p->full_size = best_p->full_size - rounded_size;
be0c514c 820 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 821 p->align = best_p->align;
1da68f56 822 p->type = best_p->type;
0aea6467 823 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 824
38a448ca
RH
825 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
826 stack_slot_list);
a45035b6
JW
827
828 best_p->size = rounded_size;
291dde90 829 best_p->full_size = rounded_size;
a45035b6
JW
830 }
831 }
a45035b6 832 }
718fe406 833
6f086dfc 834 /* If we still didn't find one, make a new temporary. */
0aea6467 835 if (selected == 0)
6f086dfc 836 {
e5e809f4
JL
837 HOST_WIDE_INT frame_offset_old = frame_offset;
838
766090c2 839 p = ggc_alloc<temp_slot> ();
e5e809f4 840
c87a0a39
JL
841 /* We are passing an explicit alignment request to assign_stack_local.
842 One side effect of that is assign_stack_local will not round SIZE
843 to ensure the frame offset remains suitably aligned.
844
845 So for requests which depended on the rounding of SIZE, we go ahead
846 and round it now. We also make sure ALIGNMENT is at least
847 BIGGEST_ALIGNMENT. */
0bccc606 848 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
849 p->slot = assign_stack_local_1 (mode,
850 (mode == BLKmode
851 ? CEIL_ROUND (size,
852 (int) align
853 / BITS_PER_UNIT)
854 : size),
855 align, 0);
d16790f2
JW
856
857 p->align = align;
e5e809f4 858
b2a80c0d
DE
859 /* The following slot size computation is necessary because we don't
860 know the actual size of the temporary slot until assign_stack_local
861 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
862 requested temporary. Note that extra space added for alignment
863 can be either above or below this stack slot depending on which
864 way the frame grows. We include the extra space if and only if it
865 is above this slot. */
f62c8a5c
JJ
866 if (FRAME_GROWS_DOWNWARD)
867 p->size = frame_offset_old - frame_offset;
868 else
869 p->size = size;
e5e809f4 870
fc91b0d0 871 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
872 if (FRAME_GROWS_DOWNWARD)
873 {
874 p->base_offset = frame_offset;
875 p->full_size = frame_offset_old - frame_offset;
876 }
877 else
878 {
879 p->base_offset = frame_offset_old;
880 p->full_size = frame_offset - frame_offset_old;
881 }
0aea6467
ZD
882
883 selected = p;
6f086dfc
RS
884 }
885
0aea6467 886 p = selected;
6f086dfc 887 p->in_use = 1;
1da68f56 888 p->type = type;
7efcb746 889 p->level = temp_slot_level;
f8395d62 890 n_temp_slots_in_use++;
1995f267 891
0aea6467
ZD
892 pp = temp_slots_at_level (p->level);
893 insert_slot_to_list (p, pp);
fb0703f7 894 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
895
896 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
897 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
898 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 899
1da68f56
RK
900 /* If we know the alias set for the memory that will be used, use
901 it. If there's no TYPE, then we don't know anything about the
902 alias set for the memory. */
faa964e5
UW
903 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
904 set_mem_align (slot, align);
1da68f56 905
30f7a378 906 /* If a type is specified, set the relevant flags. */
3bdf5ad1 907 if (type != 0)
55356334 908 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 909 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 910
faa964e5 911 return slot;
6f086dfc 912}
d16790f2
JW
913
914/* Allocate a temporary stack slot and record it for possible later
9474e8ab 915 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
916
917rtx
ef4bddc2 918assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
d16790f2 919{
9474e8ab 920 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 921}
638141a6 922\f
9432c136
EB
923/* Assign a temporary.
924 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
925 and so that should be used in error messages. In either case, we
926 allocate of the given type.
230f21b4 927 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
928 it is 0 if a register is OK.
929 DONT_PROMOTE is 1 if we should not promote values in register
930 to wider modes. */
230f21b4
PB
931
932rtx
9474e8ab 933assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 934 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 935{
9432c136 936 tree type, decl;
ef4bddc2 937 machine_mode mode;
9e1622ed 938#ifdef PROMOTE_MODE
9432c136
EB
939 int unsignedp;
940#endif
941
942 if (DECL_P (type_or_decl))
943 decl = type_or_decl, type = TREE_TYPE (decl);
944 else
945 decl = NULL, type = type_or_decl;
946
947 mode = TYPE_MODE (type);
9e1622ed 948#ifdef PROMOTE_MODE
8df83eae 949 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 950#endif
638141a6 951
230f21b4
PB
952 if (mode == BLKmode || memory_required)
953 {
e5e809f4 954 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
955 rtx tmp;
956
44affdae
JH
957 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
958 problems with allocating the stack space. */
959 if (size == 0)
960 size = 1;
961
230f21b4 962 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
963 temporaries. However, sometimes we can find a fixed upper limit on
964 the size, so try that instead. */
965 else if (size == -1)
966 size = max_int_size_in_bytes (type);
e30bb772 967
9432c136
EB
968 /* The size of the temporary may be too large to fit into an integer. */
969 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 970 this to things that aren't compiler-generated temporaries. The
535a42b1 971 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
972 if (decl && size == -1
973 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
974 {
dee15844 975 error ("size of variable %q+D is too large", decl);
9432c136
EB
976 size = 1;
977 }
978
9474e8ab 979 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
980 return tmp;
981 }
638141a6 982
9e1622ed 983#ifdef PROMOTE_MODE
b55d9ff8 984 if (! dont_promote)
cde0f3fd 985 mode = promote_mode (type, mode, &unsignedp);
230f21b4 986#endif
638141a6 987
230f21b4
PB
988 return gen_reg_rtx (mode);
989}
638141a6 990\f
a45035b6
JW
991/* Combine temporary stack slots which are adjacent on the stack.
992
993 This allows for better use of already allocated stack space. This is only
994 done for BLKmode slots because we can be sure that we won't have alignment
995 problems in this case. */
996
6fe79279 997static void
fa8db1f7 998combine_temp_slots (void)
a45035b6 999{
0aea6467 1000 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1001 int num_slots;
1002
a4c6502a
MM
1003 /* We can't combine slots, because the information about which slot
1004 is in which alias set will be lost. */
1005 if (flag_strict_aliasing)
1006 return;
1007
718fe406 1008 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1009 high levels of optimization. */
e5e809f4 1010 if (! flag_expensive_optimizations)
0aea6467 1011 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1012 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1013 return;
a45035b6 1014
0aea6467 1015 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1016 {
1017 int delete_p = 0;
e5e809f4 1018
0aea6467
ZD
1019 next = p->next;
1020
1021 if (GET_MODE (p->slot) != BLKmode)
1022 continue;
1023
1024 for (q = p->next; q; q = next_q)
e9b7093a 1025 {
0aea6467
ZD
1026 int delete_q = 0;
1027
1028 next_q = q->next;
1029
1030 if (GET_MODE (q->slot) != BLKmode)
1031 continue;
1032
1033 if (p->base_offset + p->full_size == q->base_offset)
1034 {
1035 /* Q comes after P; combine Q into P. */
1036 p->size += q->size;
1037 p->full_size += q->full_size;
1038 delete_q = 1;
1039 }
1040 else if (q->base_offset + q->full_size == p->base_offset)
1041 {
1042 /* P comes after Q; combine P into Q. */
1043 q->size += p->size;
1044 q->full_size += p->full_size;
1045 delete_p = 1;
1046 break;
1047 }
1048 if (delete_q)
1049 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1050 }
0aea6467
ZD
1051
1052 /* Either delete P or advance past it. */
1053 if (delete_p)
1054 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1055 }
a45035b6 1056}
6f086dfc 1057\f
82d6e6fc
KG
1058/* Indicate that NEW_RTX is an alternate way of referring to the temp
1059 slot that previously was known by OLD_RTX. */
e5e76139
RK
1060
1061void
82d6e6fc 1062update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1063{
14a774a9 1064 struct temp_slot *p;
e5e76139 1065
82d6e6fc 1066 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1067 return;
14a774a9 1068
82d6e6fc 1069 p = find_temp_slot_from_address (old_rtx);
14a774a9 1070
82d6e6fc
KG
1071 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1072 NEW_RTX is a register, see if one operand of the PLUS is a
1073 temporary location. If so, NEW_RTX points into it. Otherwise,
1074 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1075 in common between them. If so, try a recursive call on those
1076 values. */
14a774a9
RK
1077 if (p == 0)
1078 {
82d6e6fc 1079 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1080 return;
1081
82d6e6fc 1082 if (REG_P (new_rtx))
700f19f0 1083 {
82d6e6fc
KG
1084 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1085 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1086 return;
1087 }
82d6e6fc 1088 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1089 return;
1090
82d6e6fc
KG
1091 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1092 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1093 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1094 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1095 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1096 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1097 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1098 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1099
1100 return;
1101 }
1102
718fe406 1103 /* Otherwise add an alias for the temp's address. */
fb0703f7 1104 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1105}
1106
9cca6a99
MS
1107/* If X could be a reference to a temporary slot, mark that slot as
1108 belonging to the to one level higher than the current level. If X
1109 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1110 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1111
1112 This is called when an ({...}) construct occurs and a statement
1113 returns a value in memory. */
1114
1115void
fa8db1f7 1116preserve_temp_slots (rtx x)
6f086dfc 1117{
0aea6467 1118 struct temp_slot *p = 0, *next;
6f086dfc 1119
e3a77161 1120 if (x == 0)
9474e8ab 1121 return;
f7b6d104 1122
8fff4fc1 1123 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1124 a temporary slot we know it points to. */
8fff4fc1
RH
1125 if (REG_P (x) && REG_POINTER (x))
1126 p = find_temp_slot_from_address (x);
f7b6d104 1127
8fff4fc1 1128 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1129 a temporary slot. */
8fff4fc1 1130 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1131 return;
8fff4fc1
RH
1132
1133 /* First see if we can find a match. */
1134 if (p == 0)
1135 p = find_temp_slot_from_address (XEXP (x, 0));
1136
1137 if (p != 0)
1138 {
8fff4fc1 1139 if (p->level == temp_slot_level)
9474e8ab 1140 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1141 return;
f7b6d104 1142 }
e9a25f70 1143
8fff4fc1
RH
1144 /* Otherwise, preserve all non-kept slots at this level. */
1145 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1146 {
8fff4fc1 1147 next = p->next;
9474e8ab 1148 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1149 }
fe9b4957
MM
1150}
1151
8fff4fc1
RH
1152/* Free all temporaries used so far. This is normally called at the
1153 end of generating code for a statement. */
fe9b4957 1154
8fff4fc1
RH
1155void
1156free_temp_slots (void)
fe9b4957 1157{
8fff4fc1 1158 struct temp_slot *p, *next;
5d7cefe5 1159 bool some_available = false;
fe9b4957 1160
8fff4fc1
RH
1161 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1162 {
1163 next = p->next;
9474e8ab
MM
1164 make_slot_available (p);
1165 some_available = true;
8fff4fc1 1166 }
fe9b4957 1167
5d7cefe5
MM
1168 if (some_available)
1169 {
1170 remove_unused_temp_slot_addresses ();
1171 combine_temp_slots ();
1172 }
8fff4fc1 1173}
fe9b4957 1174
8fff4fc1 1175/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1176
8fff4fc1
RH
1177void
1178push_temp_slots (void)
fe9b4957 1179{
8fff4fc1 1180 temp_slot_level++;
fe9b4957
MM
1181}
1182
8fff4fc1
RH
1183/* Pop a temporary nesting level. All slots in use in the current level
1184 are freed. */
fe9b4957 1185
8fff4fc1
RH
1186void
1187pop_temp_slots (void)
fe9b4957 1188{
9474e8ab 1189 free_temp_slots ();
8fff4fc1 1190 temp_slot_level--;
8c36698e
NC
1191}
1192
8fff4fc1 1193/* Initialize temporary slots. */
e9a25f70
JL
1194
1195void
8fff4fc1 1196init_temp_slots (void)
e9a25f70 1197{
8fff4fc1
RH
1198 /* We have not allocated any temporaries yet. */
1199 avail_temp_slots = 0;
9771b263 1200 vec_alloc (used_temp_slots, 0);
8fff4fc1 1201 temp_slot_level = 0;
f8395d62 1202 n_temp_slots_in_use = 0;
fb0703f7
SB
1203
1204 /* Set up the table to map addresses to temp slots. */
1205 if (! temp_slot_address_table)
2a22f99c 1206 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fb0703f7 1207 else
2a22f99c 1208 temp_slot_address_table->empty ();
8fff4fc1
RH
1209}
1210\f
6399c0ab
SB
1211/* Functions and data structures to keep track of the values hard regs
1212 had at the start of the function. */
1213
1214/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1215 and has_hard_reg_initial_val.. */
1216typedef struct GTY(()) initial_value_pair {
1217 rtx hard_reg;
1218 rtx pseudo;
1219} initial_value_pair;
1220/* ??? This could be a VEC but there is currently no way to define an
1221 opaque VEC type. This could be worked around by defining struct
1222 initial_value_pair in function.h. */
1223typedef struct GTY(()) initial_value_struct {
1224 int num_entries;
1225 int max_entries;
1226 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1227} initial_value_struct;
1228
1229/* If a pseudo represents an initial hard reg (or expression), return
1230 it, else return NULL_RTX. */
1231
1232rtx
1233get_hard_reg_initial_reg (rtx reg)
1234{
1235 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1236 int i;
1237
1238 if (ivs == 0)
1239 return NULL_RTX;
1240
1241 for (i = 0; i < ivs->num_entries; i++)
1242 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1243 return ivs->entries[i].hard_reg;
1244
1245 return NULL_RTX;
1246}
1247
1248/* Make sure that there's a pseudo register of mode MODE that stores the
1249 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1250
1251rtx
ef4bddc2 1252get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1253{
1254 struct initial_value_struct *ivs;
1255 rtx rv;
1256
1257 rv = has_hard_reg_initial_val (mode, regno);
1258 if (rv)
1259 return rv;
1260
1261 ivs = crtl->hard_reg_initial_vals;
1262 if (ivs == 0)
1263 {
766090c2 1264 ivs = ggc_alloc<initial_value_struct> ();
6399c0ab
SB
1265 ivs->num_entries = 0;
1266 ivs->max_entries = 5;
766090c2 1267 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
6399c0ab
SB
1268 crtl->hard_reg_initial_vals = ivs;
1269 }
1270
1271 if (ivs->num_entries >= ivs->max_entries)
1272 {
1273 ivs->max_entries += 5;
1274 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1275 ivs->max_entries);
1276 }
1277
1278 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1279 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1280
1281 return ivs->entries[ivs->num_entries++].pseudo;
1282}
1283
1284/* See if get_hard_reg_initial_val has been used to create a pseudo
1285 for the initial value of hard register REGNO in mode MODE. Return
1286 the associated pseudo if so, otherwise return NULL. */
1287
1288rtx
ef4bddc2 1289has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1290{
1291 struct initial_value_struct *ivs;
1292 int i;
1293
1294 ivs = crtl->hard_reg_initial_vals;
1295 if (ivs != 0)
1296 for (i = 0; i < ivs->num_entries; i++)
1297 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1298 && REGNO (ivs->entries[i].hard_reg) == regno)
1299 return ivs->entries[i].pseudo;
1300
1301 return NULL_RTX;
1302}
1303
1304unsigned int
1305emit_initial_value_sets (void)
1306{
1307 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1308 int i;
691fe203 1309 rtx_insn *seq;
6399c0ab
SB
1310
1311 if (ivs == 0)
1312 return 0;
1313
1314 start_sequence ();
1315 for (i = 0; i < ivs->num_entries; i++)
1316 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1317 seq = get_insns ();
1318 end_sequence ();
1319
1320 emit_insn_at_entry (seq);
1321 return 0;
1322}
1323
1324/* Return the hardreg-pseudoreg initial values pair entry I and
1325 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1326bool
1327initial_value_entry (int i, rtx *hreg, rtx *preg)
1328{
1329 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1330 if (!ivs || i >= ivs->num_entries)
1331 return false;
1332
1333 *hreg = ivs->entries[i].hard_reg;
1334 *preg = ivs->entries[i].pseudo;
1335 return true;
1336}
1337\f
8fff4fc1
RH
1338/* These routines are responsible for converting virtual register references
1339 to the actual hard register references once RTL generation is complete.
718fe406 1340
8fff4fc1
RH
1341 The following four variables are used for communication between the
1342 routines. They contain the offsets of the virtual registers from their
1343 respective hard registers. */
fe9b4957 1344
8fff4fc1
RH
1345static int in_arg_offset;
1346static int var_offset;
1347static int dynamic_offset;
1348static int out_arg_offset;
1349static int cfa_offset;
8a5275eb 1350
8fff4fc1
RH
1351/* In most machines, the stack pointer register is equivalent to the bottom
1352 of the stack. */
718fe406 1353
8fff4fc1
RH
1354#ifndef STACK_POINTER_OFFSET
1355#define STACK_POINTER_OFFSET 0
1356#endif
8c36698e 1357
ddbb449f
AM
1358#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1359#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1360#endif
1361
8fff4fc1
RH
1362/* If not defined, pick an appropriate default for the offset of dynamically
1363 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
ddbb449f 1364 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1365
8fff4fc1 1366#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1367
8fff4fc1
RH
1368/* The bottom of the stack points to the actual arguments. If
1369 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1370 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1371 stack space for register parameters is not pushed by the caller, but
1372 rather part of the fixed stack areas and hence not included in
38173d38 1373 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1374 for it when allocating stack dynamic objects. */
8a5275eb 1375
ddbb449f 1376#ifdef INCOMING_REG_PARM_STACK_SPACE
8fff4fc1
RH
1377#define STACK_DYNAMIC_OFFSET(FNDECL) \
1378((ACCUMULATE_OUTGOING_ARGS \
38173d38 1379 ? (crtl->outgoing_args_size \
81464b2c 1380 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
ddbb449f 1381 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1382 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1383#else
1384#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1385((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1386 + (STACK_POINTER_OFFSET))
1387#endif
1388#endif
4fa48eae 1389
659e47fb 1390\f
bbf9b913
RH
1391/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1392 is a virtual register, return the equivalent hard register and set the
1393 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1394
bbf9b913
RH
1395static rtx
1396instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1397{
82d6e6fc 1398 rtx new_rtx;
bbf9b913 1399 HOST_WIDE_INT offset;
6f086dfc 1400
bbf9b913 1401 if (x == virtual_incoming_args_rtx)
2e3f842f 1402 {
d015f7cc 1403 if (stack_realign_drap)
2e3f842f 1404 {
d015f7cc
L
1405 /* Replace virtual_incoming_args_rtx with internal arg
1406 pointer if DRAP is used to realign stack. */
82d6e6fc 1407 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1408 offset = 0;
1409 }
1410 else
82d6e6fc 1411 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1412 }
bbf9b913 1413 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1414 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1415 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1416 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1417 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1418 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1419 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1420 {
1421#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1422 new_rtx = frame_pointer_rtx;
f6672e8e 1423#else
82d6e6fc 1424 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1425#endif
1426 offset = cfa_offset;
1427 }
32990d5b
JJ
1428 else if (x == virtual_preferred_stack_boundary_rtx)
1429 {
1430 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1431 offset = 0;
1432 }
bbf9b913
RH
1433 else
1434 return NULL_RTX;
6f086dfc 1435
bbf9b913 1436 *poffset = offset;
82d6e6fc 1437 return new_rtx;
6f086dfc
RS
1438}
1439
b8704801
RS
1440/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1441 registers present inside of *LOC. The expression is simplified,
1442 as much as possible, but is not to be considered "valid" in any sense
1443 implied by the target. Return true if any change is made. */
6f086dfc 1444
b8704801
RS
1445static bool
1446instantiate_virtual_regs_in_rtx (rtx *loc)
6f086dfc 1447{
b8704801
RS
1448 if (!*loc)
1449 return false;
1450 bool changed = false;
1451 subrtx_ptr_iterator::array_type array;
1452 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
6f086dfc 1453 {
b8704801
RS
1454 rtx *loc = *iter;
1455 if (rtx x = *loc)
bbf9b913 1456 {
b8704801
RS
1457 rtx new_rtx;
1458 HOST_WIDE_INT offset;
1459 switch (GET_CODE (x))
1460 {
1461 case REG:
1462 new_rtx = instantiate_new_reg (x, &offset);
1463 if (new_rtx)
1464 {
1465 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1466 changed = true;
1467 }
1468 iter.skip_subrtxes ();
1469 break;
bbf9b913 1470
b8704801
RS
1471 case PLUS:
1472 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1473 if (new_rtx)
1474 {
1475 XEXP (x, 0) = new_rtx;
1476 *loc = plus_constant (GET_MODE (x), x, offset, true);
1477 changed = true;
1478 iter.skip_subrtxes ();
1479 break;
1480 }
e5e809f4 1481
b8704801
RS
1482 /* FIXME -- from old code */
1483 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1484 we can commute the PLUS and SUBREG because pointers into the
1485 frame are well-behaved. */
1486 break;
ce717ce4 1487
b8704801
RS
1488 default:
1489 break;
1490 }
1491 }
6f086dfc 1492 }
b8704801 1493 return changed;
6f086dfc
RS
1494}
1495
bbf9b913
RH
1496/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1497 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1498
bbf9b913
RH
1499static int
1500safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1501{
2ef6ce06 1502 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1503}
5a73491b 1504
bbf9b913
RH
1505/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1506 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1507
1508static void
691fe203 1509instantiate_virtual_regs_in_insn (rtx_insn *insn)
5a73491b 1510{
bbf9b913
RH
1511 HOST_WIDE_INT offset;
1512 int insn_code, i;
9325973e 1513 bool any_change = false;
691fe203
DM
1514 rtx set, new_rtx, x;
1515 rtx_insn *seq;
32e66afd 1516
bbf9b913
RH
1517 /* There are some special cases to be handled first. */
1518 set = single_set (insn);
1519 if (set)
32e66afd 1520 {
bbf9b913
RH
1521 /* We're allowed to assign to a virtual register. This is interpreted
1522 to mean that the underlying register gets assigned the inverse
1523 transformation. This is used, for example, in the handling of
1524 non-local gotos. */
82d6e6fc
KG
1525 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1526 if (new_rtx)
bbf9b913
RH
1527 {
1528 start_sequence ();
32e66afd 1529
b8704801 1530 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
82d6e6fc 1531 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
69a59f0f 1532 gen_int_mode (-offset, GET_MODE (new_rtx)));
82d6e6fc
KG
1533 x = force_operand (x, new_rtx);
1534 if (x != new_rtx)
1535 emit_move_insn (new_rtx, x);
5a73491b 1536
bbf9b913
RH
1537 seq = get_insns ();
1538 end_sequence ();
5a73491b 1539
bbf9b913
RH
1540 emit_insn_before (seq, insn);
1541 delete_insn (insn);
1542 return;
1543 }
5a73491b 1544
bbf9b913
RH
1545 /* Handle a straight copy from a virtual register by generating a
1546 new add insn. The difference between this and falling through
1547 to the generic case is avoiding a new pseudo and eliminating a
1548 move insn in the initial rtl stream. */
82d6e6fc
KG
1549 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1550 if (new_rtx && offset != 0
bbf9b913
RH
1551 && REG_P (SET_DEST (set))
1552 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1553 {
1554 start_sequence ();
5a73491b 1555
2f1cd2eb
RS
1556 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1557 gen_int_mode (offset,
1558 GET_MODE (SET_DEST (set))),
1559 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1560 if (x != SET_DEST (set))
1561 emit_move_insn (SET_DEST (set), x);
770ae6cc 1562
bbf9b913
RH
1563 seq = get_insns ();
1564 end_sequence ();
87ce34d6 1565
bbf9b913
RH
1566 emit_insn_before (seq, insn);
1567 delete_insn (insn);
87ce34d6 1568 return;
bbf9b913 1569 }
5a73491b 1570
bbf9b913 1571 extract_insn (insn);
9325973e 1572 insn_code = INSN_CODE (insn);
5a73491b 1573
bbf9b913
RH
1574 /* Handle a plus involving a virtual register by determining if the
1575 operands remain valid if they're modified in place. */
1576 if (GET_CODE (SET_SRC (set)) == PLUS
1577 && recog_data.n_operands >= 3
1578 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1579 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1580 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1581 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1582 {
1583 offset += INTVAL (recog_data.operand[2]);
5a73491b 1584
bbf9b913 1585 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1586 if (offset == 0
1587 && REG_P (SET_DEST (set))
1588 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1589 {
1590 start_sequence ();
82d6e6fc 1591 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1592 seq = get_insns ();
1593 end_sequence ();
d1405722 1594
bbf9b913
RH
1595 emit_insn_before (seq, insn);
1596 delete_insn (insn);
1597 return;
1598 }
d1405722 1599
bbf9b913 1600 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1601
1602 /* Using validate_change and apply_change_group here leaves
1603 recog_data in an invalid state. Since we know exactly what
1604 we want to check, do those two by hand. */
82d6e6fc 1605 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1606 && safe_insn_predicate (insn_code, 2, x))
1607 {
82d6e6fc 1608 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1609 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1610 any_change = true;
9325973e
RH
1611
1612 /* Fall through into the regular operand fixup loop in
1613 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1614 }
1615 }
1616 }
d1405722 1617 else
9325973e
RH
1618 {
1619 extract_insn (insn);
1620 insn_code = INSN_CODE (insn);
1621 }
5dc96d60 1622
bbf9b913
RH
1623 /* In the general case, we expect virtual registers to appear only in
1624 operands, and then only as either bare registers or inside memories. */
1625 for (i = 0; i < recog_data.n_operands; ++i)
1626 {
1627 x = recog_data.operand[i];
1628 switch (GET_CODE (x))
1629 {
1630 case MEM:
1631 {
1632 rtx addr = XEXP (x, 0);
bbf9b913 1633
b8704801 1634 if (!instantiate_virtual_regs_in_rtx (&addr))
bbf9b913
RH
1635 continue;
1636
1637 start_sequence ();
23b33725 1638 x = replace_equiv_address (x, addr, true);
a5bfb13a
MM
1639 /* It may happen that the address with the virtual reg
1640 was valid (e.g. based on the virtual stack reg, which might
1641 be acceptable to the predicates with all offsets), whereas
1642 the address now isn't anymore, for instance when the address
1643 is still offsetted, but the base reg isn't virtual-stack-reg
1644 anymore. Below we would do a force_reg on the whole operand,
1645 but this insn might actually only accept memory. Hence,
1646 before doing that last resort, try to reload the address into
1647 a register, so this operand stays a MEM. */
1648 if (!safe_insn_predicate (insn_code, i, x))
1649 {
1650 addr = force_reg (GET_MODE (addr), addr);
23b33725 1651 x = replace_equiv_address (x, addr, true);
a5bfb13a 1652 }
bbf9b913
RH
1653 seq = get_insns ();
1654 end_sequence ();
1655 if (seq)
1656 emit_insn_before (seq, insn);
1657 }
1658 break;
1659
1660 case REG:
82d6e6fc
KG
1661 new_rtx = instantiate_new_reg (x, &offset);
1662 if (new_rtx == NULL)
bbf9b913
RH
1663 continue;
1664 if (offset == 0)
82d6e6fc 1665 x = new_rtx;
bbf9b913
RH
1666 else
1667 {
1668 start_sequence ();
6f086dfc 1669
bbf9b913
RH
1670 /* Careful, special mode predicates may have stuff in
1671 insn_data[insn_code].operand[i].mode that isn't useful
1672 to us for computing a new value. */
1673 /* ??? Recognize address_operand and/or "p" constraints
1674 to see if (plus new offset) is a valid before we put
1675 this through expand_simple_binop. */
82d6e6fc 1676 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
2f1cd2eb
RS
1677 gen_int_mode (offset, GET_MODE (x)),
1678 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1679 seq = get_insns ();
1680 end_sequence ();
1681 emit_insn_before (seq, insn);
1682 }
1683 break;
6f086dfc 1684
bbf9b913 1685 case SUBREG:
82d6e6fc
KG
1686 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1687 if (new_rtx == NULL)
bbf9b913
RH
1688 continue;
1689 if (offset != 0)
1690 {
1691 start_sequence ();
2f1cd2eb
RS
1692 new_rtx = expand_simple_binop
1693 (GET_MODE (new_rtx), PLUS, new_rtx,
1694 gen_int_mode (offset, GET_MODE (new_rtx)),
1695 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1696 seq = get_insns ();
1697 end_sequence ();
1698 emit_insn_before (seq, insn);
1699 }
82d6e6fc
KG
1700 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1701 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1702 gcc_assert (x);
bbf9b913 1703 break;
6f086dfc 1704
bbf9b913
RH
1705 default:
1706 continue;
1707 }
6f086dfc 1708
bbf9b913
RH
1709 /* At this point, X contains the new value for the operand.
1710 Validate the new value vs the insn predicate. Note that
1711 asm insns will have insn_code -1 here. */
1712 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1713 {
1714 start_sequence ();
f7ce0951
SE
1715 if (REG_P (x))
1716 {
1717 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1718 x = copy_to_reg (x);
1719 }
1720 else
1721 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1722 seq = get_insns ();
1723 end_sequence ();
1724 if (seq)
1725 emit_insn_before (seq, insn);
1726 }
6f086dfc 1727
bbf9b913
RH
1728 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1729 any_change = true;
1730 }
6f086dfc 1731
bbf9b913
RH
1732 if (any_change)
1733 {
1734 /* Propagate operand changes into the duplicates. */
1735 for (i = 0; i < recog_data.n_dups; ++i)
1736 *recog_data.dup_loc[i]
3e916873 1737 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1738
bbf9b913
RH
1739 /* Force re-recognition of the instruction for validation. */
1740 INSN_CODE (insn) = -1;
1741 }
6f086dfc 1742
bbf9b913 1743 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1744 {
bbf9b913 1745 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1746 {
bbf9b913 1747 error_for_asm (insn, "impossible constraint in %<asm%>");
5a860835
JJ
1748 /* For asm goto, instead of fixing up all the edges
1749 just clear the template and clear input operands
1750 (asm goto doesn't have any output operands). */
1751 if (JUMP_P (insn))
1752 {
1753 rtx asm_op = extract_asm_operands (PATTERN (insn));
1754 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1755 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1756 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1757 }
1758 else
1759 delete_insn (insn);
bbf9b913
RH
1760 }
1761 }
1762 else
1763 {
1764 if (recog_memoized (insn) < 0)
1765 fatal_insn_not_found (insn);
1766 }
1767}
14a774a9 1768
bbf9b913
RH
1769/* Subroutine of instantiate_decls. Given RTL representing a decl,
1770 do any instantiation required. */
14a774a9 1771
e41b2a33
PB
1772void
1773instantiate_decl_rtl (rtx x)
bbf9b913
RH
1774{
1775 rtx addr;
6f086dfc 1776
bbf9b913
RH
1777 if (x == 0)
1778 return;
6f086dfc 1779
bbf9b913
RH
1780 /* If this is a CONCAT, recurse for the pieces. */
1781 if (GET_CODE (x) == CONCAT)
1782 {
e41b2a33
PB
1783 instantiate_decl_rtl (XEXP (x, 0));
1784 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1785 return;
1786 }
6f086dfc 1787
bbf9b913
RH
1788 /* If this is not a MEM, no need to do anything. Similarly if the
1789 address is a constant or a register that is not a virtual register. */
1790 if (!MEM_P (x))
1791 return;
6f086dfc 1792
bbf9b913
RH
1793 addr = XEXP (x, 0);
1794 if (CONSTANT_P (addr)
1795 || (REG_P (addr)
1796 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1797 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1798 return;
6f086dfc 1799
b8704801 1800 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
bbf9b913 1801}
6f086dfc 1802
434eba35
JJ
1803/* Helper for instantiate_decls called via walk_tree: Process all decls
1804 in the given DECL_VALUE_EXPR. */
1805
1806static tree
1807instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1808{
1809 tree t = *tp;
726a989a 1810 if (! EXPR_P (t))
434eba35
JJ
1811 {
1812 *walk_subtrees = 0;
37d6a488
AO
1813 if (DECL_P (t))
1814 {
1815 if (DECL_RTL_SET_P (t))
1816 instantiate_decl_rtl (DECL_RTL (t));
1817 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1818 && DECL_INCOMING_RTL (t))
1819 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1820 if ((TREE_CODE (t) == VAR_DECL
1821 || TREE_CODE (t) == RESULT_DECL)
1822 && DECL_HAS_VALUE_EXPR_P (t))
1823 {
1824 tree v = DECL_VALUE_EXPR (t);
1825 walk_tree (&v, instantiate_expr, NULL, NULL);
1826 }
1827 }
434eba35
JJ
1828 }
1829 return NULL;
1830}
1831
bbf9b913
RH
1832/* Subroutine of instantiate_decls: Process all decls in the given
1833 BLOCK node and all its subblocks. */
6f086dfc 1834
bbf9b913
RH
1835static void
1836instantiate_decls_1 (tree let)
1837{
1838 tree t;
6f086dfc 1839
910ad8de 1840 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1841 {
1842 if (DECL_RTL_SET_P (t))
e41b2a33 1843 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1844 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1845 {
1846 tree v = DECL_VALUE_EXPR (t);
1847 walk_tree (&v, instantiate_expr, NULL, NULL);
1848 }
1849 }
6f086dfc 1850
bbf9b913 1851 /* Process all subblocks. */
87caf699 1852 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1853 instantiate_decls_1 (t);
1854}
6f086dfc 1855
bbf9b913
RH
1856/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1857 all virtual registers in their DECL_RTL's. */
6f086dfc 1858
bbf9b913
RH
1859static void
1860instantiate_decls (tree fndecl)
1861{
c021f10b
NF
1862 tree decl;
1863 unsigned ix;
6f086dfc 1864
bbf9b913 1865 /* Process all parameters of the function. */
910ad8de 1866 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1867 {
e41b2a33
PB
1868 instantiate_decl_rtl (DECL_RTL (decl));
1869 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1870 if (DECL_HAS_VALUE_EXPR_P (decl))
1871 {
1872 tree v = DECL_VALUE_EXPR (decl);
1873 walk_tree (&v, instantiate_expr, NULL, NULL);
1874 }
bbf9b913 1875 }
4fd796bb 1876
37d6a488
AO
1877 if ((decl = DECL_RESULT (fndecl))
1878 && TREE_CODE (decl) == RESULT_DECL)
1879 {
1880 if (DECL_RTL_SET_P (decl))
1881 instantiate_decl_rtl (DECL_RTL (decl));
1882 if (DECL_HAS_VALUE_EXPR_P (decl))
1883 {
1884 tree v = DECL_VALUE_EXPR (decl);
1885 walk_tree (&v, instantiate_expr, NULL, NULL);
1886 }
1887 }
1888
3fd48b12
EB
1889 /* Process the saved static chain if it exists. */
1890 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1891 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1892 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1893
bbf9b913
RH
1894 /* Now process all variables defined in the function or its subblocks. */
1895 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1896
c021f10b
NF
1897 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1898 if (DECL_RTL_SET_P (decl))
1899 instantiate_decl_rtl (DECL_RTL (decl));
9771b263 1900 vec_free (cfun->local_decls);
bbf9b913 1901}
6f086dfc 1902
bbf9b913
RH
1903/* Pass through the INSNS of function FNDECL and convert virtual register
1904 references to hard register references. */
6f086dfc 1905
c2924966 1906static unsigned int
bbf9b913
RH
1907instantiate_virtual_regs (void)
1908{
691fe203 1909 rtx_insn *insn;
6f086dfc 1910
bbf9b913
RH
1911 /* Compute the offsets to use for this function. */
1912 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1913 var_offset = STARTING_FRAME_OFFSET;
1914 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1915 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1916#ifdef FRAME_POINTER_CFA_OFFSET
1917 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1918#else
bbf9b913 1919 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1920#endif
e9a25f70 1921
bbf9b913
RH
1922 /* Initialize recognition, indicating that volatile is OK. */
1923 init_recog ();
6f086dfc 1924
bbf9b913
RH
1925 /* Scan through all the insns, instantiating every virtual register still
1926 present. */
45dbce1b
NF
1927 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1928 if (INSN_P (insn))
1929 {
1930 /* These patterns in the instruction stream can never be recognized.
1931 Fortunately, they shouldn't contain virtual registers either. */
39718607 1932 if (GET_CODE (PATTERN (insn)) == USE
45dbce1b 1933 || GET_CODE (PATTERN (insn)) == CLOBBER
45dbce1b
NF
1934 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1935 continue;
1936 else if (DEBUG_INSN_P (insn))
b8704801 1937 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
45dbce1b
NF
1938 else
1939 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1940
4654c0cf 1941 if (insn->deleted ())
45dbce1b 1942 continue;
7114321e 1943
b8704801 1944 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
ba4807a0 1945
45dbce1b
NF
1946 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1947 if (CALL_P (insn))
b8704801 1948 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
45dbce1b 1949 }
6f086dfc 1950
bbf9b913
RH
1951 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1952 instantiate_decls (current_function_decl);
1953
e41b2a33
PB
1954 targetm.instantiate_decls ();
1955
bbf9b913
RH
1956 /* Indicate that, from now on, assign_stack_local should use
1957 frame_pointer_rtx. */
1958 virtuals_instantiated = 1;
d3c12306 1959
c2924966 1960 return 0;
6f086dfc 1961}
ef330312 1962
27a4cd48
DM
1963namespace {
1964
1965const pass_data pass_data_instantiate_virtual_regs =
1966{
1967 RTL_PASS, /* type */
1968 "vregs", /* name */
1969 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1970 TV_NONE, /* tv_id */
1971 0, /* properties_required */
1972 0, /* properties_provided */
1973 0, /* properties_destroyed */
1974 0, /* todo_flags_start */
1975 0, /* todo_flags_finish */
ef330312
PB
1976};
1977
27a4cd48
DM
1978class pass_instantiate_virtual_regs : public rtl_opt_pass
1979{
1980public:
c3284718
RS
1981 pass_instantiate_virtual_regs (gcc::context *ctxt)
1982 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
27a4cd48
DM
1983 {}
1984
1985 /* opt_pass methods: */
be55bfe6
TS
1986 virtual unsigned int execute (function *)
1987 {
1988 return instantiate_virtual_regs ();
1989 }
27a4cd48
DM
1990
1991}; // class pass_instantiate_virtual_regs
1992
1993} // anon namespace
1994
1995rtl_opt_pass *
1996make_pass_instantiate_virtual_regs (gcc::context *ctxt)
1997{
1998 return new pass_instantiate_virtual_regs (ctxt);
1999}
2000
6f086dfc 2001\f
d181c154
RS
2002/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2003 This means a type for which function calls must pass an address to the
2004 function or get an address back from the function.
2005 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
2006
2007int
586de218 2008aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 2009{
d47d0a8d 2010 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
2011 int i, regno, nregs;
2012 rtx reg;
2f939d94 2013
61f71b34
DD
2014 if (fntype)
2015 switch (TREE_CODE (fntype))
2016 {
2017 case CALL_EXPR:
d47d0a8d
EB
2018 {
2019 tree fndecl = get_callee_fndecl (fntype);
2020 fntype = (fndecl
2021 ? TREE_TYPE (fndecl)
2022 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2023 }
61f71b34
DD
2024 break;
2025 case FUNCTION_DECL:
d47d0a8d 2026 fntype = TREE_TYPE (fntype);
61f71b34
DD
2027 break;
2028 case FUNCTION_TYPE:
2029 case METHOD_TYPE:
2030 break;
2031 case IDENTIFIER_NODE:
d47d0a8d 2032 fntype = NULL_TREE;
61f71b34
DD
2033 break;
2034 default:
d47d0a8d 2035 /* We don't expect other tree types here. */
0bccc606 2036 gcc_unreachable ();
61f71b34
DD
2037 }
2038
d47d0a8d 2039 if (VOID_TYPE_P (type))
d7bf8ada 2040 return 0;
500c353d 2041
ebf0bf7f
JJ
2042 /* If a record should be passed the same as its first (and only) member
2043 don't pass it as an aggregate. */
2044 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2045 return aggregate_value_p (first_field (type), fntype);
2046
cc77ae10
JM
2047 /* If the front end has decided that this needs to be passed by
2048 reference, do so. */
2049 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2050 && DECL_BY_REFERENCE (exp))
2051 return 1;
500c353d 2052
d47d0a8d
EB
2053 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2054 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2055 return 1;
b8698a0f 2056
956d6950 2057 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2058 and thus can't be returned in registers. */
2059 if (TREE_ADDRESSABLE (type))
2060 return 1;
d47d0a8d 2061
05e3bdb9 2062 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2063 return 1;
d47d0a8d
EB
2064
2065 if (targetm.calls.return_in_memory (type, fntype))
2066 return 1;
2067
9d790a4f
RS
2068 /* Make sure we have suitable call-clobbered regs to return
2069 the value in; if not, we must return it in memory. */
1d636cc6 2070 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2071
2072 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2073 it is OK. */
f8cfc6aa 2074 if (!REG_P (reg))
e71f7aa5
JW
2075 return 0;
2076
9d790a4f 2077 regno = REGNO (reg);
66fd46b6 2078 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2079 for (i = 0; i < nregs; i++)
2080 if (! call_used_regs[regno + i])
2081 return 1;
d47d0a8d 2082
6f086dfc
RS
2083 return 0;
2084}
2085\f
8fff4fc1
RH
2086/* Return true if we should assign DECL a pseudo register; false if it
2087 should live on the local stack. */
2088
2089bool
fa233e34 2090use_register_for_decl (const_tree decl)
8fff4fc1 2091{
c3284718 2092 if (!targetm.calls.allocate_stack_slots_for_args ())
007e61c2 2093 return true;
b8698a0f 2094
8fff4fc1
RH
2095 /* Honor volatile. */
2096 if (TREE_SIDE_EFFECTS (decl))
2097 return false;
2098
2099 /* Honor addressability. */
2100 if (TREE_ADDRESSABLE (decl))
2101 return false;
2102
2103 /* Only register-like things go in registers. */
2104 if (DECL_MODE (decl) == BLKmode)
2105 return false;
2106
2107 /* If -ffloat-store specified, don't put explicit float variables
2108 into registers. */
2109 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2110 propagates values across these stores, and it probably shouldn't. */
2111 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2112 return false;
2113
78e0d62b
RH
2114 /* If we're not interested in tracking debugging information for
2115 this decl, then we can certainly put it in a register. */
2116 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2117 return true;
2118
d130d647
JJ
2119 if (optimize)
2120 return true;
2121
2122 if (!DECL_REGISTER (decl))
2123 return false;
2124
2125 switch (TREE_CODE (TREE_TYPE (decl)))
2126 {
2127 case RECORD_TYPE:
2128 case UNION_TYPE:
2129 case QUAL_UNION_TYPE:
2130 /* When not optimizing, disregard register keyword for variables with
2131 types containing methods, otherwise the methods won't be callable
2132 from the debugger. */
2133 if (TYPE_METHODS (TREE_TYPE (decl)))
2134 return false;
2135 break;
2136 default:
2137 break;
2138 }
2139
2140 return true;
8fff4fc1
RH
2141}
2142
0976078c
RH
2143/* Return true if TYPE should be passed by invisible reference. */
2144
2145bool
ef4bddc2 2146pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
8cd5a4e0 2147 tree type, bool named_arg)
0976078c
RH
2148{
2149 if (type)
2150 {
2151 /* If this type contains non-trivial constructors, then it is
2152 forbidden for the middle-end to create any new copies. */
2153 if (TREE_ADDRESSABLE (type))
2154 return true;
2155
d58247a3
RH
2156 /* GCC post 3.4 passes *all* variable sized types by reference. */
2157 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
0976078c 2158 return true;
ebf0bf7f
JJ
2159
2160 /* If a record type should be passed the same as its first (and only)
2161 member, use the type and mode of that member. */
2162 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2163 {
2164 type = TREE_TYPE (first_field (type));
2165 mode = TYPE_MODE (type);
2166 }
0976078c
RH
2167 }
2168
d5cc9181
JR
2169 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2170 type, named_arg);
0976078c
RH
2171}
2172
6cdd5672
RH
2173/* Return true if TYPE, which is passed by reference, should be callee
2174 copied instead of caller copied. */
2175
2176bool
ef4bddc2 2177reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
6cdd5672
RH
2178 tree type, bool named_arg)
2179{
2180 if (type && TREE_ADDRESSABLE (type))
2181 return false;
d5cc9181
JR
2182 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2183 named_arg);
6cdd5672
RH
2184}
2185
6071dc7f
RH
2186/* Structures to communicate between the subroutines of assign_parms.
2187 The first holds data persistent across all parameters, the second
2188 is cleared out for each parameter. */
6f086dfc 2189
6071dc7f 2190struct assign_parm_data_all
6f086dfc 2191{
d5cc9181
JR
2192 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2193 should become a job of the target or otherwise encapsulated. */
2194 CUMULATIVE_ARGS args_so_far_v;
2195 cumulative_args_t args_so_far;
6f086dfc 2196 struct args_size stack_args_size;
6071dc7f
RH
2197 tree function_result_decl;
2198 tree orig_fnargs;
7a688d52
DM
2199 rtx_insn *first_conversion_insn;
2200 rtx_insn *last_conversion_insn;
6071dc7f
RH
2201 HOST_WIDE_INT pretend_args_size;
2202 HOST_WIDE_INT extra_pretend_bytes;
2203 int reg_parm_stack_space;
2204};
6f086dfc 2205
6071dc7f
RH
2206struct assign_parm_data_one
2207{
2208 tree nominal_type;
2209 tree passed_type;
2210 rtx entry_parm;
2211 rtx stack_parm;
ef4bddc2
RS
2212 machine_mode nominal_mode;
2213 machine_mode passed_mode;
2214 machine_mode promoted_mode;
6071dc7f
RH
2215 struct locate_and_pad_arg_data locate;
2216 int partial;
2217 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2218 BOOL_BITFIELD passed_pointer : 1;
2219 BOOL_BITFIELD on_stack : 1;
2220 BOOL_BITFIELD loaded_in_reg : 1;
2221};
ebb904cb 2222
6071dc7f 2223/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2224
6071dc7f
RH
2225static void
2226assign_parms_initialize_all (struct assign_parm_data_all *all)
2227{
fc2f1f53 2228 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2229
6071dc7f
RH
2230 memset (all, 0, sizeof (*all));
2231
2232 fntype = TREE_TYPE (current_function_decl);
2233
2234#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2235 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2236#else
d5cc9181 2237 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2238 current_function_decl, -1);
2239#endif
d5cc9181 2240 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f 2241
ddbb449f
AM
2242#ifdef INCOMING_REG_PARM_STACK_SPACE
2243 all->reg_parm_stack_space
2244 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
6071dc7f
RH
2245#endif
2246}
6f086dfc 2247
6071dc7f
RH
2248/* If ARGS contains entries with complex types, split the entry into two
2249 entries of the component type. Return a new list of substitutions are
2250 needed, else the old list. */
2251
3b3f318a 2252static void
9771b263 2253split_complex_args (vec<tree> *args)
6071dc7f 2254{
3b3f318a 2255 unsigned i;
6071dc7f
RH
2256 tree p;
2257
9771b263 2258 FOR_EACH_VEC_ELT (*args, i, p)
6071dc7f
RH
2259 {
2260 tree type = TREE_TYPE (p);
2261 if (TREE_CODE (type) == COMPLEX_TYPE
2262 && targetm.calls.split_complex_arg (type))
2263 {
2264 tree decl;
2265 tree subtype = TREE_TYPE (type);
6ccd356e 2266 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2267
2268 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2269 p = copy_node (p);
6071dc7f
RH
2270 TREE_TYPE (p) = subtype;
2271 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2272 DECL_MODE (p) = VOIDmode;
2273 DECL_SIZE (p) = NULL;
2274 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2275 /* If this arg must go in memory, put it in a pseudo here.
2276 We can't allow it to go in memory as per normal parms,
2277 because the usual place might not have the imag part
2278 adjacent to the real part. */
2279 DECL_ARTIFICIAL (p) = addressable;
2280 DECL_IGNORED_P (p) = addressable;
2281 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2282 layout_decl (p, 0);
9771b263 2283 (*args)[i] = p;
6071dc7f
RH
2284
2285 /* Build a second synthetic decl. */
c2255bc4
AH
2286 decl = build_decl (EXPR_LOCATION (p),
2287 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2288 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2289 DECL_ARTIFICIAL (decl) = addressable;
2290 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2291 layout_decl (decl, 0);
9771b263 2292 args->safe_insert (++i, decl);
6071dc7f
RH
2293 }
2294 }
6071dc7f
RH
2295}
2296
2297/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2298 the hidden struct return argument, and (abi willing) complex args.
2299 Return the new parameter list. */
2300
9771b263 2301static vec<tree>
6071dc7f
RH
2302assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2303{
2304 tree fndecl = current_function_decl;
2305 tree fntype = TREE_TYPE (fndecl);
6e1aa848 2306 vec<tree> fnargs = vNULL;
3b3f318a
RG
2307 tree arg;
2308
910ad8de 2309 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
9771b263 2310 fnargs.safe_push (arg);
3b3f318a
RG
2311
2312 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2313
2314 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2315 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2316 && ! cfun->returns_pcc_struct
61f71b34 2317 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2318 {
f9f29478 2319 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2320 tree decl;
6f086dfc 2321
c2255bc4 2322 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2323 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2324 DECL_ARG_TYPE (decl) = type;
2325 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2326 DECL_NAMELESS (decl) = 1;
2327 TREE_CONSTANT (decl) = 1;
6f086dfc 2328
910ad8de 2329 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a 2330 all->orig_fnargs = decl;
9771b263 2331 fnargs.safe_insert (0, decl);
3b3f318a 2332
6071dc7f 2333 all->function_result_decl = decl;
6f086dfc 2334 }
718fe406 2335
42ba5130
RH
2336 /* If the target wants to split complex arguments into scalars, do so. */
2337 if (targetm.calls.split_complex_arg)
3b3f318a 2338 split_complex_args (&fnargs);
ded9bf77 2339
6071dc7f
RH
2340 return fnargs;
2341}
e7949876 2342
6071dc7f
RH
2343/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2344 data for the parameter. Incorporate ABI specifics such as pass-by-
2345 reference and type promotion. */
6f086dfc 2346
6071dc7f
RH
2347static void
2348assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2349 struct assign_parm_data_one *data)
2350{
2351 tree nominal_type, passed_type;
ef4bddc2 2352 machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2353 int unsignedp;
6f086dfc 2354
6071dc7f
RH
2355 memset (data, 0, sizeof (*data));
2356
fa10beec 2357 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2358 if (!cfun->stdarg)
fa10beec 2359 data->named_arg = 1; /* No variadic parms. */
910ad8de 2360 else if (DECL_CHAIN (parm))
fa10beec 2361 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2362 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2363 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2364 else
fa10beec 2365 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2366
2367 nominal_type = TREE_TYPE (parm);
2368 passed_type = DECL_ARG_TYPE (parm);
2369
2370 /* Look out for errors propagating this far. Also, if the parameter's
2371 type is void then its value doesn't matter. */
2372 if (TREE_TYPE (parm) == error_mark_node
2373 /* This can happen after weird syntax errors
2374 or if an enum type is defined among the parms. */
2375 || TREE_CODE (parm) != PARM_DECL
2376 || passed_type == NULL
2377 || VOID_TYPE_P (nominal_type))
2378 {
2379 nominal_type = passed_type = void_type_node;
2380 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2381 goto egress;
2382 }
108b7d3d 2383
6071dc7f
RH
2384 /* Find mode of arg as it is passed, and mode of arg as it should be
2385 during execution of this function. */
2386 passed_mode = TYPE_MODE (passed_type);
2387 nominal_mode = TYPE_MODE (nominal_type);
2388
ebf0bf7f
JJ
2389 /* If the parm is to be passed as a transparent union or record, use the
2390 type of the first field for the tests below. We have already verified
2391 that the modes are the same. */
2392 if ((TREE_CODE (passed_type) == UNION_TYPE
2393 || TREE_CODE (passed_type) == RECORD_TYPE)
2394 && TYPE_TRANSPARENT_AGGR (passed_type))
2395 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2396
0976078c 2397 /* See if this arg was passed by invisible reference. */
d5cc9181 2398 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2399 passed_type, data->named_arg))
6071dc7f
RH
2400 {
2401 passed_type = nominal_type = build_pointer_type (passed_type);
2402 data->passed_pointer = true;
fd91cfe3 2403 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
6071dc7f 2404 }
6f086dfc 2405
6071dc7f 2406 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2407 unsignedp = TYPE_UNSIGNED (passed_type);
2408 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2409 TREE_TYPE (current_function_decl), 0);
6f086dfc 2410
6071dc7f
RH
2411 egress:
2412 data->nominal_type = nominal_type;
2413 data->passed_type = passed_type;
2414 data->nominal_mode = nominal_mode;
2415 data->passed_mode = passed_mode;
2416 data->promoted_mode = promoted_mode;
2417}
16bae307 2418
6071dc7f 2419/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2420
6071dc7f
RH
2421static void
2422assign_parms_setup_varargs (struct assign_parm_data_all *all,
2423 struct assign_parm_data_one *data, bool no_rtl)
2424{
2425 int varargs_pretend_bytes = 0;
2426
d5cc9181 2427 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2428 data->promoted_mode,
2429 data->passed_type,
2430 &varargs_pretend_bytes, no_rtl);
2431
2432 /* If the back-end has requested extra stack space, record how much is
2433 needed. Do not change pretend_args_size otherwise since it may be
2434 nonzero from an earlier partial argument. */
2435 if (varargs_pretend_bytes > 0)
2436 all->pretend_args_size = varargs_pretend_bytes;
2437}
a53e14c0 2438
6071dc7f
RH
2439/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2440 the incoming location of the current parameter. */
2441
2442static void
2443assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2444 struct assign_parm_data_one *data)
2445{
2446 HOST_WIDE_INT pretend_bytes = 0;
2447 rtx entry_parm;
2448 bool in_regs;
2449
2450 if (data->promoted_mode == VOIDmode)
2451 {
2452 data->entry_parm = data->stack_parm = const0_rtx;
2453 return;
2454 }
a53e14c0 2455
d5cc9181 2456 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2457 data->promoted_mode,
2458 data->passed_type,
2459 data->named_arg);
6f086dfc 2460
6071dc7f
RH
2461 if (entry_parm == 0)
2462 data->promoted_mode = data->passed_mode;
6f086dfc 2463
6071dc7f
RH
2464 /* Determine parm's home in the stack, in case it arrives in the stack
2465 or we should pretend it did. Compute the stack position and rtx where
2466 the argument arrives and its size.
6f086dfc 2467
6071dc7f
RH
2468 There is one complexity here: If this was a parameter that would
2469 have been passed in registers, but wasn't only because it is
2470 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2471 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2472 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2473 as it was the previous time. */
2474 in_regs = entry_parm != 0;
6f086dfc 2475#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2476 in_regs = true;
e7949876 2477#endif
6071dc7f
RH
2478 if (!in_regs && !data->named_arg)
2479 {
d5cc9181 2480 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2481 {
6071dc7f 2482 rtx tem;
d5cc9181 2483 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2484 data->promoted_mode,
2485 data->passed_type, true);
6071dc7f 2486 in_regs = tem != NULL;
e7949876 2487 }
6071dc7f 2488 }
e7949876 2489
6071dc7f
RH
2490 /* If this parameter was passed both in registers and in the stack, use
2491 the copy on the stack. */
fe984136
RH
2492 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2493 data->passed_type))
6071dc7f 2494 entry_parm = 0;
e7949876 2495
6071dc7f
RH
2496 if (entry_parm)
2497 {
2498 int partial;
2499
d5cc9181 2500 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2501 data->promoted_mode,
2502 data->passed_type,
2503 data->named_arg);
6071dc7f
RH
2504 data->partial = partial;
2505
2506 /* The caller might already have allocated stack space for the
2507 register parameters. */
2508 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2509 {
6071dc7f
RH
2510 /* Part of this argument is passed in registers and part
2511 is passed on the stack. Ask the prologue code to extend
2512 the stack part so that we can recreate the full value.
2513
2514 PRETEND_BYTES is the size of the registers we need to store.
2515 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2516 stack space that the prologue should allocate.
2517
2518 Internally, gcc assumes that the argument pointer is aligned
2519 to STACK_BOUNDARY bits. This is used both for alignment
2520 optimizations (see init_emit) and to locate arguments that are
2521 aligned to more than PARM_BOUNDARY bits. We must preserve this
2522 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2523 a stack boundary. */
2524
2525 /* We assume at most one partial arg, and it must be the first
2526 argument on the stack. */
0bccc606 2527 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2528
78a52f11 2529 pretend_bytes = partial;
6071dc7f
RH
2530 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2531
2532 /* We want to align relative to the actual stack pointer, so
2533 don't include this in the stack size until later. */
2534 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2535 }
6071dc7f 2536 }
e7949876 2537
6071dc7f 2538 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e4ceca5 2539 all->reg_parm_stack_space,
6071dc7f
RH
2540 entry_parm ? data->partial : 0, current_function_decl,
2541 &all->stack_args_size, &data->locate);
6f086dfc 2542
e94a448f
L
2543 /* Update parm_stack_boundary if this parameter is passed in the
2544 stack. */
2545 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2546 crtl->parm_stack_boundary = data->locate.boundary;
2547
6071dc7f
RH
2548 /* Adjust offsets to include the pretend args. */
2549 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2550 data->locate.slot_offset.constant += pretend_bytes;
2551 data->locate.offset.constant += pretend_bytes;
ebca59c3 2552
6071dc7f
RH
2553 data->entry_parm = entry_parm;
2554}
6f086dfc 2555
6071dc7f
RH
2556/* A subroutine of assign_parms. If there is actually space on the stack
2557 for this parm, count it in stack_args_size and return true. */
6f086dfc 2558
6071dc7f
RH
2559static bool
2560assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2561 struct assign_parm_data_one *data)
2562{
2e6ae27f 2563 /* Trivially true if we've no incoming register. */
6071dc7f
RH
2564 if (data->entry_parm == NULL)
2565 ;
2566 /* Also true if we're partially in registers and partially not,
2567 since we've arranged to drop the entire argument on the stack. */
2568 else if (data->partial != 0)
2569 ;
2570 /* Also true if the target says that it's passed in both registers
2571 and on the stack. */
2572 else if (GET_CODE (data->entry_parm) == PARALLEL
2573 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2574 ;
2575 /* Also true if the target says that there's stack allocated for
2576 all register parameters. */
2577 else if (all->reg_parm_stack_space > 0)
2578 ;
2579 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2580 else
2581 return false;
6f086dfc 2582
6071dc7f
RH
2583 all->stack_args_size.constant += data->locate.size.constant;
2584 if (data->locate.size.var)
2585 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2586
6071dc7f
RH
2587 return true;
2588}
0d1416c6 2589
6071dc7f
RH
2590/* A subroutine of assign_parms. Given that this parameter is allocated
2591 stack space by the ABI, find it. */
6f086dfc 2592
6071dc7f
RH
2593static void
2594assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2595{
2596 rtx offset_rtx, stack_parm;
2597 unsigned int align, boundary;
6f086dfc 2598
6071dc7f
RH
2599 /* If we're passing this arg using a reg, make its stack home the
2600 aligned stack slot. */
2601 if (data->entry_parm)
2602 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2603 else
2604 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2605
38173d38 2606 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2607 if (offset_rtx != const0_rtx)
2608 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2609 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2610
08ab0acf 2611 if (!data->passed_pointer)
997f78fb 2612 {
08ab0acf
JJ
2613 set_mem_attributes (stack_parm, parm, 1);
2614 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2615 while promoted mode's size is needed. */
2616 if (data->promoted_mode != BLKmode
2617 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2618 {
f5541398 2619 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2620 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2621 {
2622 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2623 data->promoted_mode);
2624 if (offset)
527210c4 2625 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2626 }
997f78fb
JJ
2627 }
2628 }
6071dc7f 2629
bfc45551
AM
2630 boundary = data->locate.boundary;
2631 align = BITS_PER_UNIT;
6071dc7f
RH
2632
2633 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2634 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2635 intentionally forcing upward padding. Otherwise we have to come
2636 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2637 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2638 align = boundary;
481683e1 2639 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2640 {
2641 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2642 align = align & -align;
2643 }
bfc45551 2644 set_mem_align (stack_parm, align);
6071dc7f
RH
2645
2646 if (data->entry_parm)
2647 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2648
2649 data->stack_parm = stack_parm;
2650}
2651
2652/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2653 always valid and contiguous. */
2654
2655static void
2656assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2657{
2658 rtx entry_parm = data->entry_parm;
2659 rtx stack_parm = data->stack_parm;
2660
2661 /* If this parm was passed part in regs and part in memory, pretend it
2662 arrived entirely in memory by pushing the register-part onto the stack.
2663 In the special case of a DImode or DFmode that is split, we could put
2664 it together in a pseudoreg directly, but for now that's not worth
2665 bothering with. */
2666 if (data->partial != 0)
2667 {
2668 /* Handle calls that pass values in multiple non-contiguous
2669 locations. The Irix 6 ABI has examples of this. */
2670 if (GET_CODE (entry_parm) == PARALLEL)
1a8cb155 2671 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
b8698a0f 2672 data->passed_type,
6071dc7f 2673 int_size_in_bytes (data->passed_type));
6f086dfc 2674 else
78a52f11
RH
2675 {
2676 gcc_assert (data->partial % UNITS_PER_WORD == 0);
1a8cb155
RS
2677 move_block_from_reg (REGNO (entry_parm),
2678 validize_mem (copy_rtx (stack_parm)),
78a52f11
RH
2679 data->partial / UNITS_PER_WORD);
2680 }
6f086dfc 2681
6071dc7f
RH
2682 entry_parm = stack_parm;
2683 }
6f086dfc 2684
6071dc7f
RH
2685 /* If we didn't decide this parm came in a register, by default it came
2686 on the stack. */
2687 else if (entry_parm == NULL)
2688 entry_parm = stack_parm;
2689
2690 /* When an argument is passed in multiple locations, we can't make use
2691 of this information, but we can save some copying if the whole argument
2692 is passed in a single register. */
2693 else if (GET_CODE (entry_parm) == PARALLEL
2694 && data->nominal_mode != BLKmode
2695 && data->passed_mode != BLKmode)
2696 {
2697 size_t i, len = XVECLEN (entry_parm, 0);
2698
2699 for (i = 0; i < len; i++)
2700 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2701 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2702 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2703 == data->passed_mode)
2704 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2705 {
2706 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2707 break;
2708 }
2709 }
e68a6ce1 2710
6071dc7f
RH
2711 data->entry_parm = entry_parm;
2712}
6f086dfc 2713
4d2a9850
DJ
2714/* A subroutine of assign_parms. Reconstitute any values which were
2715 passed in multiple registers and would fit in a single register. */
2716
2717static void
2718assign_parm_remove_parallels (struct assign_parm_data_one *data)
2719{
2720 rtx entry_parm = data->entry_parm;
2721
2722 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2723 This can be done with register operations rather than on the
2724 stack, even if we will store the reconstituted parameter on the
2725 stack later. */
85776d60 2726 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2727 {
2728 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2729 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2730 GET_MODE_SIZE (GET_MODE (entry_parm)));
2731 entry_parm = parmreg;
2732 }
2733
2734 data->entry_parm = entry_parm;
2735}
2736
6071dc7f
RH
2737/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2738 always valid and properly aligned. */
6f086dfc 2739
6071dc7f
RH
2740static void
2741assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2742{
2743 rtx stack_parm = data->stack_parm;
2744
2745 /* If we can't trust the parm stack slot to be aligned enough for its
2746 ultimate type, don't use that slot after entry. We'll make another
2747 stack slot, if we need one. */
bfc45551
AM
2748 if (stack_parm
2749 && ((STRICT_ALIGNMENT
2750 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2751 || (data->nominal_type
2752 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2753 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2754 stack_parm = NULL;
2755
2756 /* If parm was passed in memory, and we need to convert it on entry,
2757 don't store it back in that same slot. */
2758 else if (data->entry_parm == stack_parm
2759 && data->nominal_mode != BLKmode
2760 && data->nominal_mode != data->passed_mode)
2761 stack_parm = NULL;
2762
7d69de61
RH
2763 /* If stack protection is in effect for this function, don't leave any
2764 pointers in their passed stack slots. */
cb91fab0 2765 else if (crtl->stack_protect_guard
7d69de61
RH
2766 && (flag_stack_protect == 2
2767 || data->passed_pointer
2768 || POINTER_TYPE_P (data->nominal_type)))
2769 stack_parm = NULL;
2770
6071dc7f
RH
2771 data->stack_parm = stack_parm;
2772}
a0506b54 2773
6071dc7f
RH
2774/* A subroutine of assign_parms. Return true if the current parameter
2775 should be stored as a BLKmode in the current frame. */
2776
2777static bool
2778assign_parm_setup_block_p (struct assign_parm_data_one *data)
2779{
2780 if (data->nominal_mode == BLKmode)
2781 return true;
85776d60
DJ
2782 if (GET_MODE (data->entry_parm) == BLKmode)
2783 return true;
531547e9 2784
6e985040 2785#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2786 /* Only assign_parm_setup_block knows how to deal with register arguments
2787 that are padded at the least significant end. */
2788 if (REG_P (data->entry_parm)
2789 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2790 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2791 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2792 return true;
6e985040 2793#endif
6071dc7f
RH
2794
2795 return false;
2796}
2797
b8698a0f 2798/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2799 present and valid in DATA->STACK_RTL. */
2800
2801static void
27e29549
RH
2802assign_parm_setup_block (struct assign_parm_data_all *all,
2803 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2804{
2805 rtx entry_parm = data->entry_parm;
2806 rtx stack_parm = data->stack_parm;
bfc45551
AM
2807 HOST_WIDE_INT size;
2808 HOST_WIDE_INT size_stored;
6071dc7f 2809
27e29549
RH
2810 if (GET_CODE (entry_parm) == PARALLEL)
2811 entry_parm = emit_group_move_into_temps (entry_parm);
2812
bfc45551
AM
2813 size = int_size_in_bytes (data->passed_type);
2814 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2815 if (stack_parm == 0)
2816 {
a561d88b 2817 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
bfc45551 2818 stack_parm = assign_stack_local (BLKmode, size_stored,
a561d88b 2819 DECL_ALIGN (parm));
bfc45551
AM
2820 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2821 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2822 set_mem_attributes (stack_parm, parm, 1);
2823 }
2824
6071dc7f
RH
2825 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2826 calls that pass values in multiple non-contiguous locations. */
2827 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2828 {
6071dc7f
RH
2829 rtx mem;
2830
2831 /* Note that we will be storing an integral number of words.
2832 So we have to be careful to ensure that we allocate an
bfc45551 2833 integral number of words. We do this above when we call
6071dc7f
RH
2834 assign_stack_local if space was not allocated in the argument
2835 list. If it was, this will not work if PARM_BOUNDARY is not
2836 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2837 if it becomes a problem. Exception is when BLKmode arrives
2838 with arguments not conforming to word_mode. */
2839
bfc45551
AM
2840 if (data->stack_parm == 0)
2841 ;
6071dc7f
RH
2842 else if (GET_CODE (entry_parm) == PARALLEL)
2843 ;
0bccc606
NS
2844 else
2845 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2846
1a8cb155 2847 mem = validize_mem (copy_rtx (stack_parm));
c6b97fac 2848
6071dc7f
RH
2849 /* Handle values in multiple non-contiguous locations. */
2850 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2851 {
bb27eeda
SE
2852 push_to_sequence2 (all->first_conversion_insn,
2853 all->last_conversion_insn);
27e29549 2854 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2855 all->first_conversion_insn = get_insns ();
2856 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2857 end_sequence ();
2858 }
c6b97fac 2859
6071dc7f
RH
2860 else if (size == 0)
2861 ;
5c07bd7a 2862
6071dc7f
RH
2863 /* If SIZE is that of a mode no bigger than a word, just use
2864 that mode's store operation. */
2865 else if (size <= UNITS_PER_WORD)
2866 {
ef4bddc2 2867 machine_mode mode
6071dc7f 2868 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2869
6071dc7f 2870 if (mode != BLKmode
6e985040 2871#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2872 && (size == UNITS_PER_WORD
2873 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2874 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2875#endif
6071dc7f
RH
2876 )
2877 {
208996c7
RS
2878 rtx reg;
2879
2880 /* We are really truncating a word_mode value containing
2881 SIZE bytes into a value of mode MODE. If such an
2882 operation requires no actual instructions, we can refer
2883 to the value directly in mode MODE, otherwise we must
2884 start with the register in word_mode and explicitly
2885 convert it. */
2886 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2887 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2888 else
2889 {
2890 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2891 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2892 }
6071dc7f
RH
2893 emit_move_insn (change_address (mem, mode, 0), reg);
2894 }
c6b97fac 2895
6071dc7f
RH
2896 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2897 machine must be aligned to the left before storing
2898 to memory. Note that the previous test doesn't
2899 handle all cases (e.g. SIZE == 3). */
2900 else if (size != UNITS_PER_WORD
6e985040 2901#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2902 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2903 == downward)
6e985040 2904#else
6071dc7f 2905 && BYTES_BIG_ENDIAN
6e985040 2906#endif
6071dc7f
RH
2907 )
2908 {
2909 rtx tem, x;
2910 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2911 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2912
eb6c3df1 2913 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2914 tem = change_address (mem, word_mode, 0);
2915 emit_move_insn (tem, x);
6f086dfc 2916 }
6071dc7f 2917 else
27e29549 2918 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2919 size_stored / UNITS_PER_WORD);
6f086dfc 2920 }
6071dc7f 2921 else
27e29549 2922 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2923 size_stored / UNITS_PER_WORD);
2924 }
bfc45551
AM
2925 else if (data->stack_parm == 0)
2926 {
bb27eeda 2927 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2928 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2929 BLOCK_OP_NORMAL);
bb27eeda
SE
2930 all->first_conversion_insn = get_insns ();
2931 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2932 end_sequence ();
2933 }
6071dc7f 2934
bfc45551 2935 data->stack_parm = stack_parm;
6071dc7f
RH
2936 SET_DECL_RTL (parm, stack_parm);
2937}
2938
2939/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2940 parameter. Get it there. Perform all ABI specified conversions. */
2941
2942static void
2943assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2944 struct assign_parm_data_one *data)
2945{
71008de4
BS
2946 rtx parmreg, validated_mem;
2947 rtx equiv_stack_parm;
ef4bddc2 2948 machine_mode promoted_nominal_mode;
6071dc7f
RH
2949 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2950 bool did_conversion = false;
71008de4 2951 bool need_conversion, moved;
6071dc7f
RH
2952
2953 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2954 need to do it in a wider mode. Using 2 here makes the result
2955 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2956 promoted_nominal_mode
cde0f3fd 2957 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2958 TREE_TYPE (current_function_decl), 2);
6071dc7f
RH
2959
2960 parmreg = gen_reg_rtx (promoted_nominal_mode);
2961
2962 if (!DECL_ARTIFICIAL (parm))
2963 mark_user_reg (parmreg);
2964
2965 /* If this was an item that we received a pointer to,
2966 set DECL_RTL appropriately. */
2967 if (data->passed_pointer)
2968 {
2969 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2970 set_mem_attributes (x, parm, 1);
2971 SET_DECL_RTL (parm, x);
2972 }
2973 else
389fdba0 2974 SET_DECL_RTL (parm, parmreg);
6071dc7f 2975
4d2a9850
DJ
2976 assign_parm_remove_parallels (data);
2977
666e3ceb
PB
2978 /* Copy the value into the register, thus bridging between
2979 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2980
71008de4 2981 equiv_stack_parm = data->stack_parm;
1a8cb155 2982 validated_mem = validize_mem (copy_rtx (data->entry_parm));
71008de4
BS
2983
2984 need_conversion = (data->nominal_mode != data->passed_mode
2985 || promoted_nominal_mode != data->promoted_mode);
2986 moved = false;
2987
dbb94435
BS
2988 if (need_conversion
2989 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2990 && data->nominal_mode == data->passed_mode
2991 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 2992 {
6071dc7f
RH
2993 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2994 mode, by the caller. We now have to convert it to
2995 NOMINAL_MODE, if different. However, PARMREG may be in
2996 a different mode than NOMINAL_MODE if it is being stored
2997 promoted.
2998
2999 If ENTRY_PARM is a hard register, it might be in a register
3000 not valid for operating in its mode (e.g., an odd-numbered
3001 register for a DFmode). In that case, moves are the only
3002 thing valid, so we can't do a convert from there. This
3003 occurs when the calling sequence allow such misaligned
3004 usages.
3005
3006 In addition, the conversion may involve a call, which could
3007 clobber parameters which haven't been copied to pseudo
71008de4
BS
3008 registers yet.
3009
3010 First, we try to emit an insn which performs the necessary
3011 conversion. We verify that this insn does not clobber any
3012 hard registers. */
3013
3014 enum insn_code icode;
3015 rtx op0, op1;
3016
3017 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3018 unsignedp);
3019
3020 op0 = parmreg;
3021 op1 = validated_mem;
3022 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3023 && insn_operand_matches (icode, 0, op0)
3024 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3025 {
3026 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
b32d5189
DM
3027 rtx_insn *insn, *insns;
3028 rtx t = op1;
71008de4
BS
3029 HARD_REG_SET hardregs;
3030
3031 start_sequence ();
f9fef349
JJ
3032 /* If op1 is a hard register that is likely spilled, first
3033 force it into a pseudo, otherwise combiner might extend
3034 its lifetime too much. */
3035 if (GET_CODE (t) == SUBREG)
3036 t = SUBREG_REG (t);
3037 if (REG_P (t)
3038 && HARD_REGISTER_P (t)
3039 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3040 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3041 {
3042 t = gen_reg_rtx (GET_MODE (op1));
3043 emit_move_insn (t, op1);
3044 }
3045 else
3046 t = op1;
a11899b2
DM
3047 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3048 data->passed_mode, unsignedp);
3049 emit_insn (pat);
71008de4
BS
3050 insns = get_insns ();
3051
3052 moved = true;
3053 CLEAR_HARD_REG_SET (hardregs);
3054 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3055 {
3056 if (INSN_P (insn))
3057 note_stores (PATTERN (insn), record_hard_reg_sets,
3058 &hardregs);
3059 if (!hard_reg_set_empty_p (hardregs))
3060 moved = false;
3061 }
3062
3063 end_sequence ();
3064
3065 if (moved)
3066 {
3067 emit_insn (insns);
dbb94435
BS
3068 if (equiv_stack_parm != NULL_RTX)
3069 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3070 equiv_stack_parm);
71008de4
BS
3071 }
3072 }
3073 }
3074
3075 if (moved)
3076 /* Nothing to do. */
3077 ;
3078 else if (need_conversion)
3079 {
3080 /* We did not have an insn to convert directly, or the sequence
3081 generated appeared unsafe. We must first copy the parm to a
3082 pseudo reg, and save the conversion until after all
6071dc7f
RH
3083 parameters have been moved. */
3084
71008de4 3085 int save_tree_used;
6071dc7f
RH
3086 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3087
71008de4 3088 emit_move_insn (tempreg, validated_mem);
6071dc7f 3089
bb27eeda 3090 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3091 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3092
3093 if (GET_CODE (tempreg) == SUBREG
3094 && GET_MODE (tempreg) == data->nominal_mode
3095 && REG_P (SUBREG_REG (tempreg))
3096 && data->nominal_mode == data->passed_mode
3097 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3098 && GET_MODE_SIZE (GET_MODE (tempreg))
3099 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3100 {
6071dc7f
RH
3101 /* The argument is already sign/zero extended, so note it
3102 into the subreg. */
3103 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
362d42dc 3104 SUBREG_PROMOTED_SET (tempreg, unsignedp);
6071dc7f 3105 }
00d8a4c1 3106
6071dc7f
RH
3107 /* TREE_USED gets set erroneously during expand_assignment. */
3108 save_tree_used = TREE_USED (parm);
79f5e442 3109 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3110 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3111 all->first_conversion_insn = get_insns ();
3112 all->last_conversion_insn = get_last_insn ();
6071dc7f 3113 end_sequence ();
00d8a4c1 3114
6071dc7f
RH
3115 did_conversion = true;
3116 }
3117 else
71008de4 3118 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3119
3120 /* If we were passed a pointer but the actual value can safely live
f7e088e7
EB
3121 in a register, retrieve it and use it directly. */
3122 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
6071dc7f
RH
3123 {
3124 /* We can't use nominal_mode, because it will have been set to
3125 Pmode above. We must use the actual mode of the parm. */
f7e088e7
EB
3126 if (use_register_for_decl (parm))
3127 {
3128 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3129 mark_user_reg (parmreg);
3130 }
3131 else
3132 {
3133 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3134 TYPE_MODE (TREE_TYPE (parm)),
3135 TYPE_ALIGN (TREE_TYPE (parm)));
3136 parmreg
3137 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3138 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3139 align);
3140 set_mem_attributes (parmreg, parm, 1);
3141 }
cd5b3469 3142
6071dc7f
RH
3143 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3144 {
3145 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3146 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3147
bb27eeda
SE
3148 push_to_sequence2 (all->first_conversion_insn,
3149 all->last_conversion_insn);
6071dc7f
RH
3150 emit_move_insn (tempreg, DECL_RTL (parm));
3151 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3152 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3153 all->first_conversion_insn = get_insns ();
3154 all->last_conversion_insn = get_last_insn ();
6071dc7f 3155 end_sequence ();
6f086dfc 3156
6071dc7f
RH
3157 did_conversion = true;
3158 }
3159 else
3160 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3161
6071dc7f 3162 SET_DECL_RTL (parm, parmreg);
797a6ac1 3163
6071dc7f
RH
3164 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3165 now the parm. */
3166 data->stack_parm = NULL;
3167 }
ddef6bc7 3168
6071dc7f
RH
3169 /* Mark the register as eliminable if we did no conversion and it was
3170 copied from memory at a fixed offset, and the arg pointer was not
3171 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3172 offset formed an invalid address, such memory-equivalences as we
3173 make here would screw up life analysis for it. */
3174 if (data->nominal_mode == data->passed_mode
3175 && !did_conversion
3176 && data->stack_parm != 0
3177 && MEM_P (data->stack_parm)
3178 && data->locate.offset.var == 0
3179 && reg_mentioned_p (virtual_incoming_args_rtx,
3180 XEXP (data->stack_parm, 0)))
3181 {
691fe203
DM
3182 rtx_insn *linsn = get_last_insn ();
3183 rtx_insn *sinsn;
3184 rtx set;
a03caf76 3185
6071dc7f
RH
3186 /* Mark complex types separately. */
3187 if (GET_CODE (parmreg) == CONCAT)
3188 {
ef4bddc2 3189 machine_mode submode
6071dc7f 3190 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3191 int regnor = REGNO (XEXP (parmreg, 0));
3192 int regnoi = REGNO (XEXP (parmreg, 1));
3193 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3194 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3195 GET_MODE_SIZE (submode));
6071dc7f
RH
3196
3197 /* Scan backwards for the set of the real and
3198 imaginary parts. */
3199 for (sinsn = linsn; sinsn != 0;
3200 sinsn = prev_nonnote_insn (sinsn))
3201 {
3202 set = single_set (sinsn);
3203 if (set == 0)
3204 continue;
3205
3206 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3207 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3208 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3209 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3210 }
6071dc7f 3211 }
7543f918
JR
3212 else
3213 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3214 }
3215
3216 /* For pointer data type, suggest pointer register. */
3217 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3218 mark_reg_pointer (parmreg,
3219 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3220}
3221
3222/* A subroutine of assign_parms. Allocate stack space to hold the current
3223 parameter. Get it there. Perform all ABI specified conversions. */
3224
3225static void
3226assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3227 struct assign_parm_data_one *data)
3228{
3229 /* Value must be stored in the stack slot STACK_PARM during function
3230 execution. */
bfc45551 3231 bool to_conversion = false;
6071dc7f 3232
4d2a9850
DJ
3233 assign_parm_remove_parallels (data);
3234
6071dc7f
RH
3235 if (data->promoted_mode != data->nominal_mode)
3236 {
3237 /* Conversion is required. */
3238 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3239
1a8cb155 3240 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
6071dc7f 3241
bb27eeda 3242 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3243 to_conversion = true;
3244
6071dc7f
RH
3245 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3246 TYPE_UNSIGNED (TREE_TYPE (parm)));
3247
3248 if (data->stack_parm)
dd67163f
JJ
3249 {
3250 int offset = subreg_lowpart_offset (data->nominal_mode,
3251 GET_MODE (data->stack_parm));
3252 /* ??? This may need a big-endian conversion on sparc64. */
3253 data->stack_parm
3254 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3255 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3256 set_mem_offset (data->stack_parm,
527210c4 3257 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3258 }
6071dc7f
RH
3259 }
3260
3261 if (data->entry_parm != data->stack_parm)
3262 {
bfc45551
AM
3263 rtx src, dest;
3264
6071dc7f
RH
3265 if (data->stack_parm == 0)
3266 {
3a695389
UW
3267 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3268 GET_MODE (data->entry_parm),
3269 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3270 data->stack_parm
3271 = assign_stack_local (GET_MODE (data->entry_parm),
3272 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3273 align);
6071dc7f 3274 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3275 }
6071dc7f 3276
1a8cb155
RS
3277 dest = validize_mem (copy_rtx (data->stack_parm));
3278 src = validize_mem (copy_rtx (data->entry_parm));
bfc45551
AM
3279
3280 if (MEM_P (src))
6f086dfc 3281 {
bfc45551
AM
3282 /* Use a block move to handle potentially misaligned entry_parm. */
3283 if (!to_conversion)
bb27eeda
SE
3284 push_to_sequence2 (all->first_conversion_insn,
3285 all->last_conversion_insn);
bfc45551
AM
3286 to_conversion = true;
3287
3288 emit_block_move (dest, src,
3289 GEN_INT (int_size_in_bytes (data->passed_type)),
3290 BLOCK_OP_NORMAL);
6071dc7f
RH
3291 }
3292 else
bfc45551
AM
3293 emit_move_insn (dest, src);
3294 }
3295
3296 if (to_conversion)
3297 {
bb27eeda
SE
3298 all->first_conversion_insn = get_insns ();
3299 all->last_conversion_insn = get_last_insn ();
bfc45551 3300 end_sequence ();
6071dc7f 3301 }
6f086dfc 3302
6071dc7f
RH
3303 SET_DECL_RTL (parm, data->stack_parm);
3304}
3412b298 3305
6071dc7f
RH
3306/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3307 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3308
6071dc7f 3309static void
3b3f318a 3310assign_parms_unsplit_complex (struct assign_parm_data_all *all,
9771b263 3311 vec<tree> fnargs)
6071dc7f
RH
3312{
3313 tree parm;
6ccd356e 3314 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3315 unsigned i = 0;
f4ef873c 3316
3b3f318a 3317 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3318 {
3319 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3320 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3321 {
3322 rtx tmp, real, imag;
ef4bddc2 3323 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3324
9771b263
DN
3325 real = DECL_RTL (fnargs[i]);
3326 imag = DECL_RTL (fnargs[i + 1]);
6071dc7f 3327 if (inner != GET_MODE (real))
6f086dfc 3328 {
6071dc7f
RH
3329 real = gen_lowpart_SUBREG (inner, real);
3330 imag = gen_lowpart_SUBREG (inner, imag);
3331 }
6ccd356e
AM
3332
3333 if (TREE_ADDRESSABLE (parm))
3334 {
3335 rtx rmem, imem;
3336 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3337 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3338 DECL_MODE (parm),
3339 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3340
3341 /* split_complex_arg put the real and imag parts in
3342 pseudos. Move them to memory. */
3a695389 3343 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3344 set_mem_attributes (tmp, parm, 1);
3345 rmem = adjust_address_nv (tmp, inner, 0);
3346 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3347 push_to_sequence2 (all->first_conversion_insn,
3348 all->last_conversion_insn);
6ccd356e
AM
3349 emit_move_insn (rmem, real);
3350 emit_move_insn (imem, imag);
bb27eeda
SE
3351 all->first_conversion_insn = get_insns ();
3352 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3353 end_sequence ();
3354 }
3355 else
3356 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3357 SET_DECL_RTL (parm, tmp);
7e41ffa2 3358
9771b263
DN
3359 real = DECL_INCOMING_RTL (fnargs[i]);
3360 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
6071dc7f
RH
3361 if (inner != GET_MODE (real))
3362 {
3363 real = gen_lowpart_SUBREG (inner, real);
3364 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3365 }
6071dc7f 3366 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3367 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3368 i++;
6f086dfc 3369 }
6f086dfc 3370 }
6071dc7f
RH
3371}
3372
3373/* Assign RTL expressions to the function's parameters. This may involve
3374 copying them into registers and using those registers as the DECL_RTL. */
3375
6fe79279 3376static void
6071dc7f
RH
3377assign_parms (tree fndecl)
3378{
3379 struct assign_parm_data_all all;
3b3f318a 3380 tree parm;
9771b263 3381 vec<tree> fnargs;
3b3f318a 3382 unsigned i;
6f086dfc 3383
38173d38 3384 crtl->args.internal_arg_pointer
150cdc9e 3385 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3386
3387 assign_parms_initialize_all (&all);
3388 fnargs = assign_parms_augmented_arg_list (&all);
3389
9771b263 3390 FOR_EACH_VEC_ELT (fnargs, i, parm)
ded9bf77 3391 {
6071dc7f
RH
3392 struct assign_parm_data_one data;
3393
3394 /* Extract the type of PARM; adjust it according to ABI. */
3395 assign_parm_find_data_types (&all, parm, &data);
3396
3397 /* Early out for errors and void parameters. */
3398 if (data.passed_mode == VOIDmode)
ded9bf77 3399 {
6071dc7f
RH
3400 SET_DECL_RTL (parm, const0_rtx);
3401 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3402 continue;
3403 }
196c42cd 3404
2e3f842f
L
3405 /* Estimate stack alignment from parameter alignment. */
3406 if (SUPPORTS_STACK_ALIGNMENT)
3407 {
c2ed6cf8
NF
3408 unsigned int align
3409 = targetm.calls.function_arg_boundary (data.promoted_mode,
3410 data.passed_type);
ae58e548
JJ
3411 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3412 align);
2e3f842f 3413 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3414 align = MINIMUM_ALIGNMENT (data.nominal_type,
3415 TYPE_MODE (data.nominal_type),
3416 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3417 if (crtl->stack_alignment_estimated < align)
3418 {
3419 gcc_assert (!crtl->stack_realign_processed);
3420 crtl->stack_alignment_estimated = align;
3421 }
3422 }
b8698a0f 3423
910ad8de 3424 if (cfun->stdarg && !DECL_CHAIN (parm))
8117c488 3425 assign_parms_setup_varargs (&all, &data, false);
196c42cd 3426
6071dc7f
RH
3427 /* Find out where the parameter arrives in this function. */
3428 assign_parm_find_entry_rtl (&all, &data);
3429
3430 /* Find out where stack space for this parameter might be. */
3431 if (assign_parm_is_stack_parm (&all, &data))
3432 {
3433 assign_parm_find_stack_rtl (parm, &data);
3434 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3435 }
6071dc7f
RH
3436
3437 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3438 if (data.passed_pointer)
3439 {
3440 rtx incoming_rtl
3441 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3442 data.entry_parm);
3443 set_decl_incoming_rtl (parm, incoming_rtl, true);
3444 }
3445 else
3446 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f
RH
3447
3448 /* Update info on where next arg arrives in registers. */
d5cc9181 3449 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3450 data.passed_type, data.named_arg);
6071dc7f
RH
3451
3452 assign_parm_adjust_stack_rtl (&data);
3453
3454 if (assign_parm_setup_block_p (&data))
27e29549 3455 assign_parm_setup_block (&all, parm, &data);
6071dc7f
RH
3456 else if (data.passed_pointer || use_register_for_decl (parm))
3457 assign_parm_setup_reg (&all, parm, &data);
3458 else
3459 assign_parm_setup_stack (&all, parm, &data);
ded9bf77
AH
3460 }
3461
3b3f318a 3462 if (targetm.calls.split_complex_arg)
6ccd356e 3463 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3464
9771b263 3465 fnargs.release ();
3b3f318a 3466
bcb21886
KY
3467 /* Initialize pic_offset_table_rtx with a pseudo register
3468 if required. */
3469 if (targetm.use_pseudo_pic_reg ())
3470 pic_offset_table_rtx = gen_reg_rtx (Pmode);
3471
3412b298
JW
3472 /* Output all parameter conversion instructions (possibly including calls)
3473 now that all parameters have been copied out of hard registers. */
bb27eeda 3474 emit_insn (all.first_conversion_insn);
3412b298 3475
2e3f842f
L
3476 /* Estimate reload stack alignment from scalar return mode. */
3477 if (SUPPORTS_STACK_ALIGNMENT)
3478 {
3479 if (DECL_RESULT (fndecl))
3480 {
3481 tree type = TREE_TYPE (DECL_RESULT (fndecl));
ef4bddc2 3482 machine_mode mode = TYPE_MODE (type);
2e3f842f
L
3483
3484 if (mode != BLKmode
3485 && mode != VOIDmode
3486 && !AGGREGATE_TYPE_P (type))
3487 {
3488 unsigned int align = GET_MODE_ALIGNMENT (mode);
3489 if (crtl->stack_alignment_estimated < align)
3490 {
3491 gcc_assert (!crtl->stack_realign_processed);
3492 crtl->stack_alignment_estimated = align;
3493 }
3494 }
b8698a0f 3495 }
2e3f842f
L
3496 }
3497
b36a8cc2
OH
3498 /* If we are receiving a struct value address as the first argument, set up
3499 the RTL for the function result. As this might require code to convert
3500 the transmitted address to Pmode, we do this here to ensure that possible
3501 preliminary conversions of the address have been emitted already. */
6071dc7f 3502 if (all.function_result_decl)
b36a8cc2 3503 {
6071dc7f
RH
3504 tree result = DECL_RESULT (current_function_decl);
3505 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3506 rtx x;
fa8db1f7 3507
cc77ae10 3508 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3509 {
3510 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3511 x = addr;
3512 }
cc77ae10
JM
3513 else
3514 {
8dcfef8f
AO
3515 SET_DECL_VALUE_EXPR (result,
3516 build1 (INDIRECT_REF, TREE_TYPE (result),
3517 all.function_result_decl));
cc77ae10
JM
3518 addr = convert_memory_address (Pmode, addr);
3519 x = gen_rtx_MEM (DECL_MODE (result), addr);
3520 set_mem_attributes (x, result, 1);
3521 }
8dcfef8f
AO
3522
3523 DECL_HAS_VALUE_EXPR_P (result) = 1;
3524
b36a8cc2
OH
3525 SET_DECL_RTL (result, x);
3526 }
3527
53c428d0 3528 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3529 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3530 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3531 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3532
3533 /* Adjust function incoming argument size for alignment and
3534 minimum length. */
3535
2e4ceca5 3536 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
38173d38 3537 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3538 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3539
6f086dfc 3540#ifdef ARGS_GROW_DOWNWARD
38173d38 3541 crtl->args.arg_offset_rtx
477eff96 3542 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
6071dc7f
RH
3543 : expand_expr (size_diffop (all.stack_args_size.var,
3544 size_int (-all.stack_args_size.constant)),
bbbbb16a 3545 NULL_RTX, VOIDmode, EXPAND_NORMAL));
6f086dfc 3546#else
38173d38 3547 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3548#endif
3549
3550 /* See how many bytes, if any, of its args a function should try to pop
3551 on return. */
3552
079e7538
NF
3553 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3554 TREE_TYPE (fndecl),
3555 crtl->args.size);
6f086dfc 3556
3b69d50e
RK
3557 /* For stdarg.h function, save info about
3558 regs and stack space used by the named args. */
6f086dfc 3559
d5cc9181 3560 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3561
3562 /* Set the rtx used for the function return value. Put this in its
3563 own variable so any optimizers that need this information don't have
3564 to include tree.h. Do this here so it gets done when an inlined
3565 function gets output. */
3566
38173d38 3567 crtl->return_rtx
19e7881c
MM
3568 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3569 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3570
3571 /* If scalar return value was computed in a pseudo-reg, or was a named
3572 return value that got dumped to the stack, copy that to the hard
3573 return register. */
3574 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3575 {
3576 tree decl_result = DECL_RESULT (fndecl);
3577 rtx decl_rtl = DECL_RTL (decl_result);
3578
3579 if (REG_P (decl_rtl)
3580 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3581 : DECL_REGISTER (decl_result))
3582 {
3583 rtx real_decl_rtl;
3584
1d636cc6
RG
3585 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3586 fndecl, true);
ce5e43d0 3587 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3588 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3589 holds the hard register containing the return value, not a
3590 temporary pseudo. */
38173d38 3591 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3592 }
3593 }
6f086dfc 3594}
4744afba
RH
3595
3596/* A subroutine of gimplify_parameters, invoked via walk_tree.
3597 For all seen types, gimplify their sizes. */
3598
3599static tree
3600gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3601{
3602 tree t = *tp;
3603
3604 *walk_subtrees = 0;
3605 if (TYPE_P (t))
3606 {
3607 if (POINTER_TYPE_P (t))
3608 *walk_subtrees = 1;
ad50bc8d
RH
3609 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3610 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3611 {
726a989a 3612 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3613 *walk_subtrees = 1;
3614 }
3615 }
3616
3617 return NULL;
3618}
3619
3620/* Gimplify the parameter list for current_function_decl. This involves
3621 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3622 to implement callee-copies reference parameters. Returns a sequence of
3623 statements to add to the beginning of the function. */
4744afba 3624
726a989a 3625gimple_seq
4744afba
RH
3626gimplify_parameters (void)
3627{
3628 struct assign_parm_data_all all;
3b3f318a 3629 tree parm;
726a989a 3630 gimple_seq stmts = NULL;
9771b263 3631 vec<tree> fnargs;
3b3f318a 3632 unsigned i;
4744afba
RH
3633
3634 assign_parms_initialize_all (&all);
3635 fnargs = assign_parms_augmented_arg_list (&all);
3636
9771b263 3637 FOR_EACH_VEC_ELT (fnargs, i, parm)
4744afba
RH
3638 {
3639 struct assign_parm_data_one data;
3640
3641 /* Extract the type of PARM; adjust it according to ABI. */
3642 assign_parm_find_data_types (&all, parm, &data);
3643
3644 /* Early out for errors and void parameters. */
3645 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3646 continue;
3647
3648 /* Update info on where next arg arrives in registers. */
d5cc9181 3649 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3650 data.passed_type, data.named_arg);
4744afba
RH
3651
3652 /* ??? Once upon a time variable_size stuffed parameter list
3653 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3654 turned out to be less than manageable in the gimple world.
3655 Now we have to hunt them down ourselves. */
3656 walk_tree_without_duplicates (&data.passed_type,
3657 gimplify_parm_type, &stmts);
3658
b38f3813 3659 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3660 {
3661 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3662 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3663 }
3664
3665 if (data.passed_pointer)
3666 {
3667 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3668 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3669 type, data.named_arg))
3670 {
3671 tree local, t;
3672
b38f3813 3673 /* For constant-sized objects, this is trivial; for
4744afba 3674 variable-sized objects, we have to play games. */
b38f3813
EB
3675 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3676 && !(flag_stack_check == GENERIC_STACK_CHECK
3677 && compare_tree_int (DECL_SIZE_UNIT (parm),
3678 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3679 {
5dac1dae 3680 local = create_tmp_var (type, get_name (parm));
4744afba 3681 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3682 /* If PARM was addressable, move that flag over
3683 to the local copy, as its address will be taken,
37609bf0
RG
3684 not the PARMs. Keep the parms address taken
3685 as we'll query that flag during gimplification. */
04487a2f 3686 if (TREE_ADDRESSABLE (parm))
37609bf0 3687 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3688 else if (TREE_CODE (type) == COMPLEX_TYPE
3689 || TREE_CODE (type) == VECTOR_TYPE)
3690 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3691 }
3692 else
3693 {
5039610b 3694 tree ptr_type, addr;
4744afba
RH
3695
3696 ptr_type = build_pointer_type (type);
c98b08ff 3697 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3698 DECL_IGNORED_P (addr) = 0;
3699 local = build_fold_indirect_ref (addr);
3700
e79983f4 3701 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3702 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3703 size_int (DECL_ALIGN (parm)));
3704
d3c12306 3705 /* The call has been built for a variable-sized object. */
63d2a353 3706 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3707 t = fold_convert (ptr_type, t);
726a989a 3708 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3709 gimplify_and_add (t, &stmts);
3710 }
3711
726a989a 3712 gimplify_assign (local, parm, &stmts);
4744afba 3713
833b3afe
DB
3714 SET_DECL_VALUE_EXPR (parm, local);
3715 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3716 }
3717 }
3718 }
3719
9771b263 3720 fnargs.release ();
3b3f318a 3721
4744afba
RH
3722 return stmts;
3723}
75dc3319 3724\f
6f086dfc
RS
3725/* Compute the size and offset from the start of the stacked arguments for a
3726 parm passed in mode PASSED_MODE and with type TYPE.
3727
3728 INITIAL_OFFSET_PTR points to the current offset into the stacked
3729 arguments.
3730
e7949876
AM
3731 The starting offset and size for this parm are returned in
3732 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3733 nonzero, the offset is that of stack slot, which is returned in
3734 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3735 padding required from the initial offset ptr to the stack slot.
6f086dfc 3736
cc2902df 3737 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3738 never be set if REG_PARM_STACK_SPACE is not defined.
3739
2e4ceca5
UW
3740 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3741 for arguments which are passed in registers.
3742
6f086dfc
RS
3743 FNDECL is the function in which the argument was defined.
3744
3745 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3746 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3747 argument list to be aligned to the specific boundary (in bits). This
3748 rounding affects the initial and starting offsets, but not the argument
3749 size.
6f086dfc
RS
3750
3751 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3752 optionally rounds the size of the parm to PARM_BOUNDARY. The
3753 initial offset is not affected by this rounding, while the size always
3754 is and the starting offset may be. */
3755
e7949876
AM
3756/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3757 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3758 callers pass in the total size of args so far as
e7949876 3759 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3760
6f086dfc 3761void
ef4bddc2 3762locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e4ceca5
UW
3763 int reg_parm_stack_space, int partial,
3764 tree fndecl ATTRIBUTE_UNUSED,
fa8db1f7
AJ
3765 struct args_size *initial_offset_ptr,
3766 struct locate_and_pad_arg_data *locate)
6f086dfc 3767{
e7949876
AM
3768 tree sizetree;
3769 enum direction where_pad;
123148b5 3770 unsigned int boundary, round_boundary;
e7949876 3771 int part_size_in_regs;
6f086dfc 3772
6f086dfc
RS
3773 /* If we have found a stack parm before we reach the end of the
3774 area reserved for registers, skip that area. */
3775 if (! in_regs)
3776 {
6f086dfc
RS
3777 if (reg_parm_stack_space > 0)
3778 {
3779 if (initial_offset_ptr->var)
3780 {
3781 initial_offset_ptr->var
3782 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3783 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3784 initial_offset_ptr->constant = 0;
3785 }
3786 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3787 initial_offset_ptr->constant = reg_parm_stack_space;
3788 }
3789 }
6f086dfc 3790
78a52f11 3791 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3792
3793 sizetree
3794 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3795 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 3796 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
3797 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3798 type);
6e985040 3799 locate->where_pad = where_pad;
2e3f842f
L
3800
3801 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3802 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3803 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3804
bfc45551 3805 locate->boundary = boundary;
6f086dfc 3806
2e3f842f
L
3807 if (SUPPORTS_STACK_ALIGNMENT)
3808 {
3809 /* stack_alignment_estimated can't change after stack has been
3810 realigned. */
3811 if (crtl->stack_alignment_estimated < boundary)
3812 {
3813 if (!crtl->stack_realign_processed)
3814 crtl->stack_alignment_estimated = boundary;
3815 else
3816 {
3817 /* If stack is realigned and stack alignment value
3818 hasn't been finalized, it is OK not to increase
3819 stack_alignment_estimated. The bigger alignment
3820 requirement is recorded in stack_alignment_needed
3821 below. */
3822 gcc_assert (!crtl->stack_realign_finalized
3823 && crtl->stack_realign_needed);
3824 }
3825 }
3826 }
3827
c7e777b5
RH
3828 /* Remember if the outgoing parameter requires extra alignment on the
3829 calling function side. */
cb91fab0
JH
3830 if (crtl->stack_alignment_needed < boundary)
3831 crtl->stack_alignment_needed = boundary;
2e3f842f
L
3832 if (crtl->preferred_stack_boundary < boundary)
3833 crtl->preferred_stack_boundary = boundary;
c7e777b5 3834
6f086dfc 3835#ifdef ARGS_GROW_DOWNWARD
e7949876 3836 locate->slot_offset.constant = -initial_offset_ptr->constant;
6f086dfc 3837 if (initial_offset_ptr->var)
e7949876
AM
3838 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3839 initial_offset_ptr->var);
9dff28ab 3840
e7949876
AM
3841 {
3842 tree s2 = sizetree;
3843 if (where_pad != none
cc269bb6 3844 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 3845 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 3846 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
e7949876
AM
3847 SUB_PARM_SIZE (locate->slot_offset, s2);
3848 }
3849
3850 locate->slot_offset.constant += part_size_in_regs;
9dff28ab 3851
2e4ceca5 3852 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
3853 pad_to_arg_alignment (&locate->slot_offset, boundary,
3854 &locate->alignment_pad);
9dff28ab 3855
e7949876
AM
3856 locate->size.constant = (-initial_offset_ptr->constant
3857 - locate->slot_offset.constant);
6f086dfc 3858 if (initial_offset_ptr->var)
e7949876
AM
3859 locate->size.var = size_binop (MINUS_EXPR,
3860 size_binop (MINUS_EXPR,
3861 ssize_int (0),
3862 initial_offset_ptr->var),
3863 locate->slot_offset.var);
3864
3865 /* Pad_below needs the pre-rounded size to know how much to pad
3866 below. */
3867 locate->offset = locate->slot_offset;
3868 if (where_pad == downward)
3869 pad_below (&locate->offset, passed_mode, sizetree);
9dff28ab 3870
6f086dfc 3871#else /* !ARGS_GROW_DOWNWARD */
2e4ceca5 3872 if (!in_regs || reg_parm_stack_space > 0)
e7949876
AM
3873 pad_to_arg_alignment (initial_offset_ptr, boundary,
3874 &locate->alignment_pad);
3875 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
3876
3877#ifdef PUSH_ROUNDING
3878 if (passed_mode != BLKmode)
3879 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3880#endif
3881
d4b0a7a0
DE
3882 /* Pad_below needs the pre-rounded size to know how much to pad below
3883 so this must be done before rounding up. */
e7949876
AM
3884 locate->offset = locate->slot_offset;
3885 if (where_pad == downward)
3886 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 3887
6f086dfc 3888 if (where_pad != none
cc269bb6 3889 && (!tree_fits_uhwi_p (sizetree)
ae7e9ddd 3890 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
123148b5 3891 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 3892
e7949876
AM
3893 ADD_PARM_SIZE (locate->size, sizetree);
3894
3895 locate->size.constant -= part_size_in_regs;
6f086dfc 3896#endif /* ARGS_GROW_DOWNWARD */
099590dc
MM
3897
3898#ifdef FUNCTION_ARG_OFFSET
3899 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3900#endif
6f086dfc
RS
3901}
3902
e16c591a
RS
3903/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3904 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3905
6f086dfc 3906static void
fa8db1f7
AJ
3907pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3908 struct args_size *alignment_pad)
6f086dfc 3909{
a544cfd2
KG
3910 tree save_var = NULL_TREE;
3911 HOST_WIDE_INT save_constant = 0;
a751cd5b 3912 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
3913 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3914
3915#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
3916 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3917 the real alignment of %sp. However, when it does this, the
3918 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
3919 if (SPARC_STACK_BOUNDARY_HACK)
3920 sp_offset = 0;
3921#endif
4fc026cd 3922
6f6b8f81 3923 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
3924 {
3925 save_var = offset_ptr->var;
3926 save_constant = offset_ptr->constant;
3927 }
3928
3929 alignment_pad->var = NULL_TREE;
3930 alignment_pad->constant = 0;
4fc026cd 3931
6f086dfc
RS
3932 if (boundary > BITS_PER_UNIT)
3933 {
3934 if (offset_ptr->var)
3935 {
a594a19c
GK
3936 tree sp_offset_tree = ssize_int (sp_offset);
3937 tree offset = size_binop (PLUS_EXPR,
3938 ARGS_SIZE_TREE (*offset_ptr),
3939 sp_offset_tree);
6f086dfc 3940#ifdef ARGS_GROW_DOWNWARD
a594a19c 3941 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
6f086dfc 3942#else
a594a19c 3943 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
6f086dfc 3944#endif
a594a19c
GK
3945
3946 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
3947 /* ARGS_SIZE_TREE includes constant term. */
3948 offset_ptr->constant = 0;
6f6b8f81 3949 if (boundary > PARM_BOUNDARY)
dd3f0101 3950 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 3951 save_var);
6f086dfc
RS
3952 }
3953 else
718fe406 3954 {
a594a19c 3955 offset_ptr->constant = -sp_offset +
6f086dfc 3956#ifdef ARGS_GROW_DOWNWARD
a594a19c 3957 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3958#else
a594a19c 3959 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
6f086dfc 3960#endif
6f6b8f81 3961 if (boundary > PARM_BOUNDARY)
718fe406
KH
3962 alignment_pad->constant = offset_ptr->constant - save_constant;
3963 }
6f086dfc
RS
3964 }
3965}
3966
3967static void
ef4bddc2 3968pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
6f086dfc
RS
3969{
3970 if (passed_mode != BLKmode)
3971 {
3972 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3973 offset_ptr->constant
3974 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3975 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3976 - GET_MODE_SIZE (passed_mode));
3977 }
3978 else
3979 {
3980 if (TREE_CODE (sizetree) != INTEGER_CST
3981 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3982 {
3983 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3984 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3985 /* Add it in. */
3986 ADD_PARM_SIZE (*offset_ptr, s2);
3987 SUB_PARM_SIZE (*offset_ptr, sizetree);
3988 }
3989 }
3990}
6f086dfc 3991\f
6f086dfc 3992
6fb5fa3c
DB
3993/* True if register REGNO was alive at a place where `setjmp' was
3994 called and was set more than once or is an argument. Such regs may
3995 be clobbered by `longjmp'. */
3996
3997static bool
3998regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3999{
4000 /* There appear to be cases where some local vars never reach the
4001 backend but have bogus regnos. */
4002 if (regno >= max_reg_num ())
4003 return false;
4004
4005 return ((REG_N_SETS (regno) > 1
fefa31b5
DM
4006 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4007 regno))
6fb5fa3c
DB
4008 && REGNO_REG_SET_P (setjmp_crosses, regno));
4009}
4010
4011/* Walk the tree of blocks describing the binding levels within a
4012 function and warn about variables the might be killed by setjmp or
4013 vfork. This is done after calling flow_analysis before register
4014 allocation since that will clobber the pseudo-regs to hard
4015 regs. */
4016
4017static void
4018setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 4019{
b3694847 4020 tree decl, sub;
6de9cd9a 4021
910ad8de 4022 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 4023 {
6de9cd9a 4024 if (TREE_CODE (decl) == VAR_DECL
bc41842b 4025 && DECL_RTL_SET_P (decl)
f8cfc6aa 4026 && REG_P (DECL_RTL (decl))
6fb5fa3c 4027 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4028 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 4029 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 4030 }
6de9cd9a 4031
87caf699 4032 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 4033 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
4034}
4035
6de9cd9a 4036/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
4037 but for arguments instead of local variables. */
4038
6fb5fa3c
DB
4039static void
4040setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4041{
b3694847 4042 tree decl;
6f086dfc 4043 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4044 decl; decl = DECL_CHAIN (decl))
6f086dfc 4045 if (DECL_RTL (decl) != 0
f8cfc6aa 4046 && REG_P (DECL_RTL (decl))
6fb5fa3c 4047 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4048 warning (OPT_Wclobbered,
2b001724 4049 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4050 decl);
6f086dfc
RS
4051}
4052
6fb5fa3c
DB
4053/* Generate warning messages for variables live across setjmp. */
4054
b8698a0f 4055void
6fb5fa3c
DB
4056generate_setjmp_warnings (void)
4057{
4058 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4059
0cae8d31 4060 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
6fb5fa3c
DB
4061 || bitmap_empty_p (setjmp_crosses))
4062 return;
4063
4064 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4065 setjmp_args_warning (setjmp_crosses);
4066}
4067
6f086dfc 4068\f
3373692b 4069/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4070 and return the new head of the chain (old last element).
4071 In addition to that clear BLOCK_SAME_RANGE flags when needed
4072 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4073 its super fragment origin. */
3373692b
JJ
4074
4075static tree
4076block_fragments_nreverse (tree t)
4077{
1e3c1d95
JJ
4078 tree prev = 0, block, next, prev_super = 0;
4079 tree super = BLOCK_SUPERCONTEXT (t);
4080 if (BLOCK_FRAGMENT_ORIGIN (super))
4081 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4082 for (block = t; block; block = next)
4083 {
4084 next = BLOCK_FRAGMENT_CHAIN (block);
4085 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4086 if ((prev && !BLOCK_SAME_RANGE (prev))
4087 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4088 != prev_super))
4089 BLOCK_SAME_RANGE (block) = 0;
4090 prev_super = BLOCK_SUPERCONTEXT (block);
4091 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4092 prev = block;
4093 }
1e3c1d95
JJ
4094 t = BLOCK_FRAGMENT_ORIGIN (t);
4095 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4096 != prev_super)
4097 BLOCK_SAME_RANGE (t) = 0;
4098 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4099 return prev;
4100}
4101
4102/* Reverse the order of elements in the chain T of blocks,
4103 and return the new head of the chain (old last element).
4104 Also do the same on subblocks and reverse the order of elements
4105 in BLOCK_FRAGMENT_CHAIN as well. */
4106
4107static tree
4108blocks_nreverse_all (tree t)
4109{
4110 tree prev = 0, block, next;
4111 for (block = t; block; block = next)
4112 {
4113 next = BLOCK_CHAIN (block);
4114 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4115 if (BLOCK_FRAGMENT_CHAIN (block)
4116 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4117 {
4118 BLOCK_FRAGMENT_CHAIN (block)
4119 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4120 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4121 BLOCK_SAME_RANGE (block) = 0;
4122 }
4123 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4124 prev = block;
4125 }
4126 return prev;
4127}
4128
4129
a20612aa
RH
4130/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4131 and create duplicate blocks. */
4132/* ??? Need an option to either create block fragments or to create
4133 abstract origin duplicates of a source block. It really depends
4134 on what optimization has been performed. */
467456d0 4135
116eebd6 4136void
fa8db1f7 4137reorder_blocks (void)
467456d0 4138{
116eebd6 4139 tree block = DECL_INITIAL (current_function_decl);
467456d0 4140
1a4450c7 4141 if (block == NULL_TREE)
116eebd6 4142 return;
fc289cd1 4143
00f96dc9 4144 auto_vec<tree, 10> block_stack;
18c038b9 4145
a20612aa 4146 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4147 clear_block_marks (block);
a20612aa 4148
116eebd6
MM
4149 /* Prune the old trees away, so that they don't get in the way. */
4150 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4151 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4152
a20612aa 4153 /* Recreate the block tree from the note nesting. */
116eebd6 4154 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4155 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
467456d0
RS
4156}
4157
a20612aa 4158/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4159
6de9cd9a
DN
4160void
4161clear_block_marks (tree block)
cc1fe44f 4162{
a20612aa 4163 while (block)
cc1fe44f 4164 {
a20612aa 4165 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4166 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4167 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4168 }
4169}
4170
0a1c58a2 4171static void
691fe203
DM
4172reorder_blocks_1 (rtx_insn *insns, tree current_block,
4173 vec<tree> *p_block_stack)
0a1c58a2 4174{
691fe203 4175 rtx_insn *insn;
1e3c1d95 4176 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4177
4178 for (insn = insns; insn; insn = NEXT_INSN (insn))
4179 {
4b4bf941 4180 if (NOTE_P (insn))
0a1c58a2 4181 {
a38e7aa5 4182 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4183 {
4184 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4185 tree origin;
4186
3373692b
JJ
4187 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4188 origin = block;
a20612aa 4189
1e3c1d95
JJ
4190 if (prev_end)
4191 BLOCK_SAME_RANGE (prev_end) = 0;
4192 prev_end = NULL_TREE;
4193
a20612aa
RH
4194 /* If we have seen this block before, that means it now
4195 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4196 if (TREE_ASM_WRITTEN (block))
4197 {
a20612aa 4198 tree new_block = copy_node (block);
a20612aa 4199
1e3c1d95 4200 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4201 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4202 BLOCK_FRAGMENT_CHAIN (new_block)
4203 = BLOCK_FRAGMENT_CHAIN (origin);
4204 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4205
4206 NOTE_BLOCK (insn) = new_block;
4207 block = new_block;
0a1c58a2 4208 }
a20612aa 4209
1e3c1d95
JJ
4210 if (prev_beg == current_block && prev_beg)
4211 BLOCK_SAME_RANGE (block) = 1;
4212
4213 prev_beg = origin;
4214
0a1c58a2
JL
4215 BLOCK_SUBBLOCKS (block) = 0;
4216 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4217 /* When there's only one block for the entire function,
4218 current_block == block and we mustn't do this, it
4219 will cause infinite recursion. */
4220 if (block != current_block)
4221 {
1e3c1d95 4222 tree super;
51b7d006 4223 if (block != origin)
1e3c1d95
JJ
4224 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4225 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4226 (origin))
4227 == current_block);
9771b263 4228 if (p_block_stack->is_empty ())
1e3c1d95
JJ
4229 super = current_block;
4230 else
4231 {
9771b263 4232 super = p_block_stack->last ();
1e3c1d95
JJ
4233 gcc_assert (super == current_block
4234 || BLOCK_FRAGMENT_ORIGIN (super)
4235 == current_block);
4236 }
4237 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4238 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4239 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4240 current_block = origin;
339a28b9 4241 }
9771b263 4242 p_block_stack->safe_push (block);
0a1c58a2 4243 }
a38e7aa5 4244 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4245 {
9771b263 4246 NOTE_BLOCK (insn) = p_block_stack->pop ();
0a1c58a2 4247 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4248 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4249 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4250 prev_beg = NULL_TREE;
4251 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4252 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4253 }
4254 }
1e3c1d95
JJ
4255 else
4256 {
4257 prev_beg = NULL_TREE;
4258 if (prev_end)
4259 BLOCK_SAME_RANGE (prev_end) = 0;
4260 prev_end = NULL_TREE;
4261 }
0a1c58a2
JL
4262 }
4263}
4264
467456d0
RS
4265/* Reverse the order of elements in the chain T of blocks,
4266 and return the new head of the chain (old last element). */
4267
6de9cd9a 4268tree
fa8db1f7 4269blocks_nreverse (tree t)
467456d0 4270{
3373692b
JJ
4271 tree prev = 0, block, next;
4272 for (block = t; block; block = next)
467456d0 4273 {
3373692b
JJ
4274 next = BLOCK_CHAIN (block);
4275 BLOCK_CHAIN (block) = prev;
4276 prev = block;
467456d0
RS
4277 }
4278 return prev;
4279}
4280
61e46a7d
NF
4281/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4282 by modifying the last node in chain 1 to point to chain 2. */
4283
4284tree
4285block_chainon (tree op1, tree op2)
4286{
4287 tree t1;
4288
4289 if (!op1)
4290 return op2;
4291 if (!op2)
4292 return op1;
4293
4294 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4295 continue;
4296 BLOCK_CHAIN (t1) = op2;
4297
4298#ifdef ENABLE_TREE_CHECKING
4299 {
4300 tree t2;
4301 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4302 gcc_assert (t2 != t1);
4303 }
4304#endif
4305
4306 return op1;
4307}
4308
18c038b9
MM
4309/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4310 non-NULL, list them all into VECTOR, in a depth-first preorder
4311 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4312 blocks. */
467456d0
RS
4313
4314static int
fa8db1f7 4315all_blocks (tree block, tree *vector)
467456d0 4316{
b2a59b15
MS
4317 int n_blocks = 0;
4318
a84efb51
JO
4319 while (block)
4320 {
4321 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4322
a84efb51
JO
4323 /* Record this block. */
4324 if (vector)
4325 vector[n_blocks] = block;
b2a59b15 4326
a84efb51 4327 ++n_blocks;
718fe406 4328
a84efb51
JO
4329 /* Record the subblocks, and their subblocks... */
4330 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4331 vector ? vector + n_blocks : 0);
4332 block = BLOCK_CHAIN (block);
4333 }
467456d0
RS
4334
4335 return n_blocks;
4336}
18c038b9
MM
4337
4338/* Return a vector containing all the blocks rooted at BLOCK. The
4339 number of elements in the vector is stored in N_BLOCKS_P. The
4340 vector is dynamically allocated; it is the caller's responsibility
4341 to call `free' on the pointer returned. */
718fe406 4342
18c038b9 4343static tree *
fa8db1f7 4344get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4345{
4346 tree *block_vector;
4347
4348 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4349 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4350 all_blocks (block, block_vector);
4351
4352 return block_vector;
4353}
4354
f83b236e 4355static GTY(()) int next_block_index = 2;
18c038b9
MM
4356
4357/* Set BLOCK_NUMBER for all the blocks in FN. */
4358
4359void
fa8db1f7 4360number_blocks (tree fn)
18c038b9
MM
4361{
4362 int i;
4363 int n_blocks;
4364 tree *block_vector;
4365
4366 /* For SDB and XCOFF debugging output, we start numbering the blocks
4367 from 1 within each function, rather than keeping a running
4368 count. */
4369#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4370 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4371 next_block_index = 1;
18c038b9
MM
4372#endif
4373
4374 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4375
4376 /* The top-level BLOCK isn't numbered at all. */
4377 for (i = 1; i < n_blocks; ++i)
4378 /* We number the blocks from two. */
4379 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4380
4381 free (block_vector);
4382
4383 return;
4384}
df8992f8
RH
4385
4386/* If VAR is present in a subblock of BLOCK, return the subblock. */
4387
24e47c76 4388DEBUG_FUNCTION tree
fa8db1f7 4389debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4390{
4391 tree t;
4392
4393 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4394 if (t == var)
4395 return block;
4396
4397 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4398 {
4399 tree ret = debug_find_var_in_block_tree (var, t);
4400 if (ret)
4401 return ret;
4402 }
4403
4404 return NULL_TREE;
4405}
467456d0 4406\f
db2960f4
SL
4407/* Keep track of whether we're in a dummy function context. If we are,
4408 we don't want to invoke the set_current_function hook, because we'll
4409 get into trouble if the hook calls target_reinit () recursively or
4410 when the initial initialization is not yet complete. */
4411
4412static bool in_dummy_function;
4413
ab442df7
MM
4414/* Invoke the target hook when setting cfun. Update the optimization options
4415 if the function uses different options than the default. */
db2960f4
SL
4416
4417static void
4418invoke_set_current_function_hook (tree fndecl)
4419{
4420 if (!in_dummy_function)
ab442df7
MM
4421 {
4422 tree opts = ((fndecl)
4423 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4424 : optimization_default_node);
4425
4426 if (!opts)
4427 opts = optimization_default_node;
4428
4429 /* Change optimization options if needed. */
4430 if (optimization_current_node != opts)
4431 {
4432 optimization_current_node = opts;
46625112 4433 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4434 }
4435
892c4745 4436 targetm.set_current_function (fndecl);
4b1baac8 4437 this_fn_optabs = this_target_optabs;
135204dd 4438
4b1baac8 4439 if (opts != optimization_default_node)
135204dd 4440 {
4b1baac8
RS
4441 init_tree_optimization_optabs (opts);
4442 if (TREE_OPTIMIZATION_OPTABS (opts))
4443 this_fn_optabs = (struct target_optabs *)
4444 TREE_OPTIMIZATION_OPTABS (opts);
135204dd 4445 }
ab442df7 4446 }
db2960f4
SL
4447}
4448
4449/* cfun should never be set directly; use this function. */
4450
4451void
4452set_cfun (struct function *new_cfun)
4453{
4454 if (cfun != new_cfun)
4455 {
4456 cfun = new_cfun;
4457 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4458 }
4459}
4460
db2960f4
SL
4461/* Initialized with NOGC, making this poisonous to the garbage collector. */
4462
9771b263 4463static vec<function_p> cfun_stack;
db2960f4 4464
af16bc76
MJ
4465/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4466 current_function_decl accordingly. */
db2960f4
SL
4467
4468void
4469push_cfun (struct function *new_cfun)
4470{
af16bc76
MJ
4471 gcc_assert ((!cfun && !current_function_decl)
4472 || (cfun && current_function_decl == cfun->decl));
9771b263 4473 cfun_stack.safe_push (cfun);
af16bc76 4474 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4
SL
4475 set_cfun (new_cfun);
4476}
4477
af16bc76 4478/* Pop cfun from the stack. Also set current_function_decl accordingly. */
db2960f4
SL
4479
4480void
4481pop_cfun (void)
4482{
9771b263 4483 struct function *new_cfun = cfun_stack.pop ();
af16bc76
MJ
4484 /* When in_dummy_function, we do have a cfun but current_function_decl is
4485 NULL. We also allow pushing NULL cfun and subsequently changing
4486 current_function_decl to something else and have both restored by
4487 pop_cfun. */
4488 gcc_checking_assert (in_dummy_function
4489 || !cfun
4490 || current_function_decl == cfun->decl);
38d34676 4491 set_cfun (new_cfun);
af16bc76 4492 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4 4493}
3e87758a
RL
4494
4495/* Return value of funcdef and increase it. */
4496int
b8698a0f 4497get_next_funcdef_no (void)
3e87758a
RL
4498{
4499 return funcdef_no++;
4500}
4501
903d1e67
XDL
4502/* Return value of funcdef. */
4503int
4504get_last_funcdef_no (void)
4505{
4506 return funcdef_no;
4507}
4508
3a70d621 4509/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4510 to the defaults. Set cfun to the newly-allocated object.
4511 Some of the helper functions invoked during initialization assume
4512 that cfun has already been set. Therefore, assign the new object
4513 directly into cfun and invoke the back end hook explicitly at the
4514 very end, rather than initializing a temporary and calling set_cfun
4515 on it.
182e0d71
AK
4516
4517 ABSTRACT_P is true if this is a function that will never be seen by
4518 the middle-end. Such functions are front-end concepts (like C++
4519 function templates) that do not correspond directly to functions
4520 placed in object files. */
7a80cf9a 4521
3a70d621 4522void
182e0d71 4523allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4524{
6de9cd9a 4525 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4526
766090c2 4527 cfun = ggc_cleared_alloc<function> ();
b384405b 4528
3a70d621 4529 init_eh_for_function ();
6f086dfc 4530
3a70d621
RH
4531 if (init_machine_status)
4532 cfun->machine = (*init_machine_status) ();
e2ecd91c 4533
7c800926
KT
4534#ifdef OVERRIDE_ABI_FORMAT
4535 OVERRIDE_ABI_FORMAT (fndecl);
4536#endif
4537
81464b2c 4538 if (fndecl != NULL_TREE)
3a70d621 4539 {
db2960f4
SL
4540 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4541 cfun->decl = fndecl;
70cf5bc1 4542 current_function_funcdef_no = get_next_funcdef_no ();
5b9db1bc
MJ
4543 }
4544
4545 invoke_set_current_function_hook (fndecl);
db2960f4 4546
5b9db1bc
MJ
4547 if (fndecl != NULL_TREE)
4548 {
4549 tree result = DECL_RESULT (fndecl);
182e0d71 4550 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4551 {
3a70d621 4552#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4553 cfun->returns_pcc_struct = 1;
3a70d621 4554#endif
e3b5732b 4555 cfun->returns_struct = 1;
db2960f4
SL
4556 }
4557
f38958e8 4558 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4559
db2960f4
SL
4560 /* Assume all registers in stdarg functions need to be saved. */
4561 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4562 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4563
4564 /* ??? This could be set on a per-function basis by the front-end
4565 but is this worth the hassle? */
4566 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
d764963b 4567 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
0b37ba8a
AK
4568
4569 if (!profile_flag && !flag_instrument_function_entry_exit)
4570 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
3a70d621 4571 }
db2960f4
SL
4572}
4573
4574/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4575 instead of just setting it. */
9d30f3c1 4576
db2960f4
SL
4577void
4578push_struct_function (tree fndecl)
4579{
af16bc76
MJ
4580 /* When in_dummy_function we might be in the middle of a pop_cfun and
4581 current_function_decl and cfun may not match. */
4582 gcc_assert (in_dummy_function
4583 || (!cfun && !current_function_decl)
4584 || (cfun && current_function_decl == cfun->decl));
9771b263 4585 cfun_stack.safe_push (cfun);
af16bc76 4586 current_function_decl = fndecl;
182e0d71 4587 allocate_struct_function (fndecl, false);
3a70d621 4588}
6f086dfc 4589
8f4f502f 4590/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4591 appropriate for emitting rtl at the start of a function. */
6f086dfc 4592
3a70d621 4593static void
db2960f4 4594prepare_function_start (void)
3a70d621 4595{
3e029763 4596 gcc_assert (!crtl->emit.x_last_insn);
fb0703f7 4597 init_temp_slots ();
0de456a5 4598 init_emit ();
bd60bab2 4599 init_varasm_status ();
0de456a5 4600 init_expr ();
bf08ebeb 4601 default_rtl_profile ();
6f086dfc 4602
a11e0df4 4603 if (flag_stack_usage_info)
d3c12306 4604 {
766090c2 4605 cfun->su = ggc_cleared_alloc<stack_usage> ();
d3c12306
EB
4606 cfun->su->static_stack_size = -1;
4607 }
4608
3a70d621 4609 cse_not_expected = ! optimize;
6f086dfc 4610
3a70d621
RH
4611 /* Caller save not needed yet. */
4612 caller_save_needed = 0;
6f086dfc 4613
3a70d621
RH
4614 /* We haven't done register allocation yet. */
4615 reg_renumber = 0;
6f086dfc 4616
b384405b
BS
4617 /* Indicate that we have not instantiated virtual registers yet. */
4618 virtuals_instantiated = 0;
4619
1b3d8f8a
GK
4620 /* Indicate that we want CONCATs now. */
4621 generating_concat_p = 1;
4622
b384405b
BS
4623 /* Indicate we have no need of a frame pointer yet. */
4624 frame_pointer_needed = 0;
b384405b
BS
4625}
4626
4627/* Initialize the rtl expansion mechanism so that we can do simple things
4628 like generate sequences. This is used to provide a context during global
db2960f4
SL
4629 initialization of some passes. You must call expand_dummy_function_end
4630 to exit this context. */
4631
b384405b 4632void
fa8db1f7 4633init_dummy_function_start (void)
b384405b 4634{
db2960f4
SL
4635 gcc_assert (!in_dummy_function);
4636 in_dummy_function = true;
4637 push_struct_function (NULL_TREE);
4638 prepare_function_start ();
b384405b
BS
4639}
4640
4641/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4642 and initialize static variables for generating RTL for the statements
4643 of the function. */
4644
4645void
fa8db1f7 4646init_function_start (tree subr)
b384405b 4647{
db2960f4
SL
4648 if (subr && DECL_STRUCT_FUNCTION (subr))
4649 set_cfun (DECL_STRUCT_FUNCTION (subr));
4650 else
182e0d71 4651 allocate_struct_function (subr, false);
b9b5f433
JH
4652
4653 /* Initialize backend, if needed. */
4654 initialize_rtl ();
4655
db2960f4 4656 prepare_function_start ();
2c7eebae 4657 decide_function_section (subr);
b384405b 4658
6f086dfc
RS
4659 /* Warn if this value is an aggregate type,
4660 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4661 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4662 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4663}
5c7675e9 4664
7d69de61
RH
4665/* Expand code to verify the stack_protect_guard. This is invoked at
4666 the end of a function to be protected. */
4667
4668#ifndef HAVE_stack_protect_test
b76be05e
JJ
4669# define HAVE_stack_protect_test 0
4670# define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
7d69de61
RH
4671#endif
4672
b755446c 4673void
7d69de61
RH
4674stack_protect_epilogue (void)
4675{
4676 tree guard_decl = targetm.stack_protect_guard ();
19f8b229 4677 rtx_code_label *label = gen_label_rtx ();
7d69de61
RH
4678 rtx x, y, tmp;
4679
08d4cc33
RH
4680 x = expand_normal (crtl->stack_protect_guard);
4681 y = expand_normal (guard_decl);
7d69de61
RH
4682
4683 /* Allow the target to compare Y with X without leaking either into
4684 a register. */
fedfecef 4685 switch ((int) (HAVE_stack_protect_test != 0))
7d69de61
RH
4686 {
4687 case 1:
3aebbe5f 4688 tmp = gen_stack_protect_test (x, y, label);
7d69de61
RH
4689 if (tmp)
4690 {
4691 emit_insn (tmp);
7d69de61
RH
4692 break;
4693 }
4694 /* FALLTHRU */
4695
4696 default:
4697 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4698 break;
4699 }
4700
4701 /* The noreturn predictor has been moved to the tree level. The rtl-level
4702 predictors estimate this branch about 20%, which isn't enough to get
4703 things moved out of line. Since this is the only extant case of adding
4704 a noreturn function at the rtl level, it doesn't seem worth doing ought
4705 except adding the prediction by hand. */
4706 tmp = get_last_insn ();
4707 if (JUMP_P (tmp))
9f215bf5 4708 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
7d69de61 4709
b3c144a3
SB
4710 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4711 free_temp_slots ();
7d69de61
RH
4712 emit_label (label);
4713}
4714\f
6f086dfc
RS
4715/* Start the RTL for a new function, and set variables used for
4716 emitting RTL.
4717 SUBR is the FUNCTION_DECL node.
4718 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4719 the function's parameters, which must be run at any return statement. */
4720
4721void
b79c5284 4722expand_function_start (tree subr)
6f086dfc 4723{
6f086dfc
RS
4724 /* Make sure volatile mem refs aren't considered
4725 valid operands of arithmetic insns. */
4726 init_recog_no_volatile ();
4727
e3b5732b 4728 crtl->profile
70f4f91c
WC
4729 = (profile_flag
4730 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4731
e3b5732b 4732 crtl->limit_stack
a157febd
GK
4733 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4734
52a11cbf
RH
4735 /* Make the label for return statements to jump to. Do not special
4736 case machines with special return instructions -- they will be
4737 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4738 return_label = gen_label_rtx ();
6f086dfc
RS
4739
4740 /* Initialize rtx used to return the value. */
4741 /* Do this before assign_parms so that we copy the struct value address
4742 before any library calls that assign parms might generate. */
4743
4744 /* Decide whether to return the value in memory or in a register. */
61f71b34 4745 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4746 {
4747 /* Returning something that won't go in a register. */
b3694847 4748 rtx value_address = 0;
6f086dfc
RS
4749
4750#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4751 if (cfun->returns_pcc_struct)
6f086dfc
RS
4752 {
4753 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4754 value_address = assemble_static_space (size);
4755 }
4756 else
4757#endif
4758 {
2225b57c 4759 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4760 /* Expect to be passed the address of a place to store the value.
4761 If it is passed as an argument, assign_parms will take care of
4762 it. */
61f71b34 4763 if (sv)
6f086dfc
RS
4764 {
4765 value_address = gen_reg_rtx (Pmode);
61f71b34 4766 emit_move_insn (value_address, sv);
6f086dfc
RS
4767 }
4768 }
4769 if (value_address)
ccdecf58 4770 {
01c98570
JM
4771 rtx x = value_address;
4772 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4773 {
4774 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4775 set_mem_attributes (x, DECL_RESULT (subr), 1);
4776 }
abde42f7 4777 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4778 }
6f086dfc
RS
4779 }
4780 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4781 /* If return mode is void, this decl rtl should not be used. */
19e7881c 4782 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4783 else
a53e14c0 4784 {
d5bf1143
RH
4785 /* Compute the return values into a pseudo reg, which we will copy
4786 into the true return register after the cleanups are done. */
bef5d8b6
RS
4787 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4788 if (TYPE_MODE (return_type) != BLKmode
4789 && targetm.calls.return_in_msb (return_type))
4790 /* expand_function_end will insert the appropriate padding in
4791 this case. Use the return value's natural (unpadded) mode
4792 within the function proper. */
4793 SET_DECL_RTL (DECL_RESULT (subr),
4794 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4795 else
0bccc606 4796 {
bef5d8b6
RS
4797 /* In order to figure out what mode to use for the pseudo, we
4798 figure out what the mode of the eventual return register will
4799 actually be, and use that. */
1d636cc6 4800 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4801
4802 /* Structures that are returned in registers are not
4803 aggregate_value_p, so we may see a PARALLEL or a REG. */
4804 if (REG_P (hard_reg))
4805 SET_DECL_RTL (DECL_RESULT (subr),
4806 gen_reg_rtx (GET_MODE (hard_reg)));
4807 else
4808 {
4809 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4810 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4811 }
0bccc606 4812 }
a53e14c0 4813
084a1106
JDA
4814 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4815 result to the real return register(s). */
4816 DECL_REGISTER (DECL_RESULT (subr)) = 1;
a53e14c0 4817 }
6f086dfc
RS
4818
4819 /* Initialize rtx for parameters and local variables.
4820 In some cases this requires emitting insns. */
0d1416c6 4821 assign_parms (subr);
6f086dfc 4822
6de9cd9a
DN
4823 /* If function gets a static chain arg, store it. */
4824 if (cfun->static_chain_decl)
4825 {
7e140280 4826 tree parm = cfun->static_chain_decl;
531ca746 4827 rtx local, chain, insn;
7e140280 4828
531ca746
RH
4829 local = gen_reg_rtx (Pmode);
4830 chain = targetm.calls.static_chain (current_function_decl, true);
4831
4832 set_decl_incoming_rtl (parm, chain, false);
7e140280 4833 SET_DECL_RTL (parm, local);
7e140280 4834 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 4835
531ca746
RH
4836 insn = emit_move_insn (local, chain);
4837
4838 /* Mark the register as eliminable, similar to parameters. */
4839 if (MEM_P (chain)
4840 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 4841 set_dst_reg_note (insn, REG_EQUIV, chain, local);
3fd48b12
EB
4842
4843 /* If we aren't optimizing, save the static chain onto the stack. */
4844 if (!optimize)
4845 {
4846 tree saved_static_chain_decl
4847 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
4848 DECL_NAME (parm), TREE_TYPE (parm));
4849 rtx saved_static_chain_rtx
4850 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4851 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
4852 emit_move_insn (saved_static_chain_rtx, chain);
4853 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
4854 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4855 }
6de9cd9a
DN
4856 }
4857
4858 /* If the function receives a non-local goto, then store the
4859 bits we need to restore the frame pointer. */
4860 if (cfun->nonlocal_goto_save_area)
4861 {
4862 tree t_save;
4863 rtx r_save;
4864
4846b435 4865 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 4866 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 4867
6bbec3e1
L
4868 t_save = build4 (ARRAY_REF,
4869 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
4870 cfun->nonlocal_goto_save_area,
4871 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 4872 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 4873 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 4874
88280cf9 4875 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
4876 update_nonlocal_goto_save_area ();
4877 }
f0c51a1e 4878
6f086dfc
RS
4879 /* The following was moved from init_function_start.
4880 The move is supposed to make sdb output more accurate. */
4881 /* Indicate the beginning of the function body,
4882 as opposed to parm setup. */
2e040219 4883 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 4884
ede497cf
SB
4885 gcc_assert (NOTE_P (get_last_insn ()));
4886
6f086dfc
RS
4887 parm_birth_insn = get_last_insn ();
4888
e3b5732b 4889 if (crtl->profile)
f6f315fe 4890 {
f6f315fe 4891#ifdef PROFILE_HOOK
df696a75 4892 PROFILE_HOOK (current_function_funcdef_no);
411707f4 4893#endif
f6f315fe 4894 }
411707f4 4895
6d3cc8f0
EB
4896 /* If we are doing generic stack checking, the probe should go here. */
4897 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 4898 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
4899}
4900\f
49ad7cfa
BS
4901/* Undo the effects of init_dummy_function_start. */
4902void
fa8db1f7 4903expand_dummy_function_end (void)
49ad7cfa 4904{
db2960f4
SL
4905 gcc_assert (in_dummy_function);
4906
49ad7cfa
BS
4907 /* End any sequences that failed to be closed due to syntax errors. */
4908 while (in_sequence_p ())
4909 end_sequence ();
4910
4911 /* Outside function body, can't compute type's actual size
4912 until next function's body starts. */
fa51b01b 4913
01d939e8
BS
4914 free_after_parsing (cfun);
4915 free_after_compilation (cfun);
db2960f4
SL
4916 pop_cfun ();
4917 in_dummy_function = false;
49ad7cfa
BS
4918}
4919
c13fde05
RH
4920/* Call DOIT for each hard register used as a return value from
4921 the current function. */
bd695e1e
RH
4922
4923void
fa8db1f7 4924diddle_return_value (void (*doit) (rtx, void *), void *arg)
bd695e1e 4925{
38173d38 4926 rtx outgoing = crtl->return_rtx;
c13fde05
RH
4927
4928 if (! outgoing)
4929 return;
bd695e1e 4930
f8cfc6aa 4931 if (REG_P (outgoing))
c13fde05
RH
4932 (*doit) (outgoing, arg);
4933 else if (GET_CODE (outgoing) == PARALLEL)
4934 {
4935 int i;
bd695e1e 4936
c13fde05
RH
4937 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4938 {
4939 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4940
f8cfc6aa 4941 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 4942 (*doit) (x, arg);
bd695e1e
RH
4943 }
4944 }
4945}
4946
c13fde05 4947static void
fa8db1f7 4948do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4949{
c41c1387 4950 emit_clobber (reg);
c13fde05
RH
4951}
4952
4953void
fa8db1f7 4954clobber_return_register (void)
c13fde05
RH
4955{
4956 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
4957
4958 /* In case we do use pseudo to return value, clobber it too. */
4959 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4960 {
4961 tree decl_result = DECL_RESULT (current_function_decl);
4962 rtx decl_rtl = DECL_RTL (decl_result);
4963 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4964 {
4965 do_clobber_return_reg (decl_rtl, NULL);
4966 }
4967 }
c13fde05
RH
4968}
4969
4970static void
fa8db1f7 4971do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 4972{
c41c1387 4973 emit_use (reg);
c13fde05
RH
4974}
4975
0bf8477d 4976static void
fa8db1f7 4977use_return_register (void)
c13fde05
RH
4978{
4979 diddle_return_value (do_use_return_reg, NULL);
4980}
4981
902edd36
JH
4982/* Possibly warn about unused parameters. */
4983void
4984do_warn_unused_parameter (tree fn)
4985{
4986 tree decl;
4987
4988 for (decl = DECL_ARGUMENTS (fn);
910ad8de 4989 decl; decl = DECL_CHAIN (decl))
902edd36 4990 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
534fd534
DF
4991 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4992 && !TREE_NO_WARNING (decl))
b9b8dde3 4993 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
902edd36
JH
4994}
4995
862d0b35
DN
4996/* Set the location of the insn chain starting at INSN to LOC. */
4997
4998static void
dc01c3d1 4999set_insn_locations (rtx_insn *insn, int loc)
862d0b35 5000{
dc01c3d1 5001 while (insn != NULL)
862d0b35
DN
5002 {
5003 if (INSN_P (insn))
5004 INSN_LOCATION (insn) = loc;
5005 insn = NEXT_INSN (insn);
5006 }
5007}
5008
71c0e7fc 5009/* Generate RTL for the end of the current function. */
6f086dfc
RS
5010
5011void
fa8db1f7 5012expand_function_end (void)
6f086dfc 5013{
932f0847 5014 rtx clobber_after;
6f086dfc 5015
964be02f
RH
5016 /* If arg_pointer_save_area was referenced only from a nested
5017 function, we will not have initialized it yet. Do that now. */
e3b5732b 5018 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 5019 get_arg_pointer_save_area ();
964be02f 5020
b38f3813 5021 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
5022 do a stack probe at the start of the function to ensure we have enough
5023 space for another stack frame. */
b38f3813 5024 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66 5025 {
691fe203 5026 rtx_insn *insn, *seq;
11044f66
RK
5027
5028 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 5029 if (CALL_P (insn))
11044f66 5030 {
c35af30f 5031 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 5032 start_sequence ();
c35af30f
EB
5033 if (STACK_CHECK_MOVING_SP)
5034 anti_adjust_stack_and_probe (max_frame_size, true);
5035 else
5036 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
5037 seq = get_insns ();
5038 end_sequence ();
5368224f 5039 set_insn_locations (seq, prologue_location);
ede497cf 5040 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
5041 break;
5042 }
5043 }
5044
6f086dfc
RS
5045 /* End any sequences that failed to be closed due to syntax errors. */
5046 while (in_sequence_p ())
5f4f0e22 5047 end_sequence ();
6f086dfc 5048
6f086dfc
RS
5049 clear_pending_stack_adjust ();
5050 do_pending_stack_adjust ();
5051
6f086dfc
RS
5052 /* Output a linenumber for the end of the function.
5053 SDB depends on this. */
5368224f 5054 set_curr_insn_location (input_location);
6f086dfc 5055
fbffc70a 5056 /* Before the return label (if any), clobber the return
a1f300c0 5057 registers so that they are not propagated live to the rest of
fbffc70a
GK
5058 the function. This can only happen with functions that drop
5059 through; if there had been a return statement, there would
932f0847
JH
5060 have either been a return rtx, or a jump to the return label.
5061
5062 We delay actual code generation after the current_function_value_rtx
5063 is computed. */
5064 clobber_after = get_last_insn ();
fbffc70a 5065
526c334b
KH
5066 /* Output the label for the actual return from the function. */
5067 emit_label (return_label);
6f086dfc 5068
677f3fa8 5069 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5070 {
5071 /* Let except.c know where it should emit the call to unregister
5072 the function context for sjlj exceptions. */
5073 if (flag_exceptions)
5074 sjlj_emit_function_exit_after (get_last_insn ());
5075 }
6fb5fa3c
DB
5076 else
5077 {
5078 /* We want to ensure that instructions that may trap are not
5079 moved into the epilogue by scheduling, because we don't
5080 always emit unwind information for the epilogue. */
8f4f502f 5081 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5082 emit_insn (gen_blockage ());
5083 }
0b59e81e 5084
652b0932
RH
5085 /* If this is an implementation of throw, do what's necessary to
5086 communicate between __builtin_eh_return and the epilogue. */
5087 expand_eh_return ();
5088
3e4eac3f
RH
5089 /* If scalar return value was computed in a pseudo-reg, or was a named
5090 return value that got dumped to the stack, copy that to the hard
5091 return register. */
19e7881c 5092 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5093 {
3e4eac3f
RH
5094 tree decl_result = DECL_RESULT (current_function_decl);
5095 rtx decl_rtl = DECL_RTL (decl_result);
5096
5097 if (REG_P (decl_rtl)
5098 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5099 : DECL_REGISTER (decl_result))
5100 {
38173d38 5101 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5102
ce5e43d0 5103 /* This should be set in assign_parms. */
0bccc606 5104 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5105
5106 /* If this is a BLKmode structure being returned in registers,
5107 then use the mode computed in expand_return. Note that if
797a6ac1 5108 decl_rtl is memory, then its mode may have been changed,
38173d38 5109 but that crtl->return_rtx has not. */
3e4eac3f 5110 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5111 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5112
bef5d8b6
RS
5113 /* If a non-BLKmode return value should be padded at the least
5114 significant end of the register, shift it left by the appropriate
5115 amount. BLKmode results are handled using the group load/store
5116 machinery. */
5117 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
66de4d7c 5118 && REG_P (real_decl_rtl)
bef5d8b6
RS
5119 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5120 {
5121 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5122 REGNO (real_decl_rtl)),
5123 decl_rtl);
5124 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5125 }
3e4eac3f 5126 /* If a named return value dumped decl_return to memory, then
797a6ac1 5127 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5128 extension. */
bef5d8b6 5129 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5130 {
8df83eae 5131 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5132 promote_function_mode (TREE_TYPE (decl_result),
5133 GET_MODE (decl_rtl), &unsignedp,
5134 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5135
5136 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5137 }
aa570f54 5138 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5139 {
5140 /* If expand_function_start has created a PARALLEL for decl_rtl,
5141 move the result to the real return registers. Otherwise, do
5142 a group load from decl_rtl for a named return. */
5143 if (GET_CODE (decl_rtl) == PARALLEL)
5144 emit_group_move (real_decl_rtl, decl_rtl);
5145 else
5146 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5147 TREE_TYPE (decl_result),
084a1106
JDA
5148 int_size_in_bytes (TREE_TYPE (decl_result)));
5149 }
652b0932
RH
5150 /* In the case of complex integer modes smaller than a word, we'll
5151 need to generate some non-trivial bitfield insertions. Do that
5152 on a pseudo and not the hard register. */
5153 else if (GET_CODE (decl_rtl) == CONCAT
5154 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5155 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5156 {
5157 int old_generating_concat_p;
5158 rtx tmp;
5159
5160 old_generating_concat_p = generating_concat_p;
5161 generating_concat_p = 0;
5162 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5163 generating_concat_p = old_generating_concat_p;
5164
5165 emit_move_insn (tmp, decl_rtl);
5166 emit_move_insn (real_decl_rtl, tmp);
5167 }
3e4eac3f
RH
5168 else
5169 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5170 }
6f086dfc
RS
5171 }
5172
5173 /* If returning a structure, arrange to return the address of the value
5174 in a place where debuggers expect to find it.
5175
5176 If returning a structure PCC style,
5177 the caller also depends on this value.
e3b5732b
JH
5178 And cfun->returns_pcc_struct is not necessarily set. */
5179 if (cfun->returns_struct
5180 || cfun->returns_pcc_struct)
6f086dfc 5181 {
cc77ae10 5182 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5183 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5184 rtx outgoing;
5185
5186 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5187 type = TREE_TYPE (type);
5188 else
5189 value_address = XEXP (value_address, 0);
5190
1d636cc6
RG
5191 outgoing = targetm.calls.function_value (build_pointer_type (type),
5192 current_function_decl, true);
6f086dfc
RS
5193
5194 /* Mark this as a function return value so integrate will delete the
5195 assignment and USE below when inlining this function. */
5196 REG_FUNCTION_VALUE_P (outgoing) = 1;
5197
d1608933 5198 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5199 value_address = convert_memory_address (GET_MODE (outgoing),
5200 value_address);
d1608933 5201
6f086dfc 5202 emit_move_insn (outgoing, value_address);
d1608933
RK
5203
5204 /* Show return register used to hold result (in this case the address
5205 of the result. */
38173d38 5206 crtl->return_rtx = outgoing;
6f086dfc
RS
5207 }
5208
79c7fda6
JJ
5209 /* Emit the actual code to clobber return register. Don't emit
5210 it if clobber_after is a barrier, then the previous basic block
5211 certainly doesn't fall thru into the exit block. */
5212 if (!BARRIER_P (clobber_after))
5213 {
5214 rtx seq;
797a6ac1 5215
79c7fda6
JJ
5216 start_sequence ();
5217 clobber_return_register ();
5218 seq = get_insns ();
5219 end_sequence ();
932f0847 5220
79c7fda6
JJ
5221 emit_insn_after (seq, clobber_after);
5222 }
932f0847 5223
609c3937 5224 /* Output the label for the naked return from the function. */
4c33221c
UW
5225 if (naked_return_label)
5226 emit_label (naked_return_label);
6e3077c6 5227
25108646
AH
5228 /* @@@ This is a kludge. We want to ensure that instructions that
5229 may trap are not moved into the epilogue by scheduling, because
56d17681 5230 we don't always emit unwind information for the epilogue. */
f0a0390e 5231 if (cfun->can_throw_non_call_exceptions
677f3fa8 5232 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5233 emit_insn (gen_blockage ());
25108646 5234
7d69de61 5235 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5236 if (crtl->stack_protect_guard)
7d69de61
RH
5237 stack_protect_epilogue ();
5238
40184445
BS
5239 /* If we had calls to alloca, and this machine needs
5240 an accurate stack pointer to exit the function,
5241 insert some code to save and restore the stack pointer. */
5242 if (! EXIT_IGNORE_STACK
e3b5732b 5243 && cfun->calls_alloca)
40184445 5244 {
9eac0f2a 5245 rtx tem = 0, seq;
40184445 5246
9eac0f2a
RH
5247 start_sequence ();
5248 emit_stack_save (SAVE_FUNCTION, &tem);
5249 seq = get_insns ();
5250 end_sequence ();
5251 emit_insn_before (seq, parm_birth_insn);
5252
5253 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5254 }
5255
c13fde05
RH
5256 /* ??? This should no longer be necessary since stupid is no longer with
5257 us, but there are some parts of the compiler (eg reload_combine, and
5258 sh mach_dep_reorg) that still try and compute their own lifetime info
5259 instead of using the general framework. */
5260 use_return_register ();
6f086dfc 5261}
278ed218
RH
5262
5263rtx
bd60bab2 5264get_arg_pointer_save_area (void)
278ed218 5265{
bd60bab2 5266 rtx ret = arg_pointer_save_area;
278ed218
RH
5267
5268 if (! ret)
5269 {
bd60bab2
JH
5270 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5271 arg_pointer_save_area = ret;
964be02f
RH
5272 }
5273
e3b5732b 5274 if (! crtl->arg_pointer_save_area_init)
964be02f
RH
5275 {
5276 rtx seq;
278ed218 5277
797a6ac1 5278 /* Save the arg pointer at the beginning of the function. The
964be02f 5279 generated stack slot may not be a valid memory address, so we
278ed218
RH
5280 have to check it and fix it if necessary. */
5281 start_sequence ();
1a8cb155 5282 emit_move_insn (validize_mem (copy_rtx (ret)),
2e3f842f 5283 crtl->args.internal_arg_pointer);
2f937369 5284 seq = get_insns ();
278ed218
RH
5285 end_sequence ();
5286
964be02f 5287 push_topmost_sequence ();
1cb2fc7b 5288 emit_insn_after (seq, entry_of_function ());
964be02f 5289 pop_topmost_sequence ();
c1d9a70a
ILT
5290
5291 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5292 }
5293
5294 return ret;
5295}
bdac5f58 5296\f
cd9c1ca8
RH
5297/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5298 for the first time. */
bdac5f58 5299
0a1c58a2 5300static void
dc01c3d1 5301record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
bdac5f58 5302{
dc01c3d1 5303 rtx_insn *tmp;
cd9c1ca8 5304 htab_t hash = *hashp;
0a1c58a2 5305
cd9c1ca8
RH
5306 if (hash == NULL)
5307 *hashp = hash
5308 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5309
5310 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5311 {
5312 void **slot = htab_find_slot (hash, tmp, INSERT);
5313 gcc_assert (*slot == NULL);
5314 *slot = tmp;
5315 }
5316}
5317
cd400280
RH
5318/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5319 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5320 insn, then record COPY as well. */
cd9c1ca8
RH
5321
5322void
cd400280 5323maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5324{
cd400280 5325 htab_t hash;
cd9c1ca8
RH
5326 void **slot;
5327
cd400280
RH
5328 hash = epilogue_insn_hash;
5329 if (!hash || !htab_find (hash, insn))
5330 {
5331 hash = prologue_insn_hash;
5332 if (!hash || !htab_find (hash, insn))
5333 return;
5334 }
cd9c1ca8 5335
cd400280 5336 slot = htab_find_slot (hash, copy, INSERT);
cd9c1ca8
RH
5337 gcc_assert (*slot == NULL);
5338 *slot = copy;
bdac5f58
TW
5339}
5340
cd9c1ca8
RH
5341/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5342 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5343
cd9c1ca8
RH
5344static bool
5345contains (const_rtx insn, htab_t hash)
bdac5f58 5346{
cd9c1ca8
RH
5347 if (hash == NULL)
5348 return false;
bdac5f58 5349
cd9c1ca8 5350 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5351 {
e0944870 5352 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
cd9c1ca8 5353 int i;
e0944870
DM
5354 for (i = seq->len () - 1; i >= 0; i--)
5355 if (htab_find (hash, seq->element (i)))
cd9c1ca8
RH
5356 return true;
5357 return false;
bdac5f58 5358 }
cd9c1ca8
RH
5359
5360 return htab_find (hash, insn) != NULL;
bdac5f58 5361}
5c7675e9
RH
5362
5363int
4f588890 5364prologue_epilogue_contains (const_rtx insn)
5c7675e9 5365{
cd9c1ca8 5366 if (contains (insn, prologue_insn_hash))
5c7675e9 5367 return 1;
cd9c1ca8 5368 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5369 return 1;
5370 return 0;
5371}
bdac5f58 5372
170d8157 5373#ifdef HAVE_return
4c029f40
TV
5374/* Insert use of return register before the end of BB. */
5375
5376static void
5377emit_use_return_register_into_block (basic_block bb)
5378{
1e1b18c1 5379 rtx seq, insn;
4c029f40
TV
5380 start_sequence ();
5381 use_return_register ();
5382 seq = get_insns ();
5383 end_sequence ();
1e1b18c1
EB
5384 insn = BB_END (bb);
5385#ifdef HAVE_cc0
5386 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5387 insn = prev_cc0_setter (insn);
5388#endif
5389 emit_insn_before (seq, insn);
4c029f40
TV
5390}
5391
484db665
BS
5392
5393/* Create a return pattern, either simple_return or return, depending on
5394 simple_p. */
5395
5396static rtx
5397gen_return_pattern (bool simple_p)
5398{
5399#ifdef HAVE_simple_return
5400 return simple_p ? gen_simple_return () : gen_return ();
5401#else
5402 gcc_assert (!simple_p);
5403 return gen_return ();
5404#endif
5405}
5406
5407/* Insert an appropriate return pattern at the end of block BB. This
5408 also means updating block_for_insn appropriately. SIMPLE_P is
5409 the same as in gen_return_pattern and passed to it. */
69732dcb 5410
f30e25a3 5411void
484db665 5412emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5413{
484db665
BS
5414 rtx jump, pat;
5415 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5416 pat = PATTERN (jump);
26898771
BS
5417 if (GET_CODE (pat) == PARALLEL)
5418 pat = XVECEXP (pat, 0, 0);
5419 gcc_assert (ANY_RETURN_P (pat));
5420 JUMP_LABEL (jump) = pat;
69732dcb 5421}
484db665 5422#endif
69732dcb 5423
387748de
AM
5424/* Set JUMP_LABEL for a return insn. */
5425
5426void
5427set_return_jump_label (rtx returnjump)
5428{
5429 rtx pat = PATTERN (returnjump);
5430 if (GET_CODE (pat) == PARALLEL)
5431 pat = XVECEXP (pat, 0, 0);
5432 if (ANY_RETURN_P (pat))
5433 JUMP_LABEL (returnjump) = pat;
5434 else
5435 JUMP_LABEL (returnjump) = ret_rtx;
5436}
5437
ffe14686
AM
5438#if defined (HAVE_return) || defined (HAVE_simple_return)
5439/* Return true if there are any active insns between HEAD and TAIL. */
f30e25a3 5440bool
ffd80b43 5441active_insn_between (rtx_insn *head, rtx_insn *tail)
39d52ae5 5442{
ffe14686
AM
5443 while (tail)
5444 {
5445 if (active_insn_p (tail))
5446 return true;
5447 if (tail == head)
5448 return false;
5449 tail = PREV_INSN (tail);
5450 }
5451 return false;
5452}
5453
5454/* LAST_BB is a block that exits, and empty of active instructions.
5455 Examine its predecessors for jumps that can be converted to
5456 (conditional) returns. */
f30e25a3 5457vec<edge>
ffe14686 5458convert_jumps_to_returns (basic_block last_bb, bool simple_p,
9771b263 5459 vec<edge> unconverted ATTRIBUTE_UNUSED)
ffe14686
AM
5460{
5461 int i;
5462 basic_block bb;
39d52ae5 5463 rtx label;
ffe14686
AM
5464 edge_iterator ei;
5465 edge e;
ef062b13 5466 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
39d52ae5 5467
ffe14686 5468 FOR_EACH_EDGE (e, ei, last_bb->preds)
fefa31b5 5469 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
9771b263 5470 src_bbs.quick_push (e->src);
ffe14686
AM
5471
5472 label = BB_HEAD (last_bb);
5473
9771b263 5474 FOR_EACH_VEC_ELT (src_bbs, i, bb)
39d52ae5 5475 {
68a1a6c0 5476 rtx_insn *jump = BB_END (bb);
ffe14686
AM
5477
5478 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5479 continue;
5480
5481 e = find_edge (bb, last_bb);
5482
5483 /* If we have an unconditional jump, we can replace that
5484 with a simple return instruction. */
5485 if (simplejump_p (jump))
5486 {
5487 /* The use of the return register might be present in the exit
5488 fallthru block. Either:
5489 - removing the use is safe, and we should remove the use in
5490 the exit fallthru block, or
5491 - removing the use is not safe, and we should add it here.
5492 For now, we conservatively choose the latter. Either of the
5493 2 helps in crossjumping. */
5494 emit_use_return_register_into_block (bb);
5495
5496 emit_return_into_block (simple_p, bb);
5497 delete_insn (jump);
5498 }
5499
5500 /* If we have a conditional jump branching to the last
5501 block, we can try to replace that with a conditional
5502 return instruction. */
5503 else if (condjump_p (jump))
5504 {
5505 rtx dest;
5506
5507 if (simple_p)
5508 dest = simple_return_rtx;
5509 else
5510 dest = ret_rtx;
5511 if (!redirect_jump (jump, dest, 0))
5512 {
5513#ifdef HAVE_simple_return
5514 if (simple_p)
5515 {
5516 if (dump_file)
5517 fprintf (dump_file,
5518 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5519 unconverted.safe_push (e);
ffe14686
AM
5520 }
5521#endif
5522 continue;
5523 }
5524
5525 /* See comment in simplejump_p case above. */
5526 emit_use_return_register_into_block (bb);
5527
5528 /* If this block has only one successor, it both jumps
5529 and falls through to the fallthru block, so we can't
5530 delete the edge. */
5531 if (single_succ_p (bb))
5532 continue;
5533 }
5534 else
5535 {
5536#ifdef HAVE_simple_return
5537 if (simple_p)
5538 {
5539 if (dump_file)
5540 fprintf (dump_file,
5541 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5542 unconverted.safe_push (e);
ffe14686
AM
5543 }
5544#endif
5545 continue;
5546 }
5547
5548 /* Fix up the CFG for the successful change we just made. */
fefa31b5 5549 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
d3b623c7 5550 e->flags &= ~EDGE_CROSSING;
39d52ae5 5551 }
9771b263 5552 src_bbs.release ();
ffe14686 5553 return unconverted;
39d52ae5
BS
5554}
5555
ffe14686 5556/* Emit a return insn for the exit fallthru block. */
f30e25a3 5557basic_block
ffe14686
AM
5558emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5559{
5560 basic_block last_bb = exit_fallthru_edge->src;
5561
5562 if (JUMP_P (BB_END (last_bb)))
5563 {
5564 last_bb = split_edge (exit_fallthru_edge);
5565 exit_fallthru_edge = single_succ_edge (last_bb);
5566 }
5567 emit_barrier_after (BB_END (last_bb));
5568 emit_return_into_block (simple_p, last_bb);
5569 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5570 return last_bb;
5571}
5572#endif
5573
5574
9faa82d8 5575/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5576 this into place with notes indicating where the prologue ends and where
484db665
BS
5577 the epilogue begins. Update the basic block information when possible.
5578
5579 Notes on epilogue placement:
5580 There are several kinds of edges to the exit block:
5581 * a single fallthru edge from LAST_BB
5582 * possibly, edges from blocks containing sibcalls
5583 * possibly, fake edges from infinite loops
5584
5585 The epilogue is always emitted on the fallthru edge from the last basic
5586 block in the function, LAST_BB, into the exit block.
5587
5588 If LAST_BB is empty except for a label, it is the target of every
5589 other basic block in the function that ends in a return. If a
5590 target has a return or simple_return pattern (possibly with
5591 conditional variants), these basic blocks can be changed so that a
5592 return insn is emitted into them, and their target is adjusted to
5593 the real exit block.
5594
5595 Notes on shrink wrapping: We implement a fairly conservative
5596 version of shrink-wrapping rather than the textbook one. We only
5597 generate a single prologue and a single epilogue. This is
5598 sufficient to catch a number of interesting cases involving early
5599 exits.
5600
5601 First, we identify the blocks that require the prologue to occur before
5602 them. These are the ones that modify a call-saved register, or reference
5603 any of the stack or frame pointer registers. To simplify things, we then
5604 mark everything reachable from these blocks as also requiring a prologue.
5605 This takes care of loops automatically, and avoids the need to examine
5606 whether MEMs reference the frame, since it is sufficient to check for
5607 occurrences of the stack or frame pointer.
5608
5609 We then compute the set of blocks for which the need for a prologue
5610 is anticipatable (borrowing terminology from the shrink-wrapping
5611 description in Muchnick's book). These are the blocks which either
5612 require a prologue themselves, or those that have only successors
5613 where the prologue is anticipatable. The prologue needs to be
5614 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5615 is not. For the moment, we ensure that only one such edge exists.
5616
5617 The epilogue is placed as described above, but we make a
5618 distinction between inserting return and simple_return patterns
5619 when modifying other blocks that end in a return. Blocks that end
5620 in a sibcall omit the sibcall_epilogue if the block is not in
5621 ANTIC. */
bdac5f58 5622
6fb5fa3c
DB
5623static void
5624thread_prologue_and_epilogue_insns (void)
bdac5f58 5625{
7458026b 5626 bool inserted;
484db665 5627#ifdef HAVE_simple_return
6e1aa848 5628 vec<edge> unconverted_simple_returns = vNULL;
ffe14686 5629 bitmap_head bb_flags;
484db665 5630#endif
9c8348cf 5631 rtx_insn *returnjump;
9c8348cf 5632 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
dc01c3d1 5633 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
484db665 5634 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5635 edge_iterator ei;
484db665
BS
5636
5637 df_analyze ();
e881bb1b 5638
fefa31b5 5639 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7458026b
ILT
5640
5641 inserted = false;
9c8348cf
DM
5642 epilogue_end = NULL;
5643 returnjump = NULL;
7458026b
ILT
5644
5645 /* Can't deal with multiple successors of the entry block at the
5646 moment. Function should always have at least one entry
5647 point. */
fefa31b5
DM
5648 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5649 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
484db665
BS
5650 orig_entry_edge = entry_edge;
5651
dc01c3d1 5652 split_prologue_seq = NULL;
7458026b
ILT
5653 if (flag_split_stack
5654 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5655 == NULL))
5656 {
5657#ifndef HAVE_split_stack_prologue
5658 gcc_unreachable ();
5659#else
5660 gcc_assert (HAVE_split_stack_prologue);
5661
5662 start_sequence ();
5663 emit_insn (gen_split_stack_prologue ());
484db665 5664 split_prologue_seq = get_insns ();
7458026b
ILT
5665 end_sequence ();
5666
484db665 5667 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5368224f 5668 set_insn_locations (split_prologue_seq, prologue_location);
7458026b
ILT
5669#endif
5670 }
5671
dc01c3d1 5672 prologue_seq = NULL;
bdac5f58
TW
5673#ifdef HAVE_prologue
5674 if (HAVE_prologue)
5675 {
e881bb1b 5676 start_sequence ();
dc01c3d1 5677 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
e881bb1b 5678 emit_insn (seq);
bdac5f58 5679
b8698a0f 5680 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5681 if the profiling is on and the frame pointer is required. */
e3b5732b 5682 if (crtl->profile && frame_pointer_needed)
c41c1387 5683 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5684
bdac5f58 5685 /* Retain a map of the prologue insns. */
cd9c1ca8 5686 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5687 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5688
56d17681
UB
5689 /* Ensure that instructions are not moved into the prologue when
5690 profiling is on. The call to the profiling routine can be
5691 emitted within the live range of a call-clobbered register. */
3c5273a9 5692 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5693 emit_insn (gen_blockage ());
9185a8d5 5694
484db665 5695 prologue_seq = get_insns ();
e881bb1b 5696 end_sequence ();
5368224f 5697 set_insn_locations (prologue_seq, prologue_location);
484db665
BS
5698 }
5699#endif
e881bb1b 5700
ffe14686 5701#ifdef HAVE_simple_return
484db665
BS
5702 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5703
484db665
BS
5704 /* Try to perform a kind of shrink-wrapping, making sure the
5705 prologue/epilogue is emitted only around those parts of the
5706 function that require it. */
5707
f30e25a3 5708 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
bdac5f58 5709#endif
bdac5f58 5710
484db665
BS
5711 if (split_prologue_seq != NULL_RTX)
5712 {
f4b31a33 5713 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
5714 inserted = true;
5715 }
5716 if (prologue_seq != NULL_RTX)
5717 {
5718 insert_insn_on_edge (prologue_seq, entry_edge);
5719 inserted = true;
5720 }
5721
19d3c25c
RH
5722 /* If the exit block has no non-fake predecessors, we don't need
5723 an epilogue. */
fefa31b5 5724 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
19d3c25c
RH
5725 if ((e->flags & EDGE_FAKE) == 0)
5726 break;
5727 if (e == NULL)
5728 goto epilogue_done;
5729
fefa31b5 5730 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
484db665 5731
fefa31b5 5732 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
ffe14686 5733
ffe14686
AM
5734#ifdef HAVE_simple_return
5735 if (entry_edge != orig_entry_edge)
f30e25a3
ZC
5736 exit_fallthru_edge
5737 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5738 &unconverted_simple_returns,
5739 &returnjump);
484db665 5740#endif
ffe14686
AM
5741#ifdef HAVE_return
5742 if (HAVE_return)
5743 {
5744 if (exit_fallthru_edge == NULL)
5745 goto epilogue_done;
69732dcb 5746
ffe14686
AM
5747 if (optimize)
5748 {
5749 basic_block last_bb = exit_fallthru_edge->src;
484db665 5750
ffe14686
AM
5751 if (LABEL_P (BB_HEAD (last_bb))
5752 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6e1aa848 5753 convert_jumps_to_returns (last_bb, false, vNULL);
ffe14686 5754
1ff2fd21
AM
5755 if (EDGE_COUNT (last_bb->preds) != 0
5756 && single_succ_p (last_bb))
484db665 5757 {
ffe14686
AM
5758 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5759 epilogue_end = returnjump = BB_END (last_bb);
484db665 5760#ifdef HAVE_simple_return
ffe14686
AM
5761 /* Emitting the return may add a basic block.
5762 Fix bb_flags for the added block. */
5763 if (last_bb != exit_fallthru_edge->src)
5764 bitmap_set_bit (&bb_flags, last_bb->index);
484db665 5765#endif
ffe14686 5766 goto epilogue_done;
69732dcb 5767 }
2dd8bc01 5768 }
69732dcb
RH
5769 }
5770#endif
cd9c1ca8
RH
5771
5772 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5773 this marker for the splits of EH_RETURN patterns, and nothing else
5774 uses the flag in the meantime. */
5775 epilogue_completed = 1;
5776
5777#ifdef HAVE_eh_return
5778 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5779 some targets, these get split to a special version of the epilogue
5780 code. In order to be able to properly annotate these with unwind
5781 info, try to split them now. If we get a valid split, drop an
5782 EPILOGUE_BEG note and mark the insns as epilogue insns. */
fefa31b5 5783 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
cd9c1ca8 5784 {
691fe203 5785 rtx_insn *prev, *last, *trial;
cd9c1ca8
RH
5786
5787 if (e->flags & EDGE_FALLTHRU)
5788 continue;
5789 last = BB_END (e->src);
5790 if (!eh_returnjump_p (last))
5791 continue;
5792
5793 prev = PREV_INSN (last);
5794 trial = try_split (PATTERN (last), last, 1);
5795 if (trial == last)
5796 continue;
5797
5798 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5799 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5800 }
5801#endif
5802
484db665
BS
5803 /* If nothing falls through into the exit block, we don't need an
5804 epilogue. */
623a66fa 5805
484db665 5806 if (exit_fallthru_edge == NULL)
623a66fa
R
5807 goto epilogue_done;
5808
bdac5f58
TW
5809#ifdef HAVE_epilogue
5810 if (HAVE_epilogue)
5811 {
19d3c25c 5812 start_sequence ();
2e040219 5813 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
dc01c3d1 5814 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
55c623b5
UW
5815 if (seq)
5816 emit_jump_insn (seq);
bdac5f58 5817
19d3c25c 5818 /* Retain a map of the epilogue insns. */
cd9c1ca8 5819 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 5820 set_insn_locations (seq, epilogue_location);
bdac5f58 5821
2f937369 5822 seq = get_insns ();
484db665 5823 returnjump = get_last_insn ();
718fe406 5824 end_sequence ();
e881bb1b 5825
484db665 5826 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 5827 inserted = true;
dc0ff1c8
BS
5828
5829 if (JUMP_P (returnjump))
387748de 5830 set_return_jump_label (returnjump);
bdac5f58 5831 }
623a66fa 5832 else
bdac5f58 5833#endif
623a66fa
R
5834 {
5835 basic_block cur_bb;
5836
484db665 5837 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
5838 goto epilogue_done;
5839 /* We have a fall-through edge to the exit block, the source is not
5840 at the end of the function, and there will be an assembler epilogue
5841 at the end of the function.
5842 We can't use force_nonfallthru here, because that would try to
484db665 5843 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 5844 we take advantage of cfg_layout_finalize using
484db665 5845 fixup_fallthru_exit_predecessor. */
35b6b437 5846 cfg_layout_initialize (0);
11cd3bed 5847 FOR_EACH_BB_FN (cur_bb, cfun)
24bd1a0b
DB
5848 if (cur_bb->index >= NUM_FIXED_BLOCKS
5849 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 5850 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
5851 cfg_layout_finalize ();
5852 }
cf103ca4 5853
19d3c25c 5854epilogue_done:
484db665 5855
a8ba47cb 5856 default_rtl_profile ();
e881bb1b 5857
ca1117cc 5858 if (inserted)
30a873c3 5859 {
cf103ca4
EB
5860 sbitmap blocks;
5861
30a873c3
ZD
5862 commit_edge_insertions ();
5863
cf103ca4 5864 /* Look for basic blocks within the prologue insns. */
8b1c6fd7 5865 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 5866 bitmap_clear (blocks);
d7c028c0
LC
5867 bitmap_set_bit (blocks, entry_edge->dest->index);
5868 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
5869 find_many_sub_basic_blocks (blocks);
5870 sbitmap_free (blocks);
5871
30a873c3
ZD
5872 /* The epilogue insns we inserted may cause the exit edge to no longer
5873 be fallthru. */
fefa31b5 5874 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
30a873c3
ZD
5875 {
5876 if (((e->flags & EDGE_FALLTHRU) != 0)
5877 && returnjump_p (BB_END (e->src)))
5878 e->flags &= ~EDGE_FALLTHRU;
5879 }
5880 }
0a1c58a2 5881
484db665 5882#ifdef HAVE_simple_return
f30e25a3
ZC
5883 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
5884 unconverted_simple_returns);
484db665
BS
5885#endif
5886
0a1c58a2
JL
5887#ifdef HAVE_sibcall_epilogue
5888 /* Emit sibling epilogues before any sibling call sites. */
fefa31b5
DM
5889 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
5890 ei_safe_edge (ei));
5891 )
0a1c58a2
JL
5892 {
5893 basic_block bb = e->src;
691fe203 5894 rtx_insn *insn = BB_END (bb);
484db665 5895 rtx ep_seq;
0a1c58a2 5896
4b4bf941 5897 if (!CALL_P (insn)
484db665 5898 || ! SIBLING_CALL_P (insn)
ffe14686 5899#ifdef HAVE_simple_return
484db665 5900 || (entry_edge != orig_entry_edge
ffe14686
AM
5901 && !bitmap_bit_p (&bb_flags, bb->index))
5902#endif
5903 )
628f6a4e
BE
5904 {
5905 ei_next (&ei);
5906 continue;
5907 }
0a1c58a2 5908
484db665
BS
5909 ep_seq = gen_sibcall_epilogue ();
5910 if (ep_seq)
5911 {
5912 start_sequence ();
5913 emit_note (NOTE_INSN_EPILOGUE_BEG);
5914 emit_insn (ep_seq);
dc01c3d1 5915 rtx_insn *seq = get_insns ();
484db665 5916 end_sequence ();
0a1c58a2 5917
484db665
BS
5918 /* Retain a map of the epilogue insns. Used in life analysis to
5919 avoid getting rid of sibcall epilogue insns. Do this before we
5920 actually emit the sequence. */
5921 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 5922 set_insn_locations (seq, epilogue_location);
2f937369 5923
484db665
BS
5924 emit_insn_before (seq, insn);
5925 }
628f6a4e 5926 ei_next (&ei);
0a1c58a2
JL
5927 }
5928#endif
ca1117cc 5929
86c82654
RH
5930#ifdef HAVE_epilogue
5931 if (epilogue_end)
5932 {
9c8348cf 5933 rtx_insn *insn, *next;
86c82654
RH
5934
5935 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 5936 There is no need, however, to be quite so anal about the existence
071a42f9 5937 of such a note. Also possibly move
84c1fa24
UW
5938 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5939 info generation. */
718fe406 5940 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
5941 {
5942 next = NEXT_INSN (insn);
b8698a0f 5943 if (NOTE_P (insn)
a38e7aa5 5944 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
5945 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5946 }
5947 }
5948#endif
6fb5fa3c 5949
ffe14686 5950#ifdef HAVE_simple_return
484db665 5951 bitmap_clear (&bb_flags);
ffe14686 5952#endif
484db665 5953
6fb5fa3c
DB
5954 /* Threading the prologue and epilogue changes the artificial refs
5955 in the entry and exit blocks. */
5956 epilogue_completed = 1;
5957 df_update_entry_exit_and_calls ();
bdac5f58
TW
5958}
5959
cd9c1ca8
RH
5960/* Reposition the prologue-end and epilogue-begin notes after
5961 instruction scheduling. */
bdac5f58
TW
5962
5963void
6fb5fa3c 5964reposition_prologue_and_epilogue_notes (void)
bdac5f58 5965{
cd9c1ca8
RH
5966#if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5967 || defined (HAVE_sibcall_epilogue)
cd9c1ca8
RH
5968 /* Since the hash table is created on demand, the fact that it is
5969 non-null is a signal that it is non-empty. */
5970 if (prologue_insn_hash != NULL)
bdac5f58 5971 {
cd9c1ca8 5972 size_t len = htab_elements (prologue_insn_hash);
691fe203 5973 rtx_insn *insn, *last = NULL, *note = NULL;
bdac5f58 5974
cd9c1ca8
RH
5975 /* Scan from the beginning until we reach the last prologue insn. */
5976 /* ??? While we do have the CFG intact, there are two problems:
5977 (1) The prologue can contain loops (typically probing the stack),
5978 which means that the end of the prologue isn't in the first bb.
5979 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 5980 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 5981 {
4b4bf941 5982 if (NOTE_P (insn))
9392c110 5983 {
a38e7aa5 5984 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
5985 note = insn;
5986 }
cd9c1ca8 5987 else if (contains (insn, prologue_insn_hash))
0a1c58a2 5988 {
9f53e965
RH
5989 last = insn;
5990 if (--len == 0)
5991 break;
5992 }
5993 }
797a6ac1 5994
9f53e965
RH
5995 if (last)
5996 {
cd9c1ca8 5997 if (note == NULL)
9f53e965 5998 {
cd9c1ca8
RH
5999 /* Scan forward looking for the PROLOGUE_END note. It should
6000 be right at the beginning of the block, possibly with other
6001 insn notes that got moved there. */
6002 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6003 {
6004 if (NOTE_P (note)
6005 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6006 break;
6007 }
9f53e965 6008 }
c93b03c2 6009
9f53e965 6010 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6011 if (LABEL_P (last))
9f53e965
RH
6012 last = NEXT_INSN (last);
6013 reorder_insns (note, note, last);
bdac5f58 6014 }
0a1c58a2
JL
6015 }
6016
cd9c1ca8 6017 if (epilogue_insn_hash != NULL)
0a1c58a2 6018 {
cd9c1ca8
RH
6019 edge_iterator ei;
6020 edge e;
bdac5f58 6021
fefa31b5 6022 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bdac5f58 6023 {
691fe203 6024 rtx_insn *insn, *first = NULL, *note = NULL;
997704f1 6025 basic_block bb = e->src;
c93b03c2 6026
997704f1 6027 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6028 FOR_BB_INSNS (bb, insn)
9f53e965 6029 {
cd9c1ca8
RH
6030 if (NOTE_P (insn))
6031 {
6032 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6033 {
6034 note = insn;
997704f1 6035 if (first != NULL)
cd9c1ca8
RH
6036 break;
6037 }
6038 }
997704f1 6039 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6040 {
997704f1 6041 first = insn;
cd9c1ca8
RH
6042 if (note != NULL)
6043 break;
6044 }
9392c110 6045 }
997704f1
RH
6046
6047 if (note)
6048 {
6049 /* If the function has a single basic block, and no real
b8698a0f 6050 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6051 epilogue note can get scheduled before the prologue
6052 note. If we have frame related prologue insns, having
6053 them scanned during the epilogue will result in a crash.
6054 In this case re-order the epilogue note to just before
6055 the last insn in the block. */
6056 if (first == NULL)
6057 first = BB_END (bb);
6058
6059 if (PREV_INSN (first) != note)
6060 reorder_insns (note, note, PREV_INSN (first));
6061 }
bdac5f58
TW
6062 }
6063 }
6064#endif /* HAVE_prologue or HAVE_epilogue */
6065}
87ff9c8e 6066
df92c640
SB
6067/* Returns the name of function declared by FNDECL. */
6068const char *
6069fndecl_name (tree fndecl)
6070{
6071 if (fndecl == NULL)
6072 return "(nofn)";
6073 return lang_hooks.decl_printable_name (fndecl, 2);
6074}
6075
532aafad
SB
6076/* Returns the name of function FN. */
6077const char *
6078function_name (struct function *fn)
6079{
df92c640
SB
6080 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6081 return fndecl_name (fndecl);
532aafad
SB
6082}
6083
faed5cc3
SB
6084/* Returns the name of the current function. */
6085const char *
6086current_function_name (void)
6087{
532aafad 6088 return function_name (cfun);
faed5cc3 6089}
ef330312
PB
6090\f
6091
c2924966 6092static unsigned int
ef330312
PB
6093rest_of_handle_check_leaf_regs (void)
6094{
6095#ifdef LEAF_REGISTERS
416ff32e 6096 crtl->uses_only_leaf_regs
ef330312
PB
6097 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6098#endif
c2924966 6099 return 0;
ef330312
PB
6100}
6101
8d8d1a28 6102/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6103
8d8d1a28
AH
6104static void
6105used_types_insert_helper (tree type, struct function *func)
33c9159e 6106{
8d8d1a28 6107 if (type != NULL && func != NULL)
33c9159e 6108 {
33c9159e 6109 if (func->used_types_hash == NULL)
b086d530
TS
6110 func->used_types_hash = hash_set<tree>::create_ggc (37);
6111
6112 func->used_types_hash->add (type);
33c9159e
AH
6113 }
6114}
6115
8d8d1a28
AH
6116/* Given a type, insert it into the used hash table in cfun. */
6117void
6118used_types_insert (tree t)
6119{
6120 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6121 if (TYPE_NAME (t))
6122 break;
6123 else
6124 t = TREE_TYPE (t);
29ce73cb
PB
6125 if (TREE_CODE (t) == ERROR_MARK)
6126 return;
095c7b3c
JJ
6127 if (TYPE_NAME (t) == NULL_TREE
6128 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6129 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6130 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6131 {
6132 if (cfun)
6133 used_types_insert_helper (t, cfun);
6134 else
9771b263
DN
6135 {
6136 /* So this might be a type referenced by a global variable.
6137 Record that type so that we can later decide to emit its
6138 debug information. */
6139 vec_safe_push (types_used_by_cur_var_decl, t);
6140 }
b646ba3f
DS
6141 }
6142}
6143
6144/* Helper to Hash a struct types_used_by_vars_entry. */
6145
6146static hashval_t
6147hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6148{
6149 gcc_assert (entry && entry->var_decl && entry->type);
6150
6151 return iterative_hash_object (entry->type,
6152 iterative_hash_object (entry->var_decl, 0));
6153}
6154
6155/* Hash function of the types_used_by_vars_entry hash table. */
6156
6157hashval_t
2a22f99c 6158used_type_hasher::hash (types_used_by_vars_entry *entry)
b646ba3f 6159{
b646ba3f
DS
6160 return hash_types_used_by_vars_entry (entry);
6161}
6162
6163/*Equality function of the types_used_by_vars_entry hash table. */
6164
2a22f99c
TS
6165bool
6166used_type_hasher::equal (types_used_by_vars_entry *e1,
6167 types_used_by_vars_entry *e2)
b646ba3f 6168{
b646ba3f
DS
6169 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6170}
6171
6172/* Inserts an entry into the types_used_by_vars_hash hash table. */
6173
6174void
6175types_used_by_var_decl_insert (tree type, tree var_decl)
6176{
6177 if (type != NULL && var_decl != NULL)
6178 {
2a22f99c 6179 types_used_by_vars_entry **slot;
b646ba3f
DS
6180 struct types_used_by_vars_entry e;
6181 e.var_decl = var_decl;
6182 e.type = type;
6183 if (types_used_by_vars_hash == NULL)
2a22f99c
TS
6184 types_used_by_vars_hash
6185 = hash_table<used_type_hasher>::create_ggc (37);
6186
6187 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
b646ba3f
DS
6188 if (*slot == NULL)
6189 {
6190 struct types_used_by_vars_entry *entry;
766090c2 6191 entry = ggc_alloc<types_used_by_vars_entry> ();
b646ba3f
DS
6192 entry->type = type;
6193 entry->var_decl = var_decl;
6194 *slot = entry;
6195 }
6196 }
8d8d1a28
AH
6197}
6198
27a4cd48
DM
6199namespace {
6200
6201const pass_data pass_data_leaf_regs =
6202{
6203 RTL_PASS, /* type */
6204 "*leaf_regs", /* name */
6205 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6206 TV_NONE, /* tv_id */
6207 0, /* properties_required */
6208 0, /* properties_provided */
6209 0, /* properties_destroyed */
6210 0, /* todo_flags_start */
6211 0, /* todo_flags_finish */
ef330312
PB
6212};
6213
27a4cd48
DM
6214class pass_leaf_regs : public rtl_opt_pass
6215{
6216public:
c3284718
RS
6217 pass_leaf_regs (gcc::context *ctxt)
6218 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
27a4cd48
DM
6219 {}
6220
6221 /* opt_pass methods: */
be55bfe6
TS
6222 virtual unsigned int execute (function *)
6223 {
6224 return rest_of_handle_check_leaf_regs ();
6225 }
27a4cd48
DM
6226
6227}; // class pass_leaf_regs
6228
6229} // anon namespace
6230
6231rtl_opt_pass *
6232make_pass_leaf_regs (gcc::context *ctxt)
6233{
6234 return new pass_leaf_regs (ctxt);
6235}
6236
6fb5fa3c
DB
6237static unsigned int
6238rest_of_handle_thread_prologue_and_epilogue (void)
6239{
6240 if (optimize)
6241 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6242
6fb5fa3c
DB
6243 /* On some machines, the prologue and epilogue code, or parts thereof,
6244 can be represented as RTL. Doing so lets us schedule insns between
6245 it and the rest of the code and also allows delayed branch
6246 scheduling to operate in the epilogue. */
6fb5fa3c 6247 thread_prologue_and_epilogue_insns ();
d3c12306 6248
bdc6e1ae
SB
6249 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6250 see PR57320. */
6251 cleanup_cfg (0);
6252
d3c12306 6253 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6254 if (flag_stack_usage_info)
d3c12306
EB
6255 output_stack_usage ();
6256
6fb5fa3c
DB
6257 return 0;
6258}
6259
27a4cd48
DM
6260namespace {
6261
6262const pass_data pass_data_thread_prologue_and_epilogue =
6263{
6264 RTL_PASS, /* type */
6265 "pro_and_epilogue", /* name */
6266 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6267 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6268 0, /* properties_required */
6269 0, /* properties_provided */
6270 0, /* properties_destroyed */
3bea341f
RB
6271 0, /* todo_flags_start */
6272 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6fb5fa3c 6273};
27a4cd48
DM
6274
6275class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6276{
6277public:
c3284718
RS
6278 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6279 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
27a4cd48
DM
6280 {}
6281
6282 /* opt_pass methods: */
be55bfe6
TS
6283 virtual unsigned int execute (function *)
6284 {
6285 return rest_of_handle_thread_prologue_and_epilogue ();
6286 }
27a4cd48
DM
6287
6288}; // class pass_thread_prologue_and_epilogue
6289
6290} // anon namespace
6291
6292rtl_opt_pass *
6293make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6294{
6295 return new pass_thread_prologue_and_epilogue (ctxt);
6296}
d8d72314
PB
6297\f
6298
6299/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6300 in-out constraints. Say you start with
d8d72314
PB
6301
6302 orig = inout;
6303 asm ("": "+mr" (inout));
6304 use (orig);
6305
6306 which is transformed very early to use explicit output and match operands:
6307
6308 orig = inout;
6309 asm ("": "=mr" (inout) : "0" (inout));
6310 use (orig);
6311
6312 Or, after SSA and copyprop,
6313
6314 asm ("": "=mr" (inout_2) : "0" (inout_1));
6315 use (inout_1);
6316
6317 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6318 they represent two separate values, so they will get different pseudo
6319 registers during expansion. Then, since the two operands need to match
6320 per the constraints, but use different pseudo registers, reload can
6321 only register a reload for these operands. But reloads can only be
6322 satisfied by hardregs, not by memory, so we need a register for this
6323 reload, just because we are presented with non-matching operands.
6324 So, even though we allow memory for this operand, no memory can be
6325 used for it, just because the two operands don't match. This can
6326 cause reload failures on register-starved targets.
6327
6328 So it's a symptom of reload not being able to use memory for reloads
6329 or, alternatively it's also a symptom of both operands not coming into
6330 reload as matching (in which case the pseudo could go to memory just
6331 fine, as the alternative allows it, and no reload would be necessary).
6332 We fix the latter problem here, by transforming
6333
6334 asm ("": "=mr" (inout_2) : "0" (inout_1));
6335
6336 back to
6337
6338 inout_2 = inout_1;
6339 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6340
6341static void
691fe203 6342match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
d8d72314
PB
6343{
6344 int i;
6345 bool changed = false;
6346 rtx op = SET_SRC (p_sets[0]);
6347 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6348 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6349 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6350
d7b8033f 6351 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6352 for (i = 0; i < ninputs; i++)
6353 {
691fe203
DM
6354 rtx input, output;
6355 rtx_insn *insns;
d8d72314
PB
6356 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6357 char *end;
53220215 6358 int match, j;
d8d72314 6359
70f16287
JJ
6360 if (*constraint == '%')
6361 constraint++;
6362
d8d72314
PB
6363 match = strtoul (constraint, &end, 10);
6364 if (end == constraint)
6365 continue;
6366
6367 gcc_assert (match < noutputs);
6368 output = SET_DEST (p_sets[match]);
6369 input = RTVEC_ELT (inputs, i);
53220215
MM
6370 /* Only do the transformation for pseudos. */
6371 if (! REG_P (output)
6372 || rtx_equal_p (output, input)
d8d72314
PB
6373 || (GET_MODE (input) != VOIDmode
6374 && GET_MODE (input) != GET_MODE (output)))
6375 continue;
6376
53220215
MM
6377 /* We can't do anything if the output is also used as input,
6378 as we're going to overwrite it. */
6379 for (j = 0; j < ninputs; j++)
6380 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6381 break;
6382 if (j != ninputs)
6383 continue;
6384
d7b8033f
JJ
6385 /* Avoid changing the same input several times. For
6386 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6387 only change in once (to out1), rather than changing it
6388 first to out1 and afterwards to out2. */
6389 if (i > 0)
6390 {
6391 for (j = 0; j < noutputs; j++)
6392 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6393 break;
6394 if (j != noutputs)
6395 continue;
6396 }
6397 output_matched[match] = true;
6398
d8d72314 6399 start_sequence ();
53220215 6400 emit_move_insn (output, input);
d8d72314
PB
6401 insns = get_insns ();
6402 end_sequence ();
d8d72314 6403 emit_insn_before (insns, insn);
53220215
MM
6404
6405 /* Now replace all mentions of the input with output. We can't
fa10beec 6406 just replace the occurrence in inputs[i], as the register might
53220215
MM
6407 also be used in some other input (or even in an address of an
6408 output), which would mean possibly increasing the number of
6409 inputs by one (namely 'output' in addition), which might pose
6410 a too complicated problem for reload to solve. E.g. this situation:
6411
6412 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6413
84fbffb2 6414 Here 'input' is used in two occurrences as input (once for the
53220215 6415 input operand, once for the address in the second output operand).
fa10beec 6416 If we would replace only the occurrence of the input operand (to
53220215
MM
6417 make the matching) we would be left with this:
6418
6419 output = input
6420 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6421
6422 Now we suddenly have two different input values (containing the same
6423 value, but different pseudos) where we formerly had only one.
6424 With more complicated asms this might lead to reload failures
6425 which wouldn't have happen without this pass. So, iterate over
84fbffb2 6426 all operands and replace all occurrences of the register used. */
53220215 6427 for (j = 0; j < noutputs; j++)
1596d61e 6428 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
6429 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6430 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6431 input, output);
6432 for (j = 0; j < ninputs; j++)
6433 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6434 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6435 input, output);
6436
d8d72314
PB
6437 changed = true;
6438 }
6439
6440 if (changed)
6441 df_insn_rescan (insn);
6442}
6443
be55bfe6
TS
6444namespace {
6445
6446const pass_data pass_data_match_asm_constraints =
6447{
6448 RTL_PASS, /* type */
6449 "asmcons", /* name */
6450 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6451 TV_NONE, /* tv_id */
6452 0, /* properties_required */
6453 0, /* properties_provided */
6454 0, /* properties_destroyed */
6455 0, /* todo_flags_start */
6456 0, /* todo_flags_finish */
6457};
6458
6459class pass_match_asm_constraints : public rtl_opt_pass
6460{
6461public:
6462 pass_match_asm_constraints (gcc::context *ctxt)
6463 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6464 {}
6465
6466 /* opt_pass methods: */
6467 virtual unsigned int execute (function *);
6468
6469}; // class pass_match_asm_constraints
6470
6471unsigned
6472pass_match_asm_constraints::execute (function *fun)
d8d72314
PB
6473{
6474 basic_block bb;
691fe203
DM
6475 rtx_insn *insn;
6476 rtx pat, *p_sets;
d8d72314
PB
6477 int noutputs;
6478
e3b5732b 6479 if (!crtl->has_asm_statement)
d8d72314
PB
6480 return 0;
6481
6482 df_set_flags (DF_DEFER_INSN_RESCAN);
be55bfe6 6483 FOR_EACH_BB_FN (bb, fun)
d8d72314
PB
6484 {
6485 FOR_BB_INSNS (bb, insn)
6486 {
6487 if (!INSN_P (insn))
6488 continue;
6489
6490 pat = PATTERN (insn);
6491 if (GET_CODE (pat) == PARALLEL)
6492 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6493 else if (GET_CODE (pat) == SET)
6494 p_sets = &PATTERN (insn), noutputs = 1;
6495 else
6496 continue;
6497
6498 if (GET_CODE (*p_sets) == SET
6499 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6500 match_asm_constraints_1 (insn, p_sets, noutputs);
6501 }
6502 }
6503
6504 return TODO_df_finish;
6505}
6506
27a4cd48
DM
6507} // anon namespace
6508
6509rtl_opt_pass *
6510make_pass_match_asm_constraints (gcc::context *ctxt)
6511{
6512 return new pass_match_asm_constraints (ctxt);
6513}
6514
faed5cc3 6515
e2500fed 6516#include "gt-function.h"