]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
6f086dfc 3
1322177d 4This file is part of GCC.
6f086dfc 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6f086dfc 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6f086dfc
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6f086dfc 19
6f086dfc
RS
20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
8fff4fc1 32 not get a hard register. */
6f086dfc
RS
33
34#include "config.h"
670ee920 35#include "system.h"
4977bab6 36#include "coretypes.h"
c7131fb2
AM
37#include "backend.h"
38#include "tree.h"
39#include "rtl.h"
40#include "df.h"
0cbd9993 41#include "rtl-error.h"
40e23961 42#include "alias.h"
40e23961 43#include "fold-const.h"
d8a2d370
DN
44#include "stor-layout.h"
45#include "varasm.h"
46#include "stringpool.h"
6f086dfc 47#include "flags.h"
1ef08c63 48#include "except.h"
36566b39
PK
49#include "insn-config.h"
50#include "expmed.h"
51#include "dojump.h"
52#include "explow.h"
53#include "calls.h"
54#include "emit-rtl.h"
55#include "stmt.h"
6f086dfc 56#include "expr.h"
b0710fe1 57#include "insn-codes.h"
c6b97fac 58#include "optabs.h"
e78d8e51 59#include "libfuncs.h"
6f086dfc 60#include "regs.h"
6f086dfc
RS
61#include "recog.h"
62#include "output.h"
b1474bb7 63#include "tm_p.h"
7afff7cf 64#include "langhooks.h"
61f71b34 65#include "target.h"
677f3fa8 66#include "common/common-target.h"
2fb9a547 67#include "gimple-expr.h"
45b0be94 68#include "gimplify.h"
ef330312 69#include "tree-pass.h"
60393bbc
AM
70#include "cfgrtl.h"
71#include "cfganal.h"
72#include "cfgbuild.h"
73#include "cfgcleanup.h"
ffe14686
AM
74#include "params.h"
75#include "bb-reorder.h"
f30e25a3 76#include "shrink-wrap.h"
b9b5f433 77#include "toplev.h"
b8704801 78#include "rtl-iter.h"
d5e254e1
IE
79#include "tree-chkp.h"
80#include "rtl-chkp.h"
7d69de61 81
5576d6f2
TT
82/* So we can assign to cfun in this file. */
83#undef cfun
84
95f3f59e
JDA
85#ifndef STACK_ALIGNMENT_NEEDED
86#define STACK_ALIGNMENT_NEEDED 1
87#endif
88
975f3818
RS
89#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
90
6f086dfc
RS
91/* Round a value to the lowest integer less than it that is a multiple of
92 the required alignment. Avoid using division in case the value is
93 negative. Assume the alignment is a power of two. */
94#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
95
96/* Similar, but round to the next highest integer that meets the
97 alignment. */
98#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
99
6f086dfc 100/* Nonzero once virtual register instantiation has been done.
c39ada04
DD
101 assign_stack_local uses frame_pointer_rtx when this is nonzero.
102 calls.c:emit_library_call_value_1 uses it to set up
103 post-instantiation libcalls. */
104int virtuals_instantiated;
6f086dfc 105
df696a75 106/* Assign unique numbers to labels generated for profiling, debugging, etc. */
17211ab5 107static GTY(()) int funcdef_no;
f6f315fe 108
414c4dc4
NC
109/* These variables hold pointers to functions to create and destroy
110 target specific, per-function data structures. */
fa8db1f7 111struct machine_function * (*init_machine_status) (void);
46766466 112
b384405b 113/* The currently compiled function. */
01d939e8 114struct function *cfun = 0;
b384405b 115
cd9c1ca8 116/* These hashes record the prologue and epilogue insns. */
d242408f 117
6c907cff 118struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
d242408f
TS
119{
120 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
121 static bool equal (rtx a, rtx b) { return a == b; }
122};
123
124static GTY((cache))
125 hash_table<insn_cache_hasher> *prologue_insn_hash;
126static GTY((cache))
127 hash_table<insn_cache_hasher> *epilogue_insn_hash;
6f086dfc 128\f
b646ba3f 129
2a22f99c 130hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
9771b263 131vec<tree, va_gc> *types_used_by_cur_var_decl;
b646ba3f 132
e15679f8
RK
133/* Forward declarations. */
134
fa8db1f7 135static struct temp_slot *find_temp_slot_from_address (rtx);
fa8db1f7 136static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
ef4bddc2 137static void pad_below (struct args_size *, machine_mode, tree);
691fe203 138static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
fa8db1f7
AJ
139static int all_blocks (tree, tree *);
140static tree *get_block_vector (tree, int *);
141extern tree debug_find_var_in_block_tree (tree, tree);
1f52178b 142/* We always define `record_insns' even if it's not used so that we
ec97b83a 143 can always export `prologue_epilogue_contains'. */
d242408f
TS
144static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
145 ATTRIBUTE_UNUSED;
146static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
db2960f4 147static void prepare_function_start (void);
fa8db1f7
AJ
148static void do_clobber_return_reg (rtx, void *);
149static void do_use_return_reg (rtx, void *);
c20bf1f3 150\f
936fc9ba
JH
151/* Stack of nested functions. */
152/* Keep track of the cfun stack. */
e5e809f4 153
936fc9ba 154typedef struct function *function_p;
e5e809f4 155
9771b263 156static vec<function_p> function_context_stack;
6f086dfc
RS
157
158/* Save the current context for compilation of a nested function.
d2784db4 159 This is called from language-specific code. */
6f086dfc
RS
160
161void
d2784db4 162push_function_context (void)
6f086dfc 163{
01d939e8 164 if (cfun == 0)
182e0d71 165 allocate_struct_function (NULL, false);
b384405b 166
9771b263 167 function_context_stack.safe_push (cfun);
db2960f4 168 set_cfun (NULL);
6f086dfc
RS
169}
170
171/* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174void
d2784db4 175pop_function_context (void)
6f086dfc 176{
9771b263 177 struct function *p = function_context_stack.pop ();
db2960f4 178 set_cfun (p);
6f086dfc 179 current_function_decl = p->decl;
6f086dfc 180
6f086dfc 181 /* Reset variables that have known state during rtx generation. */
6f086dfc 182 virtuals_instantiated = 0;
1b3d8f8a 183 generating_concat_p = 1;
6f086dfc 184}
e4a4639e 185
fa51b01b
RH
186/* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190void
fa8db1f7 191free_after_parsing (struct function *f)
fa51b01b 192{
e8924938 193 f->language = 0;
fa51b01b
RH
194}
195
e2ecd91c
BS
196/* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
0a8a198c 198 reclaim the memory. */
21cd906e 199
e2ecd91c 200void
fa8db1f7 201free_after_compilation (struct function *f)
e2ecd91c 202{
cd9c1ca8
RH
203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
04695783 206 free (crtl->emit.regno_pointer_align);
f995dcfe 207
3e029763 208 memset (crtl, 0, sizeof (struct rtl_data));
e2500fed 209 f->eh = NULL;
e2500fed 210 f->machine = NULL;
997de8ed 211 f->cfg = NULL;
b11f11a1 212 f->curr_properties &= ~PROP_cfg;
fa51b01b 213
57b9e367 214 regno_reg_rtx = NULL;
e2ecd91c 215}
6f086dfc 216\f
49ad7cfa
BS
217/* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
9fb798d7 220
49ad7cfa 221HOST_WIDE_INT
fa8db1f7 222get_frame_size (void)
49ad7cfa 223{
bd60bab2
JH
224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
49ad7cfa
BS
228}
229
9fb798d7
EB
230/* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234bool
235frame_offset_overflow (HOST_WIDE_INT offset, tree func)
b8698a0f 236{
9fb798d7
EB
237 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238
239 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD)
242 {
c5d75364
MLI
243 error_at (DECL_SOURCE_LOCATION (func),
244 "total size of local objects too large");
9fb798d7
EB
245 return TRUE;
246 }
247
248 return FALSE;
249}
250
76fe54f0
L
251/* Return stack slot alignment in bits for TYPE and MODE. */
252
253static unsigned int
ef4bddc2 254get_stack_local_alignment (tree type, machine_mode mode)
76fe54f0
L
255{
256 unsigned int alignment;
257
258 if (mode == BLKmode)
259 alignment = BIGGEST_ALIGNMENT;
260 else
261 alignment = GET_MODE_ALIGNMENT (mode);
262
263 /* Allow the frond-end to (possibly) increase the alignment of this
264 stack slot. */
265 if (! type)
266 type = lang_hooks.types.type_for_mode (mode, 0);
267
268 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
269}
270
56731d64
BS
271/* Determine whether it is possible to fit a stack slot of size SIZE and
272 alignment ALIGNMENT into an area in the stack frame that starts at
273 frame offset START and has a length of LENGTH. If so, store the frame
274 offset to be used for the stack slot in *POFFSET and return true;
275 return false otherwise. This function will extend the frame size when
276 given a start/length pair that lies at the end of the frame. */
277
278static bool
279try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
280 HOST_WIDE_INT size, unsigned int alignment,
281 HOST_WIDE_INT *poffset)
282{
283 HOST_WIDE_INT this_frame_offset;
284 int frame_off, frame_alignment, frame_phase;
285
286 /* Calculate how many bytes the start of local variables is off from
287 stack alignment. */
288 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
289 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
290 frame_phase = frame_off ? frame_alignment - frame_off : 0;
291
292 /* Round the frame offset to the specified alignment. */
293
294 /* We must be careful here, since FRAME_OFFSET might be negative and
295 division with a negative dividend isn't as well defined as we might
296 like. So we instead assume that ALIGNMENT is a power of two and
297 use logical operations which are unambiguous. */
298 if (FRAME_GROWS_DOWNWARD)
299 this_frame_offset
300 = (FLOOR_ROUND (start + length - size - frame_phase,
301 (unsigned HOST_WIDE_INT) alignment)
302 + frame_phase);
303 else
304 this_frame_offset
305 = (CEIL_ROUND (start - frame_phase,
306 (unsigned HOST_WIDE_INT) alignment)
307 + frame_phase);
308
309 /* See if it fits. If this space is at the edge of the frame,
310 consider extending the frame to make it fit. Our caller relies on
311 this when allocating a new slot. */
312 if (frame_offset == start && this_frame_offset < frame_offset)
313 frame_offset = this_frame_offset;
314 else if (this_frame_offset < start)
315 return false;
316 else if (start + length == frame_offset
317 && this_frame_offset + size > start + length)
318 frame_offset = this_frame_offset + size;
319 else if (this_frame_offset + size > start + length)
320 return false;
321
322 *poffset = this_frame_offset;
323 return true;
324}
325
326/* Create a new frame_space structure describing free space in the stack
327 frame beginning at START and ending at END, and chain it into the
328 function's frame_space_list. */
329
330static void
331add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
332{
766090c2 333 struct frame_space *space = ggc_alloc<frame_space> ();
56731d64
BS
334 space->next = crtl->frame_space_list;
335 crtl->frame_space_list = space;
336 space->start = start;
337 space->length = end - start;
338}
339
6f086dfc
RS
340/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
341 with machine mode MODE.
718fe406 342
6f086dfc
RS
343 ALIGN controls the amount of alignment for the address of the slot:
344 0 means according to MODE,
345 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
cfa29a4c 346 -2 means use BITS_PER_UNIT,
6f086dfc
RS
347 positive specifies alignment boundary in bits.
348
80a832cd
JJ
349 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
350 alignment and ASLK_RECORD_PAD bit set if we should remember
351 extra space we allocated for alignment purposes. When we are
352 called from assign_stack_temp_for_type, it is not set so we don't
353 track the same stack slot in two independent lists.
2e3f842f 354
bd60bab2 355 We do not round to stack_boundary here. */
6f086dfc 356
bd60bab2 357rtx
ef4bddc2 358assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
80a832cd 359 int align, int kind)
6f086dfc 360{
b3694847 361 rtx x, addr;
6f086dfc 362 int bigend_correction = 0;
427188d5 363 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
76fe54f0 364 unsigned int alignment, alignment_in_bits;
6f086dfc
RS
365
366 if (align == 0)
367 {
76fe54f0 368 alignment = get_stack_local_alignment (NULL, mode);
d16790f2 369 alignment /= BITS_PER_UNIT;
6f086dfc
RS
370 }
371 else if (align == -1)
372 {
373 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
374 size = CEIL_ROUND (size, alignment);
375 }
cfa29a4c
EB
376 else if (align == -2)
377 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
6f086dfc
RS
378 else
379 alignment = align / BITS_PER_UNIT;
380
2e3f842f
L
381 alignment_in_bits = alignment * BITS_PER_UNIT;
382
2e3f842f
L
383 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
384 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
385 {
386 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
387 alignment = alignment_in_bits / BITS_PER_UNIT;
388 }
a0871656 389
2e3f842f
L
390 if (SUPPORTS_STACK_ALIGNMENT)
391 {
392 if (crtl->stack_alignment_estimated < alignment_in_bits)
393 {
394 if (!crtl->stack_realign_processed)
395 crtl->stack_alignment_estimated = alignment_in_bits;
396 else
397 {
398 /* If stack is realigned and stack alignment value
399 hasn't been finalized, it is OK not to increase
400 stack_alignment_estimated. The bigger alignment
401 requirement is recorded in stack_alignment_needed
402 below. */
403 gcc_assert (!crtl->stack_realign_finalized);
404 if (!crtl->stack_realign_needed)
405 {
406 /* It is OK to reduce the alignment as long as the
407 requested size is 0 or the estimated stack
408 alignment >= mode alignment. */
80a832cd 409 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
2e3f842f
L
410 || size == 0
411 || (crtl->stack_alignment_estimated
412 >= GET_MODE_ALIGNMENT (mode)));
413 alignment_in_bits = crtl->stack_alignment_estimated;
414 alignment = alignment_in_bits / BITS_PER_UNIT;
415 }
416 }
417 }
418 }
76fe54f0
L
419
420 if (crtl->stack_alignment_needed < alignment_in_bits)
421 crtl->stack_alignment_needed = alignment_in_bits;
f85882d8
JY
422 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
423 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a0871656 424
56731d64
BS
425 if (mode != BLKmode || size != 0)
426 {
80a832cd 427 if (kind & ASLK_RECORD_PAD)
56731d64 428 {
80a832cd
JJ
429 struct frame_space **psp;
430
431 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
432 {
433 struct frame_space *space = *psp;
434 if (!try_fit_stack_local (space->start, space->length, size,
435 alignment, &slot_offset))
436 continue;
437 *psp = space->next;
438 if (slot_offset > space->start)
439 add_frame_space (space->start, slot_offset);
440 if (slot_offset + size < space->start + space->length)
441 add_frame_space (slot_offset + size,
442 space->start + space->length);
443 goto found_space;
444 }
56731d64
BS
445 }
446 }
447 else if (!STACK_ALIGNMENT_NEEDED)
448 {
449 slot_offset = frame_offset;
450 goto found_space;
451 }
452
453 old_frame_offset = frame_offset;
454
455 if (FRAME_GROWS_DOWNWARD)
456 {
457 frame_offset -= size;
458 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
58dbcf05 459
80a832cd
JJ
460 if (kind & ASLK_RECORD_PAD)
461 {
462 if (slot_offset > frame_offset)
463 add_frame_space (frame_offset, slot_offset);
464 if (slot_offset + size < old_frame_offset)
465 add_frame_space (slot_offset + size, old_frame_offset);
466 }
56731d64
BS
467 }
468 else
95f3f59e 469 {
56731d64
BS
470 frame_offset += size;
471 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
472
80a832cd
JJ
473 if (kind & ASLK_RECORD_PAD)
474 {
475 if (slot_offset > old_frame_offset)
476 add_frame_space (old_frame_offset, slot_offset);
477 if (slot_offset + size < frame_offset)
478 add_frame_space (slot_offset + size, frame_offset);
479 }
95f3f59e 480 }
6f086dfc 481
56731d64 482 found_space:
6f086dfc
RS
483 /* On a big-endian machine, if we are allocating more space than we will use,
484 use the least significant bytes of those that are allocated. */
d70eadf7 485 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
6f086dfc 486 bigend_correction = size - GET_MODE_SIZE (mode);
6f086dfc 487
6f086dfc
RS
488 /* If we have already instantiated virtual registers, return the actual
489 address relative to the frame pointer. */
bd60bab2 490 if (virtuals_instantiated)
0a81f074 491 addr = plus_constant (Pmode, frame_pointer_rtx,
c41536f5 492 trunc_int_for_mode
56731d64 493 (slot_offset + bigend_correction
c41536f5 494 + STARTING_FRAME_OFFSET, Pmode));
6f086dfc 495 else
0a81f074 496 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
c41536f5 497 trunc_int_for_mode
56731d64 498 (slot_offset + bigend_correction,
c41536f5 499 Pmode));
6f086dfc 500
38a448ca 501 x = gen_rtx_MEM (mode, addr);
76fe54f0 502 set_mem_align (x, alignment_in_bits);
be0c514c 503 MEM_NOTRAP_P (x) = 1;
6f086dfc 504
bd60bab2
JH
505 stack_slot_list
506 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
e2ecd91c 507
bd60bab2
JH
508 if (frame_offset_overflow (frame_offset, current_function_decl))
509 frame_offset = 0;
9070115b 510
6f086dfc
RS
511 return x;
512}
2e3f842f
L
513
514/* Wrap up assign_stack_local_1 with last parameter as false. */
515
516rtx
ef4bddc2 517assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
2e3f842f 518{
80a832cd 519 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
2e3f842f 520}
0aea6467 521\f
fb0703f7
SB
522/* In order to evaluate some expressions, such as function calls returning
523 structures in memory, we need to temporarily allocate stack locations.
524 We record each allocated temporary in the following structure.
525
526 Associated with each temporary slot is a nesting level. When we pop up
527 one level, all temporaries associated with the previous level are freed.
528 Normally, all temporaries are freed after the execution of the statement
529 in which they were created. However, if we are inside a ({...}) grouping,
530 the result may be in a temporary and hence must be preserved. If the
531 result could be in a temporary, we preserve it if we can determine which
532 one it is in. If we cannot determine which temporary may contain the
533 result, all temporaries are preserved. A temporary is preserved by
9474e8ab 534 pretending it was allocated at the previous nesting level. */
fb0703f7 535
d1b38208 536struct GTY(()) temp_slot {
fb0703f7
SB
537 /* Points to next temporary slot. */
538 struct temp_slot *next;
539 /* Points to previous temporary slot. */
540 struct temp_slot *prev;
541 /* The rtx to used to reference the slot. */
542 rtx slot;
fb0703f7
SB
543 /* The size, in units, of the slot. */
544 HOST_WIDE_INT size;
545 /* The type of the object in the slot, or zero if it doesn't correspond
546 to a type. We use this to determine whether a slot can be reused.
547 It can be reused if objects of the type of the new slot will always
548 conflict with objects of the type of the old slot. */
549 tree type;
8f5929e1
JJ
550 /* The alignment (in bits) of the slot. */
551 unsigned int align;
fb0703f7
SB
552 /* Nonzero if this temporary is currently in use. */
553 char in_use;
fb0703f7
SB
554 /* Nesting level at which this slot is being used. */
555 int level;
fb0703f7
SB
556 /* The offset of the slot from the frame_pointer, including extra space
557 for alignment. This info is for combine_temp_slots. */
558 HOST_WIDE_INT base_offset;
559 /* The size of the slot, including extra space for alignment. This
560 info is for combine_temp_slots. */
561 HOST_WIDE_INT full_size;
562};
563
2a22f99c
TS
564/* Entry for the below hash table. */
565struct GTY((for_user)) temp_slot_address_entry {
fb0703f7
SB
566 hashval_t hash;
567 rtx address;
568 struct temp_slot *temp_slot;
569};
570
ca752f39 571struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
2a22f99c
TS
572{
573 static hashval_t hash (temp_slot_address_entry *);
574 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
575};
576
577/* A table of addresses that represent a stack slot. The table is a mapping
578 from address RTXen to a temp slot. */
579static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
580static size_t n_temp_slots_in_use;
581
0aea6467
ZD
582/* Removes temporary slot TEMP from LIST. */
583
584static void
585cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
586{
587 if (temp->next)
588 temp->next->prev = temp->prev;
589 if (temp->prev)
590 temp->prev->next = temp->next;
591 else
592 *list = temp->next;
593
594 temp->prev = temp->next = NULL;
595}
596
597/* Inserts temporary slot TEMP to LIST. */
598
599static void
600insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
601{
602 temp->next = *list;
603 if (*list)
604 (*list)->prev = temp;
605 temp->prev = NULL;
606 *list = temp;
607}
608
609/* Returns the list of used temp slots at LEVEL. */
610
611static struct temp_slot **
612temp_slots_at_level (int level)
613{
9771b263
DN
614 if (level >= (int) vec_safe_length (used_temp_slots))
615 vec_safe_grow_cleared (used_temp_slots, level + 1);
0aea6467 616
9771b263 617 return &(*used_temp_slots)[level];
0aea6467
ZD
618}
619
620/* Returns the maximal temporary slot level. */
621
622static int
623max_slot_level (void)
624{
625 if (!used_temp_slots)
626 return -1;
627
9771b263 628 return used_temp_slots->length () - 1;
0aea6467
ZD
629}
630
631/* Moves temporary slot TEMP to LEVEL. */
632
633static void
634move_slot_to_level (struct temp_slot *temp, int level)
635{
636 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
637 insert_slot_to_list (temp, temp_slots_at_level (level));
638 temp->level = level;
639}
640
641/* Make temporary slot TEMP available. */
642
643static void
644make_slot_available (struct temp_slot *temp)
645{
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, &avail_temp_slots);
648 temp->in_use = 0;
649 temp->level = -1;
f8395d62 650 n_temp_slots_in_use--;
0aea6467 651}
fb0703f7
SB
652
653/* Compute the hash value for an address -> temp slot mapping.
654 The value is cached on the mapping entry. */
655static hashval_t
656temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
657{
658 int do_not_record = 0;
659 return hash_rtx (t->address, GET_MODE (t->address),
660 &do_not_record, NULL, false);
661}
662
663/* Return the hash value for an address -> temp slot mapping. */
2a22f99c
TS
664hashval_t
665temp_address_hasher::hash (temp_slot_address_entry *t)
fb0703f7 666{
fb0703f7
SB
667 return t->hash;
668}
669
670/* Compare two address -> temp slot mapping entries. */
2a22f99c
TS
671bool
672temp_address_hasher::equal (temp_slot_address_entry *t1,
673 temp_slot_address_entry *t2)
fb0703f7 674{
fb0703f7
SB
675 return exp_equiv_p (t1->address, t2->address, 0, true);
676}
677
678/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
679static void
680insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
681{
766090c2 682 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fb0703f7
SB
683 t->address = address;
684 t->temp_slot = temp_slot;
685 t->hash = temp_slot_address_compute_hash (t);
2a22f99c 686 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fb0703f7
SB
687}
688
689/* Remove an address -> temp slot mapping entry if the temp slot is
690 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2a22f99c
TS
691int
692remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fb0703f7 693{
2a22f99c 694 const struct temp_slot_address_entry *t = *slot;
fb0703f7 695 if (! t->temp_slot->in_use)
2a22f99c 696 temp_slot_address_table->clear_slot (slot);
fb0703f7
SB
697 return 1;
698}
699
700/* Remove all mappings of addresses to unused temp slots. */
701static void
702remove_unused_temp_slot_addresses (void)
703{
f8395d62
MM
704 /* Use quicker clearing if there aren't any active temp slots. */
705 if (n_temp_slots_in_use)
2a22f99c
TS
706 temp_slot_address_table->traverse
707 <void *, remove_unused_temp_slot_addresses_1> (NULL);
f8395d62 708 else
2a22f99c 709 temp_slot_address_table->empty ();
fb0703f7
SB
710}
711
712/* Find the temp slot corresponding to the object at address X. */
713
714static struct temp_slot *
715find_temp_slot_from_address (rtx x)
716{
717 struct temp_slot *p;
718 struct temp_slot_address_entry tmp, *t;
719
720 /* First try the easy way:
721 See if X exists in the address -> temp slot mapping. */
722 tmp.address = x;
723 tmp.temp_slot = NULL;
724 tmp.hash = temp_slot_address_compute_hash (&tmp);
2a22f99c 725 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fb0703f7
SB
726 if (t)
727 return t->temp_slot;
728
729 /* If we have a sum involving a register, see if it points to a temp
730 slot. */
731 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
732 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
733 return p;
734 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
735 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
736 return p;
737
738 /* Last resort: Address is a virtual stack var address. */
739 if (GET_CODE (x) == PLUS
740 && XEXP (x, 0) == virtual_stack_vars_rtx
481683e1 741 && CONST_INT_P (XEXP (x, 1)))
fb0703f7
SB
742 {
743 int i;
744 for (i = max_slot_level (); i >= 0; i--)
745 for (p = *temp_slots_at_level (i); p; p = p->next)
746 {
747 if (INTVAL (XEXP (x, 1)) >= p->base_offset
748 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
749 return p;
750 }
751 }
752
753 return NULL;
754}
6f086dfc
RS
755\f
756/* Allocate a temporary stack slot and record it for possible later
757 reuse.
758
759 MODE is the machine mode to be given to the returned rtx.
760
761 SIZE is the size in units of the space required. We do no rounding here
762 since assign_stack_local will do any required rounding.
763
a4c6502a 764 TYPE is the type that will be used for the stack slot. */
6f086dfc 765
a06ef755 766rtx
ef4bddc2 767assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
9474e8ab 768 tree type)
6f086dfc 769{
74e2819c 770 unsigned int align;
0aea6467 771 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
faa964e5 772 rtx slot;
6f086dfc 773
303ec2aa
RK
774 /* If SIZE is -1 it means that somebody tried to allocate a temporary
775 of a variable size. */
0bccc606 776 gcc_assert (size != -1);
303ec2aa 777
76fe54f0 778 align = get_stack_local_alignment (type, mode);
d16790f2
JW
779
780 /* Try to find an available, already-allocated temporary of the proper
781 mode which meets the size and alignment requirements. Choose the
3e8b0446 782 smallest one with the closest alignment.
b8698a0f 783
3e8b0446
ZD
784 If assign_stack_temp is called outside of the tree->rtl expansion,
785 we cannot reuse the stack slots (that may still refer to
786 VIRTUAL_STACK_VARS_REGNUM). */
787 if (!virtuals_instantiated)
0aea6467 788 {
3e8b0446 789 for (p = avail_temp_slots; p; p = p->next)
0aea6467 790 {
3e8b0446
ZD
791 if (p->align >= align && p->size >= size
792 && GET_MODE (p->slot) == mode
793 && objects_must_conflict_p (p->type, type)
794 && (best_p == 0 || best_p->size > p->size
795 || (best_p->size == p->size && best_p->align > p->align)))
0aea6467 796 {
3e8b0446
ZD
797 if (p->align == align && p->size == size)
798 {
799 selected = p;
800 cut_slot_from_list (selected, &avail_temp_slots);
801 best_p = 0;
802 break;
803 }
804 best_p = p;
0aea6467 805 }
0aea6467
ZD
806 }
807 }
6f086dfc
RS
808
809 /* Make our best, if any, the one to use. */
810 if (best_p)
a45035b6 811 {
0aea6467
ZD
812 selected = best_p;
813 cut_slot_from_list (selected, &avail_temp_slots);
814
a45035b6
JW
815 /* If there are enough aligned bytes left over, make them into a new
816 temp_slot so that the extra bytes don't get wasted. Do this only
817 for BLKmode slots, so that we can be sure of the alignment. */
3bdf5ad1 818 if (GET_MODE (best_p->slot) == BLKmode)
a45035b6 819 {
d16790f2 820 int alignment = best_p->align / BITS_PER_UNIT;
e5e809f4 821 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
a45035b6
JW
822
823 if (best_p->size - rounded_size >= alignment)
824 {
766090c2 825 p = ggc_alloc<temp_slot> ();
9474e8ab 826 p->in_use = 0;
a45035b6 827 p->size = best_p->size - rounded_size;
307d8cd6
RK
828 p->base_offset = best_p->base_offset + rounded_size;
829 p->full_size = best_p->full_size - rounded_size;
be0c514c 830 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
d16790f2 831 p->align = best_p->align;
1da68f56 832 p->type = best_p->type;
0aea6467 833 insert_slot_to_list (p, &avail_temp_slots);
a45035b6 834
38a448ca
RH
835 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
836 stack_slot_list);
a45035b6
JW
837
838 best_p->size = rounded_size;
291dde90 839 best_p->full_size = rounded_size;
a45035b6
JW
840 }
841 }
a45035b6 842 }
718fe406 843
6f086dfc 844 /* If we still didn't find one, make a new temporary. */
0aea6467 845 if (selected == 0)
6f086dfc 846 {
e5e809f4
JL
847 HOST_WIDE_INT frame_offset_old = frame_offset;
848
766090c2 849 p = ggc_alloc<temp_slot> ();
e5e809f4 850
c87a0a39
JL
851 /* We are passing an explicit alignment request to assign_stack_local.
852 One side effect of that is assign_stack_local will not round SIZE
853 to ensure the frame offset remains suitably aligned.
854
855 So for requests which depended on the rounding of SIZE, we go ahead
856 and round it now. We also make sure ALIGNMENT is at least
857 BIGGEST_ALIGNMENT. */
0bccc606 858 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
80a832cd
JJ
859 p->slot = assign_stack_local_1 (mode,
860 (mode == BLKmode
861 ? CEIL_ROUND (size,
862 (int) align
863 / BITS_PER_UNIT)
864 : size),
865 align, 0);
d16790f2
JW
866
867 p->align = align;
e5e809f4 868
b2a80c0d
DE
869 /* The following slot size computation is necessary because we don't
870 know the actual size of the temporary slot until assign_stack_local
871 has performed all the frame alignment and size rounding for the
fc91b0d0
RK
872 requested temporary. Note that extra space added for alignment
873 can be either above or below this stack slot depending on which
874 way the frame grows. We include the extra space if and only if it
875 is above this slot. */
f62c8a5c
JJ
876 if (FRAME_GROWS_DOWNWARD)
877 p->size = frame_offset_old - frame_offset;
878 else
879 p->size = size;
e5e809f4 880
fc91b0d0 881 /* Now define the fields used by combine_temp_slots. */
f62c8a5c
JJ
882 if (FRAME_GROWS_DOWNWARD)
883 {
884 p->base_offset = frame_offset;
885 p->full_size = frame_offset_old - frame_offset;
886 }
887 else
888 {
889 p->base_offset = frame_offset_old;
890 p->full_size = frame_offset - frame_offset_old;
891 }
0aea6467
ZD
892
893 selected = p;
6f086dfc
RS
894 }
895
0aea6467 896 p = selected;
6f086dfc 897 p->in_use = 1;
1da68f56 898 p->type = type;
7efcb746 899 p->level = temp_slot_level;
f8395d62 900 n_temp_slots_in_use++;
1995f267 901
0aea6467
ZD
902 pp = temp_slots_at_level (p->level);
903 insert_slot_to_list (p, pp);
fb0703f7 904 insert_temp_slot_address (XEXP (p->slot, 0), p);
faa964e5
UW
905
906 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
907 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
908 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
3bdf5ad1 909
1da68f56
RK
910 /* If we know the alias set for the memory that will be used, use
911 it. If there's no TYPE, then we don't know anything about the
912 alias set for the memory. */
faa964e5
UW
913 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
914 set_mem_align (slot, align);
1da68f56 915
30f7a378 916 /* If a type is specified, set the relevant flags. */
3bdf5ad1 917 if (type != 0)
55356334 918 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
be0c514c 919 MEM_NOTRAP_P (slot) = 1;
3bdf5ad1 920
faa964e5 921 return slot;
6f086dfc 922}
d16790f2
JW
923
924/* Allocate a temporary stack slot and record it for possible later
9474e8ab 925 reuse. First two arguments are same as in preceding function. */
d16790f2
JW
926
927rtx
ef4bddc2 928assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
d16790f2 929{
9474e8ab 930 return assign_stack_temp_for_type (mode, size, NULL_TREE);
d16790f2 931}
638141a6 932\f
9432c136
EB
933/* Assign a temporary.
934 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
935 and so that should be used in error messages. In either case, we
936 allocate of the given type.
230f21b4 937 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
b55d9ff8
RK
938 it is 0 if a register is OK.
939 DONT_PROMOTE is 1 if we should not promote values in register
940 to wider modes. */
230f21b4
PB
941
942rtx
9474e8ab 943assign_temp (tree type_or_decl, int memory_required,
fa8db1f7 944 int dont_promote ATTRIBUTE_UNUSED)
230f21b4 945{
9432c136 946 tree type, decl;
ef4bddc2 947 machine_mode mode;
9e1622ed 948#ifdef PROMOTE_MODE
9432c136
EB
949 int unsignedp;
950#endif
951
952 if (DECL_P (type_or_decl))
953 decl = type_or_decl, type = TREE_TYPE (decl);
954 else
955 decl = NULL, type = type_or_decl;
956
957 mode = TYPE_MODE (type);
9e1622ed 958#ifdef PROMOTE_MODE
8df83eae 959 unsignedp = TYPE_UNSIGNED (type);
0ce8a59c 960#endif
638141a6 961
230f21b4
PB
962 if (mode == BLKmode || memory_required)
963 {
e5e809f4 964 HOST_WIDE_INT size = int_size_in_bytes (type);
230f21b4
PB
965 rtx tmp;
966
44affdae
JH
967 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
968 problems with allocating the stack space. */
969 if (size == 0)
970 size = 1;
971
230f21b4 972 /* Unfortunately, we don't yet know how to allocate variable-sized
a441447f
OH
973 temporaries. However, sometimes we can find a fixed upper limit on
974 the size, so try that instead. */
975 else if (size == -1)
976 size = max_int_size_in_bytes (type);
e30bb772 977
9432c136
EB
978 /* The size of the temporary may be too large to fit into an integer. */
979 /* ??? Not sure this should happen except for user silliness, so limit
797a6ac1 980 this to things that aren't compiler-generated temporaries. The
535a42b1 981 rest of the time we'll die in assign_stack_temp_for_type. */
9432c136
EB
982 if (decl && size == -1
983 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
984 {
dee15844 985 error ("size of variable %q+D is too large", decl);
9432c136
EB
986 size = 1;
987 }
988
9474e8ab 989 tmp = assign_stack_temp_for_type (mode, size, type);
230f21b4
PB
990 return tmp;
991 }
638141a6 992
9e1622ed 993#ifdef PROMOTE_MODE
b55d9ff8 994 if (! dont_promote)
cde0f3fd 995 mode = promote_mode (type, mode, &unsignedp);
230f21b4 996#endif
638141a6 997
230f21b4
PB
998 return gen_reg_rtx (mode);
999}
638141a6 1000\f
a45035b6
JW
1001/* Combine temporary stack slots which are adjacent on the stack.
1002
1003 This allows for better use of already allocated stack space. This is only
1004 done for BLKmode slots because we can be sure that we won't have alignment
1005 problems in this case. */
1006
6fe79279 1007static void
fa8db1f7 1008combine_temp_slots (void)
a45035b6 1009{
0aea6467 1010 struct temp_slot *p, *q, *next, *next_q;
e5e809f4
JL
1011 int num_slots;
1012
a4c6502a
MM
1013 /* We can't combine slots, because the information about which slot
1014 is in which alias set will be lost. */
1015 if (flag_strict_aliasing)
1016 return;
1017
718fe406 1018 /* If there are a lot of temp slots, don't do anything unless
d6a7951f 1019 high levels of optimization. */
e5e809f4 1020 if (! flag_expensive_optimizations)
0aea6467 1021 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
e5e809f4
JL
1022 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1023 return;
a45035b6 1024
0aea6467 1025 for (p = avail_temp_slots; p; p = next)
e9b7093a
RS
1026 {
1027 int delete_p = 0;
e5e809f4 1028
0aea6467
ZD
1029 next = p->next;
1030
1031 if (GET_MODE (p->slot) != BLKmode)
1032 continue;
1033
1034 for (q = p->next; q; q = next_q)
e9b7093a 1035 {
0aea6467
ZD
1036 int delete_q = 0;
1037
1038 next_q = q->next;
1039
1040 if (GET_MODE (q->slot) != BLKmode)
1041 continue;
1042
1043 if (p->base_offset + p->full_size == q->base_offset)
1044 {
1045 /* Q comes after P; combine Q into P. */
1046 p->size += q->size;
1047 p->full_size += q->full_size;
1048 delete_q = 1;
1049 }
1050 else if (q->base_offset + q->full_size == p->base_offset)
1051 {
1052 /* P comes after Q; combine P into Q. */
1053 q->size += p->size;
1054 q->full_size += p->full_size;
1055 delete_p = 1;
1056 break;
1057 }
1058 if (delete_q)
1059 cut_slot_from_list (q, &avail_temp_slots);
e9b7093a 1060 }
0aea6467
ZD
1061
1062 /* Either delete P or advance past it. */
1063 if (delete_p)
1064 cut_slot_from_list (p, &avail_temp_slots);
e9b7093a 1065 }
a45035b6 1066}
6f086dfc 1067\f
82d6e6fc
KG
1068/* Indicate that NEW_RTX is an alternate way of referring to the temp
1069 slot that previously was known by OLD_RTX. */
e5e76139
RK
1070
1071void
82d6e6fc 1072update_temp_slot_address (rtx old_rtx, rtx new_rtx)
e5e76139 1073{
14a774a9 1074 struct temp_slot *p;
e5e76139 1075
82d6e6fc 1076 if (rtx_equal_p (old_rtx, new_rtx))
e5e76139 1077 return;
14a774a9 1078
82d6e6fc 1079 p = find_temp_slot_from_address (old_rtx);
14a774a9 1080
82d6e6fc
KG
1081 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1082 NEW_RTX is a register, see if one operand of the PLUS is a
1083 temporary location. If so, NEW_RTX points into it. Otherwise,
1084 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1085 in common between them. If so, try a recursive call on those
1086 values. */
14a774a9
RK
1087 if (p == 0)
1088 {
82d6e6fc 1089 if (GET_CODE (old_rtx) != PLUS)
700f19f0
RK
1090 return;
1091
82d6e6fc 1092 if (REG_P (new_rtx))
700f19f0 1093 {
82d6e6fc
KG
1094 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1095 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
700f19f0
RK
1096 return;
1097 }
82d6e6fc 1098 else if (GET_CODE (new_rtx) != PLUS)
14a774a9
RK
1099 return;
1100
82d6e6fc
KG
1101 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1102 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1103 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1104 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1105 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1106 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1107 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1108 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
14a774a9
RK
1109
1110 return;
1111 }
1112
718fe406 1113 /* Otherwise add an alias for the temp's address. */
fb0703f7 1114 insert_temp_slot_address (new_rtx, p);
e5e76139
RK
1115}
1116
9cca6a99
MS
1117/* If X could be a reference to a temporary slot, mark that slot as
1118 belonging to the to one level higher than the current level. If X
1119 matched one of our slots, just mark that one. Otherwise, we can't
9474e8ab 1120 easily predict which it is, so upgrade all of them.
6f086dfc
RS
1121
1122 This is called when an ({...}) construct occurs and a statement
1123 returns a value in memory. */
1124
1125void
fa8db1f7 1126preserve_temp_slots (rtx x)
6f086dfc 1127{
0aea6467 1128 struct temp_slot *p = 0, *next;
6f086dfc 1129
e3a77161 1130 if (x == 0)
9474e8ab 1131 return;
f7b6d104 1132
8fff4fc1 1133 /* If X is a register that is being used as a pointer, see if we have
9474e8ab 1134 a temporary slot we know it points to. */
8fff4fc1
RH
1135 if (REG_P (x) && REG_POINTER (x))
1136 p = find_temp_slot_from_address (x);
f7b6d104 1137
8fff4fc1 1138 /* If X is not in memory or is at a constant address, it cannot be in
9474e8ab 1139 a temporary slot. */
8fff4fc1 1140 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
9474e8ab 1141 return;
8fff4fc1
RH
1142
1143 /* First see if we can find a match. */
1144 if (p == 0)
1145 p = find_temp_slot_from_address (XEXP (x, 0));
1146
1147 if (p != 0)
1148 {
8fff4fc1 1149 if (p->level == temp_slot_level)
9474e8ab 1150 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1151 return;
f7b6d104 1152 }
e9a25f70 1153
8fff4fc1
RH
1154 /* Otherwise, preserve all non-kept slots at this level. */
1155 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
e9a25f70 1156 {
8fff4fc1 1157 next = p->next;
9474e8ab 1158 move_slot_to_level (p, temp_slot_level - 1);
8fff4fc1 1159 }
fe9b4957
MM
1160}
1161
8fff4fc1
RH
1162/* Free all temporaries used so far. This is normally called at the
1163 end of generating code for a statement. */
fe9b4957 1164
8fff4fc1
RH
1165void
1166free_temp_slots (void)
fe9b4957 1167{
8fff4fc1 1168 struct temp_slot *p, *next;
5d7cefe5 1169 bool some_available = false;
fe9b4957 1170
8fff4fc1
RH
1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1172 {
1173 next = p->next;
9474e8ab
MM
1174 make_slot_available (p);
1175 some_available = true;
8fff4fc1 1176 }
fe9b4957 1177
5d7cefe5
MM
1178 if (some_available)
1179 {
1180 remove_unused_temp_slot_addresses ();
1181 combine_temp_slots ();
1182 }
8fff4fc1 1183}
fe9b4957 1184
8fff4fc1 1185/* Push deeper into the nesting level for stack temporaries. */
fe9b4957 1186
8fff4fc1
RH
1187void
1188push_temp_slots (void)
fe9b4957 1189{
8fff4fc1 1190 temp_slot_level++;
fe9b4957
MM
1191}
1192
8fff4fc1
RH
1193/* Pop a temporary nesting level. All slots in use in the current level
1194 are freed. */
fe9b4957 1195
8fff4fc1
RH
1196void
1197pop_temp_slots (void)
fe9b4957 1198{
9474e8ab 1199 free_temp_slots ();
8fff4fc1 1200 temp_slot_level--;
8c36698e
NC
1201}
1202
8fff4fc1 1203/* Initialize temporary slots. */
e9a25f70
JL
1204
1205void
8fff4fc1 1206init_temp_slots (void)
e9a25f70 1207{
8fff4fc1
RH
1208 /* We have not allocated any temporaries yet. */
1209 avail_temp_slots = 0;
9771b263 1210 vec_alloc (used_temp_slots, 0);
8fff4fc1 1211 temp_slot_level = 0;
f8395d62 1212 n_temp_slots_in_use = 0;
fb0703f7
SB
1213
1214 /* Set up the table to map addresses to temp slots. */
1215 if (! temp_slot_address_table)
2a22f99c 1216 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fb0703f7 1217 else
2a22f99c 1218 temp_slot_address_table->empty ();
8fff4fc1
RH
1219}
1220\f
6399c0ab
SB
1221/* Functions and data structures to keep track of the values hard regs
1222 had at the start of the function. */
1223
1224/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1225 and has_hard_reg_initial_val.. */
1226typedef struct GTY(()) initial_value_pair {
1227 rtx hard_reg;
1228 rtx pseudo;
1229} initial_value_pair;
1230/* ??? This could be a VEC but there is currently no way to define an
1231 opaque VEC type. This could be worked around by defining struct
1232 initial_value_pair in function.h. */
1233typedef struct GTY(()) initial_value_struct {
1234 int num_entries;
1235 int max_entries;
1236 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1237} initial_value_struct;
1238
1239/* If a pseudo represents an initial hard reg (or expression), return
1240 it, else return NULL_RTX. */
1241
1242rtx
1243get_hard_reg_initial_reg (rtx reg)
1244{
1245 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1246 int i;
1247
1248 if (ivs == 0)
1249 return NULL_RTX;
1250
1251 for (i = 0; i < ivs->num_entries; i++)
1252 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1253 return ivs->entries[i].hard_reg;
1254
1255 return NULL_RTX;
1256}
1257
1258/* Make sure that there's a pseudo register of mode MODE that stores the
1259 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1260
1261rtx
ef4bddc2 1262get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1263{
1264 struct initial_value_struct *ivs;
1265 rtx rv;
1266
1267 rv = has_hard_reg_initial_val (mode, regno);
1268 if (rv)
1269 return rv;
1270
1271 ivs = crtl->hard_reg_initial_vals;
1272 if (ivs == 0)
1273 {
766090c2 1274 ivs = ggc_alloc<initial_value_struct> ();
6399c0ab
SB
1275 ivs->num_entries = 0;
1276 ivs->max_entries = 5;
766090c2 1277 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
6399c0ab
SB
1278 crtl->hard_reg_initial_vals = ivs;
1279 }
1280
1281 if (ivs->num_entries >= ivs->max_entries)
1282 {
1283 ivs->max_entries += 5;
1284 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1285 ivs->max_entries);
1286 }
1287
1288 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1289 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1290
1291 return ivs->entries[ivs->num_entries++].pseudo;
1292}
1293
1294/* See if get_hard_reg_initial_val has been used to create a pseudo
1295 for the initial value of hard register REGNO in mode MODE. Return
1296 the associated pseudo if so, otherwise return NULL. */
1297
1298rtx
ef4bddc2 1299has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
6399c0ab
SB
1300{
1301 struct initial_value_struct *ivs;
1302 int i;
1303
1304 ivs = crtl->hard_reg_initial_vals;
1305 if (ivs != 0)
1306 for (i = 0; i < ivs->num_entries; i++)
1307 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1308 && REGNO (ivs->entries[i].hard_reg) == regno)
1309 return ivs->entries[i].pseudo;
1310
1311 return NULL_RTX;
1312}
1313
1314unsigned int
1315emit_initial_value_sets (void)
1316{
1317 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1318 int i;
691fe203 1319 rtx_insn *seq;
6399c0ab
SB
1320
1321 if (ivs == 0)
1322 return 0;
1323
1324 start_sequence ();
1325 for (i = 0; i < ivs->num_entries; i++)
1326 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1327 seq = get_insns ();
1328 end_sequence ();
1329
1330 emit_insn_at_entry (seq);
1331 return 0;
1332}
1333
1334/* Return the hardreg-pseudoreg initial values pair entry I and
1335 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1336bool
1337initial_value_entry (int i, rtx *hreg, rtx *preg)
1338{
1339 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1340 if (!ivs || i >= ivs->num_entries)
1341 return false;
1342
1343 *hreg = ivs->entries[i].hard_reg;
1344 *preg = ivs->entries[i].pseudo;
1345 return true;
1346}
1347\f
8fff4fc1
RH
1348/* These routines are responsible for converting virtual register references
1349 to the actual hard register references once RTL generation is complete.
718fe406 1350
8fff4fc1
RH
1351 The following four variables are used for communication between the
1352 routines. They contain the offsets of the virtual registers from their
1353 respective hard registers. */
fe9b4957 1354
8fff4fc1
RH
1355static int in_arg_offset;
1356static int var_offset;
1357static int dynamic_offset;
1358static int out_arg_offset;
1359static int cfa_offset;
8a5275eb 1360
8fff4fc1
RH
1361/* In most machines, the stack pointer register is equivalent to the bottom
1362 of the stack. */
718fe406 1363
8fff4fc1
RH
1364#ifndef STACK_POINTER_OFFSET
1365#define STACK_POINTER_OFFSET 0
1366#endif
8c36698e 1367
ddbb449f
AM
1368#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1369#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1370#endif
1371
8fff4fc1
RH
1372/* If not defined, pick an appropriate default for the offset of dynamically
1373 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
ddbb449f 1374 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
fe9b4957 1375
8fff4fc1 1376#ifndef STACK_DYNAMIC_OFFSET
8a5275eb 1377
8fff4fc1
RH
1378/* The bottom of the stack points to the actual arguments. If
1379 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1380 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1381 stack space for register parameters is not pushed by the caller, but
1382 rather part of the fixed stack areas and hence not included in
38173d38 1383 `crtl->outgoing_args_size'. Nevertheless, we must allow
8fff4fc1 1384 for it when allocating stack dynamic objects. */
8a5275eb 1385
ddbb449f 1386#ifdef INCOMING_REG_PARM_STACK_SPACE
8fff4fc1
RH
1387#define STACK_DYNAMIC_OFFSET(FNDECL) \
1388((ACCUMULATE_OUTGOING_ARGS \
38173d38 1389 ? (crtl->outgoing_args_size \
81464b2c 1390 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
ddbb449f 1391 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
ac294f0b 1392 : 0) + (STACK_POINTER_OFFSET))
8fff4fc1
RH
1393#else
1394#define STACK_DYNAMIC_OFFSET(FNDECL) \
38173d38 1395((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
8fff4fc1
RH
1396 + (STACK_POINTER_OFFSET))
1397#endif
1398#endif
4fa48eae 1399
659e47fb 1400\f
bbf9b913
RH
1401/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1402 is a virtual register, return the equivalent hard register and set the
1403 offset indirectly through the pointer. Otherwise, return 0. */
6f086dfc 1404
bbf9b913
RH
1405static rtx
1406instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
6f086dfc 1407{
82d6e6fc 1408 rtx new_rtx;
bbf9b913 1409 HOST_WIDE_INT offset;
6f086dfc 1410
bbf9b913 1411 if (x == virtual_incoming_args_rtx)
2e3f842f 1412 {
d015f7cc 1413 if (stack_realign_drap)
2e3f842f 1414 {
d015f7cc
L
1415 /* Replace virtual_incoming_args_rtx with internal arg
1416 pointer if DRAP is used to realign stack. */
82d6e6fc 1417 new_rtx = crtl->args.internal_arg_pointer;
2e3f842f
L
1418 offset = 0;
1419 }
1420 else
82d6e6fc 1421 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
2e3f842f 1422 }
bbf9b913 1423 else if (x == virtual_stack_vars_rtx)
82d6e6fc 1424 new_rtx = frame_pointer_rtx, offset = var_offset;
bbf9b913 1425 else if (x == virtual_stack_dynamic_rtx)
82d6e6fc 1426 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
bbf9b913 1427 else if (x == virtual_outgoing_args_rtx)
82d6e6fc 1428 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
bbf9b913 1429 else if (x == virtual_cfa_rtx)
f6672e8e
RH
1430 {
1431#ifdef FRAME_POINTER_CFA_OFFSET
82d6e6fc 1432 new_rtx = frame_pointer_rtx;
f6672e8e 1433#else
82d6e6fc 1434 new_rtx = arg_pointer_rtx;
f6672e8e
RH
1435#endif
1436 offset = cfa_offset;
1437 }
32990d5b
JJ
1438 else if (x == virtual_preferred_stack_boundary_rtx)
1439 {
1440 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1441 offset = 0;
1442 }
bbf9b913
RH
1443 else
1444 return NULL_RTX;
6f086dfc 1445
bbf9b913 1446 *poffset = offset;
82d6e6fc 1447 return new_rtx;
6f086dfc
RS
1448}
1449
b8704801
RS
1450/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1451 registers present inside of *LOC. The expression is simplified,
1452 as much as possible, but is not to be considered "valid" in any sense
1453 implied by the target. Return true if any change is made. */
6f086dfc 1454
b8704801
RS
1455static bool
1456instantiate_virtual_regs_in_rtx (rtx *loc)
6f086dfc 1457{
b8704801
RS
1458 if (!*loc)
1459 return false;
1460 bool changed = false;
1461 subrtx_ptr_iterator::array_type array;
1462 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
6f086dfc 1463 {
b8704801
RS
1464 rtx *loc = *iter;
1465 if (rtx x = *loc)
bbf9b913 1466 {
b8704801
RS
1467 rtx new_rtx;
1468 HOST_WIDE_INT offset;
1469 switch (GET_CODE (x))
1470 {
1471 case REG:
1472 new_rtx = instantiate_new_reg (x, &offset);
1473 if (new_rtx)
1474 {
1475 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1476 changed = true;
1477 }
1478 iter.skip_subrtxes ();
1479 break;
bbf9b913 1480
b8704801
RS
1481 case PLUS:
1482 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1483 if (new_rtx)
1484 {
1485 XEXP (x, 0) = new_rtx;
1486 *loc = plus_constant (GET_MODE (x), x, offset, true);
1487 changed = true;
1488 iter.skip_subrtxes ();
1489 break;
1490 }
e5e809f4 1491
b8704801
RS
1492 /* FIXME -- from old code */
1493 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1494 we can commute the PLUS and SUBREG because pointers into the
1495 frame are well-behaved. */
1496 break;
ce717ce4 1497
b8704801
RS
1498 default:
1499 break;
1500 }
1501 }
6f086dfc 1502 }
b8704801 1503 return changed;
6f086dfc
RS
1504}
1505
bbf9b913
RH
1506/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1507 matches the predicate for insn CODE operand OPERAND. */
6f086dfc 1508
bbf9b913
RH
1509static int
1510safe_insn_predicate (int code, int operand, rtx x)
6f086dfc 1511{
2ef6ce06 1512 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
bbf9b913 1513}
5a73491b 1514
bbf9b913
RH
1515/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1516 registers present inside of insn. The result will be a valid insn. */
5a73491b
RK
1517
1518static void
691fe203 1519instantiate_virtual_regs_in_insn (rtx_insn *insn)
5a73491b 1520{
bbf9b913
RH
1521 HOST_WIDE_INT offset;
1522 int insn_code, i;
9325973e 1523 bool any_change = false;
691fe203
DM
1524 rtx set, new_rtx, x;
1525 rtx_insn *seq;
32e66afd 1526
bbf9b913
RH
1527 /* There are some special cases to be handled first. */
1528 set = single_set (insn);
1529 if (set)
32e66afd 1530 {
bbf9b913
RH
1531 /* We're allowed to assign to a virtual register. This is interpreted
1532 to mean that the underlying register gets assigned the inverse
1533 transformation. This is used, for example, in the handling of
1534 non-local gotos. */
82d6e6fc
KG
1535 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1536 if (new_rtx)
bbf9b913
RH
1537 {
1538 start_sequence ();
32e66afd 1539
b8704801 1540 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
82d6e6fc 1541 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
69a59f0f 1542 gen_int_mode (-offset, GET_MODE (new_rtx)));
82d6e6fc
KG
1543 x = force_operand (x, new_rtx);
1544 if (x != new_rtx)
1545 emit_move_insn (new_rtx, x);
5a73491b 1546
bbf9b913
RH
1547 seq = get_insns ();
1548 end_sequence ();
5a73491b 1549
bbf9b913
RH
1550 emit_insn_before (seq, insn);
1551 delete_insn (insn);
1552 return;
1553 }
5a73491b 1554
bbf9b913
RH
1555 /* Handle a straight copy from a virtual register by generating a
1556 new add insn. The difference between this and falling through
1557 to the generic case is avoiding a new pseudo and eliminating a
1558 move insn in the initial rtl stream. */
82d6e6fc
KG
1559 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1560 if (new_rtx && offset != 0
bbf9b913
RH
1561 && REG_P (SET_DEST (set))
1562 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1563 {
1564 start_sequence ();
5a73491b 1565
2f1cd2eb
RS
1566 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1567 gen_int_mode (offset,
1568 GET_MODE (SET_DEST (set))),
1569 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1570 if (x != SET_DEST (set))
1571 emit_move_insn (SET_DEST (set), x);
770ae6cc 1572
bbf9b913
RH
1573 seq = get_insns ();
1574 end_sequence ();
87ce34d6 1575
bbf9b913
RH
1576 emit_insn_before (seq, insn);
1577 delete_insn (insn);
87ce34d6 1578 return;
bbf9b913 1579 }
5a73491b 1580
bbf9b913 1581 extract_insn (insn);
9325973e 1582 insn_code = INSN_CODE (insn);
5a73491b 1583
bbf9b913
RH
1584 /* Handle a plus involving a virtual register by determining if the
1585 operands remain valid if they're modified in place. */
1586 if (GET_CODE (SET_SRC (set)) == PLUS
1587 && recog_data.n_operands >= 3
1588 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1589 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
481683e1 1590 && CONST_INT_P (recog_data.operand[2])
82d6e6fc 1591 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
bbf9b913
RH
1592 {
1593 offset += INTVAL (recog_data.operand[2]);
5a73491b 1594
bbf9b913 1595 /* If the sum is zero, then replace with a plain move. */
9325973e
RH
1596 if (offset == 0
1597 && REG_P (SET_DEST (set))
1598 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
bbf9b913
RH
1599 {
1600 start_sequence ();
82d6e6fc 1601 emit_move_insn (SET_DEST (set), new_rtx);
bbf9b913
RH
1602 seq = get_insns ();
1603 end_sequence ();
d1405722 1604
bbf9b913
RH
1605 emit_insn_before (seq, insn);
1606 delete_insn (insn);
1607 return;
1608 }
d1405722 1609
bbf9b913 1610 x = gen_int_mode (offset, recog_data.operand_mode[2]);
bbf9b913
RH
1611
1612 /* Using validate_change and apply_change_group here leaves
1613 recog_data in an invalid state. Since we know exactly what
1614 we want to check, do those two by hand. */
82d6e6fc 1615 if (safe_insn_predicate (insn_code, 1, new_rtx)
bbf9b913
RH
1616 && safe_insn_predicate (insn_code, 2, x))
1617 {
82d6e6fc 1618 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
bbf9b913
RH
1619 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1620 any_change = true;
9325973e
RH
1621
1622 /* Fall through into the regular operand fixup loop in
1623 order to take care of operands other than 1 and 2. */
bbf9b913
RH
1624 }
1625 }
1626 }
d1405722 1627 else
9325973e
RH
1628 {
1629 extract_insn (insn);
1630 insn_code = INSN_CODE (insn);
1631 }
5dc96d60 1632
bbf9b913
RH
1633 /* In the general case, we expect virtual registers to appear only in
1634 operands, and then only as either bare registers or inside memories. */
1635 for (i = 0; i < recog_data.n_operands; ++i)
1636 {
1637 x = recog_data.operand[i];
1638 switch (GET_CODE (x))
1639 {
1640 case MEM:
1641 {
1642 rtx addr = XEXP (x, 0);
bbf9b913 1643
b8704801 1644 if (!instantiate_virtual_regs_in_rtx (&addr))
bbf9b913
RH
1645 continue;
1646
1647 start_sequence ();
23b33725 1648 x = replace_equiv_address (x, addr, true);
a5bfb13a
MM
1649 /* It may happen that the address with the virtual reg
1650 was valid (e.g. based on the virtual stack reg, which might
1651 be acceptable to the predicates with all offsets), whereas
1652 the address now isn't anymore, for instance when the address
1653 is still offsetted, but the base reg isn't virtual-stack-reg
1654 anymore. Below we would do a force_reg on the whole operand,
1655 but this insn might actually only accept memory. Hence,
1656 before doing that last resort, try to reload the address into
1657 a register, so this operand stays a MEM. */
1658 if (!safe_insn_predicate (insn_code, i, x))
1659 {
1660 addr = force_reg (GET_MODE (addr), addr);
23b33725 1661 x = replace_equiv_address (x, addr, true);
a5bfb13a 1662 }
bbf9b913
RH
1663 seq = get_insns ();
1664 end_sequence ();
1665 if (seq)
1666 emit_insn_before (seq, insn);
1667 }
1668 break;
1669
1670 case REG:
82d6e6fc
KG
1671 new_rtx = instantiate_new_reg (x, &offset);
1672 if (new_rtx == NULL)
bbf9b913
RH
1673 continue;
1674 if (offset == 0)
82d6e6fc 1675 x = new_rtx;
bbf9b913
RH
1676 else
1677 {
1678 start_sequence ();
6f086dfc 1679
bbf9b913
RH
1680 /* Careful, special mode predicates may have stuff in
1681 insn_data[insn_code].operand[i].mode that isn't useful
1682 to us for computing a new value. */
1683 /* ??? Recognize address_operand and/or "p" constraints
1684 to see if (plus new offset) is a valid before we put
1685 this through expand_simple_binop. */
82d6e6fc 1686 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
2f1cd2eb
RS
1687 gen_int_mode (offset, GET_MODE (x)),
1688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1689 seq = get_insns ();
1690 end_sequence ();
1691 emit_insn_before (seq, insn);
1692 }
1693 break;
6f086dfc 1694
bbf9b913 1695 case SUBREG:
82d6e6fc
KG
1696 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1697 if (new_rtx == NULL)
bbf9b913
RH
1698 continue;
1699 if (offset != 0)
1700 {
1701 start_sequence ();
2f1cd2eb
RS
1702 new_rtx = expand_simple_binop
1703 (GET_MODE (new_rtx), PLUS, new_rtx,
1704 gen_int_mode (offset, GET_MODE (new_rtx)),
1705 NULL_RTX, 1, OPTAB_LIB_WIDEN);
bbf9b913
RH
1706 seq = get_insns ();
1707 end_sequence ();
1708 emit_insn_before (seq, insn);
1709 }
82d6e6fc
KG
1710 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1711 GET_MODE (new_rtx), SUBREG_BYTE (x));
7314c7dd 1712 gcc_assert (x);
bbf9b913 1713 break;
6f086dfc 1714
bbf9b913
RH
1715 default:
1716 continue;
1717 }
6f086dfc 1718
bbf9b913
RH
1719 /* At this point, X contains the new value for the operand.
1720 Validate the new value vs the insn predicate. Note that
1721 asm insns will have insn_code -1 here. */
1722 if (!safe_insn_predicate (insn_code, i, x))
6ba1bd36
JM
1723 {
1724 start_sequence ();
f7ce0951
SE
1725 if (REG_P (x))
1726 {
1727 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1728 x = copy_to_reg (x);
1729 }
1730 else
1731 x = force_reg (insn_data[insn_code].operand[i].mode, x);
6ba1bd36
JM
1732 seq = get_insns ();
1733 end_sequence ();
1734 if (seq)
1735 emit_insn_before (seq, insn);
1736 }
6f086dfc 1737
bbf9b913
RH
1738 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1739 any_change = true;
1740 }
6f086dfc 1741
bbf9b913
RH
1742 if (any_change)
1743 {
1744 /* Propagate operand changes into the duplicates. */
1745 for (i = 0; i < recog_data.n_dups; ++i)
1746 *recog_data.dup_loc[i]
3e916873 1747 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
5dc96d60 1748
bbf9b913
RH
1749 /* Force re-recognition of the instruction for validation. */
1750 INSN_CODE (insn) = -1;
1751 }
6f086dfc 1752
bbf9b913 1753 if (asm_noperands (PATTERN (insn)) >= 0)
6f086dfc 1754 {
bbf9b913 1755 if (!check_asm_operands (PATTERN (insn)))
6f086dfc 1756 {
bbf9b913 1757 error_for_asm (insn, "impossible constraint in %<asm%>");
5a860835
JJ
1758 /* For asm goto, instead of fixing up all the edges
1759 just clear the template and clear input operands
1760 (asm goto doesn't have any output operands). */
1761 if (JUMP_P (insn))
1762 {
1763 rtx asm_op = extract_asm_operands (PATTERN (insn));
1764 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1765 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1766 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1767 }
1768 else
1769 delete_insn (insn);
bbf9b913
RH
1770 }
1771 }
1772 else
1773 {
1774 if (recog_memoized (insn) < 0)
1775 fatal_insn_not_found (insn);
1776 }
1777}
14a774a9 1778
bbf9b913
RH
1779/* Subroutine of instantiate_decls. Given RTL representing a decl,
1780 do any instantiation required. */
14a774a9 1781
e41b2a33
PB
1782void
1783instantiate_decl_rtl (rtx x)
bbf9b913
RH
1784{
1785 rtx addr;
6f086dfc 1786
bbf9b913
RH
1787 if (x == 0)
1788 return;
6f086dfc 1789
bbf9b913
RH
1790 /* If this is a CONCAT, recurse for the pieces. */
1791 if (GET_CODE (x) == CONCAT)
1792 {
e41b2a33
PB
1793 instantiate_decl_rtl (XEXP (x, 0));
1794 instantiate_decl_rtl (XEXP (x, 1));
bbf9b913
RH
1795 return;
1796 }
6f086dfc 1797
bbf9b913
RH
1798 /* If this is not a MEM, no need to do anything. Similarly if the
1799 address is a constant or a register that is not a virtual register. */
1800 if (!MEM_P (x))
1801 return;
6f086dfc 1802
bbf9b913
RH
1803 addr = XEXP (x, 0);
1804 if (CONSTANT_P (addr)
1805 || (REG_P (addr)
1806 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1807 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1808 return;
6f086dfc 1809
b8704801 1810 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
bbf9b913 1811}
6f086dfc 1812
434eba35
JJ
1813/* Helper for instantiate_decls called via walk_tree: Process all decls
1814 in the given DECL_VALUE_EXPR. */
1815
1816static tree
1817instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1818{
1819 tree t = *tp;
726a989a 1820 if (! EXPR_P (t))
434eba35
JJ
1821 {
1822 *walk_subtrees = 0;
37d6a488
AO
1823 if (DECL_P (t))
1824 {
1825 if (DECL_RTL_SET_P (t))
1826 instantiate_decl_rtl (DECL_RTL (t));
1827 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1828 && DECL_INCOMING_RTL (t))
1829 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1830 if ((TREE_CODE (t) == VAR_DECL
1831 || TREE_CODE (t) == RESULT_DECL)
1832 && DECL_HAS_VALUE_EXPR_P (t))
1833 {
1834 tree v = DECL_VALUE_EXPR (t);
1835 walk_tree (&v, instantiate_expr, NULL, NULL);
1836 }
1837 }
434eba35
JJ
1838 }
1839 return NULL;
1840}
1841
bbf9b913
RH
1842/* Subroutine of instantiate_decls: Process all decls in the given
1843 BLOCK node and all its subblocks. */
6f086dfc 1844
bbf9b913
RH
1845static void
1846instantiate_decls_1 (tree let)
1847{
1848 tree t;
6f086dfc 1849
910ad8de 1850 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
434eba35
JJ
1851 {
1852 if (DECL_RTL_SET_P (t))
e41b2a33 1853 instantiate_decl_rtl (DECL_RTL (t));
434eba35
JJ
1854 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1855 {
1856 tree v = DECL_VALUE_EXPR (t);
1857 walk_tree (&v, instantiate_expr, NULL, NULL);
1858 }
1859 }
6f086dfc 1860
bbf9b913 1861 /* Process all subblocks. */
87caf699 1862 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
bbf9b913
RH
1863 instantiate_decls_1 (t);
1864}
6f086dfc 1865
bbf9b913
RH
1866/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1867 all virtual registers in their DECL_RTL's. */
6f086dfc 1868
bbf9b913
RH
1869static void
1870instantiate_decls (tree fndecl)
1871{
c021f10b
NF
1872 tree decl;
1873 unsigned ix;
6f086dfc 1874
bbf9b913 1875 /* Process all parameters of the function. */
910ad8de 1876 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
bbf9b913 1877 {
e41b2a33
PB
1878 instantiate_decl_rtl (DECL_RTL (decl));
1879 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
434eba35
JJ
1880 if (DECL_HAS_VALUE_EXPR_P (decl))
1881 {
1882 tree v = DECL_VALUE_EXPR (decl);
1883 walk_tree (&v, instantiate_expr, NULL, NULL);
1884 }
bbf9b913 1885 }
4fd796bb 1886
37d6a488
AO
1887 if ((decl = DECL_RESULT (fndecl))
1888 && TREE_CODE (decl) == RESULT_DECL)
1889 {
1890 if (DECL_RTL_SET_P (decl))
1891 instantiate_decl_rtl (DECL_RTL (decl));
1892 if (DECL_HAS_VALUE_EXPR_P (decl))
1893 {
1894 tree v = DECL_VALUE_EXPR (decl);
1895 walk_tree (&v, instantiate_expr, NULL, NULL);
1896 }
1897 }
1898
3fd48b12
EB
1899 /* Process the saved static chain if it exists. */
1900 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1901 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1902 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1903
bbf9b913
RH
1904 /* Now process all variables defined in the function or its subblocks. */
1905 instantiate_decls_1 (DECL_INITIAL (fndecl));
802e9f8e 1906
c021f10b
NF
1907 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1908 if (DECL_RTL_SET_P (decl))
1909 instantiate_decl_rtl (DECL_RTL (decl));
9771b263 1910 vec_free (cfun->local_decls);
bbf9b913 1911}
6f086dfc 1912
bbf9b913
RH
1913/* Pass through the INSNS of function FNDECL and convert virtual register
1914 references to hard register references. */
6f086dfc 1915
c2924966 1916static unsigned int
bbf9b913
RH
1917instantiate_virtual_regs (void)
1918{
691fe203 1919 rtx_insn *insn;
6f086dfc 1920
bbf9b913
RH
1921 /* Compute the offsets to use for this function. */
1922 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1923 var_offset = STARTING_FRAME_OFFSET;
1924 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1925 out_arg_offset = STACK_POINTER_OFFSET;
f6672e8e
RH
1926#ifdef FRAME_POINTER_CFA_OFFSET
1927 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1928#else
bbf9b913 1929 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
f6672e8e 1930#endif
e9a25f70 1931
bbf9b913
RH
1932 /* Initialize recognition, indicating that volatile is OK. */
1933 init_recog ();
6f086dfc 1934
bbf9b913
RH
1935 /* Scan through all the insns, instantiating every virtual register still
1936 present. */
45dbce1b
NF
1937 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1938 if (INSN_P (insn))
1939 {
1940 /* These patterns in the instruction stream can never be recognized.
1941 Fortunately, they shouldn't contain virtual registers either. */
39718607 1942 if (GET_CODE (PATTERN (insn)) == USE
45dbce1b 1943 || GET_CODE (PATTERN (insn)) == CLOBBER
45dbce1b
NF
1944 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1945 continue;
1946 else if (DEBUG_INSN_P (insn))
b8704801 1947 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
45dbce1b
NF
1948 else
1949 instantiate_virtual_regs_in_insn (insn);
ba4807a0 1950
4654c0cf 1951 if (insn->deleted ())
45dbce1b 1952 continue;
7114321e 1953
b8704801 1954 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
ba4807a0 1955
45dbce1b
NF
1956 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1957 if (CALL_P (insn))
b8704801 1958 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
45dbce1b 1959 }
6f086dfc 1960
bbf9b913
RH
1961 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1962 instantiate_decls (current_function_decl);
1963
e41b2a33
PB
1964 targetm.instantiate_decls ();
1965
bbf9b913
RH
1966 /* Indicate that, from now on, assign_stack_local should use
1967 frame_pointer_rtx. */
1968 virtuals_instantiated = 1;
d3c12306 1969
c2924966 1970 return 0;
6f086dfc 1971}
ef330312 1972
27a4cd48
DM
1973namespace {
1974
1975const pass_data pass_data_instantiate_virtual_regs =
1976{
1977 RTL_PASS, /* type */
1978 "vregs", /* name */
1979 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1980 TV_NONE, /* tv_id */
1981 0, /* properties_required */
1982 0, /* properties_provided */
1983 0, /* properties_destroyed */
1984 0, /* todo_flags_start */
1985 0, /* todo_flags_finish */
ef330312
PB
1986};
1987
27a4cd48
DM
1988class pass_instantiate_virtual_regs : public rtl_opt_pass
1989{
1990public:
c3284718
RS
1991 pass_instantiate_virtual_regs (gcc::context *ctxt)
1992 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
27a4cd48
DM
1993 {}
1994
1995 /* opt_pass methods: */
be55bfe6
TS
1996 virtual unsigned int execute (function *)
1997 {
1998 return instantiate_virtual_regs ();
1999 }
27a4cd48
DM
2000
2001}; // class pass_instantiate_virtual_regs
2002
2003} // anon namespace
2004
2005rtl_opt_pass *
2006make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2007{
2008 return new pass_instantiate_virtual_regs (ctxt);
2009}
2010
6f086dfc 2011\f
d181c154
RS
2012/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2013 This means a type for which function calls must pass an address to the
2014 function or get an address back from the function.
2015 EXP may be a type node or an expression (whose type is tested). */
6f086dfc
RS
2016
2017int
586de218 2018aggregate_value_p (const_tree exp, const_tree fntype)
6f086dfc 2019{
d47d0a8d 2020 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
9d790a4f
RS
2021 int i, regno, nregs;
2022 rtx reg;
2f939d94 2023
61f71b34
DD
2024 if (fntype)
2025 switch (TREE_CODE (fntype))
2026 {
2027 case CALL_EXPR:
d47d0a8d
EB
2028 {
2029 tree fndecl = get_callee_fndecl (fntype);
1304953e
JJ
2030 if (fndecl)
2031 fntype = TREE_TYPE (fndecl);
2032 else if (CALL_EXPR_FN (fntype))
2033 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2034 else
2035 /* For internal functions, assume nothing needs to be
2036 returned in memory. */
2037 return 0;
d47d0a8d 2038 }
61f71b34
DD
2039 break;
2040 case FUNCTION_DECL:
d47d0a8d 2041 fntype = TREE_TYPE (fntype);
61f71b34
DD
2042 break;
2043 case FUNCTION_TYPE:
2044 case METHOD_TYPE:
2045 break;
2046 case IDENTIFIER_NODE:
d47d0a8d 2047 fntype = NULL_TREE;
61f71b34
DD
2048 break;
2049 default:
d47d0a8d 2050 /* We don't expect other tree types here. */
0bccc606 2051 gcc_unreachable ();
61f71b34
DD
2052 }
2053
d47d0a8d 2054 if (VOID_TYPE_P (type))
d7bf8ada 2055 return 0;
500c353d 2056
ebf0bf7f
JJ
2057 /* If a record should be passed the same as its first (and only) member
2058 don't pass it as an aggregate. */
2059 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2060 return aggregate_value_p (first_field (type), fntype);
2061
cc77ae10
JM
2062 /* If the front end has decided that this needs to be passed by
2063 reference, do so. */
2064 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2065 && DECL_BY_REFERENCE (exp))
2066 return 1;
500c353d 2067
d47d0a8d
EB
2068 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2069 if (fntype && TREE_ADDRESSABLE (fntype))
500c353d 2070 return 1;
b8698a0f 2071
956d6950 2072 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
49a2e5b2
DE
2073 and thus can't be returned in registers. */
2074 if (TREE_ADDRESSABLE (type))
2075 return 1;
d47d0a8d 2076
05e3bdb9 2077 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
6f086dfc 2078 return 1;
d47d0a8d
EB
2079
2080 if (targetm.calls.return_in_memory (type, fntype))
2081 return 1;
2082
9d790a4f
RS
2083 /* Make sure we have suitable call-clobbered regs to return
2084 the value in; if not, we must return it in memory. */
1d636cc6 2085 reg = hard_function_value (type, 0, fntype, 0);
e71f7aa5
JW
2086
2087 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2088 it is OK. */
f8cfc6aa 2089 if (!REG_P (reg))
e71f7aa5
JW
2090 return 0;
2091
9d790a4f 2092 regno = REGNO (reg);
66fd46b6 2093 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
9d790a4f
RS
2094 for (i = 0; i < nregs; i++)
2095 if (! call_used_regs[regno + i])
2096 return 1;
d47d0a8d 2097
6f086dfc
RS
2098 return 0;
2099}
2100\f
8fff4fc1
RH
2101/* Return true if we should assign DECL a pseudo register; false if it
2102 should live on the local stack. */
2103
2104bool
fa233e34 2105use_register_for_decl (const_tree decl)
8fff4fc1 2106{
c3284718 2107 if (!targetm.calls.allocate_stack_slots_for_args ())
007e61c2 2108 return true;
b8698a0f 2109
8fff4fc1
RH
2110 /* Honor volatile. */
2111 if (TREE_SIDE_EFFECTS (decl))
2112 return false;
2113
2114 /* Honor addressability. */
2115 if (TREE_ADDRESSABLE (decl))
2116 return false;
2117
d5e254e1
IE
2118 /* Decl is implicitly addressible by bound stores and loads
2119 if it is an aggregate holding bounds. */
2120 if (chkp_function_instrumented_p (current_function_decl)
2121 && TREE_TYPE (decl)
2122 && !BOUNDED_P (decl)
2123 && chkp_type_has_pointer (TREE_TYPE (decl)))
2124 return false;
2125
8fff4fc1
RH
2126 /* Only register-like things go in registers. */
2127 if (DECL_MODE (decl) == BLKmode)
2128 return false;
2129
2130 /* If -ffloat-store specified, don't put explicit float variables
2131 into registers. */
2132 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2133 propagates values across these stores, and it probably shouldn't. */
2134 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2135 return false;
2136
78e0d62b
RH
2137 /* If we're not interested in tracking debugging information for
2138 this decl, then we can certainly put it in a register. */
2139 if (DECL_IGNORED_P (decl))
8fff4fc1
RH
2140 return true;
2141
d130d647
JJ
2142 if (optimize)
2143 return true;
2144
2145 if (!DECL_REGISTER (decl))
2146 return false;
2147
2148 switch (TREE_CODE (TREE_TYPE (decl)))
2149 {
2150 case RECORD_TYPE:
2151 case UNION_TYPE:
2152 case QUAL_UNION_TYPE:
2153 /* When not optimizing, disregard register keyword for variables with
2154 types containing methods, otherwise the methods won't be callable
2155 from the debugger. */
5ce039df 2156 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
d130d647
JJ
2157 return false;
2158 break;
2159 default:
2160 break;
2161 }
2162
2163 return true;
8fff4fc1
RH
2164}
2165
6071dc7f
RH
2166/* Structures to communicate between the subroutines of assign_parms.
2167 The first holds data persistent across all parameters, the second
2168 is cleared out for each parameter. */
6f086dfc 2169
6071dc7f 2170struct assign_parm_data_all
6f086dfc 2171{
d5cc9181
JR
2172 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2173 should become a job of the target or otherwise encapsulated. */
2174 CUMULATIVE_ARGS args_so_far_v;
2175 cumulative_args_t args_so_far;
6f086dfc 2176 struct args_size stack_args_size;
6071dc7f
RH
2177 tree function_result_decl;
2178 tree orig_fnargs;
7a688d52
DM
2179 rtx_insn *first_conversion_insn;
2180 rtx_insn *last_conversion_insn;
6071dc7f
RH
2181 HOST_WIDE_INT pretend_args_size;
2182 HOST_WIDE_INT extra_pretend_bytes;
2183 int reg_parm_stack_space;
2184};
6f086dfc 2185
6071dc7f
RH
2186struct assign_parm_data_one
2187{
2188 tree nominal_type;
2189 tree passed_type;
2190 rtx entry_parm;
2191 rtx stack_parm;
ef4bddc2
RS
2192 machine_mode nominal_mode;
2193 machine_mode passed_mode;
2194 machine_mode promoted_mode;
6071dc7f
RH
2195 struct locate_and_pad_arg_data locate;
2196 int partial;
2197 BOOL_BITFIELD named_arg : 1;
6071dc7f
RH
2198 BOOL_BITFIELD passed_pointer : 1;
2199 BOOL_BITFIELD on_stack : 1;
2200 BOOL_BITFIELD loaded_in_reg : 1;
2201};
ebb904cb 2202
d5e254e1
IE
2203struct bounds_parm_data
2204{
2205 assign_parm_data_one parm_data;
2206 tree bounds_parm;
2207 tree ptr_parm;
2208 rtx ptr_entry;
2209 int bound_no;
2210};
2211
6071dc7f 2212/* A subroutine of assign_parms. Initialize ALL. */
6f086dfc 2213
6071dc7f
RH
2214static void
2215assign_parms_initialize_all (struct assign_parm_data_all *all)
2216{
fc2f1f53 2217 tree fntype ATTRIBUTE_UNUSED;
6f086dfc 2218
6071dc7f
RH
2219 memset (all, 0, sizeof (*all));
2220
2221 fntype = TREE_TYPE (current_function_decl);
2222
2223#ifdef INIT_CUMULATIVE_INCOMING_ARGS
d5cc9181 2224 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
6071dc7f 2225#else
d5cc9181 2226 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
6071dc7f
RH
2227 current_function_decl, -1);
2228#endif
d5cc9181 2229 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
6071dc7f 2230
ddbb449f
AM
2231#ifdef INCOMING_REG_PARM_STACK_SPACE
2232 all->reg_parm_stack_space
2233 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
6071dc7f
RH
2234#endif
2235}
6f086dfc 2236
6071dc7f
RH
2237/* If ARGS contains entries with complex types, split the entry into two
2238 entries of the component type. Return a new list of substitutions are
2239 needed, else the old list. */
2240
3b3f318a 2241static void
9771b263 2242split_complex_args (vec<tree> *args)
6071dc7f 2243{
3b3f318a 2244 unsigned i;
6071dc7f
RH
2245 tree p;
2246
9771b263 2247 FOR_EACH_VEC_ELT (*args, i, p)
6071dc7f
RH
2248 {
2249 tree type = TREE_TYPE (p);
2250 if (TREE_CODE (type) == COMPLEX_TYPE
2251 && targetm.calls.split_complex_arg (type))
2252 {
2253 tree decl;
2254 tree subtype = TREE_TYPE (type);
6ccd356e 2255 bool addressable = TREE_ADDRESSABLE (p);
6071dc7f
RH
2256
2257 /* Rewrite the PARM_DECL's type with its component. */
3b3f318a 2258 p = copy_node (p);
6071dc7f
RH
2259 TREE_TYPE (p) = subtype;
2260 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2261 DECL_MODE (p) = VOIDmode;
2262 DECL_SIZE (p) = NULL;
2263 DECL_SIZE_UNIT (p) = NULL;
6ccd356e
AM
2264 /* If this arg must go in memory, put it in a pseudo here.
2265 We can't allow it to go in memory as per normal parms,
2266 because the usual place might not have the imag part
2267 adjacent to the real part. */
2268 DECL_ARTIFICIAL (p) = addressable;
2269 DECL_IGNORED_P (p) = addressable;
2270 TREE_ADDRESSABLE (p) = 0;
6071dc7f 2271 layout_decl (p, 0);
9771b263 2272 (*args)[i] = p;
6071dc7f
RH
2273
2274 /* Build a second synthetic decl. */
c2255bc4
AH
2275 decl = build_decl (EXPR_LOCATION (p),
2276 PARM_DECL, NULL_TREE, subtype);
6071dc7f 2277 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
6ccd356e
AM
2278 DECL_ARTIFICIAL (decl) = addressable;
2279 DECL_IGNORED_P (decl) = addressable;
6071dc7f 2280 layout_decl (decl, 0);
9771b263 2281 args->safe_insert (++i, decl);
6071dc7f
RH
2282 }
2283 }
6071dc7f
RH
2284}
2285
2286/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2287 the hidden struct return argument, and (abi willing) complex args.
2288 Return the new parameter list. */
2289
9771b263 2290static vec<tree>
6071dc7f
RH
2291assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2292{
2293 tree fndecl = current_function_decl;
2294 tree fntype = TREE_TYPE (fndecl);
6e1aa848 2295 vec<tree> fnargs = vNULL;
3b3f318a
RG
2296 tree arg;
2297
910ad8de 2298 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
9771b263 2299 fnargs.safe_push (arg);
3b3f318a
RG
2300
2301 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
6f086dfc
RS
2302
2303 /* If struct value address is treated as the first argument, make it so. */
61f71b34 2304 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
e3b5732b 2305 && ! cfun->returns_pcc_struct
61f71b34 2306 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
6f086dfc 2307 {
f9f29478 2308 tree type = build_pointer_type (TREE_TYPE (fntype));
6071dc7f 2309 tree decl;
6f086dfc 2310
c2255bc4 2311 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
8dcfef8f 2312 PARM_DECL, get_identifier (".result_ptr"), type);
6071dc7f
RH
2313 DECL_ARG_TYPE (decl) = type;
2314 DECL_ARTIFICIAL (decl) = 1;
8dcfef8f
AO
2315 DECL_NAMELESS (decl) = 1;
2316 TREE_CONSTANT (decl) = 1;
6f086dfc 2317
910ad8de 2318 DECL_CHAIN (decl) = all->orig_fnargs;
3b3f318a 2319 all->orig_fnargs = decl;
9771b263 2320 fnargs.safe_insert (0, decl);
3b3f318a 2321
6071dc7f 2322 all->function_result_decl = decl;
d5e254e1
IE
2323
2324 /* If function is instrumented then bounds of the
2325 passed structure address is the second argument. */
2326 if (chkp_function_instrumented_p (fndecl))
2327 {
2328 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2329 PARM_DECL, get_identifier (".result_bnd"),
2330 pointer_bounds_type_node);
2331 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2332 DECL_ARTIFICIAL (decl) = 1;
2333 DECL_NAMELESS (decl) = 1;
2334 TREE_CONSTANT (decl) = 1;
2335
2336 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2337 DECL_CHAIN (all->orig_fnargs) = decl;
2338 fnargs.safe_insert (1, decl);
2339 }
6f086dfc 2340 }
718fe406 2341
42ba5130
RH
2342 /* If the target wants to split complex arguments into scalars, do so. */
2343 if (targetm.calls.split_complex_arg)
3b3f318a 2344 split_complex_args (&fnargs);
ded9bf77 2345
6071dc7f
RH
2346 return fnargs;
2347}
e7949876 2348
6071dc7f
RH
2349/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2350 data for the parameter. Incorporate ABI specifics such as pass-by-
2351 reference and type promotion. */
6f086dfc 2352
6071dc7f
RH
2353static void
2354assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2355 struct assign_parm_data_one *data)
2356{
2357 tree nominal_type, passed_type;
ef4bddc2 2358 machine_mode nominal_mode, passed_mode, promoted_mode;
cde0f3fd 2359 int unsignedp;
6f086dfc 2360
6071dc7f
RH
2361 memset (data, 0, sizeof (*data));
2362
fa10beec 2363 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
e3b5732b 2364 if (!cfun->stdarg)
fa10beec 2365 data->named_arg = 1; /* No variadic parms. */
910ad8de 2366 else if (DECL_CHAIN (parm))
fa10beec 2367 data->named_arg = 1; /* Not the last non-variadic parm. */
d5cc9181 2368 else if (targetm.calls.strict_argument_naming (all->args_so_far))
fa10beec 2369 data->named_arg = 1; /* Only variadic ones are unnamed. */
6071dc7f 2370 else
fa10beec 2371 data->named_arg = 0; /* Treat as variadic. */
6071dc7f
RH
2372
2373 nominal_type = TREE_TYPE (parm);
2374 passed_type = DECL_ARG_TYPE (parm);
2375
2376 /* Look out for errors propagating this far. Also, if the parameter's
2377 type is void then its value doesn't matter. */
2378 if (TREE_TYPE (parm) == error_mark_node
2379 /* This can happen after weird syntax errors
2380 or if an enum type is defined among the parms. */
2381 || TREE_CODE (parm) != PARM_DECL
2382 || passed_type == NULL
2383 || VOID_TYPE_P (nominal_type))
2384 {
2385 nominal_type = passed_type = void_type_node;
2386 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2387 goto egress;
2388 }
108b7d3d 2389
6071dc7f
RH
2390 /* Find mode of arg as it is passed, and mode of arg as it should be
2391 during execution of this function. */
2392 passed_mode = TYPE_MODE (passed_type);
2393 nominal_mode = TYPE_MODE (nominal_type);
2394
ebf0bf7f
JJ
2395 /* If the parm is to be passed as a transparent union or record, use the
2396 type of the first field for the tests below. We have already verified
2397 that the modes are the same. */
2398 if ((TREE_CODE (passed_type) == UNION_TYPE
2399 || TREE_CODE (passed_type) == RECORD_TYPE)
2400 && TYPE_TRANSPARENT_AGGR (passed_type))
2401 passed_type = TREE_TYPE (first_field (passed_type));
6071dc7f 2402
0976078c 2403 /* See if this arg was passed by invisible reference. */
d5cc9181 2404 if (pass_by_reference (&all->args_so_far_v, passed_mode,
0976078c 2405 passed_type, data->named_arg))
6071dc7f
RH
2406 {
2407 passed_type = nominal_type = build_pointer_type (passed_type);
2408 data->passed_pointer = true;
fd91cfe3 2409 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
6071dc7f 2410 }
6f086dfc 2411
6071dc7f 2412 /* Find mode as it is passed by the ABI. */
cde0f3fd
PB
2413 unsignedp = TYPE_UNSIGNED (passed_type);
2414 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2415 TREE_TYPE (current_function_decl), 0);
6f086dfc 2416
6071dc7f
RH
2417 egress:
2418 data->nominal_type = nominal_type;
2419 data->passed_type = passed_type;
2420 data->nominal_mode = nominal_mode;
2421 data->passed_mode = passed_mode;
2422 data->promoted_mode = promoted_mode;
2423}
16bae307 2424
6071dc7f 2425/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
6f086dfc 2426
6071dc7f
RH
2427static void
2428assign_parms_setup_varargs (struct assign_parm_data_all *all,
2429 struct assign_parm_data_one *data, bool no_rtl)
2430{
2431 int varargs_pretend_bytes = 0;
2432
d5cc9181 2433 targetm.calls.setup_incoming_varargs (all->args_so_far,
6071dc7f
RH
2434 data->promoted_mode,
2435 data->passed_type,
2436 &varargs_pretend_bytes, no_rtl);
2437
2438 /* If the back-end has requested extra stack space, record how much is
2439 needed. Do not change pretend_args_size otherwise since it may be
2440 nonzero from an earlier partial argument. */
2441 if (varargs_pretend_bytes > 0)
2442 all->pretend_args_size = varargs_pretend_bytes;
2443}
a53e14c0 2444
6071dc7f
RH
2445/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2446 the incoming location of the current parameter. */
2447
2448static void
2449assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2450 struct assign_parm_data_one *data)
2451{
2452 HOST_WIDE_INT pretend_bytes = 0;
2453 rtx entry_parm;
2454 bool in_regs;
2455
2456 if (data->promoted_mode == VOIDmode)
2457 {
2458 data->entry_parm = data->stack_parm = const0_rtx;
2459 return;
2460 }
a53e14c0 2461
d5cc9181 2462 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2463 data->promoted_mode,
2464 data->passed_type,
2465 data->named_arg);
6f086dfc 2466
6071dc7f
RH
2467 if (entry_parm == 0)
2468 data->promoted_mode = data->passed_mode;
6f086dfc 2469
6071dc7f
RH
2470 /* Determine parm's home in the stack, in case it arrives in the stack
2471 or we should pretend it did. Compute the stack position and rtx where
2472 the argument arrives and its size.
6f086dfc 2473
6071dc7f
RH
2474 There is one complexity here: If this was a parameter that would
2475 have been passed in registers, but wasn't only because it is
2476 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2477 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2478 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2479 as it was the previous time. */
d5e254e1 2480 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
6f086dfc 2481#ifdef STACK_PARMS_IN_REG_PARM_AREA
6071dc7f 2482 in_regs = true;
e7949876 2483#endif
6071dc7f
RH
2484 if (!in_regs && !data->named_arg)
2485 {
d5cc9181 2486 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
e7949876 2487 {
6071dc7f 2488 rtx tem;
d5cc9181 2489 tem = targetm.calls.function_incoming_arg (all->args_so_far,
3c07301f
NF
2490 data->promoted_mode,
2491 data->passed_type, true);
6071dc7f 2492 in_regs = tem != NULL;
e7949876 2493 }
6071dc7f 2494 }
e7949876 2495
6071dc7f
RH
2496 /* If this parameter was passed both in registers and in the stack, use
2497 the copy on the stack. */
fe984136
RH
2498 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2499 data->passed_type))
6071dc7f 2500 entry_parm = 0;
e7949876 2501
6071dc7f
RH
2502 if (entry_parm)
2503 {
2504 int partial;
2505
d5cc9181 2506 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
78a52f11
RH
2507 data->promoted_mode,
2508 data->passed_type,
2509 data->named_arg);
6071dc7f
RH
2510 data->partial = partial;
2511
2512 /* The caller might already have allocated stack space for the
2513 register parameters. */
2514 if (partial != 0 && all->reg_parm_stack_space == 0)
975f3818 2515 {
6071dc7f
RH
2516 /* Part of this argument is passed in registers and part
2517 is passed on the stack. Ask the prologue code to extend
2518 the stack part so that we can recreate the full value.
2519
2520 PRETEND_BYTES is the size of the registers we need to store.
2521 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2522 stack space that the prologue should allocate.
2523
2524 Internally, gcc assumes that the argument pointer is aligned
2525 to STACK_BOUNDARY bits. This is used both for alignment
2526 optimizations (see init_emit) and to locate arguments that are
2527 aligned to more than PARM_BOUNDARY bits. We must preserve this
2528 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2529 a stack boundary. */
2530
2531 /* We assume at most one partial arg, and it must be the first
2532 argument on the stack. */
0bccc606 2533 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
6071dc7f 2534
78a52f11 2535 pretend_bytes = partial;
6071dc7f
RH
2536 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2537
2538 /* We want to align relative to the actual stack pointer, so
2539 don't include this in the stack size until later. */
2540 all->extra_pretend_bytes = all->pretend_args_size;
975f3818 2541 }
6071dc7f 2542 }
e7949876 2543
6071dc7f 2544 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e4ceca5 2545 all->reg_parm_stack_space,
6071dc7f
RH
2546 entry_parm ? data->partial : 0, current_function_decl,
2547 &all->stack_args_size, &data->locate);
6f086dfc 2548
e94a448f
L
2549 /* Update parm_stack_boundary if this parameter is passed in the
2550 stack. */
2551 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2552 crtl->parm_stack_boundary = data->locate.boundary;
2553
6071dc7f
RH
2554 /* Adjust offsets to include the pretend args. */
2555 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2556 data->locate.slot_offset.constant += pretend_bytes;
2557 data->locate.offset.constant += pretend_bytes;
ebca59c3 2558
6071dc7f
RH
2559 data->entry_parm = entry_parm;
2560}
6f086dfc 2561
6071dc7f
RH
2562/* A subroutine of assign_parms. If there is actually space on the stack
2563 for this parm, count it in stack_args_size and return true. */
6f086dfc 2564
6071dc7f
RH
2565static bool
2566assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2567 struct assign_parm_data_one *data)
2568{
d5e254e1
IE
2569 /* Bounds are never passed on the stack to keep compatibility
2570 with not instrumented code. */
2571 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2572 return false;
2e6ae27f 2573 /* Trivially true if we've no incoming register. */
d5e254e1 2574 else if (data->entry_parm == NULL)
6071dc7f
RH
2575 ;
2576 /* Also true if we're partially in registers and partially not,
2577 since we've arranged to drop the entire argument on the stack. */
2578 else if (data->partial != 0)
2579 ;
2580 /* Also true if the target says that it's passed in both registers
2581 and on the stack. */
2582 else if (GET_CODE (data->entry_parm) == PARALLEL
2583 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2584 ;
2585 /* Also true if the target says that there's stack allocated for
2586 all register parameters. */
2587 else if (all->reg_parm_stack_space > 0)
2588 ;
2589 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2590 else
2591 return false;
6f086dfc 2592
6071dc7f
RH
2593 all->stack_args_size.constant += data->locate.size.constant;
2594 if (data->locate.size.var)
2595 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
718fe406 2596
6071dc7f
RH
2597 return true;
2598}
0d1416c6 2599
6071dc7f
RH
2600/* A subroutine of assign_parms. Given that this parameter is allocated
2601 stack space by the ABI, find it. */
6f086dfc 2602
6071dc7f
RH
2603static void
2604assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2605{
2606 rtx offset_rtx, stack_parm;
2607 unsigned int align, boundary;
6f086dfc 2608
6071dc7f
RH
2609 /* If we're passing this arg using a reg, make its stack home the
2610 aligned stack slot. */
2611 if (data->entry_parm)
2612 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2613 else
2614 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2615
38173d38 2616 stack_parm = crtl->args.internal_arg_pointer;
6071dc7f
RH
2617 if (offset_rtx != const0_rtx)
2618 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2619 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2620
08ab0acf 2621 if (!data->passed_pointer)
997f78fb 2622 {
08ab0acf
JJ
2623 set_mem_attributes (stack_parm, parm, 1);
2624 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2625 while promoted mode's size is needed. */
2626 if (data->promoted_mode != BLKmode
2627 && data->promoted_mode != DECL_MODE (parm))
997f78fb 2628 {
f5541398 2629 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
527210c4 2630 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
08ab0acf
JJ
2631 {
2632 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2633 data->promoted_mode);
2634 if (offset)
527210c4 2635 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
08ab0acf 2636 }
997f78fb
JJ
2637 }
2638 }
6071dc7f 2639
bfc45551
AM
2640 boundary = data->locate.boundary;
2641 align = BITS_PER_UNIT;
6071dc7f
RH
2642
2643 /* If we're padding upward, we know that the alignment of the slot
c2ed6cf8 2644 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
6071dc7f
RH
2645 intentionally forcing upward padding. Otherwise we have to come
2646 up with a guess at the alignment based on OFFSET_RTX. */
bfc45551 2647 if (data->locate.where_pad != downward || data->entry_parm)
6071dc7f 2648 align = boundary;
481683e1 2649 else if (CONST_INT_P (offset_rtx))
6071dc7f
RH
2650 {
2651 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2652 align = align & -align;
2653 }
bfc45551 2654 set_mem_align (stack_parm, align);
6071dc7f
RH
2655
2656 if (data->entry_parm)
2657 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2658
2659 data->stack_parm = stack_parm;
2660}
2661
2662/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2663 always valid and contiguous. */
2664
2665static void
2666assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2667{
2668 rtx entry_parm = data->entry_parm;
2669 rtx stack_parm = data->stack_parm;
2670
2671 /* If this parm was passed part in regs and part in memory, pretend it
2672 arrived entirely in memory by pushing the register-part onto the stack.
2673 In the special case of a DImode or DFmode that is split, we could put
2674 it together in a pseudoreg directly, but for now that's not worth
2675 bothering with. */
2676 if (data->partial != 0)
2677 {
2678 /* Handle calls that pass values in multiple non-contiguous
2679 locations. The Irix 6 ABI has examples of this. */
2680 if (GET_CODE (entry_parm) == PARALLEL)
1a8cb155 2681 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
b8698a0f 2682 data->passed_type,
6071dc7f 2683 int_size_in_bytes (data->passed_type));
6f086dfc 2684 else
78a52f11
RH
2685 {
2686 gcc_assert (data->partial % UNITS_PER_WORD == 0);
1a8cb155
RS
2687 move_block_from_reg (REGNO (entry_parm),
2688 validize_mem (copy_rtx (stack_parm)),
78a52f11
RH
2689 data->partial / UNITS_PER_WORD);
2690 }
6f086dfc 2691
6071dc7f
RH
2692 entry_parm = stack_parm;
2693 }
6f086dfc 2694
6071dc7f
RH
2695 /* If we didn't decide this parm came in a register, by default it came
2696 on the stack. */
2697 else if (entry_parm == NULL)
2698 entry_parm = stack_parm;
2699
2700 /* When an argument is passed in multiple locations, we can't make use
2701 of this information, but we can save some copying if the whole argument
2702 is passed in a single register. */
2703 else if (GET_CODE (entry_parm) == PARALLEL
2704 && data->nominal_mode != BLKmode
2705 && data->passed_mode != BLKmode)
2706 {
2707 size_t i, len = XVECLEN (entry_parm, 0);
2708
2709 for (i = 0; i < len; i++)
2710 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2711 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2712 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2713 == data->passed_mode)
2714 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2715 {
2716 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2717 break;
2718 }
2719 }
e68a6ce1 2720
6071dc7f
RH
2721 data->entry_parm = entry_parm;
2722}
6f086dfc 2723
4d2a9850
DJ
2724/* A subroutine of assign_parms. Reconstitute any values which were
2725 passed in multiple registers and would fit in a single register. */
2726
2727static void
2728assign_parm_remove_parallels (struct assign_parm_data_one *data)
2729{
2730 rtx entry_parm = data->entry_parm;
2731
2732 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2733 This can be done with register operations rather than on the
2734 stack, even if we will store the reconstituted parameter on the
2735 stack later. */
85776d60 2736 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
4d2a9850
DJ
2737 {
2738 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
bbd46fd5 2739 emit_group_store (parmreg, entry_parm, data->passed_type,
4d2a9850
DJ
2740 GET_MODE_SIZE (GET_MODE (entry_parm)));
2741 entry_parm = parmreg;
2742 }
2743
2744 data->entry_parm = entry_parm;
2745}
2746
6071dc7f
RH
2747/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2748 always valid and properly aligned. */
6f086dfc 2749
6071dc7f 2750static void
0f9f9784 2751assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
6071dc7f
RH
2752{
2753 rtx stack_parm = data->stack_parm;
2754
2755 /* If we can't trust the parm stack slot to be aligned enough for its
2756 ultimate type, don't use that slot after entry. We'll make another
2757 stack slot, if we need one. */
0f9f9784
AO
2758 if (stack_parm
2759 && ((STRICT_ALIGNMENT
2760 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2761 || (data->nominal_type
2762 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2763 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
6071dc7f
RH
2764 stack_parm = NULL;
2765
2766 /* If parm was passed in memory, and we need to convert it on entry,
2767 don't store it back in that same slot. */
2768 else if (data->entry_parm == stack_parm
2769 && data->nominal_mode != BLKmode
2770 && data->nominal_mode != data->passed_mode)
2771 stack_parm = NULL;
2772
7d69de61
RH
2773 /* If stack protection is in effect for this function, don't leave any
2774 pointers in their passed stack slots. */
cb91fab0 2775 else if (crtl->stack_protect_guard
7d69de61
RH
2776 && (flag_stack_protect == 2
2777 || data->passed_pointer
2778 || POINTER_TYPE_P (data->nominal_type)))
2779 stack_parm = NULL;
2780
6071dc7f
RH
2781 data->stack_parm = stack_parm;
2782}
a0506b54 2783
6071dc7f
RH
2784/* A subroutine of assign_parms. Return true if the current parameter
2785 should be stored as a BLKmode in the current frame. */
2786
2787static bool
2788assign_parm_setup_block_p (struct assign_parm_data_one *data)
2789{
2790 if (data->nominal_mode == BLKmode)
2791 return true;
85776d60
DJ
2792 if (GET_MODE (data->entry_parm) == BLKmode)
2793 return true;
531547e9 2794
6e985040 2795#ifdef BLOCK_REG_PADDING
ae8c9754
RS
2796 /* Only assign_parm_setup_block knows how to deal with register arguments
2797 that are padded at the least significant end. */
2798 if (REG_P (data->entry_parm)
2799 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2800 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2801 == (BYTES_BIG_ENDIAN ? upward : downward)))
6071dc7f 2802 return true;
6e985040 2803#endif
6071dc7f
RH
2804
2805 return false;
2806}
2807
b8698a0f 2808/* A subroutine of assign_parms. Arrange for the parameter to be
6071dc7f
RH
2809 present and valid in DATA->STACK_RTL. */
2810
2811static void
27e29549
RH
2812assign_parm_setup_block (struct assign_parm_data_all *all,
2813 tree parm, struct assign_parm_data_one *data)
6071dc7f
RH
2814{
2815 rtx entry_parm = data->entry_parm;
2816 rtx stack_parm = data->stack_parm;
bfc45551
AM
2817 HOST_WIDE_INT size;
2818 HOST_WIDE_INT size_stored;
6071dc7f 2819
27e29549
RH
2820 if (GET_CODE (entry_parm) == PARALLEL)
2821 entry_parm = emit_group_move_into_temps (entry_parm);
2822
bfc45551
AM
2823 size = int_size_in_bytes (data->passed_type);
2824 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2825 if (stack_parm == 0)
2826 {
a561d88b 2827 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
0f9f9784
AO
2828 stack_parm = assign_stack_local (BLKmode, size_stored,
2829 DECL_ALIGN (parm));
bfc45551
AM
2830 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2831 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2832 set_mem_attributes (stack_parm, parm, 1);
2833 }
2834
6071dc7f
RH
2835 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2836 calls that pass values in multiple non-contiguous locations. */
2837 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2838 {
6071dc7f
RH
2839 rtx mem;
2840
2841 /* Note that we will be storing an integral number of words.
2842 So we have to be careful to ensure that we allocate an
bfc45551 2843 integral number of words. We do this above when we call
6071dc7f
RH
2844 assign_stack_local if space was not allocated in the argument
2845 list. If it was, this will not work if PARM_BOUNDARY is not
2846 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2847 if it becomes a problem. Exception is when BLKmode arrives
2848 with arguments not conforming to word_mode. */
2849
bfc45551
AM
2850 if (data->stack_parm == 0)
2851 ;
6071dc7f
RH
2852 else if (GET_CODE (entry_parm) == PARALLEL)
2853 ;
0bccc606
NS
2854 else
2855 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
6f086dfc 2856
1a8cb155 2857 mem = validize_mem (copy_rtx (stack_parm));
c6b97fac 2858
6071dc7f
RH
2859 /* Handle values in multiple non-contiguous locations. */
2860 if (GET_CODE (entry_parm) == PARALLEL)
27e29549 2861 {
bb27eeda
SE
2862 push_to_sequence2 (all->first_conversion_insn,
2863 all->last_conversion_insn);
27e29549 2864 emit_group_store (mem, entry_parm, data->passed_type, size);
bb27eeda
SE
2865 all->first_conversion_insn = get_insns ();
2866 all->last_conversion_insn = get_last_insn ();
27e29549
RH
2867 end_sequence ();
2868 }
c6b97fac 2869
6071dc7f
RH
2870 else if (size == 0)
2871 ;
5c07bd7a 2872
6071dc7f
RH
2873 /* If SIZE is that of a mode no bigger than a word, just use
2874 that mode's store operation. */
2875 else if (size <= UNITS_PER_WORD)
2876 {
ef4bddc2 2877 machine_mode mode
6071dc7f 2878 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
c6b97fac 2879
6071dc7f 2880 if (mode != BLKmode
6e985040 2881#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2882 && (size == UNITS_PER_WORD
2883 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2884 != (BYTES_BIG_ENDIAN ? upward : downward)))
6e985040 2885#endif
6071dc7f
RH
2886 )
2887 {
208996c7
RS
2888 rtx reg;
2889
2890 /* We are really truncating a word_mode value containing
2891 SIZE bytes into a value of mode MODE. If such an
2892 operation requires no actual instructions, we can refer
2893 to the value directly in mode MODE, otherwise we must
2894 start with the register in word_mode and explicitly
2895 convert it. */
2896 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2897 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2898 else
2899 {
2900 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2901 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2902 }
6071dc7f
RH
2903 emit_move_insn (change_address (mem, mode, 0), reg);
2904 }
c6b97fac 2905
6071dc7f
RH
2906 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2907 machine must be aligned to the left before storing
2908 to memory. Note that the previous test doesn't
2909 handle all cases (e.g. SIZE == 3). */
2910 else if (size != UNITS_PER_WORD
6e985040 2911#ifdef BLOCK_REG_PADDING
6071dc7f
RH
2912 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2913 == downward)
6e985040 2914#else
6071dc7f 2915 && BYTES_BIG_ENDIAN
6e985040 2916#endif
6071dc7f
RH
2917 )
2918 {
2919 rtx tem, x;
2920 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
65c844e2 2921 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
6071dc7f 2922
eb6c3df1 2923 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
6071dc7f
RH
2924 tem = change_address (mem, word_mode, 0);
2925 emit_move_insn (tem, x);
6f086dfc 2926 }
6071dc7f 2927 else
27e29549 2928 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f 2929 size_stored / UNITS_PER_WORD);
6f086dfc 2930 }
6071dc7f 2931 else
27e29549 2932 move_block_from_reg (REGNO (entry_parm), mem,
6071dc7f
RH
2933 size_stored / UNITS_PER_WORD);
2934 }
bfc45551
AM
2935 else if (data->stack_parm == 0)
2936 {
bb27eeda 2937 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
2938 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2939 BLOCK_OP_NORMAL);
bb27eeda
SE
2940 all->first_conversion_insn = get_insns ();
2941 all->last_conversion_insn = get_last_insn ();
bfc45551
AM
2942 end_sequence ();
2943 }
6071dc7f 2944
bfc45551 2945 data->stack_parm = stack_parm;
6071dc7f
RH
2946 SET_DECL_RTL (parm, stack_parm);
2947}
2948
2949/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2950 parameter. Get it there. Perform all ABI specified conversions. */
2951
2952static void
2953assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2954 struct assign_parm_data_one *data)
2955{
71008de4
BS
2956 rtx parmreg, validated_mem;
2957 rtx equiv_stack_parm;
ef4bddc2 2958 machine_mode promoted_nominal_mode;
6071dc7f
RH
2959 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2960 bool did_conversion = false;
71008de4 2961 bool need_conversion, moved;
6071dc7f
RH
2962
2963 /* Store the parm in a pseudoregister during the function, but we may
666e3ceb
PB
2964 need to do it in a wider mode. Using 2 here makes the result
2965 consistent with promote_decl_mode and thus expand_expr_real_1. */
6071dc7f 2966 promoted_nominal_mode
cde0f3fd 2967 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
666e3ceb 2968 TREE_TYPE (current_function_decl), 2);
6071dc7f 2969
0f9f9784 2970 parmreg = gen_reg_rtx (promoted_nominal_mode);
6071dc7f 2971
0f9f9784
AO
2972 if (!DECL_ARTIFICIAL (parm))
2973 mark_user_reg (parmreg);
6071dc7f
RH
2974
2975 /* If this was an item that we received a pointer to,
2976 set DECL_RTL appropriately. */
2977 if (data->passed_pointer)
2978 {
2979 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2980 set_mem_attributes (x, parm, 1);
2981 SET_DECL_RTL (parm, x);
2982 }
2983 else
389fdba0 2984 SET_DECL_RTL (parm, parmreg);
6071dc7f 2985
4d2a9850
DJ
2986 assign_parm_remove_parallels (data);
2987
666e3ceb
PB
2988 /* Copy the value into the register, thus bridging between
2989 assign_parm_find_data_types and expand_expr_real_1. */
6071dc7f 2990
71008de4 2991 equiv_stack_parm = data->stack_parm;
1a8cb155 2992 validated_mem = validize_mem (copy_rtx (data->entry_parm));
71008de4
BS
2993
2994 need_conversion = (data->nominal_mode != data->passed_mode
2995 || promoted_nominal_mode != data->promoted_mode);
2996 moved = false;
2997
dbb94435
BS
2998 if (need_conversion
2999 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3000 && data->nominal_mode == data->passed_mode
3001 && data->nominal_mode == GET_MODE (data->entry_parm))
71008de4 3002 {
6071dc7f
RH
3003 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3004 mode, by the caller. We now have to convert it to
3005 NOMINAL_MODE, if different. However, PARMREG may be in
3006 a different mode than NOMINAL_MODE if it is being stored
3007 promoted.
3008
3009 If ENTRY_PARM is a hard register, it might be in a register
3010 not valid for operating in its mode (e.g., an odd-numbered
3011 register for a DFmode). In that case, moves are the only
3012 thing valid, so we can't do a convert from there. This
3013 occurs when the calling sequence allow such misaligned
3014 usages.
3015
3016 In addition, the conversion may involve a call, which could
3017 clobber parameters which haven't been copied to pseudo
71008de4
BS
3018 registers yet.
3019
3020 First, we try to emit an insn which performs the necessary
3021 conversion. We verify that this insn does not clobber any
3022 hard registers. */
3023
3024 enum insn_code icode;
3025 rtx op0, op1;
3026
3027 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3028 unsignedp);
3029
3030 op0 = parmreg;
3031 op1 = validated_mem;
3032 if (icode != CODE_FOR_nothing
2ef6ce06
RS
3033 && insn_operand_matches (icode, 0, op0)
3034 && insn_operand_matches (icode, 1, op1))
71008de4
BS
3035 {
3036 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
b32d5189
DM
3037 rtx_insn *insn, *insns;
3038 rtx t = op1;
71008de4
BS
3039 HARD_REG_SET hardregs;
3040
3041 start_sequence ();
f9fef349
JJ
3042 /* If op1 is a hard register that is likely spilled, first
3043 force it into a pseudo, otherwise combiner might extend
3044 its lifetime too much. */
3045 if (GET_CODE (t) == SUBREG)
3046 t = SUBREG_REG (t);
3047 if (REG_P (t)
3048 && HARD_REGISTER_P (t)
3049 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3050 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3051 {
3052 t = gen_reg_rtx (GET_MODE (op1));
3053 emit_move_insn (t, op1);
3054 }
3055 else
3056 t = op1;
e67d1102
RS
3057 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3058 data->passed_mode, unsignedp);
a11899b2 3059 emit_insn (pat);
71008de4
BS
3060 insns = get_insns ();
3061
3062 moved = true;
3063 CLEAR_HARD_REG_SET (hardregs);
3064 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3065 {
3066 if (INSN_P (insn))
3067 note_stores (PATTERN (insn), record_hard_reg_sets,
3068 &hardregs);
3069 if (!hard_reg_set_empty_p (hardregs))
3070 moved = false;
3071 }
3072
3073 end_sequence ();
3074
3075 if (moved)
3076 {
3077 emit_insn (insns);
dbb94435
BS
3078 if (equiv_stack_parm != NULL_RTX)
3079 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3080 equiv_stack_parm);
71008de4
BS
3081 }
3082 }
3083 }
3084
3085 if (moved)
3086 /* Nothing to do. */
3087 ;
3088 else if (need_conversion)
3089 {
3090 /* We did not have an insn to convert directly, or the sequence
3091 generated appeared unsafe. We must first copy the parm to a
3092 pseudo reg, and save the conversion until after all
6071dc7f
RH
3093 parameters have been moved. */
3094
71008de4 3095 int save_tree_used;
6071dc7f
RH
3096 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3097
71008de4 3098 emit_move_insn (tempreg, validated_mem);
6071dc7f 3099
bb27eeda 3100 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
6071dc7f
RH
3101 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3102
3103 if (GET_CODE (tempreg) == SUBREG
3104 && GET_MODE (tempreg) == data->nominal_mode
3105 && REG_P (SUBREG_REG (tempreg))
3106 && data->nominal_mode == data->passed_mode
3107 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3108 && GET_MODE_SIZE (GET_MODE (tempreg))
3109 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
6f086dfc 3110 {
6071dc7f
RH
3111 /* The argument is already sign/zero extended, so note it
3112 into the subreg. */
3113 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
362d42dc 3114 SUBREG_PROMOTED_SET (tempreg, unsignedp);
6071dc7f 3115 }
00d8a4c1 3116
6071dc7f
RH
3117 /* TREE_USED gets set erroneously during expand_assignment. */
3118 save_tree_used = TREE_USED (parm);
79f5e442 3119 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
6071dc7f 3120 TREE_USED (parm) = save_tree_used;
bb27eeda
SE
3121 all->first_conversion_insn = get_insns ();
3122 all->last_conversion_insn = get_last_insn ();
6071dc7f 3123 end_sequence ();
00d8a4c1 3124
6071dc7f
RH
3125 did_conversion = true;
3126 }
3127 else
71008de4 3128 emit_move_insn (parmreg, validated_mem);
6071dc7f
RH
3129
3130 /* If we were passed a pointer but the actual value can safely live
f7e088e7 3131 in a register, retrieve it and use it directly. */
0f9f9784 3132 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
6071dc7f
RH
3133 {
3134 /* We can't use nominal_mode, because it will have been set to
3135 Pmode above. We must use the actual mode of the parm. */
0f9f9784 3136 if (use_register_for_decl (parm))
f7e088e7
EB
3137 {
3138 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3139 mark_user_reg (parmreg);
3140 }
3141 else
3142 {
3143 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3144 TYPE_MODE (TREE_TYPE (parm)),
3145 TYPE_ALIGN (TREE_TYPE (parm)));
3146 parmreg
3147 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3148 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3149 align);
3150 set_mem_attributes (parmreg, parm, 1);
3151 }
cd5b3469 3152
6071dc7f
RH
3153 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3154 {
3155 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3156 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3157
bb27eeda
SE
3158 push_to_sequence2 (all->first_conversion_insn,
3159 all->last_conversion_insn);
6071dc7f
RH
3160 emit_move_insn (tempreg, DECL_RTL (parm));
3161 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3162 emit_move_insn (parmreg, tempreg);
bb27eeda
SE
3163 all->first_conversion_insn = get_insns ();
3164 all->last_conversion_insn = get_last_insn ();
6071dc7f 3165 end_sequence ();
6f086dfc 3166
6071dc7f
RH
3167 did_conversion = true;
3168 }
3169 else
3170 emit_move_insn (parmreg, DECL_RTL (parm));
6f086dfc 3171
6071dc7f 3172 SET_DECL_RTL (parm, parmreg);
797a6ac1 3173
6071dc7f
RH
3174 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3175 now the parm. */
0f9f9784 3176 data->stack_parm = NULL;
6071dc7f 3177 }
ddef6bc7 3178
6071dc7f
RH
3179 /* Mark the register as eliminable if we did no conversion and it was
3180 copied from memory at a fixed offset, and the arg pointer was not
3181 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3182 offset formed an invalid address, such memory-equivalences as we
3183 make here would screw up life analysis for it. */
3184 if (data->nominal_mode == data->passed_mode
3185 && !did_conversion
0f9f9784
AO
3186 && data->stack_parm != 0
3187 && MEM_P (data->stack_parm)
6071dc7f
RH
3188 && data->locate.offset.var == 0
3189 && reg_mentioned_p (virtual_incoming_args_rtx,
0f9f9784 3190 XEXP (data->stack_parm, 0)))
6071dc7f 3191 {
691fe203
DM
3192 rtx_insn *linsn = get_last_insn ();
3193 rtx_insn *sinsn;
3194 rtx set;
a03caf76 3195
6071dc7f
RH
3196 /* Mark complex types separately. */
3197 if (GET_CODE (parmreg) == CONCAT)
3198 {
ef4bddc2 3199 machine_mode submode
6071dc7f 3200 = GET_MODE_INNER (GET_MODE (parmreg));
1466e387
RH
3201 int regnor = REGNO (XEXP (parmreg, 0));
3202 int regnoi = REGNO (XEXP (parmreg, 1));
0f9f9784
AO
3203 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3204 rtx stacki = adjust_address_nv (data->stack_parm, submode,
1466e387 3205 GET_MODE_SIZE (submode));
6071dc7f
RH
3206
3207 /* Scan backwards for the set of the real and
3208 imaginary parts. */
3209 for (sinsn = linsn; sinsn != 0;
3210 sinsn = prev_nonnote_insn (sinsn))
3211 {
3212 set = single_set (sinsn);
3213 if (set == 0)
3214 continue;
3215
3216 if (SET_DEST (set) == regno_reg_rtx [regnoi])
a31830a7 3217 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
6071dc7f 3218 else if (SET_DEST (set) == regno_reg_rtx [regnor])
a31830a7 3219 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
a03caf76 3220 }
6071dc7f 3221 }
7543f918
JR
3222 else
3223 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
6071dc7f
RH
3224 }
3225
3226 /* For pointer data type, suggest pointer register. */
3227 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3228 mark_reg_pointer (parmreg,
3229 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3230}
3231
3232/* A subroutine of assign_parms. Allocate stack space to hold the current
3233 parameter. Get it there. Perform all ABI specified conversions. */
3234
3235static void
3236assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3237 struct assign_parm_data_one *data)
3238{
3239 /* Value must be stored in the stack slot STACK_PARM during function
3240 execution. */
bfc45551 3241 bool to_conversion = false;
6071dc7f 3242
4d2a9850
DJ
3243 assign_parm_remove_parallels (data);
3244
6071dc7f
RH
3245 if (data->promoted_mode != data->nominal_mode)
3246 {
3247 /* Conversion is required. */
3248 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
6f086dfc 3249
1a8cb155 3250 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
6071dc7f 3251
bb27eeda 3252 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
bfc45551
AM
3253 to_conversion = true;
3254
6071dc7f
RH
3255 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3256 TYPE_UNSIGNED (TREE_TYPE (parm)));
3257
3258 if (data->stack_parm)
dd67163f
JJ
3259 {
3260 int offset = subreg_lowpart_offset (data->nominal_mode,
3261 GET_MODE (data->stack_parm));
3262 /* ??? This may need a big-endian conversion on sparc64. */
3263 data->stack_parm
3264 = adjust_address (data->stack_parm, data->nominal_mode, 0);
527210c4 3265 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
dd67163f 3266 set_mem_offset (data->stack_parm,
527210c4 3267 MEM_OFFSET (data->stack_parm) + offset);
dd67163f 3268 }
6071dc7f
RH
3269 }
3270
3271 if (data->entry_parm != data->stack_parm)
3272 {
bfc45551
AM
3273 rtx src, dest;
3274
6071dc7f
RH
3275 if (data->stack_parm == 0)
3276 {
3a695389
UW
3277 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3278 GET_MODE (data->entry_parm),
3279 TYPE_ALIGN (data->passed_type));
6071dc7f
RH
3280 data->stack_parm
3281 = assign_stack_local (GET_MODE (data->entry_parm),
3282 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3a695389 3283 align);
6071dc7f 3284 set_mem_attributes (data->stack_parm, parm, 1);
6f086dfc 3285 }
6071dc7f 3286
1a8cb155
RS
3287 dest = validize_mem (copy_rtx (data->stack_parm));
3288 src = validize_mem (copy_rtx (data->entry_parm));
bfc45551
AM
3289
3290 if (MEM_P (src))
6f086dfc 3291 {
bfc45551
AM
3292 /* Use a block move to handle potentially misaligned entry_parm. */
3293 if (!to_conversion)
bb27eeda
SE
3294 push_to_sequence2 (all->first_conversion_insn,
3295 all->last_conversion_insn);
bfc45551
AM
3296 to_conversion = true;
3297
3298 emit_block_move (dest, src,
3299 GEN_INT (int_size_in_bytes (data->passed_type)),
3300 BLOCK_OP_NORMAL);
6071dc7f
RH
3301 }
3302 else
bfc45551
AM
3303 emit_move_insn (dest, src);
3304 }
3305
3306 if (to_conversion)
3307 {
bb27eeda
SE
3308 all->first_conversion_insn = get_insns ();
3309 all->last_conversion_insn = get_last_insn ();
bfc45551 3310 end_sequence ();
6071dc7f 3311 }
6f086dfc 3312
6071dc7f
RH
3313 SET_DECL_RTL (parm, data->stack_parm);
3314}
3412b298 3315
6071dc7f
RH
3316/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3317 undo the frobbing that we did in assign_parms_augmented_arg_list. */
86f8eff3 3318
6071dc7f 3319static void
3b3f318a 3320assign_parms_unsplit_complex (struct assign_parm_data_all *all,
9771b263 3321 vec<tree> fnargs)
6071dc7f
RH
3322{
3323 tree parm;
6ccd356e 3324 tree orig_fnargs = all->orig_fnargs;
3b3f318a 3325 unsigned i = 0;
f4ef873c 3326
3b3f318a 3327 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
6071dc7f
RH
3328 {
3329 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3330 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3331 {
3332 rtx tmp, real, imag;
ef4bddc2 3333 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
6f086dfc 3334
9771b263
DN
3335 real = DECL_RTL (fnargs[i]);
3336 imag = DECL_RTL (fnargs[i + 1]);
6071dc7f 3337 if (inner != GET_MODE (real))
6f086dfc 3338 {
6071dc7f
RH
3339 real = gen_lowpart_SUBREG (inner, real);
3340 imag = gen_lowpart_SUBREG (inner, imag);
3341 }
6ccd356e
AM
3342
3343 if (TREE_ADDRESSABLE (parm))
3344 {
3345 rtx rmem, imem;
3346 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3a695389
UW
3347 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3348 DECL_MODE (parm),
3349 TYPE_ALIGN (TREE_TYPE (parm)));
6ccd356e
AM
3350
3351 /* split_complex_arg put the real and imag parts in
3352 pseudos. Move them to memory. */
3a695389 3353 tmp = assign_stack_local (DECL_MODE (parm), size, align);
6ccd356e
AM
3354 set_mem_attributes (tmp, parm, 1);
3355 rmem = adjust_address_nv (tmp, inner, 0);
3356 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
bb27eeda
SE
3357 push_to_sequence2 (all->first_conversion_insn,
3358 all->last_conversion_insn);
6ccd356e
AM
3359 emit_move_insn (rmem, real);
3360 emit_move_insn (imem, imag);
bb27eeda
SE
3361 all->first_conversion_insn = get_insns ();
3362 all->last_conversion_insn = get_last_insn ();
6ccd356e
AM
3363 end_sequence ();
3364 }
3365 else
3366 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
6071dc7f 3367 SET_DECL_RTL (parm, tmp);
7e41ffa2 3368
9771b263
DN
3369 real = DECL_INCOMING_RTL (fnargs[i]);
3370 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
6071dc7f
RH
3371 if (inner != GET_MODE (real))
3372 {
3373 real = gen_lowpart_SUBREG (inner, real);
3374 imag = gen_lowpart_SUBREG (inner, imag);
6f086dfc 3375 }
6071dc7f 3376 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
5141868d 3377 set_decl_incoming_rtl (parm, tmp, false);
3b3f318a 3378 i++;
6f086dfc 3379 }
6f086dfc 3380 }
6071dc7f
RH
3381}
3382
d5e254e1
IE
3383/* Load bounds of PARM from bounds table. */
3384static void
3385assign_parm_load_bounds (struct assign_parm_data_one *data,
3386 tree parm,
3387 rtx entry,
3388 unsigned bound_no)
3389{
3390 bitmap_iterator bi;
3391 unsigned i, offs = 0;
3392 int bnd_no = -1;
3393 rtx slot = NULL, ptr = NULL;
3394
3395 if (parm)
3396 {
3397 bitmap slots;
3398 bitmap_obstack_initialize (NULL);
3399 slots = BITMAP_ALLOC (NULL);
3400 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3401 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3402 {
3403 if (bound_no)
3404 bound_no--;
3405 else
3406 {
3407 bnd_no = i;
3408 break;
3409 }
3410 }
3411 BITMAP_FREE (slots);
3412 bitmap_obstack_release (NULL);
3413 }
3414
3415 /* We may have bounds not associated with any pointer. */
3416 if (bnd_no != -1)
3417 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3418
3419 /* Find associated pointer. */
3420 if (bnd_no == -1)
3421 {
3422 /* If bounds are not associated with any bounds,
3423 then it is passed in a register or special slot. */
3424 gcc_assert (data->entry_parm);
3425 ptr = const0_rtx;
3426 }
3427 else if (MEM_P (entry))
3428 slot = adjust_address (entry, Pmode, offs);
3429 else if (REG_P (entry))
3430 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3431 else if (GET_CODE (entry) == PARALLEL)
3432 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3433 else
3434 gcc_unreachable ();
3435 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3436 data->entry_parm);
3437}
3438
3439/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3440
3441static void
3442assign_bounds (vec<bounds_parm_data> &bndargs,
55429190
IE
3443 struct assign_parm_data_all &all,
3444 bool assign_regs, bool assign_special,
3445 bool assign_bt)
d5e254e1 3446{
55429190 3447 unsigned i, pass;
d5e254e1
IE
3448 bounds_parm_data *pbdata;
3449
3450 if (!bndargs.exists ())
3451 return;
3452
3453 /* We make few passes to store input bounds. Firstly handle bounds
3454 passed in registers. After that we load bounds passed in special
3455 slots. Finally we load bounds from Bounds Table. */
3456 for (pass = 0; pass < 3; pass++)
3457 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3458 {
3459 /* Pass 0 => regs only. */
3460 if (pass == 0
55429190
IE
3461 && (!assign_regs
3462 ||(!pbdata->parm_data.entry_parm
3463 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
d5e254e1
IE
3464 continue;
3465 /* Pass 1 => slots only. */
3466 else if (pass == 1
55429190
IE
3467 && (!assign_special
3468 || (!pbdata->parm_data.entry_parm
3469 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
d5e254e1
IE
3470 continue;
3471 /* Pass 2 => BT only. */
3472 else if (pass == 2
55429190
IE
3473 && (!assign_bt
3474 || pbdata->parm_data.entry_parm))
d5e254e1
IE
3475 continue;
3476
3477 if (!pbdata->parm_data.entry_parm
3478 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3479 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3480 pbdata->ptr_entry, pbdata->bound_no);
3481
3482 set_decl_incoming_rtl (pbdata->bounds_parm,
3483 pbdata->parm_data.entry_parm, false);
3484
3485 if (assign_parm_setup_block_p (&pbdata->parm_data))
3486 assign_parm_setup_block (&all, pbdata->bounds_parm,
3487 &pbdata->parm_data);
3488 else if (pbdata->parm_data.passed_pointer
3489 || use_register_for_decl (pbdata->bounds_parm))
3490 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3491 &pbdata->parm_data);
3492 else
3493 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3494 &pbdata->parm_data);
d5e254e1 3495 }
d5e254e1
IE
3496}
3497
6071dc7f
RH
3498/* Assign RTL expressions to the function's parameters. This may involve
3499 copying them into registers and using those registers as the DECL_RTL. */
3500
6fe79279 3501static void
6071dc7f
RH
3502assign_parms (tree fndecl)
3503{
3504 struct assign_parm_data_all all;
3b3f318a 3505 tree parm;
9771b263 3506 vec<tree> fnargs;
d5e254e1
IE
3507 unsigned i, bound_no = 0;
3508 tree last_arg = NULL;
3509 rtx last_arg_entry = NULL;
3510 vec<bounds_parm_data> bndargs = vNULL;
3511 bounds_parm_data bdata;
6f086dfc 3512
38173d38 3513 crtl->args.internal_arg_pointer
150cdc9e 3514 = targetm.calls.internal_arg_pointer ();
6071dc7f
RH
3515
3516 assign_parms_initialize_all (&all);
3517 fnargs = assign_parms_augmented_arg_list (&all);
3518
9771b263 3519 FOR_EACH_VEC_ELT (fnargs, i, parm)
ded9bf77 3520 {
6071dc7f
RH
3521 struct assign_parm_data_one data;
3522
3523 /* Extract the type of PARM; adjust it according to ABI. */
3524 assign_parm_find_data_types (&all, parm, &data);
3525
3526 /* Early out for errors and void parameters. */
3527 if (data.passed_mode == VOIDmode)
ded9bf77 3528 {
6071dc7f
RH
3529 SET_DECL_RTL (parm, const0_rtx);
3530 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3531 continue;
3532 }
196c42cd 3533
2e3f842f
L
3534 /* Estimate stack alignment from parameter alignment. */
3535 if (SUPPORTS_STACK_ALIGNMENT)
3536 {
c2ed6cf8
NF
3537 unsigned int align
3538 = targetm.calls.function_arg_boundary (data.promoted_mode,
3539 data.passed_type);
ae58e548
JJ
3540 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3541 align);
2e3f842f 3542 if (TYPE_ALIGN (data.nominal_type) > align)
ae58e548
JJ
3543 align = MINIMUM_ALIGNMENT (data.nominal_type,
3544 TYPE_MODE (data.nominal_type),
3545 TYPE_ALIGN (data.nominal_type));
2e3f842f
L
3546 if (crtl->stack_alignment_estimated < align)
3547 {
3548 gcc_assert (!crtl->stack_realign_processed);
3549 crtl->stack_alignment_estimated = align;
3550 }
3551 }
b8698a0f 3552
6071dc7f
RH
3553 /* Find out where the parameter arrives in this function. */
3554 assign_parm_find_entry_rtl (&all, &data);
3555
3556 /* Find out where stack space for this parameter might be. */
3557 if (assign_parm_is_stack_parm (&all, &data))
3558 {
3559 assign_parm_find_stack_rtl (parm, &data);
3560 assign_parm_adjust_entry_rtl (&data);
ded9bf77 3561 }
d5e254e1
IE
3562 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3563 {
3564 /* Remember where last non bounds arg was passed in case
3565 we have to load associated bounds for it from Bounds
3566 Table. */
3567 last_arg = parm;
3568 last_arg_entry = data.entry_parm;
3569 bound_no = 0;
3570 }
6071dc7f 3571 /* Record permanently how this parm was passed. */
a82ff31f
JJ
3572 if (data.passed_pointer)
3573 {
3574 rtx incoming_rtl
3575 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3576 data.entry_parm);
3577 set_decl_incoming_rtl (parm, incoming_rtl, true);
3578 }
3579 else
3580 set_decl_incoming_rtl (parm, data.entry_parm, false);
6071dc7f 3581
0f9f9784 3582 /* Boudns should be loaded in the particular order to
d5e254e1
IE
3583 have registers allocated correctly. Collect info about
3584 input bounds and load them later. */
3585 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3586 {
3587 /* Expect bounds in instrumented functions only. */
3588 gcc_assert (chkp_function_instrumented_p (fndecl));
3589
3590 bdata.parm_data = data;
3591 bdata.bounds_parm = parm;
3592 bdata.ptr_parm = last_arg;
3593 bdata.ptr_entry = last_arg_entry;
3594 bdata.bound_no = bound_no;
3595 bndargs.safe_push (bdata);
3596 }
3597 else
3598 {
0f9f9784
AO
3599 assign_parm_adjust_stack_rtl (&data);
3600
d5e254e1
IE
3601 if (assign_parm_setup_block_p (&data))
3602 assign_parm_setup_block (&all, parm, &data);
0f9f9784 3603 else if (data.passed_pointer || use_register_for_decl (parm))
d5e254e1
IE
3604 assign_parm_setup_reg (&all, parm, &data);
3605 else
3606 assign_parm_setup_stack (&all, parm, &data);
3607 }
3608
3609 if (cfun->stdarg && !DECL_CHAIN (parm))
3610 {
3611 int pretend_bytes = 0;
3612
3613 assign_parms_setup_varargs (&all, &data, false);
3614
3615 if (chkp_function_instrumented_p (fndecl))
3616 {
3617 /* We expect this is the last parm. Otherwise it is wrong
3618 to assign bounds right now. */
3619 gcc_assert (i == (fnargs.length () - 1));
55429190 3620 assign_bounds (bndargs, all, true, false, false);
d5e254e1
IE
3621 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3622 data.promoted_mode,
3623 data.passed_type,
3624 &pretend_bytes,
3625 false);
55429190
IE
3626 assign_bounds (bndargs, all, false, true, true);
3627 bndargs.release ();
d5e254e1
IE
3628 }
3629 }
3630
6071dc7f 3631 /* Update info on where next arg arrives in registers. */
d5cc9181 3632 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3633 data.passed_type, data.named_arg);
6071dc7f 3634
d5e254e1
IE
3635 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3636 bound_no++;
ded9bf77
AH
3637 }
3638
55429190
IE
3639 assign_bounds (bndargs, all, true, true, true);
3640 bndargs.release ();
d5e254e1 3641
3b3f318a 3642 if (targetm.calls.split_complex_arg)
6ccd356e 3643 assign_parms_unsplit_complex (&all, fnargs);
6071dc7f 3644
9771b263 3645 fnargs.release ();
3b3f318a 3646
3412b298
JW
3647 /* Output all parameter conversion instructions (possibly including calls)
3648 now that all parameters have been copied out of hard registers. */
bb27eeda 3649 emit_insn (all.first_conversion_insn);
3412b298 3650
2e3f842f
L
3651 /* Estimate reload stack alignment from scalar return mode. */
3652 if (SUPPORTS_STACK_ALIGNMENT)
3653 {
3654 if (DECL_RESULT (fndecl))
3655 {
3656 tree type = TREE_TYPE (DECL_RESULT (fndecl));
ef4bddc2 3657 machine_mode mode = TYPE_MODE (type);
2e3f842f
L
3658
3659 if (mode != BLKmode
3660 && mode != VOIDmode
3661 && !AGGREGATE_TYPE_P (type))
3662 {
3663 unsigned int align = GET_MODE_ALIGNMENT (mode);
3664 if (crtl->stack_alignment_estimated < align)
3665 {
3666 gcc_assert (!crtl->stack_realign_processed);
3667 crtl->stack_alignment_estimated = align;
3668 }
3669 }
b8698a0f 3670 }
2e3f842f
L
3671 }
3672
b36a8cc2
OH
3673 /* If we are receiving a struct value address as the first argument, set up
3674 the RTL for the function result. As this might require code to convert
3675 the transmitted address to Pmode, we do this here to ensure that possible
3676 preliminary conversions of the address have been emitted already. */
6071dc7f 3677 if (all.function_result_decl)
b36a8cc2 3678 {
6071dc7f
RH
3679 tree result = DECL_RESULT (current_function_decl);
3680 rtx addr = DECL_RTL (all.function_result_decl);
b36a8cc2 3681 rtx x;
fa8db1f7 3682
cc77ae10 3683 if (DECL_BY_REFERENCE (result))
8dcfef8f
AO
3684 {
3685 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3686 x = addr;
3687 }
cc77ae10
JM
3688 else
3689 {
8dcfef8f
AO
3690 SET_DECL_VALUE_EXPR (result,
3691 build1 (INDIRECT_REF, TREE_TYPE (result),
3692 all.function_result_decl));
cc77ae10
JM
3693 addr = convert_memory_address (Pmode, addr);
3694 x = gen_rtx_MEM (DECL_MODE (result), addr);
3695 set_mem_attributes (x, result, 1);
3696 }
8dcfef8f
AO
3697
3698 DECL_HAS_VALUE_EXPR_P (result) = 1;
3699
b36a8cc2
OH
3700 SET_DECL_RTL (result, x);
3701 }
3702
53c428d0 3703 /* We have aligned all the args, so add space for the pretend args. */
38173d38 3704 crtl->args.pretend_args_size = all.pretend_args_size;
6071dc7f 3705 all.stack_args_size.constant += all.extra_pretend_bytes;
38173d38 3706 crtl->args.size = all.stack_args_size.constant;
6f086dfc
RS
3707
3708 /* Adjust function incoming argument size for alignment and
3709 minimum length. */
3710
2e4ceca5 3711 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
38173d38 3712 crtl->args.size = CEIL_ROUND (crtl->args.size,
53366450 3713 PARM_BOUNDARY / BITS_PER_UNIT);
4433e339 3714
6dad9361
TS
3715 if (ARGS_GROW_DOWNWARD)
3716 {
3717 crtl->args.arg_offset_rtx
3718 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3719 : expand_expr (size_diffop (all.stack_args_size.var,
3720 size_int (-all.stack_args_size.constant)),
3721 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3722 }
3723 else
3724 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
6f086dfc
RS
3725
3726 /* See how many bytes, if any, of its args a function should try to pop
3727 on return. */
3728
079e7538
NF
3729 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3730 TREE_TYPE (fndecl),
3731 crtl->args.size);
6f086dfc 3732
3b69d50e
RK
3733 /* For stdarg.h function, save info about
3734 regs and stack space used by the named args. */
6f086dfc 3735
d5cc9181 3736 crtl->args.info = all.args_so_far_v;
6f086dfc
RS
3737
3738 /* Set the rtx used for the function return value. Put this in its
3739 own variable so any optimizers that need this information don't have
3740 to include tree.h. Do this here so it gets done when an inlined
3741 function gets output. */
3742
38173d38 3743 crtl->return_rtx
19e7881c
MM
3744 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3745 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
ce5e43d0
JJ
3746
3747 /* If scalar return value was computed in a pseudo-reg, or was a named
3748 return value that got dumped to the stack, copy that to the hard
3749 return register. */
3750 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3751 {
3752 tree decl_result = DECL_RESULT (fndecl);
3753 rtx decl_rtl = DECL_RTL (decl_result);
3754
3755 if (REG_P (decl_rtl)
3756 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3757 : DECL_REGISTER (decl_result))
3758 {
3759 rtx real_decl_rtl;
3760
1d636cc6
RG
3761 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3762 fndecl, true);
d5e254e1
IE
3763 if (chkp_function_instrumented_p (fndecl))
3764 crtl->return_bnd
3765 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3766 fndecl, true);
ce5e43d0 3767 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
38173d38 3768 /* The delay slot scheduler assumes that crtl->return_rtx
ce5e43d0
JJ
3769 holds the hard register containing the return value, not a
3770 temporary pseudo. */
38173d38 3771 crtl->return_rtx = real_decl_rtl;
ce5e43d0
JJ
3772 }
3773 }
6f086dfc 3774}
4744afba
RH
3775
3776/* A subroutine of gimplify_parameters, invoked via walk_tree.
3777 For all seen types, gimplify their sizes. */
3778
3779static tree
3780gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3781{
3782 tree t = *tp;
3783
3784 *walk_subtrees = 0;
3785 if (TYPE_P (t))
3786 {
3787 if (POINTER_TYPE_P (t))
3788 *walk_subtrees = 1;
ad50bc8d
RH
3789 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3790 && !TYPE_SIZES_GIMPLIFIED (t))
4744afba 3791 {
726a989a 3792 gimplify_type_sizes (t, (gimple_seq *) data);
4744afba
RH
3793 *walk_subtrees = 1;
3794 }
3795 }
3796
3797 return NULL;
3798}
3799
3800/* Gimplify the parameter list for current_function_decl. This involves
3801 evaluating SAVE_EXPRs of variable sized parameters and generating code
726a989a
RB
3802 to implement callee-copies reference parameters. Returns a sequence of
3803 statements to add to the beginning of the function. */
4744afba 3804
726a989a 3805gimple_seq
4744afba
RH
3806gimplify_parameters (void)
3807{
3808 struct assign_parm_data_all all;
3b3f318a 3809 tree parm;
726a989a 3810 gimple_seq stmts = NULL;
9771b263 3811 vec<tree> fnargs;
3b3f318a 3812 unsigned i;
4744afba
RH
3813
3814 assign_parms_initialize_all (&all);
3815 fnargs = assign_parms_augmented_arg_list (&all);
3816
9771b263 3817 FOR_EACH_VEC_ELT (fnargs, i, parm)
4744afba
RH
3818 {
3819 struct assign_parm_data_one data;
3820
3821 /* Extract the type of PARM; adjust it according to ABI. */
3822 assign_parm_find_data_types (&all, parm, &data);
3823
3824 /* Early out for errors and void parameters. */
3825 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3826 continue;
3827
3828 /* Update info on where next arg arrives in registers. */
d5cc9181 3829 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3c07301f 3830 data.passed_type, data.named_arg);
4744afba
RH
3831
3832 /* ??? Once upon a time variable_size stuffed parameter list
3833 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3834 turned out to be less than manageable in the gimple world.
3835 Now we have to hunt them down ourselves. */
3836 walk_tree_without_duplicates (&data.passed_type,
3837 gimplify_parm_type, &stmts);
3838
b38f3813 3839 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
4744afba
RH
3840 {
3841 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3842 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3843 }
3844
3845 if (data.passed_pointer)
3846 {
3847 tree type = TREE_TYPE (data.passed_type);
d5cc9181 3848 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
4744afba
RH
3849 type, data.named_arg))
3850 {
3851 tree local, t;
3852
b38f3813 3853 /* For constant-sized objects, this is trivial; for
4744afba 3854 variable-sized objects, we have to play games. */
b38f3813
EB
3855 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3856 && !(flag_stack_check == GENERIC_STACK_CHECK
3857 && compare_tree_int (DECL_SIZE_UNIT (parm),
3858 STACK_CHECK_MAX_VAR_SIZE) > 0))
4744afba 3859 {
5dac1dae 3860 local = create_tmp_var (type, get_name (parm));
4744afba 3861 DECL_IGNORED_P (local) = 0;
04487a2f
JJ
3862 /* If PARM was addressable, move that flag over
3863 to the local copy, as its address will be taken,
37609bf0
RG
3864 not the PARMs. Keep the parms address taken
3865 as we'll query that flag during gimplification. */
04487a2f 3866 if (TREE_ADDRESSABLE (parm))
37609bf0 3867 TREE_ADDRESSABLE (local) = 1;
5dac1dae
JJ
3868 else if (TREE_CODE (type) == COMPLEX_TYPE
3869 || TREE_CODE (type) == VECTOR_TYPE)
3870 DECL_GIMPLE_REG_P (local) = 1;
4744afba
RH
3871 }
3872 else
3873 {
5039610b 3874 tree ptr_type, addr;
4744afba
RH
3875
3876 ptr_type = build_pointer_type (type);
c98b08ff 3877 addr = create_tmp_reg (ptr_type, get_name (parm));
4744afba
RH
3878 DECL_IGNORED_P (addr) = 0;
3879 local = build_fold_indirect_ref (addr);
3880
e79983f4 3881 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
c28f4b5c 3882 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
13e49da9
TV
3883 size_int (DECL_ALIGN (parm)));
3884
d3c12306 3885 /* The call has been built for a variable-sized object. */
63d2a353 3886 CALL_ALLOCA_FOR_VAR_P (t) = 1;
4744afba 3887 t = fold_convert (ptr_type, t);
726a989a 3888 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
4744afba
RH
3889 gimplify_and_add (t, &stmts);
3890 }
3891
726a989a 3892 gimplify_assign (local, parm, &stmts);
4744afba 3893
833b3afe
DB
3894 SET_DECL_VALUE_EXPR (parm, local);
3895 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4744afba
RH
3896 }
3897 }
3898 }
3899
9771b263 3900 fnargs.release ();
3b3f318a 3901
4744afba
RH
3902 return stmts;
3903}
75dc3319 3904\f
6f086dfc
RS
3905/* Compute the size and offset from the start of the stacked arguments for a
3906 parm passed in mode PASSED_MODE and with type TYPE.
3907
3908 INITIAL_OFFSET_PTR points to the current offset into the stacked
3909 arguments.
3910
e7949876
AM
3911 The starting offset and size for this parm are returned in
3912 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3913 nonzero, the offset is that of stack slot, which is returned in
3914 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3915 padding required from the initial offset ptr to the stack slot.
6f086dfc 3916
cc2902df 3917 IN_REGS is nonzero if the argument will be passed in registers. It will
6f086dfc
RS
3918 never be set if REG_PARM_STACK_SPACE is not defined.
3919
2e4ceca5
UW
3920 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3921 for arguments which are passed in registers.
3922
6f086dfc
RS
3923 FNDECL is the function in which the argument was defined.
3924
3925 There are two types of rounding that are done. The first, controlled by
c2ed6cf8
NF
3926 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3927 argument list to be aligned to the specific boundary (in bits). This
3928 rounding affects the initial and starting offsets, but not the argument
3929 size.
6f086dfc
RS
3930
3931 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3932 optionally rounds the size of the parm to PARM_BOUNDARY. The
3933 initial offset is not affected by this rounding, while the size always
3934 is and the starting offset may be. */
3935
e7949876
AM
3936/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3937 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
6f086dfc 3938 callers pass in the total size of args so far as
e7949876 3939 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
6f086dfc 3940
6f086dfc 3941void
ef4bddc2 3942locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e4ceca5
UW
3943 int reg_parm_stack_space, int partial,
3944 tree fndecl ATTRIBUTE_UNUSED,
fa8db1f7
AJ
3945 struct args_size *initial_offset_ptr,
3946 struct locate_and_pad_arg_data *locate)
6f086dfc 3947{
e7949876
AM
3948 tree sizetree;
3949 enum direction where_pad;
123148b5 3950 unsigned int boundary, round_boundary;
e7949876 3951 int part_size_in_regs;
6f086dfc 3952
6f086dfc
RS
3953 /* If we have found a stack parm before we reach the end of the
3954 area reserved for registers, skip that area. */
3955 if (! in_regs)
3956 {
6f086dfc
RS
3957 if (reg_parm_stack_space > 0)
3958 {
3959 if (initial_offset_ptr->var)
3960 {
3961 initial_offset_ptr->var
3962 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
fed3cef0 3963 ssize_int (reg_parm_stack_space));
6f086dfc
RS
3964 initial_offset_ptr->constant = 0;
3965 }
3966 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3967 initial_offset_ptr->constant = reg_parm_stack_space;
3968 }
3969 }
6f086dfc 3970
78a52f11 3971 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
e7949876
AM
3972
3973 sizetree
3974 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3975 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
c2ed6cf8 3976 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
123148b5
BS
3977 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3978 type);
6e985040 3979 locate->where_pad = where_pad;
2e3f842f
L
3980
3981 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3982 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3983 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3984
bfc45551 3985 locate->boundary = boundary;
6f086dfc 3986
2e3f842f
L
3987 if (SUPPORTS_STACK_ALIGNMENT)
3988 {
3989 /* stack_alignment_estimated can't change after stack has been
3990 realigned. */
3991 if (crtl->stack_alignment_estimated < boundary)
3992 {
3993 if (!crtl->stack_realign_processed)
3994 crtl->stack_alignment_estimated = boundary;
3995 else
3996 {
3997 /* If stack is realigned and stack alignment value
3998 hasn't been finalized, it is OK not to increase
3999 stack_alignment_estimated. The bigger alignment
4000 requirement is recorded in stack_alignment_needed
4001 below. */
4002 gcc_assert (!crtl->stack_realign_finalized
4003 && crtl->stack_realign_needed);
4004 }
4005 }
4006 }
4007
c7e777b5
RH
4008 /* Remember if the outgoing parameter requires extra alignment on the
4009 calling function side. */
cb91fab0
JH
4010 if (crtl->stack_alignment_needed < boundary)
4011 crtl->stack_alignment_needed = boundary;
2e3f842f
L
4012 if (crtl->preferred_stack_boundary < boundary)
4013 crtl->preferred_stack_boundary = boundary;
c7e777b5 4014
6dad9361
TS
4015 if (ARGS_GROW_DOWNWARD)
4016 {
4017 locate->slot_offset.constant = -initial_offset_ptr->constant;
4018 if (initial_offset_ptr->var)
4019 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4020 initial_offset_ptr->var);
4021
4022 {
4023 tree s2 = sizetree;
4024 if (where_pad != none
4025 && (!tree_fits_uhwi_p (sizetree)
4026 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4027 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4028 SUB_PARM_SIZE (locate->slot_offset, s2);
4029 }
4030
4031 locate->slot_offset.constant += part_size_in_regs;
4032
4033 if (!in_regs || reg_parm_stack_space > 0)
4034 pad_to_arg_alignment (&locate->slot_offset, boundary,
4035 &locate->alignment_pad);
4036
4037 locate->size.constant = (-initial_offset_ptr->constant
4038 - locate->slot_offset.constant);
4039 if (initial_offset_ptr->var)
4040 locate->size.var = size_binop (MINUS_EXPR,
4041 size_binop (MINUS_EXPR,
4042 ssize_int (0),
4043 initial_offset_ptr->var),
4044 locate->slot_offset.var);
4045
4046 /* Pad_below needs the pre-rounded size to know how much to pad
4047 below. */
4048 locate->offset = locate->slot_offset;
4049 if (where_pad == downward)
4050 pad_below (&locate->offset, passed_mode, sizetree);
4051
4052 }
4053 else
4054 {
4055 if (!in_regs || reg_parm_stack_space > 0)
4056 pad_to_arg_alignment (initial_offset_ptr, boundary,
4057 &locate->alignment_pad);
4058 locate->slot_offset = *initial_offset_ptr;
6f086dfc
RS
4059
4060#ifdef PUSH_ROUNDING
6dad9361
TS
4061 if (passed_mode != BLKmode)
4062 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
6f086dfc
RS
4063#endif
4064
6dad9361
TS
4065 /* Pad_below needs the pre-rounded size to know how much to pad below
4066 so this must be done before rounding up. */
4067 locate->offset = locate->slot_offset;
4068 if (where_pad == downward)
4069 pad_below (&locate->offset, passed_mode, sizetree);
d4b0a7a0 4070
6dad9361
TS
4071 if (where_pad != none
4072 && (!tree_fits_uhwi_p (sizetree)
4073 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4074 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
6f086dfc 4075
6dad9361 4076 ADD_PARM_SIZE (locate->size, sizetree);
e7949876 4077
6dad9361
TS
4078 locate->size.constant -= part_size_in_regs;
4079 }
099590dc
MM
4080
4081#ifdef FUNCTION_ARG_OFFSET
4082 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4083#endif
6f086dfc
RS
4084}
4085
e16c591a
RS
4086/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4087 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4088
6f086dfc 4089static void
fa8db1f7
AJ
4090pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4091 struct args_size *alignment_pad)
6f086dfc 4092{
a544cfd2
KG
4093 tree save_var = NULL_TREE;
4094 HOST_WIDE_INT save_constant = 0;
a751cd5b 4095 int boundary_in_bytes = boundary / BITS_PER_UNIT;
a594a19c
GK
4096 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4097
4098#ifdef SPARC_STACK_BOUNDARY_HACK
2358ff91
EB
4099 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4100 the real alignment of %sp. However, when it does this, the
4101 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
a594a19c
GK
4102 if (SPARC_STACK_BOUNDARY_HACK)
4103 sp_offset = 0;
4104#endif
4fc026cd 4105
6f6b8f81 4106 if (boundary > PARM_BOUNDARY)
4fc026cd
CM
4107 {
4108 save_var = offset_ptr->var;
4109 save_constant = offset_ptr->constant;
4110 }
4111
4112 alignment_pad->var = NULL_TREE;
4113 alignment_pad->constant = 0;
4fc026cd 4114
6f086dfc
RS
4115 if (boundary > BITS_PER_UNIT)
4116 {
4117 if (offset_ptr->var)
4118 {
a594a19c
GK
4119 tree sp_offset_tree = ssize_int (sp_offset);
4120 tree offset = size_binop (PLUS_EXPR,
4121 ARGS_SIZE_TREE (*offset_ptr),
4122 sp_offset_tree);
6dad9361
TS
4123 tree rounded;
4124 if (ARGS_GROW_DOWNWARD)
4125 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4126 else
4127 rounded = round_up (offset, boundary / BITS_PER_UNIT);
a594a19c
GK
4128
4129 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
e7949876
AM
4130 /* ARGS_SIZE_TREE includes constant term. */
4131 offset_ptr->constant = 0;
6f6b8f81 4132 if (boundary > PARM_BOUNDARY)
dd3f0101 4133 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
fed3cef0 4134 save_var);
6f086dfc
RS
4135 }
4136 else
718fe406 4137 {
a594a19c 4138 offset_ptr->constant = -sp_offset +
6b241bd1
MT
4139 (ARGS_GROW_DOWNWARD
4140 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4141 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
6dad9361 4142
6f6b8f81 4143 if (boundary > PARM_BOUNDARY)
718fe406
KH
4144 alignment_pad->constant = offset_ptr->constant - save_constant;
4145 }
6f086dfc
RS
4146 }
4147}
4148
4149static void
ef4bddc2 4150pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
6f086dfc
RS
4151{
4152 if (passed_mode != BLKmode)
4153 {
4154 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4155 offset_ptr->constant
4156 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4157 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4158 - GET_MODE_SIZE (passed_mode));
4159 }
4160 else
4161 {
4162 if (TREE_CODE (sizetree) != INTEGER_CST
4163 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4164 {
4165 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4166 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4167 /* Add it in. */
4168 ADD_PARM_SIZE (*offset_ptr, s2);
4169 SUB_PARM_SIZE (*offset_ptr, sizetree);
4170 }
4171 }
4172}
6f086dfc 4173\f
6f086dfc 4174
6fb5fa3c
DB
4175/* True if register REGNO was alive at a place where `setjmp' was
4176 called and was set more than once or is an argument. Such regs may
4177 be clobbered by `longjmp'. */
4178
4179static bool
4180regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4181{
4182 /* There appear to be cases where some local vars never reach the
4183 backend but have bogus regnos. */
4184 if (regno >= max_reg_num ())
4185 return false;
4186
4187 return ((REG_N_SETS (regno) > 1
fefa31b5
DM
4188 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4189 regno))
6fb5fa3c
DB
4190 && REGNO_REG_SET_P (setjmp_crosses, regno));
4191}
4192
4193/* Walk the tree of blocks describing the binding levels within a
4194 function and warn about variables the might be killed by setjmp or
4195 vfork. This is done after calling flow_analysis before register
4196 allocation since that will clobber the pseudo-regs to hard
4197 regs. */
4198
4199static void
4200setjmp_vars_warning (bitmap setjmp_crosses, tree block)
6f086dfc 4201{
b3694847 4202 tree decl, sub;
6de9cd9a 4203
910ad8de 4204 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
6f086dfc 4205 {
6de9cd9a 4206 if (TREE_CODE (decl) == VAR_DECL
bc41842b 4207 && DECL_RTL_SET_P (decl)
f8cfc6aa 4208 && REG_P (DECL_RTL (decl))
6fb5fa3c 4209 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4210 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
2b001724 4211 " %<longjmp%> or %<vfork%>", decl);
6f086dfc 4212 }
6de9cd9a 4213
87caf699 4214 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
6fb5fa3c 4215 setjmp_vars_warning (setjmp_crosses, sub);
6f086dfc
RS
4216}
4217
6de9cd9a 4218/* Do the appropriate part of setjmp_vars_warning
6f086dfc
RS
4219 but for arguments instead of local variables. */
4220
6fb5fa3c
DB
4221static void
4222setjmp_args_warning (bitmap setjmp_crosses)
6f086dfc 4223{
b3694847 4224 tree decl;
6f086dfc 4225 for (decl = DECL_ARGUMENTS (current_function_decl);
910ad8de 4226 decl; decl = DECL_CHAIN (decl))
6f086dfc 4227 if (DECL_RTL (decl) != 0
f8cfc6aa 4228 && REG_P (DECL_RTL (decl))
6fb5fa3c 4229 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
b8698a0f 4230 warning (OPT_Wclobbered,
2b001724 4231 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
dee15844 4232 decl);
6f086dfc
RS
4233}
4234
6fb5fa3c
DB
4235/* Generate warning messages for variables live across setjmp. */
4236
b8698a0f 4237void
6fb5fa3c
DB
4238generate_setjmp_warnings (void)
4239{
4240 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4241
0cae8d31 4242 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
6fb5fa3c
DB
4243 || bitmap_empty_p (setjmp_crosses))
4244 return;
4245
4246 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4247 setjmp_args_warning (setjmp_crosses);
4248}
4249
6f086dfc 4250\f
3373692b 4251/* Reverse the order of elements in the fragment chain T of blocks,
1e3c1d95
JJ
4252 and return the new head of the chain (old last element).
4253 In addition to that clear BLOCK_SAME_RANGE flags when needed
4254 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4255 its super fragment origin. */
3373692b
JJ
4256
4257static tree
4258block_fragments_nreverse (tree t)
4259{
1e3c1d95
JJ
4260 tree prev = 0, block, next, prev_super = 0;
4261 tree super = BLOCK_SUPERCONTEXT (t);
4262 if (BLOCK_FRAGMENT_ORIGIN (super))
4263 super = BLOCK_FRAGMENT_ORIGIN (super);
3373692b
JJ
4264 for (block = t; block; block = next)
4265 {
4266 next = BLOCK_FRAGMENT_CHAIN (block);
4267 BLOCK_FRAGMENT_CHAIN (block) = prev;
1e3c1d95
JJ
4268 if ((prev && !BLOCK_SAME_RANGE (prev))
4269 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4270 != prev_super))
4271 BLOCK_SAME_RANGE (block) = 0;
4272 prev_super = BLOCK_SUPERCONTEXT (block);
4273 BLOCK_SUPERCONTEXT (block) = super;
3373692b
JJ
4274 prev = block;
4275 }
1e3c1d95
JJ
4276 t = BLOCK_FRAGMENT_ORIGIN (t);
4277 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4278 != prev_super)
4279 BLOCK_SAME_RANGE (t) = 0;
4280 BLOCK_SUPERCONTEXT (t) = super;
3373692b
JJ
4281 return prev;
4282}
4283
4284/* Reverse the order of elements in the chain T of blocks,
4285 and return the new head of the chain (old last element).
4286 Also do the same on subblocks and reverse the order of elements
4287 in BLOCK_FRAGMENT_CHAIN as well. */
4288
4289static tree
4290blocks_nreverse_all (tree t)
4291{
4292 tree prev = 0, block, next;
4293 for (block = t; block; block = next)
4294 {
4295 next = BLOCK_CHAIN (block);
4296 BLOCK_CHAIN (block) = prev;
3373692b
JJ
4297 if (BLOCK_FRAGMENT_CHAIN (block)
4298 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
1e3c1d95
JJ
4299 {
4300 BLOCK_FRAGMENT_CHAIN (block)
4301 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4302 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4303 BLOCK_SAME_RANGE (block) = 0;
4304 }
4305 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
3373692b
JJ
4306 prev = block;
4307 }
4308 return prev;
4309}
4310
4311
a20612aa
RH
4312/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4313 and create duplicate blocks. */
4314/* ??? Need an option to either create block fragments or to create
4315 abstract origin duplicates of a source block. It really depends
4316 on what optimization has been performed. */
467456d0 4317
116eebd6 4318void
fa8db1f7 4319reorder_blocks (void)
467456d0 4320{
116eebd6 4321 tree block = DECL_INITIAL (current_function_decl);
467456d0 4322
1a4450c7 4323 if (block == NULL_TREE)
116eebd6 4324 return;
fc289cd1 4325
00f96dc9 4326 auto_vec<tree, 10> block_stack;
18c038b9 4327
a20612aa 4328 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
6de9cd9a 4329 clear_block_marks (block);
a20612aa 4330
116eebd6
MM
4331 /* Prune the old trees away, so that they don't get in the way. */
4332 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4333 BLOCK_CHAIN (block) = NULL_TREE;
fc289cd1 4334
a20612aa 4335 /* Recreate the block tree from the note nesting. */
116eebd6 4336 reorder_blocks_1 (get_insns (), block, &block_stack);
3373692b 4337 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
467456d0
RS
4338}
4339
a20612aa 4340/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
0a1c58a2 4341
6de9cd9a
DN
4342void
4343clear_block_marks (tree block)
cc1fe44f 4344{
a20612aa 4345 while (block)
cc1fe44f 4346 {
a20612aa 4347 TREE_ASM_WRITTEN (block) = 0;
6de9cd9a 4348 clear_block_marks (BLOCK_SUBBLOCKS (block));
a20612aa 4349 block = BLOCK_CHAIN (block);
cc1fe44f
DD
4350 }
4351}
4352
0a1c58a2 4353static void
691fe203
DM
4354reorder_blocks_1 (rtx_insn *insns, tree current_block,
4355 vec<tree> *p_block_stack)
0a1c58a2 4356{
691fe203 4357 rtx_insn *insn;
1e3c1d95 4358 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
0a1c58a2
JL
4359
4360 for (insn = insns; insn; insn = NEXT_INSN (insn))
4361 {
4b4bf941 4362 if (NOTE_P (insn))
0a1c58a2 4363 {
a38e7aa5 4364 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
0a1c58a2
JL
4365 {
4366 tree block = NOTE_BLOCK (insn);
51b7d006
DJ
4367 tree origin;
4368
3373692b
JJ
4369 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4370 origin = block;
a20612aa 4371
1e3c1d95
JJ
4372 if (prev_end)
4373 BLOCK_SAME_RANGE (prev_end) = 0;
4374 prev_end = NULL_TREE;
4375
a20612aa
RH
4376 /* If we have seen this block before, that means it now
4377 spans multiple address regions. Create a new fragment. */
0a1c58a2
JL
4378 if (TREE_ASM_WRITTEN (block))
4379 {
a20612aa 4380 tree new_block = copy_node (block);
a20612aa 4381
1e3c1d95 4382 BLOCK_SAME_RANGE (new_block) = 0;
a20612aa
RH
4383 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4384 BLOCK_FRAGMENT_CHAIN (new_block)
4385 = BLOCK_FRAGMENT_CHAIN (origin);
4386 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4387
4388 NOTE_BLOCK (insn) = new_block;
4389 block = new_block;
0a1c58a2 4390 }
a20612aa 4391
1e3c1d95
JJ
4392 if (prev_beg == current_block && prev_beg)
4393 BLOCK_SAME_RANGE (block) = 1;
4394
4395 prev_beg = origin;
4396
0a1c58a2
JL
4397 BLOCK_SUBBLOCKS (block) = 0;
4398 TREE_ASM_WRITTEN (block) = 1;
339a28b9
ZW
4399 /* When there's only one block for the entire function,
4400 current_block == block and we mustn't do this, it
4401 will cause infinite recursion. */
4402 if (block != current_block)
4403 {
1e3c1d95 4404 tree super;
51b7d006 4405 if (block != origin)
1e3c1d95
JJ
4406 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4407 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4408 (origin))
4409 == current_block);
9771b263 4410 if (p_block_stack->is_empty ())
1e3c1d95
JJ
4411 super = current_block;
4412 else
4413 {
9771b263 4414 super = p_block_stack->last ();
1e3c1d95
JJ
4415 gcc_assert (super == current_block
4416 || BLOCK_FRAGMENT_ORIGIN (super)
4417 == current_block);
4418 }
4419 BLOCK_SUPERCONTEXT (block) = super;
339a28b9
ZW
4420 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4421 BLOCK_SUBBLOCKS (current_block) = block;
51b7d006 4422 current_block = origin;
339a28b9 4423 }
9771b263 4424 p_block_stack->safe_push (block);
0a1c58a2 4425 }
a38e7aa5 4426 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
0a1c58a2 4427 {
9771b263 4428 NOTE_BLOCK (insn) = p_block_stack->pop ();
0a1c58a2 4429 current_block = BLOCK_SUPERCONTEXT (current_block);
1e3c1d95
JJ
4430 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4431 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4432 prev_beg = NULL_TREE;
4433 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4434 ? NOTE_BLOCK (insn) : NULL_TREE;
0a1c58a2
JL
4435 }
4436 }
1e3c1d95
JJ
4437 else
4438 {
4439 prev_beg = NULL_TREE;
4440 if (prev_end)
4441 BLOCK_SAME_RANGE (prev_end) = 0;
4442 prev_end = NULL_TREE;
4443 }
0a1c58a2
JL
4444 }
4445}
4446
467456d0
RS
4447/* Reverse the order of elements in the chain T of blocks,
4448 and return the new head of the chain (old last element). */
4449
6de9cd9a 4450tree
fa8db1f7 4451blocks_nreverse (tree t)
467456d0 4452{
3373692b
JJ
4453 tree prev = 0, block, next;
4454 for (block = t; block; block = next)
467456d0 4455 {
3373692b
JJ
4456 next = BLOCK_CHAIN (block);
4457 BLOCK_CHAIN (block) = prev;
4458 prev = block;
467456d0
RS
4459 }
4460 return prev;
4461}
4462
61e46a7d
NF
4463/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4464 by modifying the last node in chain 1 to point to chain 2. */
4465
4466tree
4467block_chainon (tree op1, tree op2)
4468{
4469 tree t1;
4470
4471 if (!op1)
4472 return op2;
4473 if (!op2)
4474 return op1;
4475
4476 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4477 continue;
4478 BLOCK_CHAIN (t1) = op2;
4479
4480#ifdef ENABLE_TREE_CHECKING
4481 {
4482 tree t2;
4483 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4484 gcc_assert (t2 != t1);
4485 }
4486#endif
4487
4488 return op1;
4489}
4490
18c038b9
MM
4491/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4492 non-NULL, list them all into VECTOR, in a depth-first preorder
4493 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
b2a59b15 4494 blocks. */
467456d0
RS
4495
4496static int
fa8db1f7 4497all_blocks (tree block, tree *vector)
467456d0 4498{
b2a59b15
MS
4499 int n_blocks = 0;
4500
a84efb51
JO
4501 while (block)
4502 {
4503 TREE_ASM_WRITTEN (block) = 0;
b2a59b15 4504
a84efb51
JO
4505 /* Record this block. */
4506 if (vector)
4507 vector[n_blocks] = block;
b2a59b15 4508
a84efb51 4509 ++n_blocks;
718fe406 4510
a84efb51
JO
4511 /* Record the subblocks, and their subblocks... */
4512 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4513 vector ? vector + n_blocks : 0);
4514 block = BLOCK_CHAIN (block);
4515 }
467456d0
RS
4516
4517 return n_blocks;
4518}
18c038b9
MM
4519
4520/* Return a vector containing all the blocks rooted at BLOCK. The
4521 number of elements in the vector is stored in N_BLOCKS_P. The
4522 vector is dynamically allocated; it is the caller's responsibility
4523 to call `free' on the pointer returned. */
718fe406 4524
18c038b9 4525static tree *
fa8db1f7 4526get_block_vector (tree block, int *n_blocks_p)
18c038b9
MM
4527{
4528 tree *block_vector;
4529
4530 *n_blocks_p = all_blocks (block, NULL);
5ed6ace5 4531 block_vector = XNEWVEC (tree, *n_blocks_p);
18c038b9
MM
4532 all_blocks (block, block_vector);
4533
4534 return block_vector;
4535}
4536
f83b236e 4537static GTY(()) int next_block_index = 2;
18c038b9
MM
4538
4539/* Set BLOCK_NUMBER for all the blocks in FN. */
4540
4541void
fa8db1f7 4542number_blocks (tree fn)
18c038b9
MM
4543{
4544 int i;
4545 int n_blocks;
4546 tree *block_vector;
4547
4548 /* For SDB and XCOFF debugging output, we start numbering the blocks
4549 from 1 within each function, rather than keeping a running
4550 count. */
4551#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
b0e3a658
RK
4552 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4553 next_block_index = 1;
18c038b9
MM
4554#endif
4555
4556 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4557
4558 /* The top-level BLOCK isn't numbered at all. */
4559 for (i = 1; i < n_blocks; ++i)
4560 /* We number the blocks from two. */
4561 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4562
4563 free (block_vector);
4564
4565 return;
4566}
df8992f8
RH
4567
4568/* If VAR is present in a subblock of BLOCK, return the subblock. */
4569
24e47c76 4570DEBUG_FUNCTION tree
fa8db1f7 4571debug_find_var_in_block_tree (tree var, tree block)
df8992f8
RH
4572{
4573 tree t;
4574
4575 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4576 if (t == var)
4577 return block;
4578
4579 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4580 {
4581 tree ret = debug_find_var_in_block_tree (var, t);
4582 if (ret)
4583 return ret;
4584 }
4585
4586 return NULL_TREE;
4587}
467456d0 4588\f
db2960f4
SL
4589/* Keep track of whether we're in a dummy function context. If we are,
4590 we don't want to invoke the set_current_function hook, because we'll
4591 get into trouble if the hook calls target_reinit () recursively or
4592 when the initial initialization is not yet complete. */
4593
4594static bool in_dummy_function;
4595
ab442df7
MM
4596/* Invoke the target hook when setting cfun. Update the optimization options
4597 if the function uses different options than the default. */
db2960f4
SL
4598
4599static void
4600invoke_set_current_function_hook (tree fndecl)
4601{
4602 if (!in_dummy_function)
ab442df7
MM
4603 {
4604 tree opts = ((fndecl)
4605 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4606 : optimization_default_node);
4607
4608 if (!opts)
4609 opts = optimization_default_node;
4610
4611 /* Change optimization options if needed. */
4612 if (optimization_current_node != opts)
4613 {
4614 optimization_current_node = opts;
46625112 4615 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
ab442df7
MM
4616 }
4617
892c4745 4618 targetm.set_current_function (fndecl);
4b1baac8 4619 this_fn_optabs = this_target_optabs;
135204dd 4620
4b1baac8 4621 if (opts != optimization_default_node)
135204dd 4622 {
4b1baac8
RS
4623 init_tree_optimization_optabs (opts);
4624 if (TREE_OPTIMIZATION_OPTABS (opts))
4625 this_fn_optabs = (struct target_optabs *)
4626 TREE_OPTIMIZATION_OPTABS (opts);
135204dd 4627 }
ab442df7 4628 }
db2960f4
SL
4629}
4630
4631/* cfun should never be set directly; use this function. */
4632
4633void
4634set_cfun (struct function *new_cfun)
4635{
4636 if (cfun != new_cfun)
4637 {
4638 cfun = new_cfun;
4639 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4640 }
4641}
4642
db2960f4
SL
4643/* Initialized with NOGC, making this poisonous to the garbage collector. */
4644
9771b263 4645static vec<function_p> cfun_stack;
db2960f4 4646
af16bc76
MJ
4647/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4648 current_function_decl accordingly. */
db2960f4
SL
4649
4650void
4651push_cfun (struct function *new_cfun)
4652{
af16bc76
MJ
4653 gcc_assert ((!cfun && !current_function_decl)
4654 || (cfun && current_function_decl == cfun->decl));
9771b263 4655 cfun_stack.safe_push (cfun);
af16bc76 4656 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4
SL
4657 set_cfun (new_cfun);
4658}
4659
af16bc76 4660/* Pop cfun from the stack. Also set current_function_decl accordingly. */
db2960f4
SL
4661
4662void
4663pop_cfun (void)
4664{
9771b263 4665 struct function *new_cfun = cfun_stack.pop ();
af16bc76
MJ
4666 /* When in_dummy_function, we do have a cfun but current_function_decl is
4667 NULL. We also allow pushing NULL cfun and subsequently changing
4668 current_function_decl to something else and have both restored by
4669 pop_cfun. */
4670 gcc_checking_assert (in_dummy_function
4671 || !cfun
4672 || current_function_decl == cfun->decl);
38d34676 4673 set_cfun (new_cfun);
af16bc76 4674 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
db2960f4 4675}
3e87758a
RL
4676
4677/* Return value of funcdef and increase it. */
4678int
b8698a0f 4679get_next_funcdef_no (void)
3e87758a
RL
4680{
4681 return funcdef_no++;
4682}
4683
903d1e67
XDL
4684/* Return value of funcdef. */
4685int
4686get_last_funcdef_no (void)
4687{
4688 return funcdef_no;
4689}
4690
3a70d621 4691/* Allocate a function structure for FNDECL and set its contents
db2960f4
SL
4692 to the defaults. Set cfun to the newly-allocated object.
4693 Some of the helper functions invoked during initialization assume
4694 that cfun has already been set. Therefore, assign the new object
4695 directly into cfun and invoke the back end hook explicitly at the
4696 very end, rather than initializing a temporary and calling set_cfun
4697 on it.
182e0d71
AK
4698
4699 ABSTRACT_P is true if this is a function that will never be seen by
4700 the middle-end. Such functions are front-end concepts (like C++
4701 function templates) that do not correspond directly to functions
4702 placed in object files. */
7a80cf9a 4703
3a70d621 4704void
182e0d71 4705allocate_struct_function (tree fndecl, bool abstract_p)
6f086dfc 4706{
6de9cd9a 4707 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
6f086dfc 4708
766090c2 4709 cfun = ggc_cleared_alloc<function> ();
b384405b 4710
3a70d621 4711 init_eh_for_function ();
6f086dfc 4712
3a70d621
RH
4713 if (init_machine_status)
4714 cfun->machine = (*init_machine_status) ();
e2ecd91c 4715
7c800926
KT
4716#ifdef OVERRIDE_ABI_FORMAT
4717 OVERRIDE_ABI_FORMAT (fndecl);
4718#endif
4719
81464b2c 4720 if (fndecl != NULL_TREE)
3a70d621 4721 {
db2960f4
SL
4722 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4723 cfun->decl = fndecl;
70cf5bc1 4724 current_function_funcdef_no = get_next_funcdef_no ();
5b9db1bc
MJ
4725 }
4726
4727 invoke_set_current_function_hook (fndecl);
db2960f4 4728
5b9db1bc
MJ
4729 if (fndecl != NULL_TREE)
4730 {
4731 tree result = DECL_RESULT (fndecl);
182e0d71 4732 if (!abstract_p && aggregate_value_p (result, fndecl))
db2960f4 4733 {
3a70d621 4734#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4735 cfun->returns_pcc_struct = 1;
3a70d621 4736#endif
e3b5732b 4737 cfun->returns_struct = 1;
db2960f4
SL
4738 }
4739
f38958e8 4740 cfun->stdarg = stdarg_p (fntype);
b8698a0f 4741
db2960f4
SL
4742 /* Assume all registers in stdarg functions need to be saved. */
4743 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4744 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
8f4f502f
EB
4745
4746 /* ??? This could be set on a per-function basis by the front-end
4747 but is this worth the hassle? */
4748 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
d764963b 4749 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
0b37ba8a
AK
4750
4751 if (!profile_flag && !flag_instrument_function_entry_exit)
4752 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
3a70d621 4753 }
db2960f4
SL
4754}
4755
4756/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4757 instead of just setting it. */
9d30f3c1 4758
db2960f4
SL
4759void
4760push_struct_function (tree fndecl)
4761{
af16bc76
MJ
4762 /* When in_dummy_function we might be in the middle of a pop_cfun and
4763 current_function_decl and cfun may not match. */
4764 gcc_assert (in_dummy_function
4765 || (!cfun && !current_function_decl)
4766 || (cfun && current_function_decl == cfun->decl));
9771b263 4767 cfun_stack.safe_push (cfun);
af16bc76 4768 current_function_decl = fndecl;
182e0d71 4769 allocate_struct_function (fndecl, false);
3a70d621 4770}
6f086dfc 4771
8f4f502f 4772/* Reset crtl and other non-struct-function variables to defaults as
2067c116 4773 appropriate for emitting rtl at the start of a function. */
6f086dfc 4774
3a70d621 4775static void
db2960f4 4776prepare_function_start (void)
3a70d621 4777{
614d5bd8 4778 gcc_assert (!get_last_insn ());
fb0703f7 4779 init_temp_slots ();
0de456a5 4780 init_emit ();
bd60bab2 4781 init_varasm_status ();
0de456a5 4782 init_expr ();
bf08ebeb 4783 default_rtl_profile ();
6f086dfc 4784
a11e0df4 4785 if (flag_stack_usage_info)
d3c12306 4786 {
766090c2 4787 cfun->su = ggc_cleared_alloc<stack_usage> ();
d3c12306
EB
4788 cfun->su->static_stack_size = -1;
4789 }
4790
3a70d621 4791 cse_not_expected = ! optimize;
6f086dfc 4792
3a70d621
RH
4793 /* Caller save not needed yet. */
4794 caller_save_needed = 0;
6f086dfc 4795
3a70d621
RH
4796 /* We haven't done register allocation yet. */
4797 reg_renumber = 0;
6f086dfc 4798
b384405b
BS
4799 /* Indicate that we have not instantiated virtual registers yet. */
4800 virtuals_instantiated = 0;
4801
1b3d8f8a
GK
4802 /* Indicate that we want CONCATs now. */
4803 generating_concat_p = 1;
4804
b384405b
BS
4805 /* Indicate we have no need of a frame pointer yet. */
4806 frame_pointer_needed = 0;
b384405b
BS
4807}
4808
5283d1ec
TV
4809void
4810push_dummy_function (bool with_decl)
4811{
4812 tree fn_decl, fn_type, fn_result_decl;
4813
4814 gcc_assert (!in_dummy_function);
4815 in_dummy_function = true;
4816
4817 if (with_decl)
4818 {
4819 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4820 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4821 fn_type);
4822 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4823 NULL_TREE, void_type_node);
4824 DECL_RESULT (fn_decl) = fn_result_decl;
4825 }
4826 else
4827 fn_decl = NULL_TREE;
4828
4829 push_struct_function (fn_decl);
4830}
4831
b384405b
BS
4832/* Initialize the rtl expansion mechanism so that we can do simple things
4833 like generate sequences. This is used to provide a context during global
db2960f4
SL
4834 initialization of some passes. You must call expand_dummy_function_end
4835 to exit this context. */
4836
b384405b 4837void
fa8db1f7 4838init_dummy_function_start (void)
b384405b 4839{
5283d1ec 4840 push_dummy_function (false);
db2960f4 4841 prepare_function_start ();
b384405b
BS
4842}
4843
4844/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4845 and initialize static variables for generating RTL for the statements
4846 of the function. */
4847
4848void
fa8db1f7 4849init_function_start (tree subr)
b384405b 4850{
db2960f4
SL
4851 if (subr && DECL_STRUCT_FUNCTION (subr))
4852 set_cfun (DECL_STRUCT_FUNCTION (subr));
4853 else
182e0d71 4854 allocate_struct_function (subr, false);
b9b5f433
JH
4855
4856 /* Initialize backend, if needed. */
4857 initialize_rtl ();
4858
db2960f4 4859 prepare_function_start ();
2c7eebae 4860 decide_function_section (subr);
b384405b 4861
6f086dfc
RS
4862 /* Warn if this value is an aggregate type,
4863 regardless of which calling convention we are using for it. */
ccf08a6e
DD
4864 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4865 warning (OPT_Waggregate_return, "function returns an aggregate");
49ad7cfa 4866}
5c7675e9 4867
7d69de61
RH
4868/* Expand code to verify the stack_protect_guard. This is invoked at
4869 the end of a function to be protected. */
4870
b755446c 4871void
7d69de61
RH
4872stack_protect_epilogue (void)
4873{
4874 tree guard_decl = targetm.stack_protect_guard ();
19f8b229 4875 rtx_code_label *label = gen_label_rtx ();
7d69de61
RH
4876 rtx x, y, tmp;
4877
08d4cc33
RH
4878 x = expand_normal (crtl->stack_protect_guard);
4879 y = expand_normal (guard_decl);
7d69de61
RH
4880
4881 /* Allow the target to compare Y with X without leaking either into
4882 a register. */
c65aa042 4883 switch (targetm.have_stack_protect_test ())
7d69de61
RH
4884 {
4885 case 1:
c65aa042 4886 if (rtx_insn *seq = targetm.gen_stack_protect_test (x, y, label))
7d69de61 4887 {
c65aa042 4888 emit_insn (seq);
7d69de61
RH
4889 break;
4890 }
4891 /* FALLTHRU */
4892
4893 default:
4894 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4895 break;
4896 }
4897
4898 /* The noreturn predictor has been moved to the tree level. The rtl-level
4899 predictors estimate this branch about 20%, which isn't enough to get
4900 things moved out of line. Since this is the only extant case of adding
4901 a noreturn function at the rtl level, it doesn't seem worth doing ought
4902 except adding the prediction by hand. */
4903 tmp = get_last_insn ();
4904 if (JUMP_P (tmp))
9f215bf5 4905 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
7d69de61 4906
b3c144a3
SB
4907 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4908 free_temp_slots ();
7d69de61
RH
4909 emit_label (label);
4910}
4911\f
6f086dfc
RS
4912/* Start the RTL for a new function, and set variables used for
4913 emitting RTL.
4914 SUBR is the FUNCTION_DECL node.
4915 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4916 the function's parameters, which must be run at any return statement. */
4917
4918void
b79c5284 4919expand_function_start (tree subr)
6f086dfc 4920{
6f086dfc
RS
4921 /* Make sure volatile mem refs aren't considered
4922 valid operands of arithmetic insns. */
4923 init_recog_no_volatile ();
4924
e3b5732b 4925 crtl->profile
70f4f91c
WC
4926 = (profile_flag
4927 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4928
e3b5732b 4929 crtl->limit_stack
a157febd
GK
4930 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4931
52a11cbf
RH
4932 /* Make the label for return statements to jump to. Do not special
4933 case machines with special return instructions -- they will be
4934 handled later during jump, ifcvt, or epilogue creation. */
6f086dfc 4935 return_label = gen_label_rtx ();
6f086dfc
RS
4936
4937 /* Initialize rtx used to return the value. */
4938 /* Do this before assign_parms so that we copy the struct value address
4939 before any library calls that assign parms might generate. */
4940
4941 /* Decide whether to return the value in memory or in a register. */
0f9f9784 4942 if (aggregate_value_p (DECL_RESULT (subr), subr))
6f086dfc
RS
4943 {
4944 /* Returning something that won't go in a register. */
b3694847 4945 rtx value_address = 0;
6f086dfc
RS
4946
4947#ifdef PCC_STATIC_STRUCT_RETURN
e3b5732b 4948 if (cfun->returns_pcc_struct)
6f086dfc 4949 {
0f9f9784 4950 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6f086dfc
RS
4951 value_address = assemble_static_space (size);
4952 }
4953 else
4954#endif
4955 {
2225b57c 4956 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
6f086dfc
RS
4957 /* Expect to be passed the address of a place to store the value.
4958 If it is passed as an argument, assign_parms will take care of
4959 it. */
61f71b34 4960 if (sv)
6f086dfc 4961 {
0f9f9784 4962 value_address = gen_reg_rtx (Pmode);
61f71b34 4963 emit_move_insn (value_address, sv);
6f086dfc
RS
4964 }
4965 }
4966 if (value_address)
ccdecf58 4967 {
01c98570 4968 rtx x = value_address;
0f9f9784 4969 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
01c98570 4970 {
0f9f9784
AO
4971 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4972 set_mem_attributes (x, DECL_RESULT (subr), 1);
01c98570 4973 }
0f9f9784 4974 SET_DECL_RTL (DECL_RESULT (subr), x);
ccdecf58 4975 }
6f086dfc 4976 }
0f9f9784 4977 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6f086dfc 4978 /* If return mode is void, this decl rtl should not be used. */
0f9f9784 4979 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
d5bf1143 4980 else
a53e14c0 4981 {
d5bf1143
RH
4982 /* Compute the return values into a pseudo reg, which we will copy
4983 into the true return register after the cleanups are done. */
0f9f9784
AO
4984 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4985 if (TYPE_MODE (return_type) != BLKmode
4986 && targetm.calls.return_in_msb (return_type))
bef5d8b6
RS
4987 /* expand_function_end will insert the appropriate padding in
4988 this case. Use the return value's natural (unpadded) mode
4989 within the function proper. */
0f9f9784
AO
4990 SET_DECL_RTL (DECL_RESULT (subr),
4991 gen_reg_rtx (TYPE_MODE (return_type)));
80a480ca 4992 else
0bccc606 4993 {
bef5d8b6
RS
4994 /* In order to figure out what mode to use for the pseudo, we
4995 figure out what the mode of the eventual return register will
4996 actually be, and use that. */
1d636cc6 4997 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
bef5d8b6
RS
4998
4999 /* Structures that are returned in registers are not
5000 aggregate_value_p, so we may see a PARALLEL or a REG. */
5001 if (REG_P (hard_reg))
0f9f9784
AO
5002 SET_DECL_RTL (DECL_RESULT (subr),
5003 gen_reg_rtx (GET_MODE (hard_reg)));
bef5d8b6
RS
5004 else
5005 {
5006 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
0f9f9784 5007 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
bef5d8b6 5008 }
0bccc606 5009 }
a53e14c0 5010
084a1106
JDA
5011 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5012 result to the real return register(s). */
0f9f9784 5013 DECL_REGISTER (DECL_RESULT (subr)) = 1;
d5e254e1
IE
5014
5015 if (chkp_function_instrumented_p (current_function_decl))
5016 {
0f9f9784 5017 tree return_type = TREE_TYPE (DECL_RESULT (subr));
d5e254e1
IE
5018 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5019 subr, 1);
0f9f9784 5020 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
d5e254e1 5021 }
a53e14c0 5022 }
6f086dfc
RS
5023
5024 /* Initialize rtx for parameters and local variables.
5025 In some cases this requires emitting insns. */
0d1416c6 5026 assign_parms (subr);
6f086dfc 5027
6de9cd9a
DN
5028 /* If function gets a static chain arg, store it. */
5029 if (cfun->static_chain_decl)
5030 {
7e140280 5031 tree parm = cfun->static_chain_decl;
21afc57d
TS
5032 rtx local, chain;
5033 rtx_insn *insn;
7e140280 5034
0f9f9784 5035 local = gen_reg_rtx (Pmode);
531ca746
RH
5036 chain = targetm.calls.static_chain (current_function_decl, true);
5037
5038 set_decl_incoming_rtl (parm, chain, false);
7e140280 5039 SET_DECL_RTL (parm, local);
7e140280 5040 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
6de9cd9a 5041
531ca746
RH
5042 insn = emit_move_insn (local, chain);
5043
5044 /* Mark the register as eliminable, similar to parameters. */
5045 if (MEM_P (chain)
5046 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
7543f918 5047 set_dst_reg_note (insn, REG_EQUIV, chain, local);
3fd48b12
EB
5048
5049 /* If we aren't optimizing, save the static chain onto the stack. */
5050 if (!optimize)
5051 {
5052 tree saved_static_chain_decl
5053 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5054 DECL_NAME (parm), TREE_TYPE (parm));
5055 rtx saved_static_chain_rtx
5056 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5057 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5058 emit_move_insn (saved_static_chain_rtx, chain);
5059 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5060 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5061 }
6de9cd9a
DN
5062 }
5063
5064 /* If the function receives a non-local goto, then store the
5065 bits we need to restore the frame pointer. */
5066 if (cfun->nonlocal_goto_save_area)
5067 {
5068 tree t_save;
5069 rtx r_save;
5070
4846b435 5071 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
ca5f4331 5072 gcc_assert (DECL_RTL_SET_P (var));
6de9cd9a 5073
6bbec3e1
L
5074 t_save = build4 (ARRAY_REF,
5075 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
3244e67d
RS
5076 cfun->nonlocal_goto_save_area,
5077 integer_zero_node, NULL_TREE, NULL_TREE);
6de9cd9a 5078 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
6bbec3e1 5079 gcc_assert (GET_MODE (r_save) == Pmode);
f0c51a1e 5080
88280cf9 5081 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
6de9cd9a
DN
5082 update_nonlocal_goto_save_area ();
5083 }
f0c51a1e 5084
6f086dfc
RS
5085 /* The following was moved from init_function_start.
5086 The move is supposed to make sdb output more accurate. */
5087 /* Indicate the beginning of the function body,
5088 as opposed to parm setup. */
2e040219 5089 emit_note (NOTE_INSN_FUNCTION_BEG);
6f086dfc 5090
ede497cf
SB
5091 gcc_assert (NOTE_P (get_last_insn ()));
5092
6f086dfc
RS
5093 parm_birth_insn = get_last_insn ();
5094
e3b5732b 5095 if (crtl->profile)
f6f315fe 5096 {
f6f315fe 5097#ifdef PROFILE_HOOK
df696a75 5098 PROFILE_HOOK (current_function_funcdef_no);
411707f4 5099#endif
f6f315fe 5100 }
411707f4 5101
6d3cc8f0
EB
5102 /* If we are doing generic stack checking, the probe should go here. */
5103 if (flag_stack_check == GENERIC_STACK_CHECK)
ede497cf 5104 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
6f086dfc
RS
5105}
5106\f
5283d1ec
TV
5107void
5108pop_dummy_function (void)
5109{
5110 pop_cfun ();
5111 in_dummy_function = false;
5112}
5113
49ad7cfa
BS
5114/* Undo the effects of init_dummy_function_start. */
5115void
fa8db1f7 5116expand_dummy_function_end (void)
49ad7cfa 5117{
db2960f4
SL
5118 gcc_assert (in_dummy_function);
5119
49ad7cfa
BS
5120 /* End any sequences that failed to be closed due to syntax errors. */
5121 while (in_sequence_p ())
5122 end_sequence ();
5123
5124 /* Outside function body, can't compute type's actual size
5125 until next function's body starts. */
fa51b01b 5126
01d939e8
BS
5127 free_after_parsing (cfun);
5128 free_after_compilation (cfun);
5283d1ec 5129 pop_dummy_function ();
49ad7cfa
BS
5130}
5131
d5e254e1 5132/* Helper for diddle_return_value. */
bd695e1e
RH
5133
5134void
d5e254e1 5135diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
bd695e1e 5136{
c13fde05
RH
5137 if (! outgoing)
5138 return;
bd695e1e 5139
f8cfc6aa 5140 if (REG_P (outgoing))
c13fde05
RH
5141 (*doit) (outgoing, arg);
5142 else if (GET_CODE (outgoing) == PARALLEL)
5143 {
5144 int i;
bd695e1e 5145
c13fde05
RH
5146 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5147 {
5148 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5149
f8cfc6aa 5150 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
c13fde05 5151 (*doit) (x, arg);
bd695e1e
RH
5152 }
5153 }
5154}
5155
d5e254e1
IE
5156/* Call DOIT for each hard register used as a return value from
5157 the current function. */
5158
5159void
5160diddle_return_value (void (*doit) (rtx, void *), void *arg)
5161{
d5e254e1 5162 diddle_return_value_1 (doit, arg, crtl->return_bnd);
e9ae68af 5163 diddle_return_value_1 (doit, arg, crtl->return_rtx);
d5e254e1
IE
5164}
5165
c13fde05 5166static void
fa8db1f7 5167do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5168{
c41c1387 5169 emit_clobber (reg);
c13fde05
RH
5170}
5171
5172void
fa8db1f7 5173clobber_return_register (void)
c13fde05
RH
5174{
5175 diddle_return_value (do_clobber_return_reg, NULL);
9c65bbf4
JH
5176
5177 /* In case we do use pseudo to return value, clobber it too. */
5178 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5179 {
5180 tree decl_result = DECL_RESULT (current_function_decl);
5181 rtx decl_rtl = DECL_RTL (decl_result);
5182 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5183 {
5184 do_clobber_return_reg (decl_rtl, NULL);
5185 }
5186 }
c13fde05
RH
5187}
5188
5189static void
fa8db1f7 5190do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
c13fde05 5191{
c41c1387 5192 emit_use (reg);
c13fde05
RH
5193}
5194
0bf8477d 5195static void
fa8db1f7 5196use_return_register (void)
c13fde05
RH
5197{
5198 diddle_return_value (do_use_return_reg, NULL);
5199}
5200
862d0b35
DN
5201/* Set the location of the insn chain starting at INSN to LOC. */
5202
5203static void
dc01c3d1 5204set_insn_locations (rtx_insn *insn, int loc)
862d0b35 5205{
dc01c3d1 5206 while (insn != NULL)
862d0b35
DN
5207 {
5208 if (INSN_P (insn))
5209 INSN_LOCATION (insn) = loc;
5210 insn = NEXT_INSN (insn);
5211 }
5212}
5213
71c0e7fc 5214/* Generate RTL for the end of the current function. */
6f086dfc
RS
5215
5216void
fa8db1f7 5217expand_function_end (void)
6f086dfc 5218{
964be02f
RH
5219 /* If arg_pointer_save_area was referenced only from a nested
5220 function, we will not have initialized it yet. Do that now. */
e3b5732b 5221 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
bd60bab2 5222 get_arg_pointer_save_area ();
964be02f 5223
b38f3813 5224 /* If we are doing generic stack checking and this function makes calls,
11044f66
RK
5225 do a stack probe at the start of the function to ensure we have enough
5226 space for another stack frame. */
b38f3813 5227 if (flag_stack_check == GENERIC_STACK_CHECK)
11044f66 5228 {
691fe203 5229 rtx_insn *insn, *seq;
11044f66
RK
5230
5231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4b4bf941 5232 if (CALL_P (insn))
11044f66 5233 {
c35af30f 5234 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
11044f66 5235 start_sequence ();
c35af30f
EB
5236 if (STACK_CHECK_MOVING_SP)
5237 anti_adjust_stack_and_probe (max_frame_size, true);
5238 else
5239 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
11044f66
RK
5240 seq = get_insns ();
5241 end_sequence ();
5368224f 5242 set_insn_locations (seq, prologue_location);
ede497cf 5243 emit_insn_before (seq, stack_check_probe_note);
11044f66
RK
5244 break;
5245 }
5246 }
5247
6f086dfc
RS
5248 /* End any sequences that failed to be closed due to syntax errors. */
5249 while (in_sequence_p ())
5f4f0e22 5250 end_sequence ();
6f086dfc 5251
6f086dfc
RS
5252 clear_pending_stack_adjust ();
5253 do_pending_stack_adjust ();
5254
6f086dfc
RS
5255 /* Output a linenumber for the end of the function.
5256 SDB depends on this. */
5368224f 5257 set_curr_insn_location (input_location);
6f086dfc 5258
fbffc70a 5259 /* Before the return label (if any), clobber the return
a1f300c0 5260 registers so that they are not propagated live to the rest of
fbffc70a
GK
5261 the function. This can only happen with functions that drop
5262 through; if there had been a return statement, there would
932f0847
JH
5263 have either been a return rtx, or a jump to the return label.
5264
5265 We delay actual code generation after the current_function_value_rtx
5266 is computed. */
e67d1102 5267 rtx_insn *clobber_after = get_last_insn ();
fbffc70a 5268
526c334b
KH
5269 /* Output the label for the actual return from the function. */
5270 emit_label (return_label);
6f086dfc 5271
677f3fa8 5272 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
815eb8f0
AM
5273 {
5274 /* Let except.c know where it should emit the call to unregister
5275 the function context for sjlj exceptions. */
5276 if (flag_exceptions)
5277 sjlj_emit_function_exit_after (get_last_insn ());
5278 }
6fb5fa3c
DB
5279 else
5280 {
5281 /* We want to ensure that instructions that may trap are not
5282 moved into the epilogue by scheduling, because we don't
5283 always emit unwind information for the epilogue. */
8f4f502f 5284 if (cfun->can_throw_non_call_exceptions)
6fb5fa3c
DB
5285 emit_insn (gen_blockage ());
5286 }
0b59e81e 5287
652b0932
RH
5288 /* If this is an implementation of throw, do what's necessary to
5289 communicate between __builtin_eh_return and the epilogue. */
5290 expand_eh_return ();
5291
3e4eac3f
RH
5292 /* If scalar return value was computed in a pseudo-reg, or was a named
5293 return value that got dumped to the stack, copy that to the hard
5294 return register. */
19e7881c 5295 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6f086dfc 5296 {
3e4eac3f
RH
5297 tree decl_result = DECL_RESULT (current_function_decl);
5298 rtx decl_rtl = DECL_RTL (decl_result);
5299
5300 if (REG_P (decl_rtl)
5301 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5302 : DECL_REGISTER (decl_result))
5303 {
38173d38 5304 rtx real_decl_rtl = crtl->return_rtx;
6f086dfc 5305
ce5e43d0 5306 /* This should be set in assign_parms. */
0bccc606 5307 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
3e4eac3f
RH
5308
5309 /* If this is a BLKmode structure being returned in registers,
5310 then use the mode computed in expand_return. Note that if
797a6ac1 5311 decl_rtl is memory, then its mode may have been changed,
38173d38 5312 but that crtl->return_rtx has not. */
3e4eac3f 5313 if (GET_MODE (real_decl_rtl) == BLKmode)
ce5e43d0 5314 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
3e4eac3f 5315
bef5d8b6
RS
5316 /* If a non-BLKmode return value should be padded at the least
5317 significant end of the register, shift it left by the appropriate
5318 amount. BLKmode results are handled using the group load/store
5319 machinery. */
5320 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
66de4d7c 5321 && REG_P (real_decl_rtl)
bef5d8b6
RS
5322 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5323 {
5324 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5325 REGNO (real_decl_rtl)),
5326 decl_rtl);
5327 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5328 }
3e4eac3f 5329 /* If a named return value dumped decl_return to memory, then
797a6ac1 5330 we may need to re-do the PROMOTE_MODE signed/unsigned
3e4eac3f 5331 extension. */
bef5d8b6 5332 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
3e4eac3f 5333 {
8df83eae 5334 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
cde0f3fd
PB
5335 promote_function_mode (TREE_TYPE (decl_result),
5336 GET_MODE (decl_rtl), &unsignedp,
5337 TREE_TYPE (current_function_decl), 1);
3e4eac3f
RH
5338
5339 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5340 }
aa570f54 5341 else if (GET_CODE (real_decl_rtl) == PARALLEL)
084a1106
JDA
5342 {
5343 /* If expand_function_start has created a PARALLEL for decl_rtl,
5344 move the result to the real return registers. Otherwise, do
5345 a group load from decl_rtl for a named return. */
5346 if (GET_CODE (decl_rtl) == PARALLEL)
5347 emit_group_move (real_decl_rtl, decl_rtl);
5348 else
5349 emit_group_load (real_decl_rtl, decl_rtl,
6e985040 5350 TREE_TYPE (decl_result),
084a1106
JDA
5351 int_size_in_bytes (TREE_TYPE (decl_result)));
5352 }
652b0932
RH
5353 /* In the case of complex integer modes smaller than a word, we'll
5354 need to generate some non-trivial bitfield insertions. Do that
5355 on a pseudo and not the hard register. */
5356 else if (GET_CODE (decl_rtl) == CONCAT
5357 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5358 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5359 {
5360 int old_generating_concat_p;
5361 rtx tmp;
5362
5363 old_generating_concat_p = generating_concat_p;
5364 generating_concat_p = 0;
5365 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5366 generating_concat_p = old_generating_concat_p;
5367
5368 emit_move_insn (tmp, decl_rtl);
5369 emit_move_insn (real_decl_rtl, tmp);
5370 }
3e4eac3f
RH
5371 else
5372 emit_move_insn (real_decl_rtl, decl_rtl);
3e4eac3f 5373 }
6f086dfc
RS
5374 }
5375
5376 /* If returning a structure, arrange to return the address of the value
5377 in a place where debuggers expect to find it.
5378
5379 If returning a structure PCC style,
5380 the caller also depends on this value.
e3b5732b 5381 And cfun->returns_pcc_struct is not necessarily set. */
e0d14c39
BS
5382 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5383 && !targetm.calls.omit_struct_return_reg)
6f086dfc 5384 {
cc77ae10 5385 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
6f086dfc 5386 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
cc77ae10
JM
5387 rtx outgoing;
5388
5389 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5390 type = TREE_TYPE (type);
5391 else
5392 value_address = XEXP (value_address, 0);
5393
1d636cc6
RG
5394 outgoing = targetm.calls.function_value (build_pointer_type (type),
5395 current_function_decl, true);
6f086dfc
RS
5396
5397 /* Mark this as a function return value so integrate will delete the
5398 assignment and USE below when inlining this function. */
5399 REG_FUNCTION_VALUE_P (outgoing) = 1;
5400
d1608933 5401 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5ae6cd0d
MM
5402 value_address = convert_memory_address (GET_MODE (outgoing),
5403 value_address);
d1608933 5404
6f086dfc 5405 emit_move_insn (outgoing, value_address);
d1608933
RK
5406
5407 /* Show return register used to hold result (in this case the address
5408 of the result. */
38173d38 5409 crtl->return_rtx = outgoing;
6f086dfc
RS
5410 }
5411
79c7fda6
JJ
5412 /* Emit the actual code to clobber return register. Don't emit
5413 it if clobber_after is a barrier, then the previous basic block
5414 certainly doesn't fall thru into the exit block. */
5415 if (!BARRIER_P (clobber_after))
5416 {
79c7fda6
JJ
5417 start_sequence ();
5418 clobber_return_register ();
e67d1102 5419 rtx_insn *seq = get_insns ();
79c7fda6 5420 end_sequence ();
932f0847 5421
79c7fda6
JJ
5422 emit_insn_after (seq, clobber_after);
5423 }
932f0847 5424
609c3937 5425 /* Output the label for the naked return from the function. */
4c33221c
UW
5426 if (naked_return_label)
5427 emit_label (naked_return_label);
6e3077c6 5428
25108646
AH
5429 /* @@@ This is a kludge. We want to ensure that instructions that
5430 may trap are not moved into the epilogue by scheduling, because
56d17681 5431 we don't always emit unwind information for the epilogue. */
f0a0390e 5432 if (cfun->can_throw_non_call_exceptions
677f3fa8 5433 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
56d17681 5434 emit_insn (gen_blockage ());
25108646 5435
7d69de61 5436 /* If stack protection is enabled for this function, check the guard. */
cb91fab0 5437 if (crtl->stack_protect_guard)
7d69de61
RH
5438 stack_protect_epilogue ();
5439
40184445
BS
5440 /* If we had calls to alloca, and this machine needs
5441 an accurate stack pointer to exit the function,
5442 insert some code to save and restore the stack pointer. */
5443 if (! EXIT_IGNORE_STACK
e3b5732b 5444 && cfun->calls_alloca)
40184445 5445 {
e67d1102 5446 rtx tem = 0;
40184445 5447
9eac0f2a
RH
5448 start_sequence ();
5449 emit_stack_save (SAVE_FUNCTION, &tem);
e67d1102 5450 rtx_insn *seq = get_insns ();
9eac0f2a
RH
5451 end_sequence ();
5452 emit_insn_before (seq, parm_birth_insn);
5453
5454 emit_stack_restore (SAVE_FUNCTION, tem);
40184445
BS
5455 }
5456
c13fde05
RH
5457 /* ??? This should no longer be necessary since stupid is no longer with
5458 us, but there are some parts of the compiler (eg reload_combine, and
5459 sh mach_dep_reorg) that still try and compute their own lifetime info
5460 instead of using the general framework. */
5461 use_return_register ();
6f086dfc 5462}
278ed218
RH
5463
5464rtx
bd60bab2 5465get_arg_pointer_save_area (void)
278ed218 5466{
bd60bab2 5467 rtx ret = arg_pointer_save_area;
278ed218
RH
5468
5469 if (! ret)
5470 {
bd60bab2
JH
5471 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5472 arg_pointer_save_area = ret;
964be02f
RH
5473 }
5474
e3b5732b 5475 if (! crtl->arg_pointer_save_area_init)
964be02f 5476 {
797a6ac1 5477 /* Save the arg pointer at the beginning of the function. The
964be02f 5478 generated stack slot may not be a valid memory address, so we
278ed218
RH
5479 have to check it and fix it if necessary. */
5480 start_sequence ();
1a8cb155 5481 emit_move_insn (validize_mem (copy_rtx (ret)),
2e3f842f 5482 crtl->args.internal_arg_pointer);
e67d1102 5483 rtx_insn *seq = get_insns ();
278ed218
RH
5484 end_sequence ();
5485
964be02f 5486 push_topmost_sequence ();
1cb2fc7b 5487 emit_insn_after (seq, entry_of_function ());
964be02f 5488 pop_topmost_sequence ();
c1d9a70a
ILT
5489
5490 crtl->arg_pointer_save_area_init = true;
278ed218
RH
5491 }
5492
5493 return ret;
5494}
bdac5f58 5495\f
cd9c1ca8
RH
5496/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5497 for the first time. */
bdac5f58 5498
0a1c58a2 5499static void
d242408f 5500record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
bdac5f58 5501{
dc01c3d1 5502 rtx_insn *tmp;
d242408f 5503 hash_table<insn_cache_hasher> *hash = *hashp;
0a1c58a2 5504
cd9c1ca8 5505 if (hash == NULL)
d242408f 5506 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
cd9c1ca8
RH
5507
5508 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5509 {
d242408f 5510 rtx *slot = hash->find_slot (tmp, INSERT);
cd9c1ca8
RH
5511 gcc_assert (*slot == NULL);
5512 *slot = tmp;
5513 }
5514}
5515
cd400280
RH
5516/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5517 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5518 insn, then record COPY as well. */
cd9c1ca8
RH
5519
5520void
cd400280 5521maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
cd9c1ca8 5522{
d242408f
TS
5523 hash_table<insn_cache_hasher> *hash;
5524 rtx *slot;
cd9c1ca8 5525
cd400280 5526 hash = epilogue_insn_hash;
d242408f 5527 if (!hash || !hash->find (insn))
cd400280
RH
5528 {
5529 hash = prologue_insn_hash;
d242408f 5530 if (!hash || !hash->find (insn))
cd400280
RH
5531 return;
5532 }
cd9c1ca8 5533
d242408f 5534 slot = hash->find_slot (copy, INSERT);
cd9c1ca8
RH
5535 gcc_assert (*slot == NULL);
5536 *slot = copy;
bdac5f58
TW
5537}
5538
cd9c1ca8
RH
5539/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5540 we can be running after reorg, SEQUENCE rtl is possible. */
bdac5f58 5541
cd9c1ca8 5542static bool
d242408f 5543contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
bdac5f58 5544{
cd9c1ca8
RH
5545 if (hash == NULL)
5546 return false;
bdac5f58 5547
cd9c1ca8 5548 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
bdac5f58 5549 {
e0944870 5550 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
cd9c1ca8 5551 int i;
e0944870 5552 for (i = seq->len () - 1; i >= 0; i--)
d242408f 5553 if (hash->find (seq->element (i)))
cd9c1ca8
RH
5554 return true;
5555 return false;
bdac5f58 5556 }
cd9c1ca8 5557
d242408f 5558 return hash->find (const_cast<rtx> (insn)) != NULL;
bdac5f58 5559}
5c7675e9
RH
5560
5561int
4f588890 5562prologue_epilogue_contains (const_rtx insn)
5c7675e9 5563{
cd9c1ca8 5564 if (contains (insn, prologue_insn_hash))
5c7675e9 5565 return 1;
cd9c1ca8 5566 if (contains (insn, epilogue_insn_hash))
5c7675e9
RH
5567 return 1;
5568 return 0;
5569}
bdac5f58 5570
4c029f40
TV
5571/* Insert use of return register before the end of BB. */
5572
5573static void
5574emit_use_return_register_into_block (basic_block bb)
5575{
4c029f40
TV
5576 start_sequence ();
5577 use_return_register ();
e67d1102 5578 rtx_insn *seq = get_insns ();
4c029f40 5579 end_sequence ();
e67d1102 5580 rtx_insn *insn = BB_END (bb);
058eb3b0 5581 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
1e1b18c1 5582 insn = prev_cc0_setter (insn);
058eb3b0 5583
1e1b18c1 5584 emit_insn_before (seq, insn);
4c029f40
TV
5585}
5586
484db665
BS
5587
5588/* Create a return pattern, either simple_return or return, depending on
5589 simple_p. */
5590
4bccb39e 5591static rtx_insn *
484db665
BS
5592gen_return_pattern (bool simple_p)
5593{
4bccb39e
RS
5594 return (simple_p
5595 ? targetm.gen_simple_return ()
5596 : targetm.gen_return ());
484db665
BS
5597}
5598
5599/* Insert an appropriate return pattern at the end of block BB. This
5600 also means updating block_for_insn appropriately. SIMPLE_P is
5601 the same as in gen_return_pattern and passed to it. */
69732dcb 5602
f30e25a3 5603void
484db665 5604emit_return_into_block (bool simple_p, basic_block bb)
69732dcb 5605{
e67d1102
RS
5606 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5607 BB_END (bb));
5608 rtx pat = PATTERN (jump);
26898771
BS
5609 if (GET_CODE (pat) == PARALLEL)
5610 pat = XVECEXP (pat, 0, 0);
5611 gcc_assert (ANY_RETURN_P (pat));
5612 JUMP_LABEL (jump) = pat;
69732dcb
RH
5613}
5614
387748de
AM
5615/* Set JUMP_LABEL for a return insn. */
5616
5617void
d38ff8dd 5618set_return_jump_label (rtx_insn *returnjump)
387748de
AM
5619{
5620 rtx pat = PATTERN (returnjump);
5621 if (GET_CODE (pat) == PARALLEL)
5622 pat = XVECEXP (pat, 0, 0);
5623 if (ANY_RETURN_P (pat))
5624 JUMP_LABEL (returnjump) = pat;
5625 else
5626 JUMP_LABEL (returnjump) = ret_rtx;
5627}
5628
ffe14686 5629/* Return true if there are any active insns between HEAD and TAIL. */
f30e25a3 5630bool
ffd80b43 5631active_insn_between (rtx_insn *head, rtx_insn *tail)
39d52ae5 5632{
ffe14686
AM
5633 while (tail)
5634 {
5635 if (active_insn_p (tail))
5636 return true;
5637 if (tail == head)
5638 return false;
5639 tail = PREV_INSN (tail);
5640 }
5641 return false;
5642}
5643
5644/* LAST_BB is a block that exits, and empty of active instructions.
5645 Examine its predecessors for jumps that can be converted to
5646 (conditional) returns. */
f30e25a3 5647vec<edge>
ffe14686 5648convert_jumps_to_returns (basic_block last_bb, bool simple_p,
9771b263 5649 vec<edge> unconverted ATTRIBUTE_UNUSED)
ffe14686
AM
5650{
5651 int i;
5652 basic_block bb;
ffe14686
AM
5653 edge_iterator ei;
5654 edge e;
ef062b13 5655 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
39d52ae5 5656
ffe14686 5657 FOR_EACH_EDGE (e, ei, last_bb->preds)
fefa31b5 5658 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
9771b263 5659 src_bbs.quick_push (e->src);
ffe14686 5660
e67d1102 5661 rtx_insn *label = BB_HEAD (last_bb);
ffe14686 5662
9771b263 5663 FOR_EACH_VEC_ELT (src_bbs, i, bb)
39d52ae5 5664 {
68a1a6c0 5665 rtx_insn *jump = BB_END (bb);
ffe14686
AM
5666
5667 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5668 continue;
5669
5670 e = find_edge (bb, last_bb);
5671
5672 /* If we have an unconditional jump, we can replace that
5673 with a simple return instruction. */
5674 if (simplejump_p (jump))
5675 {
5676 /* The use of the return register might be present in the exit
5677 fallthru block. Either:
5678 - removing the use is safe, and we should remove the use in
5679 the exit fallthru block, or
5680 - removing the use is not safe, and we should add it here.
5681 For now, we conservatively choose the latter. Either of the
5682 2 helps in crossjumping. */
5683 emit_use_return_register_into_block (bb);
5684
5685 emit_return_into_block (simple_p, bb);
5686 delete_insn (jump);
5687 }
5688
5689 /* If we have a conditional jump branching to the last
5690 block, we can try to replace that with a conditional
5691 return instruction. */
5692 else if (condjump_p (jump))
5693 {
5694 rtx dest;
5695
5696 if (simple_p)
5697 dest = simple_return_rtx;
5698 else
5699 dest = ret_rtx;
1476d1bd 5700 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
ffe14686 5701 {
4bccb39e 5702 if (targetm.have_simple_return () && simple_p)
ffe14686
AM
5703 {
5704 if (dump_file)
5705 fprintf (dump_file,
5706 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5707 unconverted.safe_push (e);
ffe14686 5708 }
ffe14686
AM
5709 continue;
5710 }
5711
5712 /* See comment in simplejump_p case above. */
5713 emit_use_return_register_into_block (bb);
5714
5715 /* If this block has only one successor, it both jumps
5716 and falls through to the fallthru block, so we can't
5717 delete the edge. */
5718 if (single_succ_p (bb))
5719 continue;
5720 }
5721 else
5722 {
4bccb39e 5723 if (targetm.have_simple_return () && simple_p)
ffe14686
AM
5724 {
5725 if (dump_file)
5726 fprintf (dump_file,
5727 "Failed to redirect bb %d branch.\n", bb->index);
9771b263 5728 unconverted.safe_push (e);
ffe14686 5729 }
ffe14686
AM
5730 continue;
5731 }
5732
5733 /* Fix up the CFG for the successful change we just made. */
fefa31b5 5734 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
d3b623c7 5735 e->flags &= ~EDGE_CROSSING;
39d52ae5 5736 }
9771b263 5737 src_bbs.release ();
ffe14686 5738 return unconverted;
39d52ae5
BS
5739}
5740
ffe14686 5741/* Emit a return insn for the exit fallthru block. */
f30e25a3 5742basic_block
ffe14686
AM
5743emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5744{
5745 basic_block last_bb = exit_fallthru_edge->src;
5746
5747 if (JUMP_P (BB_END (last_bb)))
5748 {
5749 last_bb = split_edge (exit_fallthru_edge);
5750 exit_fallthru_edge = single_succ_edge (last_bb);
5751 }
5752 emit_barrier_after (BB_END (last_bb));
5753 emit_return_into_block (simple_p, last_bb);
5754 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5755 return last_bb;
5756}
ffe14686
AM
5757
5758
9faa82d8 5759/* Generate the prologue and epilogue RTL if the machine supports it. Thread
bdac5f58 5760 this into place with notes indicating where the prologue ends and where
484db665
BS
5761 the epilogue begins. Update the basic block information when possible.
5762
5763 Notes on epilogue placement:
5764 There are several kinds of edges to the exit block:
5765 * a single fallthru edge from LAST_BB
5766 * possibly, edges from blocks containing sibcalls
5767 * possibly, fake edges from infinite loops
5768
5769 The epilogue is always emitted on the fallthru edge from the last basic
5770 block in the function, LAST_BB, into the exit block.
5771
5772 If LAST_BB is empty except for a label, it is the target of every
5773 other basic block in the function that ends in a return. If a
5774 target has a return or simple_return pattern (possibly with
5775 conditional variants), these basic blocks can be changed so that a
5776 return insn is emitted into them, and their target is adjusted to
5777 the real exit block.
5778
5779 Notes on shrink wrapping: We implement a fairly conservative
5780 version of shrink-wrapping rather than the textbook one. We only
5781 generate a single prologue and a single epilogue. This is
5782 sufficient to catch a number of interesting cases involving early
5783 exits.
5784
5785 First, we identify the blocks that require the prologue to occur before
5786 them. These are the ones that modify a call-saved register, or reference
5787 any of the stack or frame pointer registers. To simplify things, we then
5788 mark everything reachable from these blocks as also requiring a prologue.
5789 This takes care of loops automatically, and avoids the need to examine
5790 whether MEMs reference the frame, since it is sufficient to check for
5791 occurrences of the stack or frame pointer.
5792
5793 We then compute the set of blocks for which the need for a prologue
5794 is anticipatable (borrowing terminology from the shrink-wrapping
5795 description in Muchnick's book). These are the blocks which either
5796 require a prologue themselves, or those that have only successors
5797 where the prologue is anticipatable. The prologue needs to be
5798 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5799 is not. For the moment, we ensure that only one such edge exists.
5800
5801 The epilogue is placed as described above, but we make a
5802 distinction between inserting return and simple_return patterns
5803 when modifying other blocks that end in a return. Blocks that end
5804 in a sibcall omit the sibcall_epilogue if the block is not in
5805 ANTIC. */
bdac5f58 5806
c81b4a0e 5807void
6fb5fa3c 5808thread_prologue_and_epilogue_insns (void)
bdac5f58 5809{
7458026b 5810 bool inserted;
6e1aa848 5811 vec<edge> unconverted_simple_returns = vNULL;
ffe14686 5812 bitmap_head bb_flags;
9c8348cf 5813 rtx_insn *returnjump;
9c8348cf 5814 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
dc01c3d1 5815 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
484db665 5816 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
628f6a4e 5817 edge_iterator ei;
484db665
BS
5818
5819 df_analyze ();
e881bb1b 5820
fefa31b5 5821 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7458026b
ILT
5822
5823 inserted = false;
9c8348cf
DM
5824 epilogue_end = NULL;
5825 returnjump = NULL;
7458026b
ILT
5826
5827 /* Can't deal with multiple successors of the entry block at the
5828 moment. Function should always have at least one entry
5829 point. */
fefa31b5
DM
5830 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5831 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
484db665
BS
5832 orig_entry_edge = entry_edge;
5833
dc01c3d1 5834 split_prologue_seq = NULL;
7458026b
ILT
5835 if (flag_split_stack
5836 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5837 == NULL))
5838 {
7458026b 5839 start_sequence ();
10169a8b 5840 emit_insn (targetm.gen_split_stack_prologue ());
484db665 5841 split_prologue_seq = get_insns ();
7458026b
ILT
5842 end_sequence ();
5843
484db665 5844 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5368224f 5845 set_insn_locations (split_prologue_seq, prologue_location);
7458026b
ILT
5846 }
5847
dc01c3d1 5848 prologue_seq = NULL;
e86a9946 5849 if (targetm.have_prologue ())
bdac5f58 5850 {
e881bb1b 5851 start_sequence ();
e86a9946 5852 rtx_insn *seq = targetm.gen_prologue ();
e881bb1b 5853 emit_insn (seq);
bdac5f58 5854
b8698a0f 5855 /* Insert an explicit USE for the frame pointer
6fb5fa3c 5856 if the profiling is on and the frame pointer is required. */
e3b5732b 5857 if (crtl->profile && frame_pointer_needed)
c41c1387 5858 emit_use (hard_frame_pointer_rtx);
6fb5fa3c 5859
bdac5f58 5860 /* Retain a map of the prologue insns. */
cd9c1ca8 5861 record_insns (seq, NULL, &prologue_insn_hash);
56d17681 5862 emit_note (NOTE_INSN_PROLOGUE_END);
b8698a0f 5863
56d17681
UB
5864 /* Ensure that instructions are not moved into the prologue when
5865 profiling is on. The call to the profiling routine can be
5866 emitted within the live range of a call-clobbered register. */
3c5273a9 5867 if (!targetm.profile_before_prologue () && crtl->profile)
56d17681 5868 emit_insn (gen_blockage ());
9185a8d5 5869
484db665 5870 prologue_seq = get_insns ();
e881bb1b 5871 end_sequence ();
5368224f 5872 set_insn_locations (prologue_seq, prologue_location);
484db665 5873 }
e881bb1b 5874
484db665
BS
5875 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5876
484db665
BS
5877 /* Try to perform a kind of shrink-wrapping, making sure the
5878 prologue/epilogue is emitted only around those parts of the
5879 function that require it. */
5880
f30e25a3 5881 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
bdac5f58 5882
484db665
BS
5883 if (split_prologue_seq != NULL_RTX)
5884 {
f4b31a33 5885 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
484db665
BS
5886 inserted = true;
5887 }
5888 if (prologue_seq != NULL_RTX)
5889 {
5890 insert_insn_on_edge (prologue_seq, entry_edge);
5891 inserted = true;
5892 }
5893
19d3c25c
RH
5894 /* If the exit block has no non-fake predecessors, we don't need
5895 an epilogue. */
fefa31b5 5896 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
19d3c25c
RH
5897 if ((e->flags & EDGE_FAKE) == 0)
5898 break;
5899 if (e == NULL)
5900 goto epilogue_done;
5901
fefa31b5 5902 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
484db665 5903
fefa31b5 5904 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
ffe14686 5905
4bccb39e 5906 if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
f30e25a3
ZC
5907 exit_fallthru_edge
5908 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5909 &unconverted_simple_returns,
5910 &returnjump);
4bccb39e 5911 if (targetm.have_return ())
ffe14686
AM
5912 {
5913 if (exit_fallthru_edge == NULL)
5914 goto epilogue_done;
69732dcb 5915
ffe14686
AM
5916 if (optimize)
5917 {
5918 basic_block last_bb = exit_fallthru_edge->src;
484db665 5919
ffe14686
AM
5920 if (LABEL_P (BB_HEAD (last_bb))
5921 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6e1aa848 5922 convert_jumps_to_returns (last_bb, false, vNULL);
ffe14686 5923
1ff2fd21
AM
5924 if (EDGE_COUNT (last_bb->preds) != 0
5925 && single_succ_p (last_bb))
484db665 5926 {
ffe14686
AM
5927 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5928 epilogue_end = returnjump = BB_END (last_bb);
08b7ff1e 5929
ffe14686
AM
5930 /* Emitting the return may add a basic block.
5931 Fix bb_flags for the added block. */
4bccb39e
RS
5932 if (targetm.have_simple_return ()
5933 && last_bb != exit_fallthru_edge->src)
ffe14686 5934 bitmap_set_bit (&bb_flags, last_bb->index);
08b7ff1e 5935
ffe14686 5936 goto epilogue_done;
69732dcb 5937 }
2dd8bc01 5938 }
69732dcb 5939 }
cd9c1ca8
RH
5940
5941 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5942 this marker for the splits of EH_RETURN patterns, and nothing else
5943 uses the flag in the meantime. */
5944 epilogue_completed = 1;
5945
5946#ifdef HAVE_eh_return
5947 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5948 some targets, these get split to a special version of the epilogue
5949 code. In order to be able to properly annotate these with unwind
5950 info, try to split them now. If we get a valid split, drop an
5951 EPILOGUE_BEG note and mark the insns as epilogue insns. */
fefa31b5 5952 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
cd9c1ca8 5953 {
691fe203 5954 rtx_insn *prev, *last, *trial;
cd9c1ca8
RH
5955
5956 if (e->flags & EDGE_FALLTHRU)
5957 continue;
5958 last = BB_END (e->src);
5959 if (!eh_returnjump_p (last))
5960 continue;
5961
5962 prev = PREV_INSN (last);
5963 trial = try_split (PATTERN (last), last, 1);
5964 if (trial == last)
5965 continue;
5966
5967 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5968 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5969 }
5970#endif
5971
484db665
BS
5972 /* If nothing falls through into the exit block, we don't need an
5973 epilogue. */
623a66fa 5974
484db665 5975 if (exit_fallthru_edge == NULL)
623a66fa
R
5976 goto epilogue_done;
5977
e86a9946 5978 if (targetm.have_epilogue ())
bdac5f58 5979 {
19d3c25c 5980 start_sequence ();
2e040219 5981 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
e86a9946 5982 rtx_insn *seq = targetm.gen_epilogue ();
55c623b5
UW
5983 if (seq)
5984 emit_jump_insn (seq);
bdac5f58 5985
19d3c25c 5986 /* Retain a map of the epilogue insns. */
cd9c1ca8 5987 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 5988 set_insn_locations (seq, epilogue_location);
bdac5f58 5989
2f937369 5990 seq = get_insns ();
484db665 5991 returnjump = get_last_insn ();
718fe406 5992 end_sequence ();
e881bb1b 5993
484db665 5994 insert_insn_on_edge (seq, exit_fallthru_edge);
7458026b 5995 inserted = true;
dc0ff1c8
BS
5996
5997 if (JUMP_P (returnjump))
387748de 5998 set_return_jump_label (returnjump);
bdac5f58 5999 }
623a66fa 6000 else
623a66fa
R
6001 {
6002 basic_block cur_bb;
6003
484db665 6004 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
623a66fa
R
6005 goto epilogue_done;
6006 /* We have a fall-through edge to the exit block, the source is not
6007 at the end of the function, and there will be an assembler epilogue
6008 at the end of the function.
6009 We can't use force_nonfallthru here, because that would try to
484db665 6010 use return. Inserting a jump 'by hand' is extremely messy, so
623a66fa 6011 we take advantage of cfg_layout_finalize using
484db665 6012 fixup_fallthru_exit_predecessor. */
35b6b437 6013 cfg_layout_initialize (0);
11cd3bed 6014 FOR_EACH_BB_FN (cur_bb, cfun)
24bd1a0b
DB
6015 if (cur_bb->index >= NUM_FIXED_BLOCKS
6016 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
370369e1 6017 cur_bb->aux = cur_bb->next_bb;
623a66fa
R
6018 cfg_layout_finalize ();
6019 }
cf103ca4 6020
19d3c25c 6021epilogue_done:
484db665 6022
a8ba47cb 6023 default_rtl_profile ();
e881bb1b 6024
ca1117cc 6025 if (inserted)
30a873c3 6026 {
cf103ca4
EB
6027 sbitmap blocks;
6028
30a873c3
ZD
6029 commit_edge_insertions ();
6030
cf103ca4 6031 /* Look for basic blocks within the prologue insns. */
8b1c6fd7 6032 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 6033 bitmap_clear (blocks);
d7c028c0
LC
6034 bitmap_set_bit (blocks, entry_edge->dest->index);
6035 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
cf103ca4
EB
6036 find_many_sub_basic_blocks (blocks);
6037 sbitmap_free (blocks);
6038
30a873c3
ZD
6039 /* The epilogue insns we inserted may cause the exit edge to no longer
6040 be fallthru. */
fefa31b5 6041 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
30a873c3
ZD
6042 {
6043 if (((e->flags & EDGE_FALLTHRU) != 0)
6044 && returnjump_p (BB_END (e->src)))
6045 e->flags &= ~EDGE_FALLTHRU;
6046 }
6047 }
0a1c58a2 6048
4bccb39e 6049 if (targetm.have_simple_return ())
08b7ff1e
TS
6050 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6051 returnjump, unconverted_simple_returns);
484db665 6052
0a1c58a2 6053 /* Emit sibling epilogues before any sibling call sites. */
fefa31b5
DM
6054 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6055 ei_safe_edge (ei));
6056 )
0a1c58a2
JL
6057 {
6058 basic_block bb = e->src;
691fe203 6059 rtx_insn *insn = BB_END (bb);
0a1c58a2 6060
4b4bf941 6061 if (!CALL_P (insn)
484db665 6062 || ! SIBLING_CALL_P (insn)
4bccb39e
RS
6063 || (targetm.have_simple_return ()
6064 && entry_edge != orig_entry_edge
6065 && !bitmap_bit_p (&bb_flags, bb->index)))
628f6a4e
BE
6066 {
6067 ei_next (&ei);
6068 continue;
6069 }
0a1c58a2 6070
e86a9946 6071 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
484db665
BS
6072 {
6073 start_sequence ();
6074 emit_note (NOTE_INSN_EPILOGUE_BEG);
6075 emit_insn (ep_seq);
dc01c3d1 6076 rtx_insn *seq = get_insns ();
484db665 6077 end_sequence ();
0a1c58a2 6078
484db665
BS
6079 /* Retain a map of the epilogue insns. Used in life analysis to
6080 avoid getting rid of sibcall epilogue insns. Do this before we
6081 actually emit the sequence. */
6082 record_insns (seq, NULL, &epilogue_insn_hash);
5368224f 6083 set_insn_locations (seq, epilogue_location);
2f937369 6084
484db665
BS
6085 emit_insn_before (seq, insn);
6086 }
628f6a4e 6087 ei_next (&ei);
0a1c58a2 6088 }
ca1117cc 6089
86c82654
RH
6090 if (epilogue_end)
6091 {
9c8348cf 6092 rtx_insn *insn, *next;
86c82654
RH
6093
6094 /* Similarly, move any line notes that appear after the epilogue.
ff7cc307 6095 There is no need, however, to be quite so anal about the existence
071a42f9 6096 of such a note. Also possibly move
84c1fa24
UW
6097 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6098 info generation. */
718fe406 6099 for (insn = epilogue_end; insn; insn = next)
86c82654
RH
6100 {
6101 next = NEXT_INSN (insn);
b8698a0f 6102 if (NOTE_P (insn)
a38e7aa5 6103 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
86c82654
RH
6104 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6105 }
6106 }
6fb5fa3c 6107
484db665
BS
6108 bitmap_clear (&bb_flags);
6109
6fb5fa3c
DB
6110 /* Threading the prologue and epilogue changes the artificial refs
6111 in the entry and exit blocks. */
6112 epilogue_completed = 1;
6113 df_update_entry_exit_and_calls ();
bdac5f58
TW
6114}
6115
cd9c1ca8
RH
6116/* Reposition the prologue-end and epilogue-begin notes after
6117 instruction scheduling. */
bdac5f58
TW
6118
6119void
6fb5fa3c 6120reposition_prologue_and_epilogue_notes (void)
bdac5f58 6121{
e86a9946
RS
6122 if (!targetm.have_prologue ()
6123 && !targetm.have_epilogue ()
6124 && !targetm.have_sibcall_epilogue ())
5251b8b3 6125 return;
5251b8b3 6126
cd9c1ca8
RH
6127 /* Since the hash table is created on demand, the fact that it is
6128 non-null is a signal that it is non-empty. */
6129 if (prologue_insn_hash != NULL)
bdac5f58 6130 {
d242408f 6131 size_t len = prologue_insn_hash->elements ();
691fe203 6132 rtx_insn *insn, *last = NULL, *note = NULL;
bdac5f58 6133
cd9c1ca8
RH
6134 /* Scan from the beginning until we reach the last prologue insn. */
6135 /* ??? While we do have the CFG intact, there are two problems:
6136 (1) The prologue can contain loops (typically probing the stack),
6137 which means that the end of the prologue isn't in the first bb.
6138 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6fb5fa3c 6139 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
bdac5f58 6140 {
4b4bf941 6141 if (NOTE_P (insn))
9392c110 6142 {
a38e7aa5 6143 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
0a1c58a2
JL
6144 note = insn;
6145 }
cd9c1ca8 6146 else if (contains (insn, prologue_insn_hash))
0a1c58a2 6147 {
9f53e965
RH
6148 last = insn;
6149 if (--len == 0)
6150 break;
6151 }
6152 }
797a6ac1 6153
9f53e965
RH
6154 if (last)
6155 {
cd9c1ca8 6156 if (note == NULL)
9f53e965 6157 {
cd9c1ca8
RH
6158 /* Scan forward looking for the PROLOGUE_END note. It should
6159 be right at the beginning of the block, possibly with other
6160 insn notes that got moved there. */
6161 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6162 {
6163 if (NOTE_P (note)
6164 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6165 break;
6166 }
9f53e965 6167 }
c93b03c2 6168
9f53e965 6169 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
4b4bf941 6170 if (LABEL_P (last))
9f53e965
RH
6171 last = NEXT_INSN (last);
6172 reorder_insns (note, note, last);
bdac5f58 6173 }
0a1c58a2
JL
6174 }
6175
cd9c1ca8 6176 if (epilogue_insn_hash != NULL)
0a1c58a2 6177 {
cd9c1ca8
RH
6178 edge_iterator ei;
6179 edge e;
bdac5f58 6180
fefa31b5 6181 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
bdac5f58 6182 {
691fe203 6183 rtx_insn *insn, *first = NULL, *note = NULL;
997704f1 6184 basic_block bb = e->src;
c93b03c2 6185
997704f1 6186 /* Scan from the beginning until we reach the first epilogue insn. */
cd9c1ca8 6187 FOR_BB_INSNS (bb, insn)
9f53e965 6188 {
cd9c1ca8
RH
6189 if (NOTE_P (insn))
6190 {
6191 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6192 {
6193 note = insn;
997704f1 6194 if (first != NULL)
cd9c1ca8
RH
6195 break;
6196 }
6197 }
997704f1 6198 else if (first == NULL && contains (insn, epilogue_insn_hash))
cd9c1ca8 6199 {
997704f1 6200 first = insn;
cd9c1ca8
RH
6201 if (note != NULL)
6202 break;
6203 }
9392c110 6204 }
997704f1
RH
6205
6206 if (note)
6207 {
6208 /* If the function has a single basic block, and no real
b8698a0f 6209 epilogue insns (e.g. sibcall with no cleanup), the
997704f1
RH
6210 epilogue note can get scheduled before the prologue
6211 note. If we have frame related prologue insns, having
6212 them scanned during the epilogue will result in a crash.
6213 In this case re-order the epilogue note to just before
6214 the last insn in the block. */
6215 if (first == NULL)
6216 first = BB_END (bb);
6217
6218 if (PREV_INSN (first) != note)
6219 reorder_insns (note, note, PREV_INSN (first));
6220 }
bdac5f58
TW
6221 }
6222 }
bdac5f58 6223}
87ff9c8e 6224
df92c640
SB
6225/* Returns the name of function declared by FNDECL. */
6226const char *
6227fndecl_name (tree fndecl)
6228{
6229 if (fndecl == NULL)
6230 return "(nofn)";
6231 return lang_hooks.decl_printable_name (fndecl, 2);
6232}
6233
532aafad
SB
6234/* Returns the name of function FN. */
6235const char *
6236function_name (struct function *fn)
6237{
df92c640
SB
6238 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6239 return fndecl_name (fndecl);
532aafad
SB
6240}
6241
faed5cc3
SB
6242/* Returns the name of the current function. */
6243const char *
6244current_function_name (void)
6245{
532aafad 6246 return function_name (cfun);
faed5cc3 6247}
ef330312
PB
6248\f
6249
c2924966 6250static unsigned int
ef330312
PB
6251rest_of_handle_check_leaf_regs (void)
6252{
6253#ifdef LEAF_REGISTERS
416ff32e 6254 crtl->uses_only_leaf_regs
ef330312
PB
6255 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6256#endif
c2924966 6257 return 0;
ef330312
PB
6258}
6259
8d8d1a28 6260/* Insert a TYPE into the used types hash table of CFUN. */
b646ba3f 6261
8d8d1a28
AH
6262static void
6263used_types_insert_helper (tree type, struct function *func)
33c9159e 6264{
8d8d1a28 6265 if (type != NULL && func != NULL)
33c9159e 6266 {
33c9159e 6267 if (func->used_types_hash == NULL)
b086d530
TS
6268 func->used_types_hash = hash_set<tree>::create_ggc (37);
6269
6270 func->used_types_hash->add (type);
33c9159e
AH
6271 }
6272}
6273
8d8d1a28
AH
6274/* Given a type, insert it into the used hash table in cfun. */
6275void
6276used_types_insert (tree t)
6277{
6278 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
095c7b3c
JJ
6279 if (TYPE_NAME (t))
6280 break;
6281 else
6282 t = TREE_TYPE (t);
29ce73cb
PB
6283 if (TREE_CODE (t) == ERROR_MARK)
6284 return;
095c7b3c
JJ
6285 if (TYPE_NAME (t) == NULL_TREE
6286 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6287 t = TYPE_MAIN_VARIANT (t);
8d8d1a28 6288 if (debug_info_level > DINFO_LEVEL_NONE)
b646ba3f
DS
6289 {
6290 if (cfun)
6291 used_types_insert_helper (t, cfun);
6292 else
9771b263
DN
6293 {
6294 /* So this might be a type referenced by a global variable.
6295 Record that type so that we can later decide to emit its
6296 debug information. */
6297 vec_safe_push (types_used_by_cur_var_decl, t);
6298 }
b646ba3f
DS
6299 }
6300}
6301
6302/* Helper to Hash a struct types_used_by_vars_entry. */
6303
6304static hashval_t
6305hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6306{
6307 gcc_assert (entry && entry->var_decl && entry->type);
6308
6309 return iterative_hash_object (entry->type,
6310 iterative_hash_object (entry->var_decl, 0));
6311}
6312
6313/* Hash function of the types_used_by_vars_entry hash table. */
6314
6315hashval_t
2a22f99c 6316used_type_hasher::hash (types_used_by_vars_entry *entry)
b646ba3f 6317{
b646ba3f
DS
6318 return hash_types_used_by_vars_entry (entry);
6319}
6320
6321/*Equality function of the types_used_by_vars_entry hash table. */
6322
2a22f99c
TS
6323bool
6324used_type_hasher::equal (types_used_by_vars_entry *e1,
6325 types_used_by_vars_entry *e2)
b646ba3f 6326{
b646ba3f
DS
6327 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6328}
6329
6330/* Inserts an entry into the types_used_by_vars_hash hash table. */
6331
6332void
6333types_used_by_var_decl_insert (tree type, tree var_decl)
6334{
6335 if (type != NULL && var_decl != NULL)
6336 {
2a22f99c 6337 types_used_by_vars_entry **slot;
b646ba3f
DS
6338 struct types_used_by_vars_entry e;
6339 e.var_decl = var_decl;
6340 e.type = type;
6341 if (types_used_by_vars_hash == NULL)
2a22f99c
TS
6342 types_used_by_vars_hash
6343 = hash_table<used_type_hasher>::create_ggc (37);
6344
6345 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
b646ba3f
DS
6346 if (*slot == NULL)
6347 {
6348 struct types_used_by_vars_entry *entry;
766090c2 6349 entry = ggc_alloc<types_used_by_vars_entry> ();
b646ba3f
DS
6350 entry->type = type;
6351 entry->var_decl = var_decl;
6352 *slot = entry;
6353 }
6354 }
8d8d1a28
AH
6355}
6356
27a4cd48
DM
6357namespace {
6358
6359const pass_data pass_data_leaf_regs =
6360{
6361 RTL_PASS, /* type */
6362 "*leaf_regs", /* name */
6363 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6364 TV_NONE, /* tv_id */
6365 0, /* properties_required */
6366 0, /* properties_provided */
6367 0, /* properties_destroyed */
6368 0, /* todo_flags_start */
6369 0, /* todo_flags_finish */
ef330312
PB
6370};
6371
27a4cd48
DM
6372class pass_leaf_regs : public rtl_opt_pass
6373{
6374public:
c3284718
RS
6375 pass_leaf_regs (gcc::context *ctxt)
6376 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
27a4cd48
DM
6377 {}
6378
6379 /* opt_pass methods: */
be55bfe6
TS
6380 virtual unsigned int execute (function *)
6381 {
6382 return rest_of_handle_check_leaf_regs ();
6383 }
27a4cd48
DM
6384
6385}; // class pass_leaf_regs
6386
6387} // anon namespace
6388
6389rtl_opt_pass *
6390make_pass_leaf_regs (gcc::context *ctxt)
6391{
6392 return new pass_leaf_regs (ctxt);
6393}
6394
6fb5fa3c
DB
6395static unsigned int
6396rest_of_handle_thread_prologue_and_epilogue (void)
6397{
6398 if (optimize)
6399 cleanup_cfg (CLEANUP_EXPENSIVE);
d3c12306 6400
6fb5fa3c
DB
6401 /* On some machines, the prologue and epilogue code, or parts thereof,
6402 can be represented as RTL. Doing so lets us schedule insns between
6403 it and the rest of the code and also allows delayed branch
6404 scheduling to operate in the epilogue. */
6fb5fa3c 6405 thread_prologue_and_epilogue_insns ();
d3c12306 6406
bdc6e1ae
SB
6407 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6408 see PR57320. */
6409 cleanup_cfg (0);
6410
d3c12306 6411 /* The stack usage info is finalized during prologue expansion. */
a11e0df4 6412 if (flag_stack_usage_info)
d3c12306
EB
6413 output_stack_usage ();
6414
6fb5fa3c
DB
6415 return 0;
6416}
6417
27a4cd48
DM
6418namespace {
6419
6420const pass_data pass_data_thread_prologue_and_epilogue =
6421{
6422 RTL_PASS, /* type */
6423 "pro_and_epilogue", /* name */
6424 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
6425 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6426 0, /* properties_required */
6427 0, /* properties_provided */
6428 0, /* properties_destroyed */
3bea341f
RB
6429 0, /* todo_flags_start */
6430 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6fb5fa3c 6431};
27a4cd48
DM
6432
6433class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6434{
6435public:
c3284718
RS
6436 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6437 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
27a4cd48
DM
6438 {}
6439
6440 /* opt_pass methods: */
be55bfe6
TS
6441 virtual unsigned int execute (function *)
6442 {
6443 return rest_of_handle_thread_prologue_and_epilogue ();
6444 }
27a4cd48
DM
6445
6446}; // class pass_thread_prologue_and_epilogue
6447
6448} // anon namespace
6449
6450rtl_opt_pass *
6451make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6452{
6453 return new pass_thread_prologue_and_epilogue (ctxt);
6454}
d8d72314
PB
6455\f
6456
6457/* This mini-pass fixes fall-out from SSA in asm statements that have
b8698a0f 6458 in-out constraints. Say you start with
d8d72314
PB
6459
6460 orig = inout;
6461 asm ("": "+mr" (inout));
6462 use (orig);
6463
6464 which is transformed very early to use explicit output and match operands:
6465
6466 orig = inout;
6467 asm ("": "=mr" (inout) : "0" (inout));
6468 use (orig);
6469
6470 Or, after SSA and copyprop,
6471
6472 asm ("": "=mr" (inout_2) : "0" (inout_1));
6473 use (inout_1);
6474
6475 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6476 they represent two separate values, so they will get different pseudo
6477 registers during expansion. Then, since the two operands need to match
6478 per the constraints, but use different pseudo registers, reload can
6479 only register a reload for these operands. But reloads can only be
6480 satisfied by hardregs, not by memory, so we need a register for this
6481 reload, just because we are presented with non-matching operands.
6482 So, even though we allow memory for this operand, no memory can be
6483 used for it, just because the two operands don't match. This can
6484 cause reload failures on register-starved targets.
6485
6486 So it's a symptom of reload not being able to use memory for reloads
6487 or, alternatively it's also a symptom of both operands not coming into
6488 reload as matching (in which case the pseudo could go to memory just
6489 fine, as the alternative allows it, and no reload would be necessary).
6490 We fix the latter problem here, by transforming
6491
6492 asm ("": "=mr" (inout_2) : "0" (inout_1));
6493
6494 back to
6495
6496 inout_2 = inout_1;
6497 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6498
6499static void
691fe203 6500match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
d8d72314
PB
6501{
6502 int i;
6503 bool changed = false;
6504 rtx op = SET_SRC (p_sets[0]);
6505 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6506 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
1b4572a8 6507 bool *output_matched = XALLOCAVEC (bool, noutputs);
d8d72314 6508
d7b8033f 6509 memset (output_matched, 0, noutputs * sizeof (bool));
d8d72314
PB
6510 for (i = 0; i < ninputs; i++)
6511 {
691fe203
DM
6512 rtx input, output;
6513 rtx_insn *insns;
d8d72314
PB
6514 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6515 char *end;
53220215 6516 int match, j;
d8d72314 6517
70f16287
JJ
6518 if (*constraint == '%')
6519 constraint++;
6520
d8d72314
PB
6521 match = strtoul (constraint, &end, 10);
6522 if (end == constraint)
6523 continue;
6524
6525 gcc_assert (match < noutputs);
6526 output = SET_DEST (p_sets[match]);
6527 input = RTVEC_ELT (inputs, i);
53220215
MM
6528 /* Only do the transformation for pseudos. */
6529 if (! REG_P (output)
6530 || rtx_equal_p (output, input)
d8d72314
PB
6531 || (GET_MODE (input) != VOIDmode
6532 && GET_MODE (input) != GET_MODE (output)))
6533 continue;
6534
53220215
MM
6535 /* We can't do anything if the output is also used as input,
6536 as we're going to overwrite it. */
6537 for (j = 0; j < ninputs; j++)
6538 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6539 break;
6540 if (j != ninputs)
6541 continue;
6542
d7b8033f
JJ
6543 /* Avoid changing the same input several times. For
6544 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6545 only change in once (to out1), rather than changing it
6546 first to out1 and afterwards to out2. */
6547 if (i > 0)
6548 {
6549 for (j = 0; j < noutputs; j++)
6550 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6551 break;
6552 if (j != noutputs)
6553 continue;
6554 }
6555 output_matched[match] = true;
6556
d8d72314 6557 start_sequence ();
53220215 6558 emit_move_insn (output, input);
d8d72314
PB
6559 insns = get_insns ();
6560 end_sequence ();
d8d72314 6561 emit_insn_before (insns, insn);
53220215
MM
6562
6563 /* Now replace all mentions of the input with output. We can't
fa10beec 6564 just replace the occurrence in inputs[i], as the register might
53220215
MM
6565 also be used in some other input (or even in an address of an
6566 output), which would mean possibly increasing the number of
6567 inputs by one (namely 'output' in addition), which might pose
6568 a too complicated problem for reload to solve. E.g. this situation:
6569
6570 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6571
84fbffb2 6572 Here 'input' is used in two occurrences as input (once for the
53220215 6573 input operand, once for the address in the second output operand).
fa10beec 6574 If we would replace only the occurrence of the input operand (to
53220215
MM
6575 make the matching) we would be left with this:
6576
6577 output = input
6578 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6579
6580 Now we suddenly have two different input values (containing the same
6581 value, but different pseudos) where we formerly had only one.
6582 With more complicated asms this might lead to reload failures
6583 which wouldn't have happen without this pass. So, iterate over
84fbffb2 6584 all operands and replace all occurrences of the register used. */
53220215 6585 for (j = 0; j < noutputs; j++)
1596d61e 6586 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
53220215
MM
6587 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6588 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6589 input, output);
6590 for (j = 0; j < ninputs; j++)
6591 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6592 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6593 input, output);
6594
d8d72314
PB
6595 changed = true;
6596 }
6597
6598 if (changed)
6599 df_insn_rescan (insn);
6600}
6601
5cf18d25
ML
6602/* Add the decl D to the local_decls list of FUN. */
6603
6604void
6605add_local_decl (struct function *fun, tree d)
6606{
6607 gcc_assert (TREE_CODE (d) == VAR_DECL);
6608 vec_safe_push (fun->local_decls, d);
6609}
6610
be55bfe6
TS
6611namespace {
6612
6613const pass_data pass_data_match_asm_constraints =
6614{
6615 RTL_PASS, /* type */
6616 "asmcons", /* name */
6617 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
6618 TV_NONE, /* tv_id */
6619 0, /* properties_required */
6620 0, /* properties_provided */
6621 0, /* properties_destroyed */
6622 0, /* todo_flags_start */
6623 0, /* todo_flags_finish */
6624};
6625
6626class pass_match_asm_constraints : public rtl_opt_pass
6627{
6628public:
6629 pass_match_asm_constraints (gcc::context *ctxt)
6630 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6631 {}
6632
6633 /* opt_pass methods: */
6634 virtual unsigned int execute (function *);
6635
6636}; // class pass_match_asm_constraints
6637
6638unsigned
6639pass_match_asm_constraints::execute (function *fun)
d8d72314
PB
6640{
6641 basic_block bb;
691fe203
DM
6642 rtx_insn *insn;
6643 rtx pat, *p_sets;
d8d72314
PB
6644 int noutputs;
6645
e3b5732b 6646 if (!crtl->has_asm_statement)
d8d72314
PB
6647 return 0;
6648
6649 df_set_flags (DF_DEFER_INSN_RESCAN);
be55bfe6 6650 FOR_EACH_BB_FN (bb, fun)
d8d72314
PB
6651 {
6652 FOR_BB_INSNS (bb, insn)
6653 {
6654 if (!INSN_P (insn))
6655 continue;
6656
6657 pat = PATTERN (insn);
6658 if (GET_CODE (pat) == PARALLEL)
6659 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6660 else if (GET_CODE (pat) == SET)
6661 p_sets = &PATTERN (insn), noutputs = 1;
6662 else
6663 continue;
6664
6665 if (GET_CODE (*p_sets) == SET
6666 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6667 match_asm_constraints_1 (insn, p_sets, noutputs);
6668 }
6669 }
6670
6671 return TODO_df_finish;
6672}
6673
27a4cd48
DM
6674} // anon namespace
6675
6676rtl_opt_pass *
6677make_pass_match_asm_constraints (gcc::context *ctxt)
6678{
6679 return new pass_match_asm_constraints (ctxt);
6680}
6681
faed5cc3 6682
e2500fed 6683#include "gt-function.h"