]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
bccafa26 1/* Expands front end tree to back end RTL for GCC.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
897b77d6 3
f12b58b3 4This file is part of GCC.
897b77d6 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
897b77d6 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
897b77d6 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
897b77d6 19
897b77d6 20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
e8825bb0 32 not get a hard register. */
897b77d6 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
9ef16211 37#include "backend.h"
38#include "tree.h"
39#include "rtl.h"
40#include "df.h"
d7091a76 41#include "rtl-error.h"
b20a8bb4 42#include "alias.h"
b20a8bb4 43#include "fold-const.h"
9ed99284 44#include "stor-layout.h"
45#include "varasm.h"
46#include "stringpool.h"
897b77d6 47#include "flags.h"
dcabb90e 48#include "except.h"
d53441c8 49#include "insn-config.h"
50#include "expmed.h"
51#include "dojump.h"
52#include "explow.h"
53#include "calls.h"
54#include "emit-rtl.h"
55#include "stmt.h"
897b77d6 56#include "expr.h"
34517c64 57#include "insn-codes.h"
530178a9 58#include "optabs.h"
d8fc4d0b 59#include "libfuncs.h"
897b77d6 60#include "regs.h"
897b77d6 61#include "recog.h"
62#include "output.h"
075136a2 63#include "tm_p.h"
96554925 64#include "langhooks.h"
45550790 65#include "target.h"
218e3e4e 66#include "common/common-target.h"
bc61cadb 67#include "gimple-expr.h"
a8783bee 68#include "gimplify.h"
77fce4cd 69#include "tree-pass.h"
94ea8568 70#include "cfgrtl.h"
71#include "cfganal.h"
72#include "cfgbuild.h"
73#include "cfgcleanup.h"
0a55d497 74#include "params.h"
75#include "bb-reorder.h"
c562205f 76#include "shrink-wrap.h"
e0ff5636 77#include "toplev.h"
2d184b77 78#include "rtl-iter.h"
058a1b7a 79#include "tree-chkp.h"
80#include "rtl-chkp.h"
f1a0edff 81
c8a152f6 82/* So we can assign to cfun in this file. */
83#undef cfun
84
256f9b65 85#ifndef STACK_ALIGNMENT_NEEDED
86#define STACK_ALIGNMENT_NEEDED 1
87#endif
88
1cd50c9a 89#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
90
897b77d6 91/* Round a value to the lowest integer less than it that is a multiple of
92 the required alignment. Avoid using division in case the value is
93 negative. Assume the alignment is a power of two. */
94#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
95
96/* Similar, but round to the next highest integer that meets the
97 alignment. */
98#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
99
897b77d6 100/* Nonzero once virtual register instantiation has been done.
9c0a756f 101 assign_stack_local uses frame_pointer_rtx when this is nonzero.
102 calls.c:emit_library_call_value_1 uses it to set up
103 post-instantiation libcalls. */
104int virtuals_instantiated;
897b77d6 105
4781f9b9 106/* Assign unique numbers to labels generated for profiling, debugging, etc. */
573aba85 107static GTY(()) int funcdef_no;
b8a21949 108
ab5beff9 109/* These variables hold pointers to functions to create and destroy
110 target specific, per-function data structures. */
de1b648b 111struct machine_function * (*init_machine_status) (void);
adc2961c 112
304c5bf1 113/* The currently compiled function. */
08513b52 114struct function *cfun = 0;
304c5bf1 115
25e880b1 116/* These hashes record the prologue and epilogue insns. */
d1023d12 117
eae1ecb4 118struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
d1023d12 119{
120 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
121 static bool equal (rtx a, rtx b) { return a == b; }
122};
123
124static GTY((cache))
125 hash_table<insn_cache_hasher> *prologue_insn_hash;
126static GTY((cache))
127 hash_table<insn_cache_hasher> *epilogue_insn_hash;
897b77d6 128\f
1a4c44c5 129
2ef51f0e 130hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
f1f41a6c 131vec<tree, va_gc> *types_used_by_cur_var_decl;
1a4c44c5 132
209a68cc 133/* Forward declarations. */
134
de1b648b 135static struct temp_slot *find_temp_slot_from_address (rtx);
de1b648b 136static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
3754d046 137static void pad_below (struct args_size *, machine_mode, tree);
8bb2625b 138static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
de1b648b 139static int all_blocks (tree, tree *);
140static tree *get_block_vector (tree, int *);
141extern tree debug_find_var_in_block_tree (tree, tree);
4885b286 142/* We always define `record_insns' even if it's not used so that we
2dc40d2d 143 can always export `prologue_epilogue_contains'. */
d1023d12 144static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
145 ATTRIBUTE_UNUSED;
146static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
87d4aa85 147static void prepare_function_start (void);
de1b648b 148static void do_clobber_return_reg (rtx, void *);
149static void do_use_return_reg (rtx, void *);
8e4c05da 150\f
5737913a 151/* Stack of nested functions. */
152/* Keep track of the cfun stack. */
997d68fe 153
5737913a 154typedef struct function *function_p;
997d68fe 155
f1f41a6c 156static vec<function_p> function_context_stack;
897b77d6 157
158/* Save the current context for compilation of a nested function.
d2764e2d 159 This is called from language-specific code. */
897b77d6 160
161void
d2764e2d 162push_function_context (void)
897b77d6 163{
08513b52 164 if (cfun == 0)
80f2ef47 165 allocate_struct_function (NULL, false);
304c5bf1 166
f1f41a6c 167 function_context_stack.safe_push (cfun);
87d4aa85 168 set_cfun (NULL);
897b77d6 169}
170
171/* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174void
d2764e2d 175pop_function_context (void)
897b77d6 176{
f1f41a6c 177 struct function *p = function_context_stack.pop ();
87d4aa85 178 set_cfun (p);
897b77d6 179 current_function_decl = p->decl;
897b77d6 180
897b77d6 181 /* Reset variables that have known state during rtx generation. */
897b77d6 182 virtuals_instantiated = 0;
316bc009 183 generating_concat_p = 1;
897b77d6 184}
2a228d52 185
3c3bb268 186/* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190void
de1b648b 191free_after_parsing (struct function *f)
3c3bb268 192{
b75409ba 193 f->language = 0;
3c3bb268 194}
195
26df1c5e 196/* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
a57bcb3b 198 reclaim the memory. */
c788feb1 199
26df1c5e 200void
de1b648b 201free_after_compilation (struct function *f)
26df1c5e 202{
25e880b1 203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
dd045aee 206 free (crtl->emit.regno_pointer_align);
a4a0e8fd 207
fd6ffb7c 208 memset (crtl, 0, sizeof (struct rtl_data));
1f3233d1 209 f->eh = NULL;
1f3233d1 210 f->machine = NULL;
7a22afab 211 f->cfg = NULL;
789581b6 212 f->curr_properties &= ~PROP_cfg;
3c3bb268 213
a9f6414b 214 regno_reg_rtx = NULL;
26df1c5e 215}
897b77d6 216\f
0a893c29 217/* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
26d04e5f 220
0a893c29 221HOST_WIDE_INT
de1b648b 222get_frame_size (void)
0a893c29 223{
b079a207 224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
0a893c29 228}
229
26d04e5f 230/* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234bool
235frame_offset_overflow (HOST_WIDE_INT offset, tree func)
48e1416a 236{
26d04e5f 237 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238
239 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD)
242 {
712d2297 243 error_at (DECL_SOURCE_LOCATION (func),
244 "total size of local objects too large");
26d04e5f 245 return TRUE;
246 }
247
248 return FALSE;
249}
250
ad33891d 251/* Return stack slot alignment in bits for TYPE and MODE. */
252
253static unsigned int
3754d046 254get_stack_local_alignment (tree type, machine_mode mode)
ad33891d 255{
256 unsigned int alignment;
257
258 if (mode == BLKmode)
259 alignment = BIGGEST_ALIGNMENT;
260 else
261 alignment = GET_MODE_ALIGNMENT (mode);
262
263 /* Allow the frond-end to (possibly) increase the alignment of this
264 stack slot. */
265 if (! type)
266 type = lang_hooks.types.type_for_mode (mode, 0);
267
268 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
269}
270
43165fe4 271/* Determine whether it is possible to fit a stack slot of size SIZE and
272 alignment ALIGNMENT into an area in the stack frame that starts at
273 frame offset START and has a length of LENGTH. If so, store the frame
274 offset to be used for the stack slot in *POFFSET and return true;
275 return false otherwise. This function will extend the frame size when
276 given a start/length pair that lies at the end of the frame. */
277
278static bool
279try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
280 HOST_WIDE_INT size, unsigned int alignment,
281 HOST_WIDE_INT *poffset)
282{
283 HOST_WIDE_INT this_frame_offset;
284 int frame_off, frame_alignment, frame_phase;
285
286 /* Calculate how many bytes the start of local variables is off from
287 stack alignment. */
288 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
289 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
290 frame_phase = frame_off ? frame_alignment - frame_off : 0;
291
292 /* Round the frame offset to the specified alignment. */
293
294 /* We must be careful here, since FRAME_OFFSET might be negative and
295 division with a negative dividend isn't as well defined as we might
296 like. So we instead assume that ALIGNMENT is a power of two and
297 use logical operations which are unambiguous. */
298 if (FRAME_GROWS_DOWNWARD)
299 this_frame_offset
300 = (FLOOR_ROUND (start + length - size - frame_phase,
301 (unsigned HOST_WIDE_INT) alignment)
302 + frame_phase);
303 else
304 this_frame_offset
305 = (CEIL_ROUND (start - frame_phase,
306 (unsigned HOST_WIDE_INT) alignment)
307 + frame_phase);
308
309 /* See if it fits. If this space is at the edge of the frame,
310 consider extending the frame to make it fit. Our caller relies on
311 this when allocating a new slot. */
312 if (frame_offset == start && this_frame_offset < frame_offset)
313 frame_offset = this_frame_offset;
314 else if (this_frame_offset < start)
315 return false;
316 else if (start + length == frame_offset
317 && this_frame_offset + size > start + length)
318 frame_offset = this_frame_offset + size;
319 else if (this_frame_offset + size > start + length)
320 return false;
321
322 *poffset = this_frame_offset;
323 return true;
324}
325
326/* Create a new frame_space structure describing free space in the stack
327 frame beginning at START and ending at END, and chain it into the
328 function's frame_space_list. */
329
330static void
331add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
332{
25a27413 333 struct frame_space *space = ggc_alloc<frame_space> ();
43165fe4 334 space->next = crtl->frame_space_list;
335 crtl->frame_space_list = space;
336 space->start = start;
337 space->length = end - start;
338}
339
897b77d6 340/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
341 with machine mode MODE.
06ebc183 342
897b77d6 343 ALIGN controls the amount of alignment for the address of the slot:
344 0 means according to MODE,
345 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
c20b6803 346 -2 means use BITS_PER_UNIT,
897b77d6 347 positive specifies alignment boundary in bits.
348
943d8723 349 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
350 alignment and ASLK_RECORD_PAD bit set if we should remember
351 extra space we allocated for alignment purposes. When we are
352 called from assign_stack_temp_for_type, it is not set so we don't
353 track the same stack slot in two independent lists.
27a7a23a 354
b079a207 355 We do not round to stack_boundary here. */
897b77d6 356
b079a207 357rtx
3754d046 358assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
943d8723 359 int align, int kind)
897b77d6 360{
19cb6b50 361 rtx x, addr;
897b77d6 362 int bigend_correction = 0;
286887d9 363 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
ad33891d 364 unsigned int alignment, alignment_in_bits;
897b77d6 365
366 if (align == 0)
367 {
ad33891d 368 alignment = get_stack_local_alignment (NULL, mode);
9bd87fd2 369 alignment /= BITS_PER_UNIT;
897b77d6 370 }
371 else if (align == -1)
372 {
373 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
374 size = CEIL_ROUND (size, alignment);
375 }
c20b6803 376 else if (align == -2)
377 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
897b77d6 378 else
379 alignment = align / BITS_PER_UNIT;
380
27a7a23a 381 alignment_in_bits = alignment * BITS_PER_UNIT;
382
27a7a23a 383 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
384 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
385 {
386 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
387 alignment = alignment_in_bits / BITS_PER_UNIT;
388 }
a79cb8e9 389
27a7a23a 390 if (SUPPORTS_STACK_ALIGNMENT)
391 {
392 if (crtl->stack_alignment_estimated < alignment_in_bits)
393 {
394 if (!crtl->stack_realign_processed)
395 crtl->stack_alignment_estimated = alignment_in_bits;
396 else
397 {
398 /* If stack is realigned and stack alignment value
399 hasn't been finalized, it is OK not to increase
400 stack_alignment_estimated. The bigger alignment
401 requirement is recorded in stack_alignment_needed
402 below. */
403 gcc_assert (!crtl->stack_realign_finalized);
404 if (!crtl->stack_realign_needed)
405 {
406 /* It is OK to reduce the alignment as long as the
407 requested size is 0 or the estimated stack
408 alignment >= mode alignment. */
943d8723 409 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
27a7a23a 410 || size == 0
411 || (crtl->stack_alignment_estimated
412 >= GET_MODE_ALIGNMENT (mode)));
413 alignment_in_bits = crtl->stack_alignment_estimated;
414 alignment = alignment_in_bits / BITS_PER_UNIT;
415 }
416 }
417 }
418 }
ad33891d 419
420 if (crtl->stack_alignment_needed < alignment_in_bits)
421 crtl->stack_alignment_needed = alignment_in_bits;
bd9c33a8 422 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
423 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a79cb8e9 424
43165fe4 425 if (mode != BLKmode || size != 0)
426 {
943d8723 427 if (kind & ASLK_RECORD_PAD)
43165fe4 428 {
943d8723 429 struct frame_space **psp;
430
431 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
432 {
433 struct frame_space *space = *psp;
434 if (!try_fit_stack_local (space->start, space->length, size,
435 alignment, &slot_offset))
436 continue;
437 *psp = space->next;
438 if (slot_offset > space->start)
439 add_frame_space (space->start, slot_offset);
440 if (slot_offset + size < space->start + space->length)
441 add_frame_space (slot_offset + size,
442 space->start + space->length);
443 goto found_space;
444 }
43165fe4 445 }
446 }
447 else if (!STACK_ALIGNMENT_NEEDED)
448 {
449 slot_offset = frame_offset;
450 goto found_space;
451 }
452
453 old_frame_offset = frame_offset;
454
455 if (FRAME_GROWS_DOWNWARD)
456 {
457 frame_offset -= size;
458 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
9f843b44 459
943d8723 460 if (kind & ASLK_RECORD_PAD)
461 {
462 if (slot_offset > frame_offset)
463 add_frame_space (frame_offset, slot_offset);
464 if (slot_offset + size < old_frame_offset)
465 add_frame_space (slot_offset + size, old_frame_offset);
466 }
43165fe4 467 }
468 else
256f9b65 469 {
43165fe4 470 frame_offset += size;
471 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
472
943d8723 473 if (kind & ASLK_RECORD_PAD)
474 {
475 if (slot_offset > old_frame_offset)
476 add_frame_space (old_frame_offset, slot_offset);
477 if (slot_offset + size < frame_offset)
478 add_frame_space (slot_offset + size, frame_offset);
479 }
256f9b65 480 }
897b77d6 481
43165fe4 482 found_space:
897b77d6 483 /* On a big-endian machine, if we are allocating more space than we will use,
484 use the least significant bytes of those that are allocated. */
1c088911 485 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
897b77d6 486 bigend_correction = size - GET_MODE_SIZE (mode);
897b77d6 487
897b77d6 488 /* If we have already instantiated virtual registers, return the actual
489 address relative to the frame pointer. */
b079a207 490 if (virtuals_instantiated)
29c05e22 491 addr = plus_constant (Pmode, frame_pointer_rtx,
eb21abb2 492 trunc_int_for_mode
43165fe4 493 (slot_offset + bigend_correction
eb21abb2 494 + STARTING_FRAME_OFFSET, Pmode));
897b77d6 495 else
29c05e22 496 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
eb21abb2 497 trunc_int_for_mode
43165fe4 498 (slot_offset + bigend_correction,
eb21abb2 499 Pmode));
897b77d6 500
941522d6 501 x = gen_rtx_MEM (mode, addr);
ad33891d 502 set_mem_align (x, alignment_in_bits);
43283c91 503 MEM_NOTRAP_P (x) = 1;
897b77d6 504
b079a207 505 stack_slot_list
506 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
26df1c5e 507
b079a207 508 if (frame_offset_overflow (frame_offset, current_function_decl))
509 frame_offset = 0;
55abba5b 510
897b77d6 511 return x;
512}
27a7a23a 513
514/* Wrap up assign_stack_local_1 with last parameter as false. */
515
516rtx
3754d046 517assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
27a7a23a 518{
943d8723 519 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
27a7a23a 520}
a6629703 521\f
fef299ce 522/* In order to evaluate some expressions, such as function calls returning
523 structures in memory, we need to temporarily allocate stack locations.
524 We record each allocated temporary in the following structure.
525
526 Associated with each temporary slot is a nesting level. When we pop up
527 one level, all temporaries associated with the previous level are freed.
528 Normally, all temporaries are freed after the execution of the statement
529 in which they were created. However, if we are inside a ({...}) grouping,
530 the result may be in a temporary and hence must be preserved. If the
531 result could be in a temporary, we preserve it if we can determine which
532 one it is in. If we cannot determine which temporary may contain the
533 result, all temporaries are preserved. A temporary is preserved by
0ab48139 534 pretending it was allocated at the previous nesting level. */
fef299ce 535
fb1e4f4a 536struct GTY(()) temp_slot {
fef299ce 537 /* Points to next temporary slot. */
538 struct temp_slot *next;
539 /* Points to previous temporary slot. */
540 struct temp_slot *prev;
541 /* The rtx to used to reference the slot. */
542 rtx slot;
fef299ce 543 /* The size, in units, of the slot. */
544 HOST_WIDE_INT size;
545 /* The type of the object in the slot, or zero if it doesn't correspond
546 to a type. We use this to determine whether a slot can be reused.
547 It can be reused if objects of the type of the new slot will always
548 conflict with objects of the type of the old slot. */
549 tree type;
0ac758f7 550 /* The alignment (in bits) of the slot. */
551 unsigned int align;
fef299ce 552 /* Nonzero if this temporary is currently in use. */
553 char in_use;
fef299ce 554 /* Nesting level at which this slot is being used. */
555 int level;
fef299ce 556 /* The offset of the slot from the frame_pointer, including extra space
557 for alignment. This info is for combine_temp_slots. */
558 HOST_WIDE_INT base_offset;
559 /* The size of the slot, including extra space for alignment. This
560 info is for combine_temp_slots. */
561 HOST_WIDE_INT full_size;
562};
563
2ef51f0e 564/* Entry for the below hash table. */
565struct GTY((for_user)) temp_slot_address_entry {
fef299ce 566 hashval_t hash;
567 rtx address;
568 struct temp_slot *temp_slot;
569};
570
b594087e 571struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
2ef51f0e 572{
573 static hashval_t hash (temp_slot_address_entry *);
574 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
575};
576
577/* A table of addresses that represent a stack slot. The table is a mapping
578 from address RTXen to a temp slot. */
579static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
580static size_t n_temp_slots_in_use;
581
a6629703 582/* Removes temporary slot TEMP from LIST. */
583
584static void
585cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
586{
587 if (temp->next)
588 temp->next->prev = temp->prev;
589 if (temp->prev)
590 temp->prev->next = temp->next;
591 else
592 *list = temp->next;
593
594 temp->prev = temp->next = NULL;
595}
596
597/* Inserts temporary slot TEMP to LIST. */
598
599static void
600insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
601{
602 temp->next = *list;
603 if (*list)
604 (*list)->prev = temp;
605 temp->prev = NULL;
606 *list = temp;
607}
608
609/* Returns the list of used temp slots at LEVEL. */
610
611static struct temp_slot **
612temp_slots_at_level (int level)
613{
f1f41a6c 614 if (level >= (int) vec_safe_length (used_temp_slots))
615 vec_safe_grow_cleared (used_temp_slots, level + 1);
a6629703 616
f1f41a6c 617 return &(*used_temp_slots)[level];
a6629703 618}
619
620/* Returns the maximal temporary slot level. */
621
622static int
623max_slot_level (void)
624{
625 if (!used_temp_slots)
626 return -1;
627
f1f41a6c 628 return used_temp_slots->length () - 1;
a6629703 629}
630
631/* Moves temporary slot TEMP to LEVEL. */
632
633static void
634move_slot_to_level (struct temp_slot *temp, int level)
635{
636 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
637 insert_slot_to_list (temp, temp_slots_at_level (level));
638 temp->level = level;
639}
640
641/* Make temporary slot TEMP available. */
642
643static void
644make_slot_available (struct temp_slot *temp)
645{
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, &avail_temp_slots);
648 temp->in_use = 0;
649 temp->level = -1;
fc3c948c 650 n_temp_slots_in_use--;
a6629703 651}
fef299ce 652
653/* Compute the hash value for an address -> temp slot mapping.
654 The value is cached on the mapping entry. */
655static hashval_t
656temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
657{
658 int do_not_record = 0;
659 return hash_rtx (t->address, GET_MODE (t->address),
660 &do_not_record, NULL, false);
661}
662
663/* Return the hash value for an address -> temp slot mapping. */
2ef51f0e 664hashval_t
665temp_address_hasher::hash (temp_slot_address_entry *t)
fef299ce 666{
fef299ce 667 return t->hash;
668}
669
670/* Compare two address -> temp slot mapping entries. */
2ef51f0e 671bool
672temp_address_hasher::equal (temp_slot_address_entry *t1,
673 temp_slot_address_entry *t2)
fef299ce 674{
fef299ce 675 return exp_equiv_p (t1->address, t2->address, 0, true);
676}
677
678/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
679static void
680insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
681{
25a27413 682 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fef299ce 683 t->address = address;
684 t->temp_slot = temp_slot;
685 t->hash = temp_slot_address_compute_hash (t);
2ef51f0e 686 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fef299ce 687}
688
689/* Remove an address -> temp slot mapping entry if the temp slot is
690 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2ef51f0e 691int
692remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fef299ce 693{
2ef51f0e 694 const struct temp_slot_address_entry *t = *slot;
fef299ce 695 if (! t->temp_slot->in_use)
2ef51f0e 696 temp_slot_address_table->clear_slot (slot);
fef299ce 697 return 1;
698}
699
700/* Remove all mappings of addresses to unused temp slots. */
701static void
702remove_unused_temp_slot_addresses (void)
703{
fc3c948c 704 /* Use quicker clearing if there aren't any active temp slots. */
705 if (n_temp_slots_in_use)
2ef51f0e 706 temp_slot_address_table->traverse
707 <void *, remove_unused_temp_slot_addresses_1> (NULL);
fc3c948c 708 else
2ef51f0e 709 temp_slot_address_table->empty ();
fef299ce 710}
711
712/* Find the temp slot corresponding to the object at address X. */
713
714static struct temp_slot *
715find_temp_slot_from_address (rtx x)
716{
717 struct temp_slot *p;
718 struct temp_slot_address_entry tmp, *t;
719
720 /* First try the easy way:
721 See if X exists in the address -> temp slot mapping. */
722 tmp.address = x;
723 tmp.temp_slot = NULL;
724 tmp.hash = temp_slot_address_compute_hash (&tmp);
2ef51f0e 725 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fef299ce 726 if (t)
727 return t->temp_slot;
728
729 /* If we have a sum involving a register, see if it points to a temp
730 slot. */
731 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
732 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
733 return p;
734 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
735 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
736 return p;
737
738 /* Last resort: Address is a virtual stack var address. */
739 if (GET_CODE (x) == PLUS
740 && XEXP (x, 0) == virtual_stack_vars_rtx
971ba038 741 && CONST_INT_P (XEXP (x, 1)))
fef299ce 742 {
743 int i;
744 for (i = max_slot_level (); i >= 0; i--)
745 for (p = *temp_slots_at_level (i); p; p = p->next)
746 {
747 if (INTVAL (XEXP (x, 1)) >= p->base_offset
748 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
749 return p;
750 }
751 }
752
753 return NULL;
754}
897b77d6 755\f
756/* Allocate a temporary stack slot and record it for possible later
757 reuse.
758
759 MODE is the machine mode to be given to the returned rtx.
760
761 SIZE is the size in units of the space required. We do no rounding here
762 since assign_stack_local will do any required rounding.
763
59241190 764 TYPE is the type that will be used for the stack slot. */
897b77d6 765
2b96c5f6 766rtx
3754d046 767assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
0ab48139 768 tree type)
897b77d6 769{
d3e10bed 770 unsigned int align;
a6629703 771 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
84be287d 772 rtx slot;
897b77d6 773
babc13fa 774 /* If SIZE is -1 it means that somebody tried to allocate a temporary
775 of a variable size. */
fdada98f 776 gcc_assert (size != -1);
babc13fa 777
ad33891d 778 align = get_stack_local_alignment (type, mode);
9bd87fd2 779
780 /* Try to find an available, already-allocated temporary of the proper
781 mode which meets the size and alignment requirements. Choose the
867eb367 782 smallest one with the closest alignment.
48e1416a 783
867eb367 784 If assign_stack_temp is called outside of the tree->rtl expansion,
785 we cannot reuse the stack slots (that may still refer to
786 VIRTUAL_STACK_VARS_REGNUM). */
787 if (!virtuals_instantiated)
a6629703 788 {
867eb367 789 for (p = avail_temp_slots; p; p = p->next)
a6629703 790 {
867eb367 791 if (p->align >= align && p->size >= size
792 && GET_MODE (p->slot) == mode
793 && objects_must_conflict_p (p->type, type)
794 && (best_p == 0 || best_p->size > p->size
795 || (best_p->size == p->size && best_p->align > p->align)))
a6629703 796 {
867eb367 797 if (p->align == align && p->size == size)
798 {
799 selected = p;
800 cut_slot_from_list (selected, &avail_temp_slots);
801 best_p = 0;
802 break;
803 }
804 best_p = p;
a6629703 805 }
a6629703 806 }
807 }
897b77d6 808
809 /* Make our best, if any, the one to use. */
810 if (best_p)
49d3d726 811 {
a6629703 812 selected = best_p;
813 cut_slot_from_list (selected, &avail_temp_slots);
814
49d3d726 815 /* If there are enough aligned bytes left over, make them into a new
816 temp_slot so that the extra bytes don't get wasted. Do this only
817 for BLKmode slots, so that we can be sure of the alignment. */
f7c44134 818 if (GET_MODE (best_p->slot) == BLKmode)
49d3d726 819 {
9bd87fd2 820 int alignment = best_p->align / BITS_PER_UNIT;
997d68fe 821 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
49d3d726 822
823 if (best_p->size - rounded_size >= alignment)
824 {
25a27413 825 p = ggc_alloc<temp_slot> ();
0ab48139 826 p->in_use = 0;
49d3d726 827 p->size = best_p->size - rounded_size;
e8a637a3 828 p->base_offset = best_p->base_offset + rounded_size;
829 p->full_size = best_p->full_size - rounded_size;
43283c91 830 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
9bd87fd2 831 p->align = best_p->align;
387bc205 832 p->type = best_p->type;
a6629703 833 insert_slot_to_list (p, &avail_temp_slots);
49d3d726 834
941522d6 835 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
836 stack_slot_list);
49d3d726 837
838 best_p->size = rounded_size;
5ea3c815 839 best_p->full_size = rounded_size;
49d3d726 840 }
841 }
49d3d726 842 }
06ebc183 843
897b77d6 844 /* If we still didn't find one, make a new temporary. */
a6629703 845 if (selected == 0)
897b77d6 846 {
997d68fe 847 HOST_WIDE_INT frame_offset_old = frame_offset;
848
25a27413 849 p = ggc_alloc<temp_slot> ();
997d68fe 850
d61726bc 851 /* We are passing an explicit alignment request to assign_stack_local.
852 One side effect of that is assign_stack_local will not round SIZE
853 to ensure the frame offset remains suitably aligned.
854
855 So for requests which depended on the rounding of SIZE, we go ahead
856 and round it now. We also make sure ALIGNMENT is at least
857 BIGGEST_ALIGNMENT. */
fdada98f 858 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
943d8723 859 p->slot = assign_stack_local_1 (mode,
860 (mode == BLKmode
861 ? CEIL_ROUND (size,
862 (int) align
863 / BITS_PER_UNIT)
864 : size),
865 align, 0);
9bd87fd2 866
867 p->align = align;
997d68fe 868
ef4d68c5 869 /* The following slot size computation is necessary because we don't
870 know the actual size of the temporary slot until assign_stack_local
871 has performed all the frame alignment and size rounding for the
d53be447 872 requested temporary. Note that extra space added for alignment
873 can be either above or below this stack slot depending on which
874 way the frame grows. We include the extra space if and only if it
875 is above this slot. */
d28d5017 876 if (FRAME_GROWS_DOWNWARD)
877 p->size = frame_offset_old - frame_offset;
878 else
879 p->size = size;
997d68fe 880
d53be447 881 /* Now define the fields used by combine_temp_slots. */
d28d5017 882 if (FRAME_GROWS_DOWNWARD)
883 {
884 p->base_offset = frame_offset;
885 p->full_size = frame_offset_old - frame_offset;
886 }
887 else
888 {
889 p->base_offset = frame_offset_old;
890 p->full_size = frame_offset - frame_offset_old;
891 }
a6629703 892
893 selected = p;
897b77d6 894 }
895
a6629703 896 p = selected;
897b77d6 897 p->in_use = 1;
387bc205 898 p->type = type;
fcb807f8 899 p->level = temp_slot_level;
fc3c948c 900 n_temp_slots_in_use++;
21c867df 901
a6629703 902 pp = temp_slots_at_level (p->level);
903 insert_slot_to_list (p, pp);
fef299ce 904 insert_temp_slot_address (XEXP (p->slot, 0), p);
84be287d 905
906 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
907 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
908 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
f7c44134 909
387bc205 910 /* If we know the alias set for the memory that will be used, use
911 it. If there's no TYPE, then we don't know anything about the
912 alias set for the memory. */
84be287d 913 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
914 set_mem_align (slot, align);
387bc205 915
6312a35e 916 /* If a type is specified, set the relevant flags. */
f7c44134 917 if (type != 0)
402f6a9e 918 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
43283c91 919 MEM_NOTRAP_P (slot) = 1;
f7c44134 920
84be287d 921 return slot;
897b77d6 922}
9bd87fd2 923
924/* Allocate a temporary stack slot and record it for possible later
0ab48139 925 reuse. First two arguments are same as in preceding function. */
9bd87fd2 926
927rtx
3754d046 928assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
9bd87fd2 929{
0ab48139 930 return assign_stack_temp_for_type (mode, size, NULL_TREE);
9bd87fd2 931}
ad6d0e80 932\f
567c22a9 933/* Assign a temporary.
934 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
935 and so that should be used in error messages. In either case, we
936 allocate of the given type.
9c457457 937 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
30dd806d 938 it is 0 if a register is OK.
939 DONT_PROMOTE is 1 if we should not promote values in register
940 to wider modes. */
9c457457 941
942rtx
0ab48139 943assign_temp (tree type_or_decl, int memory_required,
de1b648b 944 int dont_promote ATTRIBUTE_UNUSED)
9c457457 945{
567c22a9 946 tree type, decl;
3754d046 947 machine_mode mode;
7752d341 948#ifdef PROMOTE_MODE
567c22a9 949 int unsignedp;
950#endif
951
952 if (DECL_P (type_or_decl))
953 decl = type_or_decl, type = TREE_TYPE (decl);
954 else
955 decl = NULL, type = type_or_decl;
956
957 mode = TYPE_MODE (type);
7752d341 958#ifdef PROMOTE_MODE
78a8ed03 959 unsignedp = TYPE_UNSIGNED (type);
aeb6d7ef 960#endif
ad6d0e80 961
9c457457 962 if (mode == BLKmode || memory_required)
963 {
997d68fe 964 HOST_WIDE_INT size = int_size_in_bytes (type);
9c457457 965 rtx tmp;
966
779a20c8 967 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
968 problems with allocating the stack space. */
969 if (size == 0)
970 size = 1;
971
9c457457 972 /* Unfortunately, we don't yet know how to allocate variable-sized
150edb07 973 temporaries. However, sometimes we can find a fixed upper limit on
974 the size, so try that instead. */
975 else if (size == -1)
976 size = max_int_size_in_bytes (type);
8c3216ae 977
567c22a9 978 /* The size of the temporary may be too large to fit into an integer. */
979 /* ??? Not sure this should happen except for user silliness, so limit
60d903f5 980 this to things that aren't compiler-generated temporaries. The
89f18f73 981 rest of the time we'll die in assign_stack_temp_for_type. */
567c22a9 982 if (decl && size == -1
983 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
984 {
3cf8b391 985 error ("size of variable %q+D is too large", decl);
567c22a9 986 size = 1;
987 }
988
0ab48139 989 tmp = assign_stack_temp_for_type (mode, size, type);
9c457457 990 return tmp;
991 }
ad6d0e80 992
7752d341 993#ifdef PROMOTE_MODE
30dd806d 994 if (! dont_promote)
3b2411a8 995 mode = promote_mode (type, mode, &unsignedp);
9c457457 996#endif
ad6d0e80 997
9c457457 998 return gen_reg_rtx (mode);
999}
ad6d0e80 1000\f
49d3d726 1001/* Combine temporary stack slots which are adjacent on the stack.
1002
1003 This allows for better use of already allocated stack space. This is only
1004 done for BLKmode slots because we can be sure that we won't have alignment
1005 problems in this case. */
1006
3f0895d3 1007static void
de1b648b 1008combine_temp_slots (void)
49d3d726 1009{
a6629703 1010 struct temp_slot *p, *q, *next, *next_q;
997d68fe 1011 int num_slots;
1012
59241190 1013 /* We can't combine slots, because the information about which slot
1014 is in which alias set will be lost. */
1015 if (flag_strict_aliasing)
1016 return;
1017
06ebc183 1018 /* If there are a lot of temp slots, don't do anything unless
cb0ccc1e 1019 high levels of optimization. */
997d68fe 1020 if (! flag_expensive_optimizations)
a6629703 1021 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
997d68fe 1022 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1023 return;
49d3d726 1024
a6629703 1025 for (p = avail_temp_slots; p; p = next)
ccf0a5eb 1026 {
1027 int delete_p = 0;
997d68fe 1028
a6629703 1029 next = p->next;
1030
1031 if (GET_MODE (p->slot) != BLKmode)
1032 continue;
1033
1034 for (q = p->next; q; q = next_q)
ccf0a5eb 1035 {
a6629703 1036 int delete_q = 0;
1037
1038 next_q = q->next;
1039
1040 if (GET_MODE (q->slot) != BLKmode)
1041 continue;
1042
1043 if (p->base_offset + p->full_size == q->base_offset)
1044 {
1045 /* Q comes after P; combine Q into P. */
1046 p->size += q->size;
1047 p->full_size += q->full_size;
1048 delete_q = 1;
1049 }
1050 else if (q->base_offset + q->full_size == p->base_offset)
1051 {
1052 /* P comes after Q; combine P into Q. */
1053 q->size += p->size;
1054 q->full_size += p->full_size;
1055 delete_p = 1;
1056 break;
1057 }
1058 if (delete_q)
1059 cut_slot_from_list (q, &avail_temp_slots);
ccf0a5eb 1060 }
a6629703 1061
1062 /* Either delete P or advance past it. */
1063 if (delete_p)
1064 cut_slot_from_list (p, &avail_temp_slots);
ccf0a5eb 1065 }
49d3d726 1066}
897b77d6 1067\f
f4e36c33 1068/* Indicate that NEW_RTX is an alternate way of referring to the temp
1069 slot that previously was known by OLD_RTX. */
64e90dae 1070
1071void
f4e36c33 1072update_temp_slot_address (rtx old_rtx, rtx new_rtx)
64e90dae 1073{
155b05dc 1074 struct temp_slot *p;
64e90dae 1075
f4e36c33 1076 if (rtx_equal_p (old_rtx, new_rtx))
64e90dae 1077 return;
155b05dc 1078
f4e36c33 1079 p = find_temp_slot_from_address (old_rtx);
155b05dc 1080
f4e36c33 1081 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1082 NEW_RTX is a register, see if one operand of the PLUS is a
1083 temporary location. If so, NEW_RTX points into it. Otherwise,
1084 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1085 in common between them. If so, try a recursive call on those
1086 values. */
155b05dc 1087 if (p == 0)
1088 {
f4e36c33 1089 if (GET_CODE (old_rtx) != PLUS)
8911b943 1090 return;
1091
f4e36c33 1092 if (REG_P (new_rtx))
8911b943 1093 {
f4e36c33 1094 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1095 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
8911b943 1096 return;
1097 }
f4e36c33 1098 else if (GET_CODE (new_rtx) != PLUS)
155b05dc 1099 return;
1100
f4e36c33 1101 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1102 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1103 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1104 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1105 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1106 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1107 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1108 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
155b05dc 1109
1110 return;
1111 }
1112
06ebc183 1113 /* Otherwise add an alias for the temp's address. */
fef299ce 1114 insert_temp_slot_address (new_rtx, p);
64e90dae 1115}
1116
30f413ae 1117/* If X could be a reference to a temporary slot, mark that slot as
1118 belonging to the to one level higher than the current level. If X
1119 matched one of our slots, just mark that one. Otherwise, we can't
0ab48139 1120 easily predict which it is, so upgrade all of them.
897b77d6 1121
1122 This is called when an ({...}) construct occurs and a statement
1123 returns a value in memory. */
1124
1125void
de1b648b 1126preserve_temp_slots (rtx x)
897b77d6 1127{
a6629703 1128 struct temp_slot *p = 0, *next;
897b77d6 1129
c7c7590a 1130 if (x == 0)
0ab48139 1131 return;
41969bd3 1132
e8825bb0 1133 /* If X is a register that is being used as a pointer, see if we have
0ab48139 1134 a temporary slot we know it points to. */
e8825bb0 1135 if (REG_P (x) && REG_POINTER (x))
1136 p = find_temp_slot_from_address (x);
41969bd3 1137
e8825bb0 1138 /* If X is not in memory or is at a constant address, it cannot be in
0ab48139 1139 a temporary slot. */
e8825bb0 1140 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
0ab48139 1141 return;
e8825bb0 1142
1143 /* First see if we can find a match. */
1144 if (p == 0)
1145 p = find_temp_slot_from_address (XEXP (x, 0));
1146
1147 if (p != 0)
1148 {
e8825bb0 1149 if (p->level == temp_slot_level)
0ab48139 1150 move_slot_to_level (p, temp_slot_level - 1);
e8825bb0 1151 return;
41969bd3 1152 }
0dbd1c74 1153
e8825bb0 1154 /* Otherwise, preserve all non-kept slots at this level. */
1155 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
0dbd1c74 1156 {
e8825bb0 1157 next = p->next;
0ab48139 1158 move_slot_to_level (p, temp_slot_level - 1);
e8825bb0 1159 }
c925694c 1160}
1161
e8825bb0 1162/* Free all temporaries used so far. This is normally called at the
1163 end of generating code for a statement. */
c925694c 1164
e8825bb0 1165void
1166free_temp_slots (void)
c925694c 1167{
e8825bb0 1168 struct temp_slot *p, *next;
a4da9a83 1169 bool some_available = false;
c925694c 1170
e8825bb0 1171 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1172 {
1173 next = p->next;
0ab48139 1174 make_slot_available (p);
1175 some_available = true;
e8825bb0 1176 }
c925694c 1177
a4da9a83 1178 if (some_available)
1179 {
1180 remove_unused_temp_slot_addresses ();
1181 combine_temp_slots ();
1182 }
e8825bb0 1183}
c925694c 1184
e8825bb0 1185/* Push deeper into the nesting level for stack temporaries. */
c925694c 1186
e8825bb0 1187void
1188push_temp_slots (void)
c925694c 1189{
e8825bb0 1190 temp_slot_level++;
c925694c 1191}
1192
e8825bb0 1193/* Pop a temporary nesting level. All slots in use in the current level
1194 are freed. */
c925694c 1195
e8825bb0 1196void
1197pop_temp_slots (void)
c925694c 1198{
0ab48139 1199 free_temp_slots ();
e8825bb0 1200 temp_slot_level--;
bf5a43e2 1201}
1202
e8825bb0 1203/* Initialize temporary slots. */
0dbd1c74 1204
1205void
e8825bb0 1206init_temp_slots (void)
0dbd1c74 1207{
e8825bb0 1208 /* We have not allocated any temporaries yet. */
1209 avail_temp_slots = 0;
f1f41a6c 1210 vec_alloc (used_temp_slots, 0);
e8825bb0 1211 temp_slot_level = 0;
fc3c948c 1212 n_temp_slots_in_use = 0;
fef299ce 1213
1214 /* Set up the table to map addresses to temp slots. */
1215 if (! temp_slot_address_table)
2ef51f0e 1216 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fef299ce 1217 else
2ef51f0e 1218 temp_slot_address_table->empty ();
e8825bb0 1219}
1220\f
ea1760a3 1221/* Functions and data structures to keep track of the values hard regs
1222 had at the start of the function. */
1223
1224/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1225 and has_hard_reg_initial_val.. */
1226typedef struct GTY(()) initial_value_pair {
1227 rtx hard_reg;
1228 rtx pseudo;
1229} initial_value_pair;
1230/* ??? This could be a VEC but there is currently no way to define an
1231 opaque VEC type. This could be worked around by defining struct
1232 initial_value_pair in function.h. */
1233typedef struct GTY(()) initial_value_struct {
1234 int num_entries;
1235 int max_entries;
1236 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1237} initial_value_struct;
1238
1239/* If a pseudo represents an initial hard reg (or expression), return
1240 it, else return NULL_RTX. */
1241
1242rtx
1243get_hard_reg_initial_reg (rtx reg)
1244{
1245 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1246 int i;
1247
1248 if (ivs == 0)
1249 return NULL_RTX;
1250
1251 for (i = 0; i < ivs->num_entries; i++)
1252 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1253 return ivs->entries[i].hard_reg;
1254
1255 return NULL_RTX;
1256}
1257
1258/* Make sure that there's a pseudo register of mode MODE that stores the
1259 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1260
1261rtx
3754d046 1262get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
ea1760a3 1263{
1264 struct initial_value_struct *ivs;
1265 rtx rv;
1266
1267 rv = has_hard_reg_initial_val (mode, regno);
1268 if (rv)
1269 return rv;
1270
1271 ivs = crtl->hard_reg_initial_vals;
1272 if (ivs == 0)
1273 {
25a27413 1274 ivs = ggc_alloc<initial_value_struct> ();
ea1760a3 1275 ivs->num_entries = 0;
1276 ivs->max_entries = 5;
25a27413 1277 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
ea1760a3 1278 crtl->hard_reg_initial_vals = ivs;
1279 }
1280
1281 if (ivs->num_entries >= ivs->max_entries)
1282 {
1283 ivs->max_entries += 5;
1284 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1285 ivs->max_entries);
1286 }
1287
1288 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1289 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1290
1291 return ivs->entries[ivs->num_entries++].pseudo;
1292}
1293
1294/* See if get_hard_reg_initial_val has been used to create a pseudo
1295 for the initial value of hard register REGNO in mode MODE. Return
1296 the associated pseudo if so, otherwise return NULL. */
1297
1298rtx
3754d046 1299has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
ea1760a3 1300{
1301 struct initial_value_struct *ivs;
1302 int i;
1303
1304 ivs = crtl->hard_reg_initial_vals;
1305 if (ivs != 0)
1306 for (i = 0; i < ivs->num_entries; i++)
1307 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1308 && REGNO (ivs->entries[i].hard_reg) == regno)
1309 return ivs->entries[i].pseudo;
1310
1311 return NULL_RTX;
1312}
1313
1314unsigned int
1315emit_initial_value_sets (void)
1316{
1317 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1318 int i;
8bb2625b 1319 rtx_insn *seq;
ea1760a3 1320
1321 if (ivs == 0)
1322 return 0;
1323
1324 start_sequence ();
1325 for (i = 0; i < ivs->num_entries; i++)
1326 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1327 seq = get_insns ();
1328 end_sequence ();
1329
1330 emit_insn_at_entry (seq);
1331 return 0;
1332}
1333
1334/* Return the hardreg-pseudoreg initial values pair entry I and
1335 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1336bool
1337initial_value_entry (int i, rtx *hreg, rtx *preg)
1338{
1339 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1340 if (!ivs || i >= ivs->num_entries)
1341 return false;
1342
1343 *hreg = ivs->entries[i].hard_reg;
1344 *preg = ivs->entries[i].pseudo;
1345 return true;
1346}
1347\f
e8825bb0 1348/* These routines are responsible for converting virtual register references
1349 to the actual hard register references once RTL generation is complete.
06ebc183 1350
e8825bb0 1351 The following four variables are used for communication between the
1352 routines. They contain the offsets of the virtual registers from their
1353 respective hard registers. */
c925694c 1354
e8825bb0 1355static int in_arg_offset;
1356static int var_offset;
1357static int dynamic_offset;
1358static int out_arg_offset;
1359static int cfa_offset;
a8636638 1360
e8825bb0 1361/* In most machines, the stack pointer register is equivalent to the bottom
1362 of the stack. */
06ebc183 1363
e8825bb0 1364#ifndef STACK_POINTER_OFFSET
1365#define STACK_POINTER_OFFSET 0
1366#endif
bf5a43e2 1367
02114c95 1368#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1369#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1370#endif
1371
e8825bb0 1372/* If not defined, pick an appropriate default for the offset of dynamically
1373 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
02114c95 1374 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
c925694c 1375
e8825bb0 1376#ifndef STACK_DYNAMIC_OFFSET
a8636638 1377
e8825bb0 1378/* The bottom of the stack points to the actual arguments. If
1379 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1380 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1381 stack space for register parameters is not pushed by the caller, but
1382 rather part of the fixed stack areas and hence not included in
abe32cce 1383 `crtl->outgoing_args_size'. Nevertheless, we must allow
e8825bb0 1384 for it when allocating stack dynamic objects. */
a8636638 1385
02114c95 1386#ifdef INCOMING_REG_PARM_STACK_SPACE
e8825bb0 1387#define STACK_DYNAMIC_OFFSET(FNDECL) \
1388((ACCUMULATE_OUTGOING_ARGS \
abe32cce 1389 ? (crtl->outgoing_args_size \
22c61100 1390 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
02114c95 1391 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
63c68695 1392 : 0) + (STACK_POINTER_OFFSET))
e8825bb0 1393#else
1394#define STACK_DYNAMIC_OFFSET(FNDECL) \
abe32cce 1395((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
e8825bb0 1396 + (STACK_POINTER_OFFSET))
1397#endif
1398#endif
f678883b 1399
e3d5af87 1400\f
f15c4004 1401/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1402 is a virtual register, return the equivalent hard register and set the
1403 offset indirectly through the pointer. Otherwise, return 0. */
897b77d6 1404
f15c4004 1405static rtx
1406instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
897b77d6 1407{
f4e36c33 1408 rtx new_rtx;
f15c4004 1409 HOST_WIDE_INT offset;
897b77d6 1410
f15c4004 1411 if (x == virtual_incoming_args_rtx)
27a7a23a 1412 {
f6754469 1413 if (stack_realign_drap)
27a7a23a 1414 {
f6754469 1415 /* Replace virtual_incoming_args_rtx with internal arg
1416 pointer if DRAP is used to realign stack. */
f4e36c33 1417 new_rtx = crtl->args.internal_arg_pointer;
27a7a23a 1418 offset = 0;
1419 }
1420 else
f4e36c33 1421 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
27a7a23a 1422 }
f15c4004 1423 else if (x == virtual_stack_vars_rtx)
f4e36c33 1424 new_rtx = frame_pointer_rtx, offset = var_offset;
f15c4004 1425 else if (x == virtual_stack_dynamic_rtx)
f4e36c33 1426 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
f15c4004 1427 else if (x == virtual_outgoing_args_rtx)
f4e36c33 1428 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
f15c4004 1429 else if (x == virtual_cfa_rtx)
da72c083 1430 {
1431#ifdef FRAME_POINTER_CFA_OFFSET
f4e36c33 1432 new_rtx = frame_pointer_rtx;
da72c083 1433#else
f4e36c33 1434 new_rtx = arg_pointer_rtx;
da72c083 1435#endif
1436 offset = cfa_offset;
1437 }
60778e62 1438 else if (x == virtual_preferred_stack_boundary_rtx)
1439 {
1440 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1441 offset = 0;
1442 }
f15c4004 1443 else
1444 return NULL_RTX;
897b77d6 1445
f15c4004 1446 *poffset = offset;
f4e36c33 1447 return new_rtx;
897b77d6 1448}
1449
2d184b77 1450/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1451 registers present inside of *LOC. The expression is simplified,
1452 as much as possible, but is not to be considered "valid" in any sense
1453 implied by the target. Return true if any change is made. */
897b77d6 1454
2d184b77 1455static bool
1456instantiate_virtual_regs_in_rtx (rtx *loc)
897b77d6 1457{
2d184b77 1458 if (!*loc)
1459 return false;
1460 bool changed = false;
1461 subrtx_ptr_iterator::array_type array;
1462 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
897b77d6 1463 {
2d184b77 1464 rtx *loc = *iter;
1465 if (rtx x = *loc)
f15c4004 1466 {
2d184b77 1467 rtx new_rtx;
1468 HOST_WIDE_INT offset;
1469 switch (GET_CODE (x))
1470 {
1471 case REG:
1472 new_rtx = instantiate_new_reg (x, &offset);
1473 if (new_rtx)
1474 {
1475 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1476 changed = true;
1477 }
1478 iter.skip_subrtxes ();
1479 break;
f15c4004 1480
2d184b77 1481 case PLUS:
1482 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1483 if (new_rtx)
1484 {
1485 XEXP (x, 0) = new_rtx;
1486 *loc = plus_constant (GET_MODE (x), x, offset, true);
1487 changed = true;
1488 iter.skip_subrtxes ();
1489 break;
1490 }
997d68fe 1491
2d184b77 1492 /* FIXME -- from old code */
1493 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1494 we can commute the PLUS and SUBREG because pointers into the
1495 frame are well-behaved. */
1496 break;
5970b26a 1497
2d184b77 1498 default:
1499 break;
1500 }
1501 }
897b77d6 1502 }
2d184b77 1503 return changed;
897b77d6 1504}
1505
f15c4004 1506/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1507 matches the predicate for insn CODE operand OPERAND. */
897b77d6 1508
f15c4004 1509static int
1510safe_insn_predicate (int code, int operand, rtx x)
897b77d6 1511{
39c56a89 1512 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
f15c4004 1513}
6d0423b8 1514
f15c4004 1515/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1516 registers present inside of insn. The result will be a valid insn. */
6d0423b8 1517
1518static void
8bb2625b 1519instantiate_virtual_regs_in_insn (rtx_insn *insn)
6d0423b8 1520{
f15c4004 1521 HOST_WIDE_INT offset;
1522 int insn_code, i;
27ca6129 1523 bool any_change = false;
8bb2625b 1524 rtx set, new_rtx, x;
1525 rtx_insn *seq;
00dfb616 1526
f15c4004 1527 /* There are some special cases to be handled first. */
1528 set = single_set (insn);
1529 if (set)
00dfb616 1530 {
f15c4004 1531 /* We're allowed to assign to a virtual register. This is interpreted
1532 to mean that the underlying register gets assigned the inverse
1533 transformation. This is used, for example, in the handling of
1534 non-local gotos. */
f4e36c33 1535 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1536 if (new_rtx)
f15c4004 1537 {
1538 start_sequence ();
00dfb616 1539
2d184b77 1540 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
f4e36c33 1541 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
5d5ee71f 1542 gen_int_mode (-offset, GET_MODE (new_rtx)));
f4e36c33 1543 x = force_operand (x, new_rtx);
1544 if (x != new_rtx)
1545 emit_move_insn (new_rtx, x);
6d0423b8 1546
f15c4004 1547 seq = get_insns ();
1548 end_sequence ();
6d0423b8 1549
f15c4004 1550 emit_insn_before (seq, insn);
1551 delete_insn (insn);
1552 return;
1553 }
6d0423b8 1554
f15c4004 1555 /* Handle a straight copy from a virtual register by generating a
1556 new add insn. The difference between this and falling through
1557 to the generic case is avoiding a new pseudo and eliminating a
1558 move insn in the initial rtl stream. */
f4e36c33 1559 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1560 if (new_rtx && offset != 0
f15c4004 1561 && REG_P (SET_DEST (set))
1562 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1563 {
1564 start_sequence ();
6d0423b8 1565
0359f9f5 1566 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1567 gen_int_mode (offset,
1568 GET_MODE (SET_DEST (set))),
1569 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
f15c4004 1570 if (x != SET_DEST (set))
1571 emit_move_insn (SET_DEST (set), x);
02e7a332 1572
f15c4004 1573 seq = get_insns ();
1574 end_sequence ();
e3f529ab 1575
f15c4004 1576 emit_insn_before (seq, insn);
1577 delete_insn (insn);
e3f529ab 1578 return;
f15c4004 1579 }
6d0423b8 1580
f15c4004 1581 extract_insn (insn);
27ca6129 1582 insn_code = INSN_CODE (insn);
6d0423b8 1583
f15c4004 1584 /* Handle a plus involving a virtual register by determining if the
1585 operands remain valid if they're modified in place. */
1586 if (GET_CODE (SET_SRC (set)) == PLUS
1587 && recog_data.n_operands >= 3
1588 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1589 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
971ba038 1590 && CONST_INT_P (recog_data.operand[2])
f4e36c33 1591 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
f15c4004 1592 {
1593 offset += INTVAL (recog_data.operand[2]);
6d0423b8 1594
f15c4004 1595 /* If the sum is zero, then replace with a plain move. */
27ca6129 1596 if (offset == 0
1597 && REG_P (SET_DEST (set))
1598 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
f15c4004 1599 {
1600 start_sequence ();
f4e36c33 1601 emit_move_insn (SET_DEST (set), new_rtx);
f15c4004 1602 seq = get_insns ();
1603 end_sequence ();
bc17f7a4 1604
f15c4004 1605 emit_insn_before (seq, insn);
1606 delete_insn (insn);
1607 return;
1608 }
bc17f7a4 1609
f15c4004 1610 x = gen_int_mode (offset, recog_data.operand_mode[2]);
f15c4004 1611
1612 /* Using validate_change and apply_change_group here leaves
1613 recog_data in an invalid state. Since we know exactly what
1614 we want to check, do those two by hand. */
f4e36c33 1615 if (safe_insn_predicate (insn_code, 1, new_rtx)
f15c4004 1616 && safe_insn_predicate (insn_code, 2, x))
1617 {
f4e36c33 1618 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
f15c4004 1619 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1620 any_change = true;
27ca6129 1621
1622 /* Fall through into the regular operand fixup loop in
1623 order to take care of operands other than 1 and 2. */
f15c4004 1624 }
1625 }
1626 }
bc17f7a4 1627 else
27ca6129 1628 {
1629 extract_insn (insn);
1630 insn_code = INSN_CODE (insn);
1631 }
dd79abfb 1632
f15c4004 1633 /* In the general case, we expect virtual registers to appear only in
1634 operands, and then only as either bare registers or inside memories. */
1635 for (i = 0; i < recog_data.n_operands; ++i)
1636 {
1637 x = recog_data.operand[i];
1638 switch (GET_CODE (x))
1639 {
1640 case MEM:
1641 {
1642 rtx addr = XEXP (x, 0);
f15c4004 1643
2d184b77 1644 if (!instantiate_virtual_regs_in_rtx (&addr))
f15c4004 1645 continue;
1646
1647 start_sequence ();
5cc04e45 1648 x = replace_equiv_address (x, addr, true);
7e507322 1649 /* It may happen that the address with the virtual reg
1650 was valid (e.g. based on the virtual stack reg, which might
1651 be acceptable to the predicates with all offsets), whereas
1652 the address now isn't anymore, for instance when the address
1653 is still offsetted, but the base reg isn't virtual-stack-reg
1654 anymore. Below we would do a force_reg on the whole operand,
1655 but this insn might actually only accept memory. Hence,
1656 before doing that last resort, try to reload the address into
1657 a register, so this operand stays a MEM. */
1658 if (!safe_insn_predicate (insn_code, i, x))
1659 {
1660 addr = force_reg (GET_MODE (addr), addr);
5cc04e45 1661 x = replace_equiv_address (x, addr, true);
7e507322 1662 }
f15c4004 1663 seq = get_insns ();
1664 end_sequence ();
1665 if (seq)
1666 emit_insn_before (seq, insn);
1667 }
1668 break;
1669
1670 case REG:
f4e36c33 1671 new_rtx = instantiate_new_reg (x, &offset);
1672 if (new_rtx == NULL)
f15c4004 1673 continue;
1674 if (offset == 0)
f4e36c33 1675 x = new_rtx;
f15c4004 1676 else
1677 {
1678 start_sequence ();
897b77d6 1679
f15c4004 1680 /* Careful, special mode predicates may have stuff in
1681 insn_data[insn_code].operand[i].mode that isn't useful
1682 to us for computing a new value. */
1683 /* ??? Recognize address_operand and/or "p" constraints
1684 to see if (plus new offset) is a valid before we put
1685 this through expand_simple_binop. */
f4e36c33 1686 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
0359f9f5 1687 gen_int_mode (offset, GET_MODE (x)),
1688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
f15c4004 1689 seq = get_insns ();
1690 end_sequence ();
1691 emit_insn_before (seq, insn);
1692 }
1693 break;
897b77d6 1694
f15c4004 1695 case SUBREG:
f4e36c33 1696 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1697 if (new_rtx == NULL)
f15c4004 1698 continue;
1699 if (offset != 0)
1700 {
1701 start_sequence ();
0359f9f5 1702 new_rtx = expand_simple_binop
1703 (GET_MODE (new_rtx), PLUS, new_rtx,
1704 gen_int_mode (offset, GET_MODE (new_rtx)),
1705 NULL_RTX, 1, OPTAB_LIB_WIDEN);
f15c4004 1706 seq = get_insns ();
1707 end_sequence ();
1708 emit_insn_before (seq, insn);
1709 }
f4e36c33 1710 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1711 GET_MODE (new_rtx), SUBREG_BYTE (x));
024f0a8a 1712 gcc_assert (x);
f15c4004 1713 break;
897b77d6 1714
f15c4004 1715 default:
1716 continue;
1717 }
897b77d6 1718
f15c4004 1719 /* At this point, X contains the new value for the operand.
1720 Validate the new value vs the insn predicate. Note that
1721 asm insns will have insn_code -1 here. */
1722 if (!safe_insn_predicate (insn_code, i, x))
c5159852 1723 {
1724 start_sequence ();
83b6c9db 1725 if (REG_P (x))
1726 {
1727 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1728 x = copy_to_reg (x);
1729 }
1730 else
1731 x = force_reg (insn_data[insn_code].operand[i].mode, x);
c5159852 1732 seq = get_insns ();
1733 end_sequence ();
1734 if (seq)
1735 emit_insn_before (seq, insn);
1736 }
897b77d6 1737
f15c4004 1738 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1739 any_change = true;
1740 }
897b77d6 1741
f15c4004 1742 if (any_change)
1743 {
1744 /* Propagate operand changes into the duplicates. */
1745 for (i = 0; i < recog_data.n_dups; ++i)
1746 *recog_data.dup_loc[i]
cdf37bc1 1747 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
dd79abfb 1748
f15c4004 1749 /* Force re-recognition of the instruction for validation. */
1750 INSN_CODE (insn) = -1;
1751 }
897b77d6 1752
f15c4004 1753 if (asm_noperands (PATTERN (insn)) >= 0)
897b77d6 1754 {
f15c4004 1755 if (!check_asm_operands (PATTERN (insn)))
897b77d6 1756 {
f15c4004 1757 error_for_asm (insn, "impossible constraint in %<asm%>");
33a7b2d7 1758 /* For asm goto, instead of fixing up all the edges
1759 just clear the template and clear input operands
1760 (asm goto doesn't have any output operands). */
1761 if (JUMP_P (insn))
1762 {
1763 rtx asm_op = extract_asm_operands (PATTERN (insn));
1764 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1765 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1766 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1767 }
1768 else
1769 delete_insn (insn);
f15c4004 1770 }
1771 }
1772 else
1773 {
1774 if (recog_memoized (insn) < 0)
1775 fatal_insn_not_found (insn);
1776 }
1777}
155b05dc 1778
f15c4004 1779/* Subroutine of instantiate_decls. Given RTL representing a decl,
1780 do any instantiation required. */
155b05dc 1781
bc5e6ea1 1782void
1783instantiate_decl_rtl (rtx x)
f15c4004 1784{
1785 rtx addr;
897b77d6 1786
f15c4004 1787 if (x == 0)
1788 return;
897b77d6 1789
f15c4004 1790 /* If this is a CONCAT, recurse for the pieces. */
1791 if (GET_CODE (x) == CONCAT)
1792 {
bc5e6ea1 1793 instantiate_decl_rtl (XEXP (x, 0));
1794 instantiate_decl_rtl (XEXP (x, 1));
f15c4004 1795 return;
1796 }
897b77d6 1797
f15c4004 1798 /* If this is not a MEM, no need to do anything. Similarly if the
1799 address is a constant or a register that is not a virtual register. */
1800 if (!MEM_P (x))
1801 return;
897b77d6 1802
f15c4004 1803 addr = XEXP (x, 0);
1804 if (CONSTANT_P (addr)
1805 || (REG_P (addr)
1806 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1807 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1808 return;
897b77d6 1809
2d184b77 1810 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
f15c4004 1811}
897b77d6 1812
9338678e 1813/* Helper for instantiate_decls called via walk_tree: Process all decls
1814 in the given DECL_VALUE_EXPR. */
1815
1816static tree
1817instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1818{
1819 tree t = *tp;
75a70cf9 1820 if (! EXPR_P (t))
9338678e 1821 {
1822 *walk_subtrees = 0;
95b985e5 1823 if (DECL_P (t))
1824 {
1825 if (DECL_RTL_SET_P (t))
1826 instantiate_decl_rtl (DECL_RTL (t));
1827 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1828 && DECL_INCOMING_RTL (t))
1829 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1830 if ((TREE_CODE (t) == VAR_DECL
1831 || TREE_CODE (t) == RESULT_DECL)
1832 && DECL_HAS_VALUE_EXPR_P (t))
1833 {
1834 tree v = DECL_VALUE_EXPR (t);
1835 walk_tree (&v, instantiate_expr, NULL, NULL);
1836 }
1837 }
9338678e 1838 }
1839 return NULL;
1840}
1841
f15c4004 1842/* Subroutine of instantiate_decls: Process all decls in the given
1843 BLOCK node and all its subblocks. */
897b77d6 1844
f15c4004 1845static void
1846instantiate_decls_1 (tree let)
1847{
1848 tree t;
897b77d6 1849
1767a056 1850 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
9338678e 1851 {
1852 if (DECL_RTL_SET_P (t))
bc5e6ea1 1853 instantiate_decl_rtl (DECL_RTL (t));
9338678e 1854 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1855 {
1856 tree v = DECL_VALUE_EXPR (t);
1857 walk_tree (&v, instantiate_expr, NULL, NULL);
1858 }
1859 }
897b77d6 1860
f15c4004 1861 /* Process all subblocks. */
93110716 1862 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
f15c4004 1863 instantiate_decls_1 (t);
1864}
897b77d6 1865
f15c4004 1866/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1867 all virtual registers in their DECL_RTL's. */
897b77d6 1868
f15c4004 1869static void
1870instantiate_decls (tree fndecl)
1871{
2ab2ce89 1872 tree decl;
1873 unsigned ix;
897b77d6 1874
f15c4004 1875 /* Process all parameters of the function. */
1767a056 1876 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
f15c4004 1877 {
bc5e6ea1 1878 instantiate_decl_rtl (DECL_RTL (decl));
1879 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
9338678e 1880 if (DECL_HAS_VALUE_EXPR_P (decl))
1881 {
1882 tree v = DECL_VALUE_EXPR (decl);
1883 walk_tree (&v, instantiate_expr, NULL, NULL);
1884 }
f15c4004 1885 }
a51c8974 1886
95b985e5 1887 if ((decl = DECL_RESULT (fndecl))
1888 && TREE_CODE (decl) == RESULT_DECL)
1889 {
1890 if (DECL_RTL_SET_P (decl))
1891 instantiate_decl_rtl (DECL_RTL (decl));
1892 if (DECL_HAS_VALUE_EXPR_P (decl))
1893 {
1894 tree v = DECL_VALUE_EXPR (decl);
1895 walk_tree (&v, instantiate_expr, NULL, NULL);
1896 }
1897 }
1898
eac967db 1899 /* Process the saved static chain if it exists. */
1900 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1901 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1902 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1903
f15c4004 1904 /* Now process all variables defined in the function or its subblocks. */
1905 instantiate_decls_1 (DECL_INITIAL (fndecl));
78fa9ba7 1906
2ab2ce89 1907 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1908 if (DECL_RTL_SET_P (decl))
1909 instantiate_decl_rtl (DECL_RTL (decl));
f1f41a6c 1910 vec_free (cfun->local_decls);
f15c4004 1911}
897b77d6 1912
f15c4004 1913/* Pass through the INSNS of function FNDECL and convert virtual register
1914 references to hard register references. */
897b77d6 1915
2a1990e9 1916static unsigned int
f15c4004 1917instantiate_virtual_regs (void)
1918{
8bb2625b 1919 rtx_insn *insn;
897b77d6 1920
f15c4004 1921 /* Compute the offsets to use for this function. */
1922 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1923 var_offset = STARTING_FRAME_OFFSET;
1924 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1925 out_arg_offset = STACK_POINTER_OFFSET;
da72c083 1926#ifdef FRAME_POINTER_CFA_OFFSET
1927 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1928#else
f15c4004 1929 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
da72c083 1930#endif
0dbd1c74 1931
f15c4004 1932 /* Initialize recognition, indicating that volatile is OK. */
1933 init_recog ();
897b77d6 1934
f15c4004 1935 /* Scan through all the insns, instantiating every virtual register still
1936 present. */
ca8a2945 1937 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1938 if (INSN_P (insn))
1939 {
1940 /* These patterns in the instruction stream can never be recognized.
1941 Fortunately, they shouldn't contain virtual registers either. */
91f71fa3 1942 if (GET_CODE (PATTERN (insn)) == USE
ca8a2945 1943 || GET_CODE (PATTERN (insn)) == CLOBBER
ca8a2945 1944 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1945 continue;
1946 else if (DEBUG_INSN_P (insn))
2d184b77 1947 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
ca8a2945 1948 else
1949 instantiate_virtual_regs_in_insn (insn);
201f6961 1950
dd1286fb 1951 if (insn->deleted ())
ca8a2945 1952 continue;
d304b9e1 1953
2d184b77 1954 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
201f6961 1955
ca8a2945 1956 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1957 if (CALL_P (insn))
2d184b77 1958 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
ca8a2945 1959 }
897b77d6 1960
f15c4004 1961 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1962 instantiate_decls (current_function_decl);
1963
bc5e6ea1 1964 targetm.instantiate_decls ();
1965
f15c4004 1966 /* Indicate that, from now on, assign_stack_local should use
1967 frame_pointer_rtx. */
1968 virtuals_instantiated = 1;
990495a7 1969
2a1990e9 1970 return 0;
897b77d6 1971}
77fce4cd 1972
cbe8bda8 1973namespace {
1974
1975const pass_data pass_data_instantiate_virtual_regs =
1976{
1977 RTL_PASS, /* type */
1978 "vregs", /* name */
1979 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 1980 TV_NONE, /* tv_id */
1981 0, /* properties_required */
1982 0, /* properties_provided */
1983 0, /* properties_destroyed */
1984 0, /* todo_flags_start */
1985 0, /* todo_flags_finish */
77fce4cd 1986};
1987
cbe8bda8 1988class pass_instantiate_virtual_regs : public rtl_opt_pass
1989{
1990public:
9af5ce0c 1991 pass_instantiate_virtual_regs (gcc::context *ctxt)
1992 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
cbe8bda8 1993 {}
1994
1995 /* opt_pass methods: */
65b0537f 1996 virtual unsigned int execute (function *)
1997 {
1998 return instantiate_virtual_regs ();
1999 }
cbe8bda8 2000
2001}; // class pass_instantiate_virtual_regs
2002
2003} // anon namespace
2004
2005rtl_opt_pass *
2006make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2007{
2008 return new pass_instantiate_virtual_regs (ctxt);
2009}
2010
897b77d6 2011\f
8f48fc81 2012/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2013 This means a type for which function calls must pass an address to the
2014 function or get an address back from the function.
2015 EXP may be a type node or an expression (whose type is tested). */
897b77d6 2016
2017int
fb80456a 2018aggregate_value_p (const_tree exp, const_tree fntype)
897b77d6 2019{
4cd5bb61 2020 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d5c7cfd2 2021 int i, regno, nregs;
2022 rtx reg;
9308e976 2023
45550790 2024 if (fntype)
2025 switch (TREE_CODE (fntype))
2026 {
2027 case CALL_EXPR:
4cd5bb61 2028 {
2029 tree fndecl = get_callee_fndecl (fntype);
0c93c8a9 2030 if (fndecl)
2031 fntype = TREE_TYPE (fndecl);
2032 else if (CALL_EXPR_FN (fntype))
2033 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2034 else
2035 /* For internal functions, assume nothing needs to be
2036 returned in memory. */
2037 return 0;
4cd5bb61 2038 }
45550790 2039 break;
2040 case FUNCTION_DECL:
4cd5bb61 2041 fntype = TREE_TYPE (fntype);
45550790 2042 break;
2043 case FUNCTION_TYPE:
2044 case METHOD_TYPE:
2045 break;
2046 case IDENTIFIER_NODE:
4cd5bb61 2047 fntype = NULL_TREE;
45550790 2048 break;
2049 default:
4cd5bb61 2050 /* We don't expect other tree types here. */
fdada98f 2051 gcc_unreachable ();
45550790 2052 }
2053
4cd5bb61 2054 if (VOID_TYPE_P (type))
2c8db4fe 2055 return 0;
6f18455e 2056
8df5a43d 2057 /* If a record should be passed the same as its first (and only) member
2058 don't pass it as an aggregate. */
2059 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2060 return aggregate_value_p (first_field (type), fntype);
2061
806e4c12 2062 /* If the front end has decided that this needs to be passed by
2063 reference, do so. */
2064 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2065 && DECL_BY_REFERENCE (exp))
2066 return 1;
6f18455e 2067
4cd5bb61 2068 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2069 if (fntype && TREE_ADDRESSABLE (fntype))
6f18455e 2070 return 1;
48e1416a 2071
ad87de1e 2072 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
79f1a380 2073 and thus can't be returned in registers. */
2074 if (TREE_ADDRESSABLE (type))
2075 return 1;
4cd5bb61 2076
727a13df 2077 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
897b77d6 2078 return 1;
4cd5bb61 2079
2080 if (targetm.calls.return_in_memory (type, fntype))
2081 return 1;
2082
d5c7cfd2 2083 /* Make sure we have suitable call-clobbered regs to return
2084 the value in; if not, we must return it in memory. */
46b3ff29 2085 reg = hard_function_value (type, 0, fntype, 0);
84d69b33 2086
2087 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2088 it is OK. */
8ad4c111 2089 if (!REG_P (reg))
84d69b33 2090 return 0;
2091
d5c7cfd2 2092 regno = REGNO (reg);
67d6c12b 2093 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
d5c7cfd2 2094 for (i = 0; i < nregs; i++)
2095 if (! call_used_regs[regno + i])
2096 return 1;
4cd5bb61 2097
897b77d6 2098 return 0;
2099}
2100\f
e8825bb0 2101/* Return true if we should assign DECL a pseudo register; false if it
2102 should live on the local stack. */
2103
2104bool
b7bf20db 2105use_register_for_decl (const_tree decl)
e8825bb0 2106{
9af5ce0c 2107 if (!targetm.calls.allocate_stack_slots_for_args ())
658e203c 2108 return true;
48e1416a 2109
e8825bb0 2110 /* Honor volatile. */
2111 if (TREE_SIDE_EFFECTS (decl))
2112 return false;
2113
2114 /* Honor addressability. */
2115 if (TREE_ADDRESSABLE (decl))
2116 return false;
2117
058a1b7a 2118 /* Decl is implicitly addressible by bound stores and loads
2119 if it is an aggregate holding bounds. */
2120 if (chkp_function_instrumented_p (current_function_decl)
2121 && TREE_TYPE (decl)
2122 && !BOUNDED_P (decl)
2123 && chkp_type_has_pointer (TREE_TYPE (decl)))
2124 return false;
2125
e8825bb0 2126 /* Only register-like things go in registers. */
2127 if (DECL_MODE (decl) == BLKmode)
2128 return false;
2129
2130 /* If -ffloat-store specified, don't put explicit float variables
2131 into registers. */
2132 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2133 propagates values across these stores, and it probably shouldn't. */
2134 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2135 return false;
2136
553acd9c 2137 /* If we're not interested in tracking debugging information for
2138 this decl, then we can certainly put it in a register. */
2139 if (DECL_IGNORED_P (decl))
e8825bb0 2140 return true;
2141
f24ccada 2142 if (optimize)
2143 return true;
2144
2145 if (!DECL_REGISTER (decl))
2146 return false;
2147
2148 switch (TREE_CODE (TREE_TYPE (decl)))
2149 {
2150 case RECORD_TYPE:
2151 case UNION_TYPE:
2152 case QUAL_UNION_TYPE:
2153 /* When not optimizing, disregard register keyword for variables with
2154 types containing methods, otherwise the methods won't be callable
2155 from the debugger. */
041558e3 2156 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
f24ccada 2157 return false;
2158 break;
2159 default:
2160 break;
2161 }
2162
2163 return true;
e8825bb0 2164}
2165
35a569c6 2166/* Structures to communicate between the subroutines of assign_parms.
2167 The first holds data persistent across all parameters, the second
2168 is cleared out for each parameter. */
897b77d6 2169
35a569c6 2170struct assign_parm_data_all
897b77d6 2171{
39cba157 2172 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2173 should become a job of the target or otherwise encapsulated. */
2174 CUMULATIVE_ARGS args_so_far_v;
2175 cumulative_args_t args_so_far;
897b77d6 2176 struct args_size stack_args_size;
35a569c6 2177 tree function_result_decl;
2178 tree orig_fnargs;
c363cb8c 2179 rtx_insn *first_conversion_insn;
2180 rtx_insn *last_conversion_insn;
35a569c6 2181 HOST_WIDE_INT pretend_args_size;
2182 HOST_WIDE_INT extra_pretend_bytes;
2183 int reg_parm_stack_space;
2184};
897b77d6 2185
35a569c6 2186struct assign_parm_data_one
2187{
2188 tree nominal_type;
2189 tree passed_type;
2190 rtx entry_parm;
2191 rtx stack_parm;
3754d046 2192 machine_mode nominal_mode;
2193 machine_mode passed_mode;
2194 machine_mode promoted_mode;
35a569c6 2195 struct locate_and_pad_arg_data locate;
2196 int partial;
2197 BOOL_BITFIELD named_arg : 1;
35a569c6 2198 BOOL_BITFIELD passed_pointer : 1;
2199 BOOL_BITFIELD on_stack : 1;
2200 BOOL_BITFIELD loaded_in_reg : 1;
2201};
eb749d77 2202
058a1b7a 2203struct bounds_parm_data
2204{
2205 assign_parm_data_one parm_data;
2206 tree bounds_parm;
2207 tree ptr_parm;
2208 rtx ptr_entry;
2209 int bound_no;
2210};
2211
35a569c6 2212/* A subroutine of assign_parms. Initialize ALL. */
897b77d6 2213
35a569c6 2214static void
2215assign_parms_initialize_all (struct assign_parm_data_all *all)
2216{
132d5071 2217 tree fntype ATTRIBUTE_UNUSED;
897b77d6 2218
35a569c6 2219 memset (all, 0, sizeof (*all));
2220
2221 fntype = TREE_TYPE (current_function_decl);
2222
2223#ifdef INIT_CUMULATIVE_INCOMING_ARGS
39cba157 2224 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
35a569c6 2225#else
39cba157 2226 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
35a569c6 2227 current_function_decl, -1);
2228#endif
39cba157 2229 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
35a569c6 2230
02114c95 2231#ifdef INCOMING_REG_PARM_STACK_SPACE
2232 all->reg_parm_stack_space
2233 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
35a569c6 2234#endif
2235}
897b77d6 2236
35a569c6 2237/* If ARGS contains entries with complex types, split the entry into two
2238 entries of the component type. Return a new list of substitutions are
2239 needed, else the old list. */
2240
3e992c41 2241static void
f1f41a6c 2242split_complex_args (vec<tree> *args)
35a569c6 2243{
3e992c41 2244 unsigned i;
35a569c6 2245 tree p;
2246
f1f41a6c 2247 FOR_EACH_VEC_ELT (*args, i, p)
35a569c6 2248 {
2249 tree type = TREE_TYPE (p);
2250 if (TREE_CODE (type) == COMPLEX_TYPE
2251 && targetm.calls.split_complex_arg (type))
2252 {
2253 tree decl;
2254 tree subtype = TREE_TYPE (type);
e6427ef0 2255 bool addressable = TREE_ADDRESSABLE (p);
35a569c6 2256
2257 /* Rewrite the PARM_DECL's type with its component. */
3e992c41 2258 p = copy_node (p);
35a569c6 2259 TREE_TYPE (p) = subtype;
2260 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2261 DECL_MODE (p) = VOIDmode;
2262 DECL_SIZE (p) = NULL;
2263 DECL_SIZE_UNIT (p) = NULL;
e6427ef0 2264 /* If this arg must go in memory, put it in a pseudo here.
2265 We can't allow it to go in memory as per normal parms,
2266 because the usual place might not have the imag part
2267 adjacent to the real part. */
2268 DECL_ARTIFICIAL (p) = addressable;
2269 DECL_IGNORED_P (p) = addressable;
2270 TREE_ADDRESSABLE (p) = 0;
35a569c6 2271 layout_decl (p, 0);
f1f41a6c 2272 (*args)[i] = p;
35a569c6 2273
2274 /* Build a second synthetic decl. */
e60a6f7b 2275 decl = build_decl (EXPR_LOCATION (p),
2276 PARM_DECL, NULL_TREE, subtype);
35a569c6 2277 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
e6427ef0 2278 DECL_ARTIFICIAL (decl) = addressable;
2279 DECL_IGNORED_P (decl) = addressable;
35a569c6 2280 layout_decl (decl, 0);
f1f41a6c 2281 args->safe_insert (++i, decl);
35a569c6 2282 }
2283 }
35a569c6 2284}
2285
2286/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2287 the hidden struct return argument, and (abi willing) complex args.
2288 Return the new parameter list. */
2289
f1f41a6c 2290static vec<tree>
35a569c6 2291assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2292{
2293 tree fndecl = current_function_decl;
2294 tree fntype = TREE_TYPE (fndecl);
1e094109 2295 vec<tree> fnargs = vNULL;
3e992c41 2296 tree arg;
2297
1767a056 2298 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
f1f41a6c 2299 fnargs.safe_push (arg);
3e992c41 2300
2301 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
897b77d6 2302
2303 /* If struct value address is treated as the first argument, make it so. */
45550790 2304 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
18d50ae6 2305 && ! cfun->returns_pcc_struct
45550790 2306 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
897b77d6 2307 {
3ff448ca 2308 tree type = build_pointer_type (TREE_TYPE (fntype));
35a569c6 2309 tree decl;
897b77d6 2310
e60a6f7b 2311 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
4d5b4e6a 2312 PARM_DECL, get_identifier (".result_ptr"), type);
35a569c6 2313 DECL_ARG_TYPE (decl) = type;
2314 DECL_ARTIFICIAL (decl) = 1;
4d5b4e6a 2315 DECL_NAMELESS (decl) = 1;
2316 TREE_CONSTANT (decl) = 1;
897b77d6 2317
1767a056 2318 DECL_CHAIN (decl) = all->orig_fnargs;
3e992c41 2319 all->orig_fnargs = decl;
f1f41a6c 2320 fnargs.safe_insert (0, decl);
3e992c41 2321
35a569c6 2322 all->function_result_decl = decl;
058a1b7a 2323
2324 /* If function is instrumented then bounds of the
2325 passed structure address is the second argument. */
2326 if (chkp_function_instrumented_p (fndecl))
2327 {
2328 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2329 PARM_DECL, get_identifier (".result_bnd"),
2330 pointer_bounds_type_node);
2331 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2332 DECL_ARTIFICIAL (decl) = 1;
2333 DECL_NAMELESS (decl) = 1;
2334 TREE_CONSTANT (decl) = 1;
2335
2336 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2337 DECL_CHAIN (all->orig_fnargs) = decl;
2338 fnargs.safe_insert (1, decl);
2339 }
897b77d6 2340 }
06ebc183 2341
92d40bc4 2342 /* If the target wants to split complex arguments into scalars, do so. */
2343 if (targetm.calls.split_complex_arg)
3e992c41 2344 split_complex_args (&fnargs);
915e81b8 2345
35a569c6 2346 return fnargs;
2347}
241399f6 2348
35a569c6 2349/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2350 data for the parameter. Incorporate ABI specifics such as pass-by-
2351 reference and type promotion. */
897b77d6 2352
35a569c6 2353static void
2354assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2355 struct assign_parm_data_one *data)
2356{
2357 tree nominal_type, passed_type;
3754d046 2358 machine_mode nominal_mode, passed_mode, promoted_mode;
3b2411a8 2359 int unsignedp;
897b77d6 2360
35a569c6 2361 memset (data, 0, sizeof (*data));
2362
f0b5f617 2363 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
18d50ae6 2364 if (!cfun->stdarg)
f0b5f617 2365 data->named_arg = 1; /* No variadic parms. */
1767a056 2366 else if (DECL_CHAIN (parm))
f0b5f617 2367 data->named_arg = 1; /* Not the last non-variadic parm. */
39cba157 2368 else if (targetm.calls.strict_argument_naming (all->args_so_far))
f0b5f617 2369 data->named_arg = 1; /* Only variadic ones are unnamed. */
35a569c6 2370 else
f0b5f617 2371 data->named_arg = 0; /* Treat as variadic. */
35a569c6 2372
2373 nominal_type = TREE_TYPE (parm);
2374 passed_type = DECL_ARG_TYPE (parm);
2375
2376 /* Look out for errors propagating this far. Also, if the parameter's
2377 type is void then its value doesn't matter. */
2378 if (TREE_TYPE (parm) == error_mark_node
2379 /* This can happen after weird syntax errors
2380 or if an enum type is defined among the parms. */
2381 || TREE_CODE (parm) != PARM_DECL
2382 || passed_type == NULL
2383 || VOID_TYPE_P (nominal_type))
2384 {
2385 nominal_type = passed_type = void_type_node;
2386 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2387 goto egress;
2388 }
d06f5fba 2389
35a569c6 2390 /* Find mode of arg as it is passed, and mode of arg as it should be
2391 during execution of this function. */
2392 passed_mode = TYPE_MODE (passed_type);
2393 nominal_mode = TYPE_MODE (nominal_type);
2394
8df5a43d 2395 /* If the parm is to be passed as a transparent union or record, use the
2396 type of the first field for the tests below. We have already verified
2397 that the modes are the same. */
2398 if ((TREE_CODE (passed_type) == UNION_TYPE
2399 || TREE_CODE (passed_type) == RECORD_TYPE)
2400 && TYPE_TRANSPARENT_AGGR (passed_type))
2401 passed_type = TREE_TYPE (first_field (passed_type));
35a569c6 2402
cc9b8628 2403 /* See if this arg was passed by invisible reference. */
39cba157 2404 if (pass_by_reference (&all->args_so_far_v, passed_mode,
cc9b8628 2405 passed_type, data->named_arg))
35a569c6 2406 {
2407 passed_type = nominal_type = build_pointer_type (passed_type);
2408 data->passed_pointer = true;
25178032 2409 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
35a569c6 2410 }
897b77d6 2411
35a569c6 2412 /* Find mode as it is passed by the ABI. */
3b2411a8 2413 unsignedp = TYPE_UNSIGNED (passed_type);
2414 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2415 TREE_TYPE (current_function_decl), 0);
897b77d6 2416
35a569c6 2417 egress:
2418 data->nominal_type = nominal_type;
2419 data->passed_type = passed_type;
2420 data->nominal_mode = nominal_mode;
2421 data->passed_mode = passed_mode;
2422 data->promoted_mode = promoted_mode;
2423}
24ec33e7 2424
35a569c6 2425/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
897b77d6 2426
35a569c6 2427static void
2428assign_parms_setup_varargs (struct assign_parm_data_all *all,
2429 struct assign_parm_data_one *data, bool no_rtl)
2430{
2431 int varargs_pretend_bytes = 0;
2432
39cba157 2433 targetm.calls.setup_incoming_varargs (all->args_so_far,
35a569c6 2434 data->promoted_mode,
2435 data->passed_type,
2436 &varargs_pretend_bytes, no_rtl);
2437
2438 /* If the back-end has requested extra stack space, record how much is
2439 needed. Do not change pretend_args_size otherwise since it may be
2440 nonzero from an earlier partial argument. */
2441 if (varargs_pretend_bytes > 0)
2442 all->pretend_args_size = varargs_pretend_bytes;
2443}
7e8dfb30 2444
35a569c6 2445/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2446 the incoming location of the current parameter. */
2447
2448static void
2449assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2450 struct assign_parm_data_one *data)
2451{
2452 HOST_WIDE_INT pretend_bytes = 0;
2453 rtx entry_parm;
2454 bool in_regs;
2455
2456 if (data->promoted_mode == VOIDmode)
2457 {
2458 data->entry_parm = data->stack_parm = const0_rtx;
2459 return;
2460 }
7e8dfb30 2461
39cba157 2462 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
f387af4f 2463 data->promoted_mode,
2464 data->passed_type,
2465 data->named_arg);
897b77d6 2466
35a569c6 2467 if (entry_parm == 0)
2468 data->promoted_mode = data->passed_mode;
897b77d6 2469
35a569c6 2470 /* Determine parm's home in the stack, in case it arrives in the stack
2471 or we should pretend it did. Compute the stack position and rtx where
2472 the argument arrives and its size.
897b77d6 2473
35a569c6 2474 There is one complexity here: If this was a parameter that would
2475 have been passed in registers, but wasn't only because it is
2476 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2477 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2478 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2479 as it was the previous time. */
058a1b7a 2480 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
897b77d6 2481#ifdef STACK_PARMS_IN_REG_PARM_AREA
35a569c6 2482 in_regs = true;
241399f6 2483#endif
35a569c6 2484 if (!in_regs && !data->named_arg)
2485 {
39cba157 2486 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
241399f6 2487 {
35a569c6 2488 rtx tem;
39cba157 2489 tem = targetm.calls.function_incoming_arg (all->args_so_far,
f387af4f 2490 data->promoted_mode,
2491 data->passed_type, true);
35a569c6 2492 in_regs = tem != NULL;
241399f6 2493 }
35a569c6 2494 }
241399f6 2495
35a569c6 2496 /* If this parameter was passed both in registers and in the stack, use
2497 the copy on the stack. */
0336f0f0 2498 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2499 data->passed_type))
35a569c6 2500 entry_parm = 0;
241399f6 2501
35a569c6 2502 if (entry_parm)
2503 {
2504 int partial;
2505
39cba157 2506 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
f054eb3c 2507 data->promoted_mode,
2508 data->passed_type,
2509 data->named_arg);
35a569c6 2510 data->partial = partial;
2511
2512 /* The caller might already have allocated stack space for the
2513 register parameters. */
2514 if (partial != 0 && all->reg_parm_stack_space == 0)
1cd50c9a 2515 {
35a569c6 2516 /* Part of this argument is passed in registers and part
2517 is passed on the stack. Ask the prologue code to extend
2518 the stack part so that we can recreate the full value.
2519
2520 PRETEND_BYTES is the size of the registers we need to store.
2521 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2522 stack space that the prologue should allocate.
2523
2524 Internally, gcc assumes that the argument pointer is aligned
2525 to STACK_BOUNDARY bits. This is used both for alignment
2526 optimizations (see init_emit) and to locate arguments that are
2527 aligned to more than PARM_BOUNDARY bits. We must preserve this
2528 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2529 a stack boundary. */
2530
2531 /* We assume at most one partial arg, and it must be the first
2532 argument on the stack. */
fdada98f 2533 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
35a569c6 2534
f054eb3c 2535 pretend_bytes = partial;
35a569c6 2536 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2537
2538 /* We want to align relative to the actual stack pointer, so
2539 don't include this in the stack size until later. */
2540 all->extra_pretend_bytes = all->pretend_args_size;
1cd50c9a 2541 }
35a569c6 2542 }
241399f6 2543
35a569c6 2544 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e090bf6 2545 all->reg_parm_stack_space,
35a569c6 2546 entry_parm ? data->partial : 0, current_function_decl,
2547 &all->stack_args_size, &data->locate);
897b77d6 2548
c6586120 2549 /* Update parm_stack_boundary if this parameter is passed in the
2550 stack. */
2551 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2552 crtl->parm_stack_boundary = data->locate.boundary;
2553
35a569c6 2554 /* Adjust offsets to include the pretend args. */
2555 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2556 data->locate.slot_offset.constant += pretend_bytes;
2557 data->locate.offset.constant += pretend_bytes;
27664a4b 2558
35a569c6 2559 data->entry_parm = entry_parm;
2560}
897b77d6 2561
35a569c6 2562/* A subroutine of assign_parms. If there is actually space on the stack
2563 for this parm, count it in stack_args_size and return true. */
897b77d6 2564
35a569c6 2565static bool
2566assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2567 struct assign_parm_data_one *data)
2568{
058a1b7a 2569 /* Bounds are never passed on the stack to keep compatibility
2570 with not instrumented code. */
2571 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2572 return false;
a133d57d 2573 /* Trivially true if we've no incoming register. */
058a1b7a 2574 else if (data->entry_parm == NULL)
35a569c6 2575 ;
2576 /* Also true if we're partially in registers and partially not,
2577 since we've arranged to drop the entire argument on the stack. */
2578 else if (data->partial != 0)
2579 ;
2580 /* Also true if the target says that it's passed in both registers
2581 and on the stack. */
2582 else if (GET_CODE (data->entry_parm) == PARALLEL
2583 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2584 ;
2585 /* Also true if the target says that there's stack allocated for
2586 all register parameters. */
2587 else if (all->reg_parm_stack_space > 0)
2588 ;
2589 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2590 else
2591 return false;
897b77d6 2592
35a569c6 2593 all->stack_args_size.constant += data->locate.size.constant;
2594 if (data->locate.size.var)
2595 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
06ebc183 2596
35a569c6 2597 return true;
2598}
bffcf014 2599
35a569c6 2600/* A subroutine of assign_parms. Given that this parameter is allocated
2601 stack space by the ABI, find it. */
897b77d6 2602
35a569c6 2603static void
2604assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2605{
2606 rtx offset_rtx, stack_parm;
2607 unsigned int align, boundary;
897b77d6 2608
35a569c6 2609 /* If we're passing this arg using a reg, make its stack home the
2610 aligned stack slot. */
2611 if (data->entry_parm)
2612 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2613 else
2614 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2615
abe32cce 2616 stack_parm = crtl->args.internal_arg_pointer;
35a569c6 2617 if (offset_rtx != const0_rtx)
2618 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2619 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2620
d92e3973 2621 if (!data->passed_pointer)
7aeb4db5 2622 {
d92e3973 2623 set_mem_attributes (stack_parm, parm, 1);
2624 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2625 while promoted mode's size is needed. */
2626 if (data->promoted_mode != BLKmode
2627 && data->promoted_mode != DECL_MODE (parm))
7aeb4db5 2628 {
5b2a69fa 2629 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
da443c27 2630 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
d92e3973 2631 {
2632 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2633 data->promoted_mode);
2634 if (offset)
da443c27 2635 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
d92e3973 2636 }
7aeb4db5 2637 }
2638 }
35a569c6 2639
c5dc0c32 2640 boundary = data->locate.boundary;
2641 align = BITS_PER_UNIT;
35a569c6 2642
2643 /* If we're padding upward, we know that the alignment of the slot
bd99ba64 2644 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
35a569c6 2645 intentionally forcing upward padding. Otherwise we have to come
2646 up with a guess at the alignment based on OFFSET_RTX. */
c5dc0c32 2647 if (data->locate.where_pad != downward || data->entry_parm)
35a569c6 2648 align = boundary;
971ba038 2649 else if (CONST_INT_P (offset_rtx))
35a569c6 2650 {
2651 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2652 align = align & -align;
2653 }
c5dc0c32 2654 set_mem_align (stack_parm, align);
35a569c6 2655
2656 if (data->entry_parm)
2657 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2658
2659 data->stack_parm = stack_parm;
2660}
2661
2662/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2663 always valid and contiguous. */
2664
2665static void
2666assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2667{
2668 rtx entry_parm = data->entry_parm;
2669 rtx stack_parm = data->stack_parm;
2670
2671 /* If this parm was passed part in regs and part in memory, pretend it
2672 arrived entirely in memory by pushing the register-part onto the stack.
2673 In the special case of a DImode or DFmode that is split, we could put
2674 it together in a pseudoreg directly, but for now that's not worth
2675 bothering with. */
2676 if (data->partial != 0)
2677 {
2678 /* Handle calls that pass values in multiple non-contiguous
2679 locations. The Irix 6 ABI has examples of this. */
2680 if (GET_CODE (entry_parm) == PARALLEL)
d2b9158b 2681 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
48e1416a 2682 data->passed_type,
35a569c6 2683 int_size_in_bytes (data->passed_type));
897b77d6 2684 else
f054eb3c 2685 {
2686 gcc_assert (data->partial % UNITS_PER_WORD == 0);
d2b9158b 2687 move_block_from_reg (REGNO (entry_parm),
2688 validize_mem (copy_rtx (stack_parm)),
f054eb3c 2689 data->partial / UNITS_PER_WORD);
2690 }
897b77d6 2691
35a569c6 2692 entry_parm = stack_parm;
2693 }
897b77d6 2694
35a569c6 2695 /* If we didn't decide this parm came in a register, by default it came
2696 on the stack. */
2697 else if (entry_parm == NULL)
2698 entry_parm = stack_parm;
2699
2700 /* When an argument is passed in multiple locations, we can't make use
2701 of this information, but we can save some copying if the whole argument
2702 is passed in a single register. */
2703 else if (GET_CODE (entry_parm) == PARALLEL
2704 && data->nominal_mode != BLKmode
2705 && data->passed_mode != BLKmode)
2706 {
2707 size_t i, len = XVECLEN (entry_parm, 0);
2708
2709 for (i = 0; i < len; i++)
2710 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2711 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2712 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2713 == data->passed_mode)
2714 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2715 {
2716 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2717 break;
2718 }
2719 }
4d6c855d 2720
35a569c6 2721 data->entry_parm = entry_parm;
2722}
897b77d6 2723
77c0eeb4 2724/* A subroutine of assign_parms. Reconstitute any values which were
2725 passed in multiple registers and would fit in a single register. */
2726
2727static void
2728assign_parm_remove_parallels (struct assign_parm_data_one *data)
2729{
2730 rtx entry_parm = data->entry_parm;
2731
2732 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2733 This can be done with register operations rather than on the
2734 stack, even if we will store the reconstituted parameter on the
2735 stack later. */
1cf0636a 2736 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
77c0eeb4 2737 {
2738 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
77b80ffd 2739 emit_group_store (parmreg, entry_parm, data->passed_type,
77c0eeb4 2740 GET_MODE_SIZE (GET_MODE (entry_parm)));
2741 entry_parm = parmreg;
2742 }
2743
2744 data->entry_parm = entry_parm;
2745}
2746
35a569c6 2747/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2748 always valid and properly aligned. */
897b77d6 2749
35a569c6 2750static void
1382992b 2751assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
35a569c6 2752{
2753 rtx stack_parm = data->stack_parm;
2754
2755 /* If we can't trust the parm stack slot to be aligned enough for its
2756 ultimate type, don't use that slot after entry. We'll make another
2757 stack slot, if we need one. */
1382992b 2758 if (stack_parm
2759 && ((STRICT_ALIGNMENT
2760 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2761 || (data->nominal_type
2762 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2763 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
35a569c6 2764 stack_parm = NULL;
2765
2766 /* If parm was passed in memory, and we need to convert it on entry,
2767 don't store it back in that same slot. */
2768 else if (data->entry_parm == stack_parm
2769 && data->nominal_mode != BLKmode
2770 && data->nominal_mode != data->passed_mode)
2771 stack_parm = NULL;
2772
f1a0edff 2773 /* If stack protection is in effect for this function, don't leave any
2774 pointers in their passed stack slots. */
edb7afe8 2775 else if (crtl->stack_protect_guard
f1a0edff 2776 && (flag_stack_protect == 2
2777 || data->passed_pointer
2778 || POINTER_TYPE_P (data->nominal_type)))
2779 stack_parm = NULL;
2780
35a569c6 2781 data->stack_parm = stack_parm;
2782}
90b076ea 2783
35a569c6 2784/* A subroutine of assign_parms. Return true if the current parameter
2785 should be stored as a BLKmode in the current frame. */
2786
2787static bool
2788assign_parm_setup_block_p (struct assign_parm_data_one *data)
2789{
2790 if (data->nominal_mode == BLKmode)
2791 return true;
1cf0636a 2792 if (GET_MODE (data->entry_parm) == BLKmode)
2793 return true;
a2509aaa 2794
5f4cd670 2795#ifdef BLOCK_REG_PADDING
ed4b0b75 2796 /* Only assign_parm_setup_block knows how to deal with register arguments
2797 that are padded at the least significant end. */
2798 if (REG_P (data->entry_parm)
2799 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2800 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2801 == (BYTES_BIG_ENDIAN ? upward : downward)))
35a569c6 2802 return true;
5f4cd670 2803#endif
35a569c6 2804
2805 return false;
2806}
2807
48e1416a 2808/* A subroutine of assign_parms. Arrange for the parameter to be
35a569c6 2809 present and valid in DATA->STACK_RTL. */
2810
2811static void
e2ff5c1b 2812assign_parm_setup_block (struct assign_parm_data_all *all,
2813 tree parm, struct assign_parm_data_one *data)
35a569c6 2814{
2815 rtx entry_parm = data->entry_parm;
2816 rtx stack_parm = data->stack_parm;
c5dc0c32 2817 HOST_WIDE_INT size;
2818 HOST_WIDE_INT size_stored;
35a569c6 2819
e2ff5c1b 2820 if (GET_CODE (entry_parm) == PARALLEL)
2821 entry_parm = emit_group_move_into_temps (entry_parm);
2822
c5dc0c32 2823 size = int_size_in_bytes (data->passed_type);
2824 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2825 if (stack_parm == 0)
2826 {
3a1c59da 2827 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
1382992b 2828 stack_parm = assign_stack_local (BLKmode, size_stored,
2829 DECL_ALIGN (parm));
c5dc0c32 2830 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2831 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2832 set_mem_attributes (stack_parm, parm, 1);
2833 }
2834
35a569c6 2835 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2836 calls that pass values in multiple non-contiguous locations. */
2837 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2838 {
35a569c6 2839 rtx mem;
2840
2841 /* Note that we will be storing an integral number of words.
2842 So we have to be careful to ensure that we allocate an
c5dc0c32 2843 integral number of words. We do this above when we call
35a569c6 2844 assign_stack_local if space was not allocated in the argument
2845 list. If it was, this will not work if PARM_BOUNDARY is not
2846 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2847 if it becomes a problem. Exception is when BLKmode arrives
2848 with arguments not conforming to word_mode. */
2849
c5dc0c32 2850 if (data->stack_parm == 0)
2851 ;
35a569c6 2852 else if (GET_CODE (entry_parm) == PARALLEL)
2853 ;
fdada98f 2854 else
2855 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
897b77d6 2856
d2b9158b 2857 mem = validize_mem (copy_rtx (stack_parm));
530178a9 2858
35a569c6 2859 /* Handle values in multiple non-contiguous locations. */
2860 if (GET_CODE (entry_parm) == PARALLEL)
e2ff5c1b 2861 {
28bf151d 2862 push_to_sequence2 (all->first_conversion_insn,
2863 all->last_conversion_insn);
e2ff5c1b 2864 emit_group_store (mem, entry_parm, data->passed_type, size);
28bf151d 2865 all->first_conversion_insn = get_insns ();
2866 all->last_conversion_insn = get_last_insn ();
e2ff5c1b 2867 end_sequence ();
2868 }
530178a9 2869
35a569c6 2870 else if (size == 0)
2871 ;
dd6fed02 2872
35a569c6 2873 /* If SIZE is that of a mode no bigger than a word, just use
2874 that mode's store operation. */
2875 else if (size <= UNITS_PER_WORD)
2876 {
3754d046 2877 machine_mode mode
35a569c6 2878 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
530178a9 2879
35a569c6 2880 if (mode != BLKmode
5f4cd670 2881#ifdef BLOCK_REG_PADDING
35a569c6 2882 && (size == UNITS_PER_WORD
2883 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2884 != (BYTES_BIG_ENDIAN ? upward : downward)))
5f4cd670 2885#endif
35a569c6 2886 )
2887 {
2973927c 2888 rtx reg;
2889
2890 /* We are really truncating a word_mode value containing
2891 SIZE bytes into a value of mode MODE. If such an
2892 operation requires no actual instructions, we can refer
2893 to the value directly in mode MODE, otherwise we must
2894 start with the register in word_mode and explicitly
2895 convert it. */
2896 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2897 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2898 else
2899 {
2900 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2901 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2902 }
35a569c6 2903 emit_move_insn (change_address (mem, mode, 0), reg);
2904 }
530178a9 2905
35a569c6 2906 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2907 machine must be aligned to the left before storing
2908 to memory. Note that the previous test doesn't
2909 handle all cases (e.g. SIZE == 3). */
2910 else if (size != UNITS_PER_WORD
5f4cd670 2911#ifdef BLOCK_REG_PADDING
35a569c6 2912 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2913 == downward)
5f4cd670 2914#else
35a569c6 2915 && BYTES_BIG_ENDIAN
5f4cd670 2916#endif
35a569c6 2917 )
2918 {
2919 rtx tem, x;
2920 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
e1b9bbec 2921 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
35a569c6 2922
f5ff0b21 2923 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
35a569c6 2924 tem = change_address (mem, word_mode, 0);
2925 emit_move_insn (tem, x);
897b77d6 2926 }
35a569c6 2927 else
e2ff5c1b 2928 move_block_from_reg (REGNO (entry_parm), mem,
35a569c6 2929 size_stored / UNITS_PER_WORD);
897b77d6 2930 }
35a569c6 2931 else
e2ff5c1b 2932 move_block_from_reg (REGNO (entry_parm), mem,
35a569c6 2933 size_stored / UNITS_PER_WORD);
2934 }
c5dc0c32 2935 else if (data->stack_parm == 0)
2936 {
28bf151d 2937 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
c5dc0c32 2938 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2939 BLOCK_OP_NORMAL);
28bf151d 2940 all->first_conversion_insn = get_insns ();
2941 all->last_conversion_insn = get_last_insn ();
c5dc0c32 2942 end_sequence ();
2943 }
35a569c6 2944
c5dc0c32 2945 data->stack_parm = stack_parm;
35a569c6 2946 SET_DECL_RTL (parm, stack_parm);
2947}
2948
2949/* A subroutine of assign_parms. Allocate a pseudo to hold the current
2950 parameter. Get it there. Perform all ABI specified conversions. */
2951
2952static void
2953assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2954 struct assign_parm_data_one *data)
2955{
f3e93fd1 2956 rtx parmreg, validated_mem;
2957 rtx equiv_stack_parm;
3754d046 2958 machine_mode promoted_nominal_mode;
35a569c6 2959 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2960 bool did_conversion = false;
f3e93fd1 2961 bool need_conversion, moved;
35a569c6 2962
2963 /* Store the parm in a pseudoregister during the function, but we may
c879dbcf 2964 need to do it in a wider mode. Using 2 here makes the result
2965 consistent with promote_decl_mode and thus expand_expr_real_1. */
35a569c6 2966 promoted_nominal_mode
3b2411a8 2967 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
c879dbcf 2968 TREE_TYPE (current_function_decl), 2);
35a569c6 2969
1382992b 2970 parmreg = gen_reg_rtx (promoted_nominal_mode);
35a569c6 2971
1382992b 2972 if (!DECL_ARTIFICIAL (parm))
2973 mark_user_reg (parmreg);
35a569c6 2974
2975 /* If this was an item that we received a pointer to,
2976 set DECL_RTL appropriately. */
2977 if (data->passed_pointer)
2978 {
2979 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2980 set_mem_attributes (x, parm, 1);
2981 SET_DECL_RTL (parm, x);
2982 }
2983 else
b04fab2a 2984 SET_DECL_RTL (parm, parmreg);
35a569c6 2985
77c0eeb4 2986 assign_parm_remove_parallels (data);
2987
c879dbcf 2988 /* Copy the value into the register, thus bridging between
2989 assign_parm_find_data_types and expand_expr_real_1. */
35a569c6 2990
f3e93fd1 2991 equiv_stack_parm = data->stack_parm;
d2b9158b 2992 validated_mem = validize_mem (copy_rtx (data->entry_parm));
f3e93fd1 2993
2994 need_conversion = (data->nominal_mode != data->passed_mode
2995 || promoted_nominal_mode != data->promoted_mode);
2996 moved = false;
2997
3939ef08 2998 if (need_conversion
2999 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3000 && data->nominal_mode == data->passed_mode
3001 && data->nominal_mode == GET_MODE (data->entry_parm))
f3e93fd1 3002 {
35a569c6 3003 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3004 mode, by the caller. We now have to convert it to
3005 NOMINAL_MODE, if different. However, PARMREG may be in
3006 a different mode than NOMINAL_MODE if it is being stored
3007 promoted.
3008
3009 If ENTRY_PARM is a hard register, it might be in a register
3010 not valid for operating in its mode (e.g., an odd-numbered
3011 register for a DFmode). In that case, moves are the only
3012 thing valid, so we can't do a convert from there. This
3013 occurs when the calling sequence allow such misaligned
3014 usages.
3015
3016 In addition, the conversion may involve a call, which could
3017 clobber parameters which haven't been copied to pseudo
f3e93fd1 3018 registers yet.
3019
3020 First, we try to emit an insn which performs the necessary
3021 conversion. We verify that this insn does not clobber any
3022 hard registers. */
3023
3024 enum insn_code icode;
3025 rtx op0, op1;
3026
3027 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3028 unsignedp);
3029
3030 op0 = parmreg;
3031 op1 = validated_mem;
3032 if (icode != CODE_FOR_nothing
39c56a89 3033 && insn_operand_matches (icode, 0, op0)
3034 && insn_operand_matches (icode, 1, op1))
f3e93fd1 3035 {
3036 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
91a55c11 3037 rtx_insn *insn, *insns;
3038 rtx t = op1;
f3e93fd1 3039 HARD_REG_SET hardregs;
3040
3041 start_sequence ();
30790040 3042 /* If op1 is a hard register that is likely spilled, first
3043 force it into a pseudo, otherwise combiner might extend
3044 its lifetime too much. */
3045 if (GET_CODE (t) == SUBREG)
3046 t = SUBREG_REG (t);
3047 if (REG_P (t)
3048 && HARD_REGISTER_P (t)
3049 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3050 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3051 {
3052 t = gen_reg_rtx (GET_MODE (op1));
3053 emit_move_insn (t, op1);
3054 }
3055 else
3056 t = op1;
9ed997be 3057 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3058 data->passed_mode, unsignedp);
eb10ade7 3059 emit_insn (pat);
f3e93fd1 3060 insns = get_insns ();
3061
3062 moved = true;
3063 CLEAR_HARD_REG_SET (hardregs);
3064 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3065 {
3066 if (INSN_P (insn))
3067 note_stores (PATTERN (insn), record_hard_reg_sets,
3068 &hardregs);
3069 if (!hard_reg_set_empty_p (hardregs))
3070 moved = false;
3071 }
3072
3073 end_sequence ();
3074
3075 if (moved)
3076 {
3077 emit_insn (insns);
3939ef08 3078 if (equiv_stack_parm != NULL_RTX)
3079 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3080 equiv_stack_parm);
f3e93fd1 3081 }
3082 }
3083 }
3084
3085 if (moved)
3086 /* Nothing to do. */
3087 ;
3088 else if (need_conversion)
3089 {
3090 /* We did not have an insn to convert directly, or the sequence
3091 generated appeared unsafe. We must first copy the parm to a
3092 pseudo reg, and save the conversion until after all
35a569c6 3093 parameters have been moved. */
3094
f3e93fd1 3095 int save_tree_used;
35a569c6 3096 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3097
f3e93fd1 3098 emit_move_insn (tempreg, validated_mem);
35a569c6 3099
28bf151d 3100 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
35a569c6 3101 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3102
3103 if (GET_CODE (tempreg) == SUBREG
3104 && GET_MODE (tempreg) == data->nominal_mode
3105 && REG_P (SUBREG_REG (tempreg))
3106 && data->nominal_mode == data->passed_mode
3107 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3108 && GET_MODE_SIZE (GET_MODE (tempreg))
3109 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
897b77d6 3110 {
35a569c6 3111 /* The argument is already sign/zero extended, so note it
3112 into the subreg. */
3113 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
e8629f9e 3114 SUBREG_PROMOTED_SET (tempreg, unsignedp);
35a569c6 3115 }
19e03a68 3116
35a569c6 3117 /* TREE_USED gets set erroneously during expand_assignment. */
3118 save_tree_used = TREE_USED (parm);
5b5037b3 3119 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
35a569c6 3120 TREE_USED (parm) = save_tree_used;
28bf151d 3121 all->first_conversion_insn = get_insns ();
3122 all->last_conversion_insn = get_last_insn ();
35a569c6 3123 end_sequence ();
19e03a68 3124
35a569c6 3125 did_conversion = true;
3126 }
3127 else
f3e93fd1 3128 emit_move_insn (parmreg, validated_mem);
35a569c6 3129
3130 /* If we were passed a pointer but the actual value can safely live
cad0d474 3131 in a register, retrieve it and use it directly. */
1382992b 3132 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
35a569c6 3133 {
3134 /* We can't use nominal_mode, because it will have been set to
3135 Pmode above. We must use the actual mode of the parm. */
1382992b 3136 if (use_register_for_decl (parm))
cad0d474 3137 {
3138 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3139 mark_user_reg (parmreg);
3140 }
3141 else
3142 {
3143 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3144 TYPE_MODE (TREE_TYPE (parm)),
3145 TYPE_ALIGN (TREE_TYPE (parm)));
3146 parmreg
3147 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3148 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3149 align);
3150 set_mem_attributes (parmreg, parm, 1);
3151 }
8815f4da 3152
35a569c6 3153 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3154 {
3155 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3156 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3157
28bf151d 3158 push_to_sequence2 (all->first_conversion_insn,
3159 all->last_conversion_insn);
35a569c6 3160 emit_move_insn (tempreg, DECL_RTL (parm));
3161 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3162 emit_move_insn (parmreg, tempreg);
28bf151d 3163 all->first_conversion_insn = get_insns ();
3164 all->last_conversion_insn = get_last_insn ();
35a569c6 3165 end_sequence ();
897b77d6 3166
35a569c6 3167 did_conversion = true;
3168 }
3169 else
3170 emit_move_insn (parmreg, DECL_RTL (parm));
897b77d6 3171
35a569c6 3172 SET_DECL_RTL (parm, parmreg);
60d903f5 3173
35a569c6 3174 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3175 now the parm. */
1382992b 3176 data->stack_parm = NULL;
35a569c6 3177 }
701e46d0 3178
35a569c6 3179 /* Mark the register as eliminable if we did no conversion and it was
3180 copied from memory at a fixed offset, and the arg pointer was not
3181 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3182 offset formed an invalid address, such memory-equivalences as we
3183 make here would screw up life analysis for it. */
3184 if (data->nominal_mode == data->passed_mode
3185 && !did_conversion
1382992b 3186 && data->stack_parm != 0
3187 && MEM_P (data->stack_parm)
35a569c6 3188 && data->locate.offset.var == 0
3189 && reg_mentioned_p (virtual_incoming_args_rtx,
1382992b 3190 XEXP (data->stack_parm, 0)))
35a569c6 3191 {
8bb2625b 3192 rtx_insn *linsn = get_last_insn ();
3193 rtx_insn *sinsn;
3194 rtx set;
5f85a240 3195
35a569c6 3196 /* Mark complex types separately. */
3197 if (GET_CODE (parmreg) == CONCAT)
3198 {
3754d046 3199 machine_mode submode
35a569c6 3200 = GET_MODE_INNER (GET_MODE (parmreg));
de17a47b 3201 int regnor = REGNO (XEXP (parmreg, 0));
3202 int regnoi = REGNO (XEXP (parmreg, 1));
1382992b 3203 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3204 rtx stacki = adjust_address_nv (data->stack_parm, submode,
de17a47b 3205 GET_MODE_SIZE (submode));
35a569c6 3206
3207 /* Scan backwards for the set of the real and
3208 imaginary parts. */
3209 for (sinsn = linsn; sinsn != 0;
3210 sinsn = prev_nonnote_insn (sinsn))
3211 {
3212 set = single_set (sinsn);
3213 if (set == 0)
3214 continue;
3215
3216 if (SET_DEST (set) == regno_reg_rtx [regnoi])
750a330e 3217 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
35a569c6 3218 else if (SET_DEST (set) == regno_reg_rtx [regnor])
750a330e 3219 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
5f85a240 3220 }
35a569c6 3221 }
41cf444a 3222 else
3223 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
35a569c6 3224 }
3225
3226 /* For pointer data type, suggest pointer register. */
3227 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3228 mark_reg_pointer (parmreg,
3229 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3230}
3231
3232/* A subroutine of assign_parms. Allocate stack space to hold the current
3233 parameter. Get it there. Perform all ABI specified conversions. */
3234
3235static void
3236assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3237 struct assign_parm_data_one *data)
3238{
3239 /* Value must be stored in the stack slot STACK_PARM during function
3240 execution. */
c5dc0c32 3241 bool to_conversion = false;
35a569c6 3242
77c0eeb4 3243 assign_parm_remove_parallels (data);
3244
35a569c6 3245 if (data->promoted_mode != data->nominal_mode)
3246 {
3247 /* Conversion is required. */
3248 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
897b77d6 3249
d2b9158b 3250 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
35a569c6 3251
28bf151d 3252 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
c5dc0c32 3253 to_conversion = true;
3254
35a569c6 3255 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3256 TYPE_UNSIGNED (TREE_TYPE (parm)));
3257
3258 if (data->stack_parm)
738ab6f5 3259 {
3260 int offset = subreg_lowpart_offset (data->nominal_mode,
3261 GET_MODE (data->stack_parm));
3262 /* ??? This may need a big-endian conversion on sparc64. */
3263 data->stack_parm
3264 = adjust_address (data->stack_parm, data->nominal_mode, 0);
da443c27 3265 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
738ab6f5 3266 set_mem_offset (data->stack_parm,
da443c27 3267 MEM_OFFSET (data->stack_parm) + offset);
738ab6f5 3268 }
35a569c6 3269 }
3270
3271 if (data->entry_parm != data->stack_parm)
3272 {
c5dc0c32 3273 rtx src, dest;
3274
35a569c6 3275 if (data->stack_parm == 0)
3276 {
c9b50df7 3277 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3278 GET_MODE (data->entry_parm),
3279 TYPE_ALIGN (data->passed_type));
35a569c6 3280 data->stack_parm
3281 = assign_stack_local (GET_MODE (data->entry_parm),
3282 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
c9b50df7 3283 align);
35a569c6 3284 set_mem_attributes (data->stack_parm, parm, 1);
897b77d6 3285 }
35a569c6 3286
d2b9158b 3287 dest = validize_mem (copy_rtx (data->stack_parm));
3288 src = validize_mem (copy_rtx (data->entry_parm));
c5dc0c32 3289
3290 if (MEM_P (src))
897b77d6 3291 {
c5dc0c32 3292 /* Use a block move to handle potentially misaligned entry_parm. */
3293 if (!to_conversion)
28bf151d 3294 push_to_sequence2 (all->first_conversion_insn,
3295 all->last_conversion_insn);
c5dc0c32 3296 to_conversion = true;
3297
3298 emit_block_move (dest, src,
3299 GEN_INT (int_size_in_bytes (data->passed_type)),
3300 BLOCK_OP_NORMAL);
35a569c6 3301 }
3302 else
c5dc0c32 3303 emit_move_insn (dest, src);
3304 }
3305
3306 if (to_conversion)
3307 {
28bf151d 3308 all->first_conversion_insn = get_insns ();
3309 all->last_conversion_insn = get_last_insn ();
c5dc0c32 3310 end_sequence ();
35a569c6 3311 }
897b77d6 3312
35a569c6 3313 SET_DECL_RTL (parm, data->stack_parm);
3314}
b8f621ce 3315
35a569c6 3316/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3317 undo the frobbing that we did in assign_parms_augmented_arg_list. */
006be676 3318
35a569c6 3319static void
3e992c41 3320assign_parms_unsplit_complex (struct assign_parm_data_all *all,
f1f41a6c 3321 vec<tree> fnargs)
35a569c6 3322{
3323 tree parm;
e6427ef0 3324 tree orig_fnargs = all->orig_fnargs;
3e992c41 3325 unsigned i = 0;
e513d163 3326
3e992c41 3327 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
35a569c6 3328 {
3329 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3330 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3331 {
3332 rtx tmp, real, imag;
3754d046 3333 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
897b77d6 3334
f1f41a6c 3335 real = DECL_RTL (fnargs[i]);
3336 imag = DECL_RTL (fnargs[i + 1]);
35a569c6 3337 if (inner != GET_MODE (real))
897b77d6 3338 {
35a569c6 3339 real = gen_lowpart_SUBREG (inner, real);
3340 imag = gen_lowpart_SUBREG (inner, imag);
3341 }
e6427ef0 3342
3343 if (TREE_ADDRESSABLE (parm))
3344 {
3345 rtx rmem, imem;
3346 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
c9b50df7 3347 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3348 DECL_MODE (parm),
3349 TYPE_ALIGN (TREE_TYPE (parm)));
e6427ef0 3350
3351 /* split_complex_arg put the real and imag parts in
3352 pseudos. Move them to memory. */
c9b50df7 3353 tmp = assign_stack_local (DECL_MODE (parm), size, align);
e6427ef0 3354 set_mem_attributes (tmp, parm, 1);
3355 rmem = adjust_address_nv (tmp, inner, 0);
3356 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
28bf151d 3357 push_to_sequence2 (all->first_conversion_insn,
3358 all->last_conversion_insn);
e6427ef0 3359 emit_move_insn (rmem, real);
3360 emit_move_insn (imem, imag);
28bf151d 3361 all->first_conversion_insn = get_insns ();
3362 all->last_conversion_insn = get_last_insn ();
e6427ef0 3363 end_sequence ();
3364 }
3365 else
3366 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
35a569c6 3367 SET_DECL_RTL (parm, tmp);
08531d36 3368
f1f41a6c 3369 real = DECL_INCOMING_RTL (fnargs[i]);
3370 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
35a569c6 3371 if (inner != GET_MODE (real))
3372 {
3373 real = gen_lowpart_SUBREG (inner, real);
3374 imag = gen_lowpart_SUBREG (inner, imag);
897b77d6 3375 }
35a569c6 3376 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
d91cf567 3377 set_decl_incoming_rtl (parm, tmp, false);
3e992c41 3378 i++;
897b77d6 3379 }
897b77d6 3380 }
35a569c6 3381}
3382
058a1b7a 3383/* Load bounds of PARM from bounds table. */
3384static void
3385assign_parm_load_bounds (struct assign_parm_data_one *data,
3386 tree parm,
3387 rtx entry,
3388 unsigned bound_no)
3389{
3390 bitmap_iterator bi;
3391 unsigned i, offs = 0;
3392 int bnd_no = -1;
3393 rtx slot = NULL, ptr = NULL;
3394
3395 if (parm)
3396 {
3397 bitmap slots;
3398 bitmap_obstack_initialize (NULL);
3399 slots = BITMAP_ALLOC (NULL);
3400 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3401 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3402 {
3403 if (bound_no)
3404 bound_no--;
3405 else
3406 {
3407 bnd_no = i;
3408 break;
3409 }
3410 }
3411 BITMAP_FREE (slots);
3412 bitmap_obstack_release (NULL);
3413 }
3414
3415 /* We may have bounds not associated with any pointer. */
3416 if (bnd_no != -1)
3417 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3418
3419 /* Find associated pointer. */
3420 if (bnd_no == -1)
3421 {
3422 /* If bounds are not associated with any bounds,
3423 then it is passed in a register or special slot. */
3424 gcc_assert (data->entry_parm);
3425 ptr = const0_rtx;
3426 }
3427 else if (MEM_P (entry))
3428 slot = adjust_address (entry, Pmode, offs);
3429 else if (REG_P (entry))
3430 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3431 else if (GET_CODE (entry) == PARALLEL)
3432 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3433 else
3434 gcc_unreachable ();
3435 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3436 data->entry_parm);
3437}
3438
3439/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3440
3441static void
3442assign_bounds (vec<bounds_parm_data> &bndargs,
3cc70dc3 3443 struct assign_parm_data_all &all,
3444 bool assign_regs, bool assign_special,
3445 bool assign_bt)
058a1b7a 3446{
3cc70dc3 3447 unsigned i, pass;
058a1b7a 3448 bounds_parm_data *pbdata;
3449
3450 if (!bndargs.exists ())
3451 return;
3452
3453 /* We make few passes to store input bounds. Firstly handle bounds
3454 passed in registers. After that we load bounds passed in special
3455 slots. Finally we load bounds from Bounds Table. */
3456 for (pass = 0; pass < 3; pass++)
3457 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3458 {
3459 /* Pass 0 => regs only. */
3460 if (pass == 0
3cc70dc3 3461 && (!assign_regs
3462 ||(!pbdata->parm_data.entry_parm
3463 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
058a1b7a 3464 continue;
3465 /* Pass 1 => slots only. */
3466 else if (pass == 1
3cc70dc3 3467 && (!assign_special
3468 || (!pbdata->parm_data.entry_parm
3469 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
058a1b7a 3470 continue;
3471 /* Pass 2 => BT only. */
3472 else if (pass == 2
3cc70dc3 3473 && (!assign_bt
3474 || pbdata->parm_data.entry_parm))
058a1b7a 3475 continue;
3476
3477 if (!pbdata->parm_data.entry_parm
3478 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3479 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3480 pbdata->ptr_entry, pbdata->bound_no);
3481
3482 set_decl_incoming_rtl (pbdata->bounds_parm,
3483 pbdata->parm_data.entry_parm, false);
3484
3485 if (assign_parm_setup_block_p (&pbdata->parm_data))
3486 assign_parm_setup_block (&all, pbdata->bounds_parm,
3487 &pbdata->parm_data);
3488 else if (pbdata->parm_data.passed_pointer
3489 || use_register_for_decl (pbdata->bounds_parm))
3490 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3491 &pbdata->parm_data);
3492 else
3493 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3494 &pbdata->parm_data);
058a1b7a 3495 }
058a1b7a 3496}
3497
35a569c6 3498/* Assign RTL expressions to the function's parameters. This may involve
3499 copying them into registers and using those registers as the DECL_RTL. */
3500
3f0895d3 3501static void
35a569c6 3502assign_parms (tree fndecl)
3503{
3504 struct assign_parm_data_all all;
3e992c41 3505 tree parm;
f1f41a6c 3506 vec<tree> fnargs;
058a1b7a 3507 unsigned i, bound_no = 0;
3508 tree last_arg = NULL;
3509 rtx last_arg_entry = NULL;
3510 vec<bounds_parm_data> bndargs = vNULL;
3511 bounds_parm_data bdata;
897b77d6 3512
abe32cce 3513 crtl->args.internal_arg_pointer
567925e3 3514 = targetm.calls.internal_arg_pointer ();
35a569c6 3515
3516 assign_parms_initialize_all (&all);
3517 fnargs = assign_parms_augmented_arg_list (&all);
3518
f1f41a6c 3519 FOR_EACH_VEC_ELT (fnargs, i, parm)
915e81b8 3520 {
35a569c6 3521 struct assign_parm_data_one data;
3522
3523 /* Extract the type of PARM; adjust it according to ABI. */
3524 assign_parm_find_data_types (&all, parm, &data);
3525
3526 /* Early out for errors and void parameters. */
3527 if (data.passed_mode == VOIDmode)
915e81b8 3528 {
35a569c6 3529 SET_DECL_RTL (parm, const0_rtx);
3530 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3531 continue;
3532 }
1b4f3c7d 3533
27a7a23a 3534 /* Estimate stack alignment from parameter alignment. */
3535 if (SUPPORTS_STACK_ALIGNMENT)
3536 {
bd99ba64 3537 unsigned int align
3538 = targetm.calls.function_arg_boundary (data.promoted_mode,
3539 data.passed_type);
8645d3e7 3540 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3541 align);
27a7a23a 3542 if (TYPE_ALIGN (data.nominal_type) > align)
8645d3e7 3543 align = MINIMUM_ALIGNMENT (data.nominal_type,
3544 TYPE_MODE (data.nominal_type),
3545 TYPE_ALIGN (data.nominal_type));
27a7a23a 3546 if (crtl->stack_alignment_estimated < align)
3547 {
3548 gcc_assert (!crtl->stack_realign_processed);
3549 crtl->stack_alignment_estimated = align;
3550 }
3551 }
48e1416a 3552
35a569c6 3553 /* Find out where the parameter arrives in this function. */
3554 assign_parm_find_entry_rtl (&all, &data);
3555
3556 /* Find out where stack space for this parameter might be. */
3557 if (assign_parm_is_stack_parm (&all, &data))
3558 {
3559 assign_parm_find_stack_rtl (parm, &data);
3560 assign_parm_adjust_entry_rtl (&data);
915e81b8 3561 }
058a1b7a 3562 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3563 {
3564 /* Remember where last non bounds arg was passed in case
3565 we have to load associated bounds for it from Bounds
3566 Table. */
3567 last_arg = parm;
3568 last_arg_entry = data.entry_parm;
3569 bound_no = 0;
3570 }
35a569c6 3571 /* Record permanently how this parm was passed. */
56fe7223 3572 if (data.passed_pointer)
3573 {
3574 rtx incoming_rtl
3575 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3576 data.entry_parm);
3577 set_decl_incoming_rtl (parm, incoming_rtl, true);
3578 }
3579 else
3580 set_decl_incoming_rtl (parm, data.entry_parm, false);
35a569c6 3581
1382992b 3582 /* Boudns should be loaded in the particular order to
058a1b7a 3583 have registers allocated correctly. Collect info about
3584 input bounds and load them later. */
3585 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3586 {
3587 /* Expect bounds in instrumented functions only. */
3588 gcc_assert (chkp_function_instrumented_p (fndecl));
3589
3590 bdata.parm_data = data;
3591 bdata.bounds_parm = parm;
3592 bdata.ptr_parm = last_arg;
3593 bdata.ptr_entry = last_arg_entry;
3594 bdata.bound_no = bound_no;
3595 bndargs.safe_push (bdata);
3596 }
3597 else
3598 {
1382992b 3599 assign_parm_adjust_stack_rtl (&data);
3600
058a1b7a 3601 if (assign_parm_setup_block_p (&data))
3602 assign_parm_setup_block (&all, parm, &data);
1382992b 3603 else if (data.passed_pointer || use_register_for_decl (parm))
058a1b7a 3604 assign_parm_setup_reg (&all, parm, &data);
3605 else
3606 assign_parm_setup_stack (&all, parm, &data);
3607 }
3608
3609 if (cfun->stdarg && !DECL_CHAIN (parm))
3610 {
3611 int pretend_bytes = 0;
3612
3613 assign_parms_setup_varargs (&all, &data, false);
3614
3615 if (chkp_function_instrumented_p (fndecl))
3616 {
3617 /* We expect this is the last parm. Otherwise it is wrong
3618 to assign bounds right now. */
3619 gcc_assert (i == (fnargs.length () - 1));
3cc70dc3 3620 assign_bounds (bndargs, all, true, false, false);
058a1b7a 3621 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3622 data.promoted_mode,
3623 data.passed_type,
3624 &pretend_bytes,
3625 false);
3cc70dc3 3626 assign_bounds (bndargs, all, false, true, true);
3627 bndargs.release ();
058a1b7a 3628 }
3629 }
3630
35a569c6 3631 /* Update info on where next arg arrives in registers. */
39cba157 3632 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
f387af4f 3633 data.passed_type, data.named_arg);
35a569c6 3634
058a1b7a 3635 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3636 bound_no++;
915e81b8 3637 }
3638
3cc70dc3 3639 assign_bounds (bndargs, all, true, true, true);
3640 bndargs.release ();
058a1b7a 3641
3e992c41 3642 if (targetm.calls.split_complex_arg)
e6427ef0 3643 assign_parms_unsplit_complex (&all, fnargs);
35a569c6 3644
f1f41a6c 3645 fnargs.release ();
3e992c41 3646
b8f621ce 3647 /* Output all parameter conversion instructions (possibly including calls)
3648 now that all parameters have been copied out of hard registers. */
28bf151d 3649 emit_insn (all.first_conversion_insn);
b8f621ce 3650
27a7a23a 3651 /* Estimate reload stack alignment from scalar return mode. */
3652 if (SUPPORTS_STACK_ALIGNMENT)
3653 {
3654 if (DECL_RESULT (fndecl))
3655 {
3656 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3754d046 3657 machine_mode mode = TYPE_MODE (type);
27a7a23a 3658
3659 if (mode != BLKmode
3660 && mode != VOIDmode
3661 && !AGGREGATE_TYPE_P (type))
3662 {
3663 unsigned int align = GET_MODE_ALIGNMENT (mode);
3664 if (crtl->stack_alignment_estimated < align)
3665 {
3666 gcc_assert (!crtl->stack_realign_processed);
3667 crtl->stack_alignment_estimated = align;
3668 }
3669 }
48e1416a 3670 }
27a7a23a 3671 }
3672
ba133423 3673 /* If we are receiving a struct value address as the first argument, set up
3674 the RTL for the function result. As this might require code to convert
3675 the transmitted address to Pmode, we do this here to ensure that possible
3676 preliminary conversions of the address have been emitted already. */
35a569c6 3677 if (all.function_result_decl)
ba133423 3678 {
35a569c6 3679 tree result = DECL_RESULT (current_function_decl);
3680 rtx addr = DECL_RTL (all.function_result_decl);
ba133423 3681 rtx x;
de1b648b 3682
806e4c12 3683 if (DECL_BY_REFERENCE (result))
4d5b4e6a 3684 {
3685 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3686 x = addr;
3687 }
806e4c12 3688 else
3689 {
4d5b4e6a 3690 SET_DECL_VALUE_EXPR (result,
3691 build1 (INDIRECT_REF, TREE_TYPE (result),
3692 all.function_result_decl));
806e4c12 3693 addr = convert_memory_address (Pmode, addr);
3694 x = gen_rtx_MEM (DECL_MODE (result), addr);
3695 set_mem_attributes (x, result, 1);
3696 }
4d5b4e6a 3697
3698 DECL_HAS_VALUE_EXPR_P (result) = 1;
3699
ba133423 3700 SET_DECL_RTL (result, x);
3701 }
3702
b0cdd2bb 3703 /* We have aligned all the args, so add space for the pretend args. */
abe32cce 3704 crtl->args.pretend_args_size = all.pretend_args_size;
35a569c6 3705 all.stack_args_size.constant += all.extra_pretend_bytes;
abe32cce 3706 crtl->args.size = all.stack_args_size.constant;
897b77d6 3707
3708 /* Adjust function incoming argument size for alignment and
3709 minimum length. */
3710
2e090bf6 3711 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
abe32cce 3712 crtl->args.size = CEIL_ROUND (crtl->args.size,
26be63dd 3713 PARM_BOUNDARY / BITS_PER_UNIT);
8967ddf7 3714
ccccd62c 3715 if (ARGS_GROW_DOWNWARD)
3716 {
3717 crtl->args.arg_offset_rtx
3718 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3719 : expand_expr (size_diffop (all.stack_args_size.var,
3720 size_int (-all.stack_args_size.constant)),
3721 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3722 }
3723 else
3724 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
897b77d6 3725
3726 /* See how many bytes, if any, of its args a function should try to pop
3727 on return. */
3728
f5bc28da 3729 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3730 TREE_TYPE (fndecl),
3731 crtl->args.size);
897b77d6 3732
ec195bc4 3733 /* For stdarg.h function, save info about
3734 regs and stack space used by the named args. */
897b77d6 3735
39cba157 3736 crtl->args.info = all.args_so_far_v;
897b77d6 3737
3738 /* Set the rtx used for the function return value. Put this in its
3739 own variable so any optimizers that need this information don't have
3740 to include tree.h. Do this here so it gets done when an inlined
3741 function gets output. */
3742
abe32cce 3743 crtl->return_rtx
0e8e37b2 3744 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3745 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
8839b7f1 3746
3747 /* If scalar return value was computed in a pseudo-reg, or was a named
3748 return value that got dumped to the stack, copy that to the hard
3749 return register. */
3750 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3751 {
3752 tree decl_result = DECL_RESULT (fndecl);
3753 rtx decl_rtl = DECL_RTL (decl_result);
3754
3755 if (REG_P (decl_rtl)
3756 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3757 : DECL_REGISTER (decl_result))
3758 {
3759 rtx real_decl_rtl;
3760
46b3ff29 3761 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3762 fndecl, true);
058a1b7a 3763 if (chkp_function_instrumented_p (fndecl))
3764 crtl->return_bnd
3765 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3766 fndecl, true);
8839b7f1 3767 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
abe32cce 3768 /* The delay slot scheduler assumes that crtl->return_rtx
8839b7f1 3769 holds the hard register containing the return value, not a
3770 temporary pseudo. */
abe32cce 3771 crtl->return_rtx = real_decl_rtl;
8839b7f1 3772 }
3773 }
897b77d6 3774}
6b275368 3775
3776/* A subroutine of gimplify_parameters, invoked via walk_tree.
3777 For all seen types, gimplify their sizes. */
3778
3779static tree
3780gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3781{
3782 tree t = *tp;
3783
3784 *walk_subtrees = 0;
3785 if (TYPE_P (t))
3786 {
3787 if (POINTER_TYPE_P (t))
3788 *walk_subtrees = 1;
bc97b18f 3789 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3790 && !TYPE_SIZES_GIMPLIFIED (t))
6b275368 3791 {
75a70cf9 3792 gimplify_type_sizes (t, (gimple_seq *) data);
6b275368 3793 *walk_subtrees = 1;
3794 }
3795 }
3796
3797 return NULL;
3798}
3799
3800/* Gimplify the parameter list for current_function_decl. This involves
3801 evaluating SAVE_EXPRs of variable sized parameters and generating code
75a70cf9 3802 to implement callee-copies reference parameters. Returns a sequence of
3803 statements to add to the beginning of the function. */
6b275368 3804
75a70cf9 3805gimple_seq
6b275368 3806gimplify_parameters (void)
3807{
3808 struct assign_parm_data_all all;
3e992c41 3809 tree parm;
75a70cf9 3810 gimple_seq stmts = NULL;
f1f41a6c 3811 vec<tree> fnargs;
3e992c41 3812 unsigned i;
6b275368 3813
3814 assign_parms_initialize_all (&all);
3815 fnargs = assign_parms_augmented_arg_list (&all);
3816
f1f41a6c 3817 FOR_EACH_VEC_ELT (fnargs, i, parm)
6b275368 3818 {
3819 struct assign_parm_data_one data;
3820
3821 /* Extract the type of PARM; adjust it according to ABI. */
3822 assign_parm_find_data_types (&all, parm, &data);
3823
3824 /* Early out for errors and void parameters. */
3825 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3826 continue;
3827
3828 /* Update info on where next arg arrives in registers. */
39cba157 3829 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
f387af4f 3830 data.passed_type, data.named_arg);
6b275368 3831
3832 /* ??? Once upon a time variable_size stuffed parameter list
3833 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3834 turned out to be less than manageable in the gimple world.
3835 Now we have to hunt them down ourselves. */
3836 walk_tree_without_duplicates (&data.passed_type,
3837 gimplify_parm_type, &stmts);
3838
4852b829 3839 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
6b275368 3840 {
3841 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3842 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3843 }
3844
3845 if (data.passed_pointer)
3846 {
3847 tree type = TREE_TYPE (data.passed_type);
39cba157 3848 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
6b275368 3849 type, data.named_arg))
3850 {
3851 tree local, t;
3852
4852b829 3853 /* For constant-sized objects, this is trivial; for
6b275368 3854 variable-sized objects, we have to play games. */
4852b829 3855 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3856 && !(flag_stack_check == GENERIC_STACK_CHECK
3857 && compare_tree_int (DECL_SIZE_UNIT (parm),
3858 STACK_CHECK_MAX_VAR_SIZE) > 0))
6b275368 3859 {
63e6b59a 3860 local = create_tmp_var (type, get_name (parm));
6b275368 3861 DECL_IGNORED_P (local) = 0;
ab349ddd 3862 /* If PARM was addressable, move that flag over
3863 to the local copy, as its address will be taken,
5a715a82 3864 not the PARMs. Keep the parms address taken
3865 as we'll query that flag during gimplification. */
ab349ddd 3866 if (TREE_ADDRESSABLE (parm))
5a715a82 3867 TREE_ADDRESSABLE (local) = 1;
63e6b59a 3868 else if (TREE_CODE (type) == COMPLEX_TYPE
3869 || TREE_CODE (type) == VECTOR_TYPE)
3870 DECL_GIMPLE_REG_P (local) = 1;
6b275368 3871 }
3872 else
3873 {
c2f47e15 3874 tree ptr_type, addr;
6b275368 3875
3876 ptr_type = build_pointer_type (type);
599548a7 3877 addr = create_tmp_reg (ptr_type, get_name (parm));
6b275368 3878 DECL_IGNORED_P (addr) = 0;
3879 local = build_fold_indirect_ref (addr);
3880
b9a16870 3881 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4f986f8b 3882 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
581bf1c2 3883 size_int (DECL_ALIGN (parm)));
3884
990495a7 3885 /* The call has been built for a variable-sized object. */
a882d754 3886 CALL_ALLOCA_FOR_VAR_P (t) = 1;
6b275368 3887 t = fold_convert (ptr_type, t);
75a70cf9 3888 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
6b275368 3889 gimplify_and_add (t, &stmts);
3890 }
3891
75a70cf9 3892 gimplify_assign (local, parm, &stmts);
6b275368 3893
75fa4f82 3894 SET_DECL_VALUE_EXPR (parm, local);
3895 DECL_HAS_VALUE_EXPR_P (parm) = 1;
6b275368 3896 }
3897 }
3898 }
3899
f1f41a6c 3900 fnargs.release ();
3e992c41 3901
6b275368 3902 return stmts;
3903}
96b1130a 3904\f
897b77d6 3905/* Compute the size and offset from the start of the stacked arguments for a
3906 parm passed in mode PASSED_MODE and with type TYPE.
3907
3908 INITIAL_OFFSET_PTR points to the current offset into the stacked
3909 arguments.
3910
241399f6 3911 The starting offset and size for this parm are returned in
3912 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3913 nonzero, the offset is that of stack slot, which is returned in
3914 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3915 padding required from the initial offset ptr to the stack slot.
897b77d6 3916
6ef828f9 3917 IN_REGS is nonzero if the argument will be passed in registers. It will
897b77d6 3918 never be set if REG_PARM_STACK_SPACE is not defined.
3919
2e090bf6 3920 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3921 for arguments which are passed in registers.
3922
897b77d6 3923 FNDECL is the function in which the argument was defined.
3924
3925 There are two types of rounding that are done. The first, controlled by
bd99ba64 3926 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3927 argument list to be aligned to the specific boundary (in bits). This
3928 rounding affects the initial and starting offsets, but not the argument
3929 size.
897b77d6 3930
3931 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3932 optionally rounds the size of the parm to PARM_BOUNDARY. The
3933 initial offset is not affected by this rounding, while the size always
3934 is and the starting offset may be. */
3935
241399f6 3936/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3937 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
897b77d6 3938 callers pass in the total size of args so far as
241399f6 3939 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
897b77d6 3940
897b77d6 3941void
3754d046 3942locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e090bf6 3943 int reg_parm_stack_space, int partial,
3944 tree fndecl ATTRIBUTE_UNUSED,
de1b648b 3945 struct args_size *initial_offset_ptr,
3946 struct locate_and_pad_arg_data *locate)
897b77d6 3947{
241399f6 3948 tree sizetree;
3949 enum direction where_pad;
17bfc2bc 3950 unsigned int boundary, round_boundary;
241399f6 3951 int part_size_in_regs;
897b77d6 3952
897b77d6 3953 /* If we have found a stack parm before we reach the end of the
3954 area reserved for registers, skip that area. */
3955 if (! in_regs)
3956 {
897b77d6 3957 if (reg_parm_stack_space > 0)
3958 {
3959 if (initial_offset_ptr->var)
3960 {
3961 initial_offset_ptr->var
3962 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
902de8ed 3963 ssize_int (reg_parm_stack_space));
897b77d6 3964 initial_offset_ptr->constant = 0;
3965 }
3966 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3967 initial_offset_ptr->constant = reg_parm_stack_space;
3968 }
3969 }
897b77d6 3970
f054eb3c 3971 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
241399f6 3972
3973 sizetree
3974 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3975 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
bd99ba64 3976 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
17bfc2bc 3977 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3978 type);
5f4cd670 3979 locate->where_pad = where_pad;
27a7a23a 3980
3981 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3982 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3983 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3984
c5dc0c32 3985 locate->boundary = boundary;
897b77d6 3986
27a7a23a 3987 if (SUPPORTS_STACK_ALIGNMENT)
3988 {
3989 /* stack_alignment_estimated can't change after stack has been
3990 realigned. */
3991 if (crtl->stack_alignment_estimated < boundary)
3992 {
3993 if (!crtl->stack_realign_processed)
3994 crtl->stack_alignment_estimated = boundary;
3995 else
3996 {
3997 /* If stack is realigned and stack alignment value
3998 hasn't been finalized, it is OK not to increase
3999 stack_alignment_estimated. The bigger alignment
4000 requirement is recorded in stack_alignment_needed
4001 below. */
4002 gcc_assert (!crtl->stack_realign_finalized
4003 && crtl->stack_realign_needed);
4004 }
4005 }
4006 }
4007
90ab54b2 4008 /* Remember if the outgoing parameter requires extra alignment on the
4009 calling function side. */
edb7afe8 4010 if (crtl->stack_alignment_needed < boundary)
4011 crtl->stack_alignment_needed = boundary;
27a7a23a 4012 if (crtl->preferred_stack_boundary < boundary)
4013 crtl->preferred_stack_boundary = boundary;
90ab54b2 4014
ccccd62c 4015 if (ARGS_GROW_DOWNWARD)
4016 {
4017 locate->slot_offset.constant = -initial_offset_ptr->constant;
4018 if (initial_offset_ptr->var)
4019 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4020 initial_offset_ptr->var);
4021
4022 {
4023 tree s2 = sizetree;
4024 if (where_pad != none
4025 && (!tree_fits_uhwi_p (sizetree)
4026 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4027 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4028 SUB_PARM_SIZE (locate->slot_offset, s2);
4029 }
4030
4031 locate->slot_offset.constant += part_size_in_regs;
4032
4033 if (!in_regs || reg_parm_stack_space > 0)
4034 pad_to_arg_alignment (&locate->slot_offset, boundary,
4035 &locate->alignment_pad);
4036
4037 locate->size.constant = (-initial_offset_ptr->constant
4038 - locate->slot_offset.constant);
4039 if (initial_offset_ptr->var)
4040 locate->size.var = size_binop (MINUS_EXPR,
4041 size_binop (MINUS_EXPR,
4042 ssize_int (0),
4043 initial_offset_ptr->var),
4044 locate->slot_offset.var);
4045
4046 /* Pad_below needs the pre-rounded size to know how much to pad
4047 below. */
4048 locate->offset = locate->slot_offset;
4049 if (where_pad == downward)
4050 pad_below (&locate->offset, passed_mode, sizetree);
4051
4052 }
4053 else
4054 {
4055 if (!in_regs || reg_parm_stack_space > 0)
4056 pad_to_arg_alignment (initial_offset_ptr, boundary,
4057 &locate->alignment_pad);
4058 locate->slot_offset = *initial_offset_ptr;
897b77d6 4059
4060#ifdef PUSH_ROUNDING
ccccd62c 4061 if (passed_mode != BLKmode)
4062 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
897b77d6 4063#endif
4064
ccccd62c 4065 /* Pad_below needs the pre-rounded size to know how much to pad below
4066 so this must be done before rounding up. */
4067 locate->offset = locate->slot_offset;
4068 if (where_pad == downward)
4069 pad_below (&locate->offset, passed_mode, sizetree);
82f48b55 4070
ccccd62c 4071 if (where_pad != none
4072 && (!tree_fits_uhwi_p (sizetree)
4073 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4074 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
897b77d6 4075
ccccd62c 4076 ADD_PARM_SIZE (locate->size, sizetree);
241399f6 4077
ccccd62c 4078 locate->size.constant -= part_size_in_regs;
4079 }
b704e80f 4080
4081#ifdef FUNCTION_ARG_OFFSET
4082 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4083#endif
897b77d6 4084}
4085
ba585215 4086/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4087 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4088
897b77d6 4089static void
de1b648b 4090pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4091 struct args_size *alignment_pad)
897b77d6 4092{
ef2c4a29 4093 tree save_var = NULL_TREE;
4094 HOST_WIDE_INT save_constant = 0;
5cf5baa2 4095 int boundary_in_bytes = boundary / BITS_PER_UNIT;
891a1732 4096 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4097
4098#ifdef SPARC_STACK_BOUNDARY_HACK
1aecae7f 4099 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4100 the real alignment of %sp. However, when it does this, the
4101 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
891a1732 4102 if (SPARC_STACK_BOUNDARY_HACK)
4103 sp_offset = 0;
4104#endif
9d855d2f 4105
b3f75873 4106 if (boundary > PARM_BOUNDARY)
9d855d2f 4107 {
4108 save_var = offset_ptr->var;
4109 save_constant = offset_ptr->constant;
4110 }
4111
4112 alignment_pad->var = NULL_TREE;
4113 alignment_pad->constant = 0;
9d855d2f 4114
897b77d6 4115 if (boundary > BITS_PER_UNIT)
4116 {
4117 if (offset_ptr->var)
4118 {
891a1732 4119 tree sp_offset_tree = ssize_int (sp_offset);
4120 tree offset = size_binop (PLUS_EXPR,
4121 ARGS_SIZE_TREE (*offset_ptr),
4122 sp_offset_tree);
ccccd62c 4123 tree rounded;
4124 if (ARGS_GROW_DOWNWARD)
4125 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4126 else
4127 rounded = round_up (offset, boundary / BITS_PER_UNIT);
891a1732 4128
4129 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
241399f6 4130 /* ARGS_SIZE_TREE includes constant term. */
4131 offset_ptr->constant = 0;
b3f75873 4132 if (boundary > PARM_BOUNDARY)
d3371fcd 4133 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
902de8ed 4134 save_var);
897b77d6 4135 }
4136 else
06ebc183 4137 {
891a1732 4138 offset_ptr->constant = -sp_offset +
9e37e96e 4139 (ARGS_GROW_DOWNWARD
4140 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4141 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
ccccd62c 4142
b3f75873 4143 if (boundary > PARM_BOUNDARY)
06ebc183 4144 alignment_pad->constant = offset_ptr->constant - save_constant;
4145 }
897b77d6 4146 }
4147}
4148
4149static void
3754d046 4150pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
897b77d6 4151{
4152 if (passed_mode != BLKmode)
4153 {
4154 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4155 offset_ptr->constant
4156 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4157 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4158 - GET_MODE_SIZE (passed_mode));
4159 }
4160 else
4161 {
4162 if (TREE_CODE (sizetree) != INTEGER_CST
4163 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4164 {
4165 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4166 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4167 /* Add it in. */
4168 ADD_PARM_SIZE (*offset_ptr, s2);
4169 SUB_PARM_SIZE (*offset_ptr, sizetree);
4170 }
4171 }
4172}
897b77d6 4173\f
897b77d6 4174
3072d30e 4175/* True if register REGNO was alive at a place where `setjmp' was
4176 called and was set more than once or is an argument. Such regs may
4177 be clobbered by `longjmp'. */
4178
4179static bool
4180regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4181{
4182 /* There appear to be cases where some local vars never reach the
4183 backend but have bogus regnos. */
4184 if (regno >= max_reg_num ())
4185 return false;
4186
4187 return ((REG_N_SETS (regno) > 1
34154e27 4188 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4189 regno))
3072d30e 4190 && REGNO_REG_SET_P (setjmp_crosses, regno));
4191}
4192
4193/* Walk the tree of blocks describing the binding levels within a
4194 function and warn about variables the might be killed by setjmp or
4195 vfork. This is done after calling flow_analysis before register
4196 allocation since that will clobber the pseudo-regs to hard
4197 regs. */
4198
4199static void
4200setjmp_vars_warning (bitmap setjmp_crosses, tree block)
897b77d6 4201{
19cb6b50 4202 tree decl, sub;
4ee9c684 4203
1767a056 4204 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
897b77d6 4205 {
4ee9c684 4206 if (TREE_CODE (decl) == VAR_DECL
49bf95f0 4207 && DECL_RTL_SET_P (decl)
8ad4c111 4208 && REG_P (DECL_RTL (decl))
3072d30e 4209 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
48e1416a 4210 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
0d438110 4211 " %<longjmp%> or %<vfork%>", decl);
897b77d6 4212 }
4ee9c684 4213
93110716 4214 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3072d30e 4215 setjmp_vars_warning (setjmp_crosses, sub);
897b77d6 4216}
4217
4ee9c684 4218/* Do the appropriate part of setjmp_vars_warning
897b77d6 4219 but for arguments instead of local variables. */
4220
3072d30e 4221static void
4222setjmp_args_warning (bitmap setjmp_crosses)
897b77d6 4223{
19cb6b50 4224 tree decl;
897b77d6 4225 for (decl = DECL_ARGUMENTS (current_function_decl);
1767a056 4226 decl; decl = DECL_CHAIN (decl))
897b77d6 4227 if (DECL_RTL (decl) != 0
8ad4c111 4228 && REG_P (DECL_RTL (decl))
3072d30e 4229 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
48e1416a 4230 warning (OPT_Wclobbered,
0d438110 4231 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3cf8b391 4232 decl);
897b77d6 4233}
4234
3072d30e 4235/* Generate warning messages for variables live across setjmp. */
4236
48e1416a 4237void
3072d30e 4238generate_setjmp_warnings (void)
4239{
4240 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4241
a28770e1 4242 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
3072d30e 4243 || bitmap_empty_p (setjmp_crosses))
4244 return;
4245
4246 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4247 setjmp_args_warning (setjmp_crosses);
4248}
4249
897b77d6 4250\f
d6263c49 4251/* Reverse the order of elements in the fragment chain T of blocks,
665611e7 4252 and return the new head of the chain (old last element).
4253 In addition to that clear BLOCK_SAME_RANGE flags when needed
4254 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4255 its super fragment origin. */
d6263c49 4256
4257static tree
4258block_fragments_nreverse (tree t)
4259{
665611e7 4260 tree prev = 0, block, next, prev_super = 0;
4261 tree super = BLOCK_SUPERCONTEXT (t);
4262 if (BLOCK_FRAGMENT_ORIGIN (super))
4263 super = BLOCK_FRAGMENT_ORIGIN (super);
d6263c49 4264 for (block = t; block; block = next)
4265 {
4266 next = BLOCK_FRAGMENT_CHAIN (block);
4267 BLOCK_FRAGMENT_CHAIN (block) = prev;
665611e7 4268 if ((prev && !BLOCK_SAME_RANGE (prev))
4269 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4270 != prev_super))
4271 BLOCK_SAME_RANGE (block) = 0;
4272 prev_super = BLOCK_SUPERCONTEXT (block);
4273 BLOCK_SUPERCONTEXT (block) = super;
d6263c49 4274 prev = block;
4275 }
665611e7 4276 t = BLOCK_FRAGMENT_ORIGIN (t);
4277 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4278 != prev_super)
4279 BLOCK_SAME_RANGE (t) = 0;
4280 BLOCK_SUPERCONTEXT (t) = super;
d6263c49 4281 return prev;
4282}
4283
4284/* Reverse the order of elements in the chain T of blocks,
4285 and return the new head of the chain (old last element).
4286 Also do the same on subblocks and reverse the order of elements
4287 in BLOCK_FRAGMENT_CHAIN as well. */
4288
4289static tree
4290blocks_nreverse_all (tree t)
4291{
4292 tree prev = 0, block, next;
4293 for (block = t; block; block = next)
4294 {
4295 next = BLOCK_CHAIN (block);
4296 BLOCK_CHAIN (block) = prev;
d6263c49 4297 if (BLOCK_FRAGMENT_CHAIN (block)
4298 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
665611e7 4299 {
4300 BLOCK_FRAGMENT_CHAIN (block)
4301 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4302 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4303 BLOCK_SAME_RANGE (block) = 0;
4304 }
4305 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
d6263c49 4306 prev = block;
4307 }
4308 return prev;
4309}
4310
4311
a36145ca 4312/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4313 and create duplicate blocks. */
4314/* ??? Need an option to either create block fragments or to create
4315 abstract origin duplicates of a source block. It really depends
4316 on what optimization has been performed. */
11b373ff 4317
f1ab82be 4318void
de1b648b 4319reorder_blocks (void)
11b373ff 4320{
f1ab82be 4321 tree block = DECL_INITIAL (current_function_decl);
11b373ff 4322
0c45344e 4323 if (block == NULL_TREE)
f1ab82be 4324 return;
9d819987 4325
4997014d 4326 auto_vec<tree, 10> block_stack;
5846cb0f 4327
a36145ca 4328 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4ee9c684 4329 clear_block_marks (block);
a36145ca 4330
f1ab82be 4331 /* Prune the old trees away, so that they don't get in the way. */
4332 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4333 BLOCK_CHAIN (block) = NULL_TREE;
9d819987 4334
a36145ca 4335 /* Recreate the block tree from the note nesting. */
f1ab82be 4336 reorder_blocks_1 (get_insns (), block, &block_stack);
d6263c49 4337 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
11b373ff 4338}
4339
a36145ca 4340/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
60ecc450 4341
4ee9c684 4342void
4343clear_block_marks (tree block)
5e960ca9 4344{
a36145ca 4345 while (block)
5e960ca9 4346 {
a36145ca 4347 TREE_ASM_WRITTEN (block) = 0;
4ee9c684 4348 clear_block_marks (BLOCK_SUBBLOCKS (block));
a36145ca 4349 block = BLOCK_CHAIN (block);
5e960ca9 4350 }
4351}
4352
60ecc450 4353static void
8bb2625b 4354reorder_blocks_1 (rtx_insn *insns, tree current_block,
4355 vec<tree> *p_block_stack)
60ecc450 4356{
8bb2625b 4357 rtx_insn *insn;
665611e7 4358 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
60ecc450 4359
4360 for (insn = insns; insn; insn = NEXT_INSN (insn))
4361 {
6d7dc5b9 4362 if (NOTE_P (insn))
60ecc450 4363 {
ad4583d9 4364 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
60ecc450 4365 {
4366 tree block = NOTE_BLOCK (insn);
70392493 4367 tree origin;
4368
d6263c49 4369 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4370 origin = block;
a36145ca 4371
665611e7 4372 if (prev_end)
4373 BLOCK_SAME_RANGE (prev_end) = 0;
4374 prev_end = NULL_TREE;
4375
a36145ca 4376 /* If we have seen this block before, that means it now
4377 spans multiple address regions. Create a new fragment. */
60ecc450 4378 if (TREE_ASM_WRITTEN (block))
4379 {
a36145ca 4380 tree new_block = copy_node (block);
a36145ca 4381
665611e7 4382 BLOCK_SAME_RANGE (new_block) = 0;
a36145ca 4383 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4384 BLOCK_FRAGMENT_CHAIN (new_block)
4385 = BLOCK_FRAGMENT_CHAIN (origin);
4386 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4387
4388 NOTE_BLOCK (insn) = new_block;
4389 block = new_block;
60ecc450 4390 }
a36145ca 4391
665611e7 4392 if (prev_beg == current_block && prev_beg)
4393 BLOCK_SAME_RANGE (block) = 1;
4394
4395 prev_beg = origin;
4396
60ecc450 4397 BLOCK_SUBBLOCKS (block) = 0;
4398 TREE_ASM_WRITTEN (block) = 1;
31ddae9f 4399 /* When there's only one block for the entire function,
4400 current_block == block and we mustn't do this, it
4401 will cause infinite recursion. */
4402 if (block != current_block)
4403 {
665611e7 4404 tree super;
70392493 4405 if (block != origin)
665611e7 4406 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4407 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4408 (origin))
4409 == current_block);
f1f41a6c 4410 if (p_block_stack->is_empty ())
665611e7 4411 super = current_block;
4412 else
4413 {
f1f41a6c 4414 super = p_block_stack->last ();
665611e7 4415 gcc_assert (super == current_block
4416 || BLOCK_FRAGMENT_ORIGIN (super)
4417 == current_block);
4418 }
4419 BLOCK_SUPERCONTEXT (block) = super;
31ddae9f 4420 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4421 BLOCK_SUBBLOCKS (current_block) = block;
70392493 4422 current_block = origin;
31ddae9f 4423 }
f1f41a6c 4424 p_block_stack->safe_push (block);
60ecc450 4425 }
ad4583d9 4426 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
60ecc450 4427 {
f1f41a6c 4428 NOTE_BLOCK (insn) = p_block_stack->pop ();
60ecc450 4429 current_block = BLOCK_SUPERCONTEXT (current_block);
665611e7 4430 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4431 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4432 prev_beg = NULL_TREE;
4433 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4434 ? NOTE_BLOCK (insn) : NULL_TREE;
60ecc450 4435 }
4436 }
665611e7 4437 else
4438 {
4439 prev_beg = NULL_TREE;
4440 if (prev_end)
4441 BLOCK_SAME_RANGE (prev_end) = 0;
4442 prev_end = NULL_TREE;
4443 }
60ecc450 4444 }
4445}
4446
11b373ff 4447/* Reverse the order of elements in the chain T of blocks,
4448 and return the new head of the chain (old last element). */
4449
4ee9c684 4450tree
de1b648b 4451blocks_nreverse (tree t)
11b373ff 4452{
d6263c49 4453 tree prev = 0, block, next;
4454 for (block = t; block; block = next)
11b373ff 4455 {
d6263c49 4456 next = BLOCK_CHAIN (block);
4457 BLOCK_CHAIN (block) = prev;
4458 prev = block;
11b373ff 4459 }
4460 return prev;
4461}
4462
2149d019 4463/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4464 by modifying the last node in chain 1 to point to chain 2. */
4465
4466tree
4467block_chainon (tree op1, tree op2)
4468{
4469 tree t1;
4470
4471 if (!op1)
4472 return op2;
4473 if (!op2)
4474 return op1;
4475
4476 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4477 continue;
4478 BLOCK_CHAIN (t1) = op2;
4479
4480#ifdef ENABLE_TREE_CHECKING
4481 {
4482 tree t2;
4483 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4484 gcc_assert (t2 != t1);
4485 }
4486#endif
4487
4488 return op1;
4489}
4490
5846cb0f 4491/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4492 non-NULL, list them all into VECTOR, in a depth-first preorder
4493 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
396bfb69 4494 blocks. */
11b373ff 4495
4496static int
de1b648b 4497all_blocks (tree block, tree *vector)
11b373ff 4498{
396bfb69 4499 int n_blocks = 0;
4500
874a9b8d 4501 while (block)
4502 {
4503 TREE_ASM_WRITTEN (block) = 0;
396bfb69 4504
874a9b8d 4505 /* Record this block. */
4506 if (vector)
4507 vector[n_blocks] = block;
396bfb69 4508
874a9b8d 4509 ++n_blocks;
06ebc183 4510
874a9b8d 4511 /* Record the subblocks, and their subblocks... */
4512 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4513 vector ? vector + n_blocks : 0);
4514 block = BLOCK_CHAIN (block);
4515 }
11b373ff 4516
4517 return n_blocks;
4518}
5846cb0f 4519
4520/* Return a vector containing all the blocks rooted at BLOCK. The
4521 number of elements in the vector is stored in N_BLOCKS_P. The
4522 vector is dynamically allocated; it is the caller's responsibility
4523 to call `free' on the pointer returned. */
06ebc183 4524
5846cb0f 4525static tree *
de1b648b 4526get_block_vector (tree block, int *n_blocks_p)
5846cb0f 4527{
4528 tree *block_vector;
4529
4530 *n_blocks_p = all_blocks (block, NULL);
4c36ffe6 4531 block_vector = XNEWVEC (tree, *n_blocks_p);
5846cb0f 4532 all_blocks (block, block_vector);
4533
4534 return block_vector;
4535}
4536
177c2ebc 4537static GTY(()) int next_block_index = 2;
5846cb0f 4538
4539/* Set BLOCK_NUMBER for all the blocks in FN. */
4540
4541void
de1b648b 4542number_blocks (tree fn)
5846cb0f 4543{
4544 int i;
4545 int n_blocks;
4546 tree *block_vector;
4547
4548 /* For SDB and XCOFF debugging output, we start numbering the blocks
4549 from 1 within each function, rather than keeping a running
4550 count. */
4551#if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
0eb76379 4552 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4553 next_block_index = 1;
5846cb0f 4554#endif
4555
4556 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4557
4558 /* The top-level BLOCK isn't numbered at all. */
4559 for (i = 1; i < n_blocks; ++i)
4560 /* We number the blocks from two. */
4561 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4562
4563 free (block_vector);
4564
4565 return;
4566}
baa8dec7 4567
4568/* If VAR is present in a subblock of BLOCK, return the subblock. */
4569
4b987fac 4570DEBUG_FUNCTION tree
de1b648b 4571debug_find_var_in_block_tree (tree var, tree block)
baa8dec7 4572{
4573 tree t;
4574
4575 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4576 if (t == var)
4577 return block;
4578
4579 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4580 {
4581 tree ret = debug_find_var_in_block_tree (var, t);
4582 if (ret)
4583 return ret;
4584 }
4585
4586 return NULL_TREE;
4587}
11b373ff 4588\f
87d4aa85 4589/* Keep track of whether we're in a dummy function context. If we are,
4590 we don't want to invoke the set_current_function hook, because we'll
4591 get into trouble if the hook calls target_reinit () recursively or
4592 when the initial initialization is not yet complete. */
4593
4594static bool in_dummy_function;
4595
46f8e3b0 4596/* Invoke the target hook when setting cfun. Update the optimization options
4597 if the function uses different options than the default. */
87d4aa85 4598
4599static void
4600invoke_set_current_function_hook (tree fndecl)
4601{
4602 if (!in_dummy_function)
46f8e3b0 4603 {
4604 tree opts = ((fndecl)
4605 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4606 : optimization_default_node);
4607
4608 if (!opts)
4609 opts = optimization_default_node;
4610
4611 /* Change optimization options if needed. */
4612 if (optimization_current_node != opts)
4613 {
4614 optimization_current_node = opts;
2c5d2e39 4615 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
46f8e3b0 4616 }
4617
6eaab580 4618 targetm.set_current_function (fndecl);
9d3fa937 4619 this_fn_optabs = this_target_optabs;
08c7d04b 4620
9d3fa937 4621 if (opts != optimization_default_node)
08c7d04b 4622 {
9d3fa937 4623 init_tree_optimization_optabs (opts);
4624 if (TREE_OPTIMIZATION_OPTABS (opts))
4625 this_fn_optabs = (struct target_optabs *)
4626 TREE_OPTIMIZATION_OPTABS (opts);
08c7d04b 4627 }
46f8e3b0 4628 }
87d4aa85 4629}
4630
4631/* cfun should never be set directly; use this function. */
4632
4633void
4634set_cfun (struct function *new_cfun)
4635{
4636 if (cfun != new_cfun)
4637 {
4638 cfun = new_cfun;
4639 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4640 }
4641}
4642
87d4aa85 4643/* Initialized with NOGC, making this poisonous to the garbage collector. */
4644
f1f41a6c 4645static vec<function_p> cfun_stack;
87d4aa85 4646
9078126c 4647/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4648 current_function_decl accordingly. */
87d4aa85 4649
4650void
4651push_cfun (struct function *new_cfun)
4652{
9078126c 4653 gcc_assert ((!cfun && !current_function_decl)
4654 || (cfun && current_function_decl == cfun->decl));
f1f41a6c 4655 cfun_stack.safe_push (cfun);
9078126c 4656 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
87d4aa85 4657 set_cfun (new_cfun);
4658}
4659
9078126c 4660/* Pop cfun from the stack. Also set current_function_decl accordingly. */
87d4aa85 4661
4662void
4663pop_cfun (void)
4664{
f1f41a6c 4665 struct function *new_cfun = cfun_stack.pop ();
9078126c 4666 /* When in_dummy_function, we do have a cfun but current_function_decl is
4667 NULL. We also allow pushing NULL cfun and subsequently changing
4668 current_function_decl to something else and have both restored by
4669 pop_cfun. */
4670 gcc_checking_assert (in_dummy_function
4671 || !cfun
4672 || current_function_decl == cfun->decl);
3c9dcda1 4673 set_cfun (new_cfun);
9078126c 4674 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
87d4aa85 4675}
a3adcd4a 4676
4677/* Return value of funcdef and increase it. */
4678int
48e1416a 4679get_next_funcdef_no (void)
a3adcd4a 4680{
4681 return funcdef_no++;
4682}
4683
1ad3e14c 4684/* Return value of funcdef. */
4685int
4686get_last_funcdef_no (void)
4687{
4688 return funcdef_no;
4689}
4690
ecc82929 4691/* Allocate a function structure for FNDECL and set its contents
87d4aa85 4692 to the defaults. Set cfun to the newly-allocated object.
4693 Some of the helper functions invoked during initialization assume
4694 that cfun has already been set. Therefore, assign the new object
4695 directly into cfun and invoke the back end hook explicitly at the
4696 very end, rather than initializing a temporary and calling set_cfun
4697 on it.
80f2ef47 4698
4699 ABSTRACT_P is true if this is a function that will never be seen by
4700 the middle-end. Such functions are front-end concepts (like C++
4701 function templates) that do not correspond directly to functions
4702 placed in object files. */
942cc45f 4703
ecc82929 4704void
80f2ef47 4705allocate_struct_function (tree fndecl, bool abstract_p)
897b77d6 4706{
4ee9c684 4707 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
897b77d6 4708
25a27413 4709 cfun = ggc_cleared_alloc<function> ();
304c5bf1 4710
ecc82929 4711 init_eh_for_function ();
897b77d6 4712
ecc82929 4713 if (init_machine_status)
4714 cfun->machine = (*init_machine_status) ();
26df1c5e 4715
d3feb168 4716#ifdef OVERRIDE_ABI_FORMAT
4717 OVERRIDE_ABI_FORMAT (fndecl);
4718#endif
4719
22c61100 4720 if (fndecl != NULL_TREE)
ecc82929 4721 {
87d4aa85 4722 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4723 cfun->decl = fndecl;
285aabd1 4724 current_function_funcdef_no = get_next_funcdef_no ();
a956a7a6 4725 }
4726
4727 invoke_set_current_function_hook (fndecl);
87d4aa85 4728
a956a7a6 4729 if (fndecl != NULL_TREE)
4730 {
4731 tree result = DECL_RESULT (fndecl);
80f2ef47 4732 if (!abstract_p && aggregate_value_p (result, fndecl))
87d4aa85 4733 {
ecc82929 4734#ifdef PCC_STATIC_STRUCT_RETURN
18d50ae6 4735 cfun->returns_pcc_struct = 1;
ecc82929 4736#endif
18d50ae6 4737 cfun->returns_struct = 1;
87d4aa85 4738 }
4739
257d99c3 4740 cfun->stdarg = stdarg_p (fntype);
48e1416a 4741
87d4aa85 4742 /* Assume all registers in stdarg functions need to be saved. */
4743 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4744 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
cbeb677e 4745
4746 /* ??? This could be set on a per-function basis by the front-end
4747 but is this worth the hassle? */
4748 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
c4c3cd53 4749 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4f6f9d05 4750
4751 if (!profile_flag && !flag_instrument_function_entry_exit)
4752 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
ecc82929 4753 }
87d4aa85 4754}
4755
4756/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4757 instead of just setting it. */
a6c787e5 4758
87d4aa85 4759void
4760push_struct_function (tree fndecl)
4761{
9078126c 4762 /* When in_dummy_function we might be in the middle of a pop_cfun and
4763 current_function_decl and cfun may not match. */
4764 gcc_assert (in_dummy_function
4765 || (!cfun && !current_function_decl)
4766 || (cfun && current_function_decl == cfun->decl));
f1f41a6c 4767 cfun_stack.safe_push (cfun);
9078126c 4768 current_function_decl = fndecl;
80f2ef47 4769 allocate_struct_function (fndecl, false);
ecc82929 4770}
897b77d6 4771
cbeb677e 4772/* Reset crtl and other non-struct-function variables to defaults as
f024691d 4773 appropriate for emitting rtl at the start of a function. */
897b77d6 4774
ecc82929 4775static void
87d4aa85 4776prepare_function_start (void)
ecc82929 4777{
c36aa54b 4778 gcc_assert (!get_last_insn ());
fef299ce 4779 init_temp_slots ();
957211e4 4780 init_emit ();
b079a207 4781 init_varasm_status ();
957211e4 4782 init_expr ();
7dfb44a0 4783 default_rtl_profile ();
897b77d6 4784
8c0dd614 4785 if (flag_stack_usage_info)
990495a7 4786 {
25a27413 4787 cfun->su = ggc_cleared_alloc<stack_usage> ();
990495a7 4788 cfun->su->static_stack_size = -1;
4789 }
4790
ecc82929 4791 cse_not_expected = ! optimize;
897b77d6 4792
ecc82929 4793 /* Caller save not needed yet. */
4794 caller_save_needed = 0;
897b77d6 4795
ecc82929 4796 /* We haven't done register allocation yet. */
4797 reg_renumber = 0;
897b77d6 4798
304c5bf1 4799 /* Indicate that we have not instantiated virtual registers yet. */
4800 virtuals_instantiated = 0;
4801
316bc009 4802 /* Indicate that we want CONCATs now. */
4803 generating_concat_p = 1;
4804
304c5bf1 4805 /* Indicate we have no need of a frame pointer yet. */
4806 frame_pointer_needed = 0;
304c5bf1 4807}
4808
20dc3373 4809void
4810push_dummy_function (bool with_decl)
4811{
4812 tree fn_decl, fn_type, fn_result_decl;
4813
4814 gcc_assert (!in_dummy_function);
4815 in_dummy_function = true;
4816
4817 if (with_decl)
4818 {
4819 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4820 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4821 fn_type);
4822 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4823 NULL_TREE, void_type_node);
4824 DECL_RESULT (fn_decl) = fn_result_decl;
4825 }
4826 else
4827 fn_decl = NULL_TREE;
4828
4829 push_struct_function (fn_decl);
4830}
4831
304c5bf1 4832/* Initialize the rtl expansion mechanism so that we can do simple things
4833 like generate sequences. This is used to provide a context during global
87d4aa85 4834 initialization of some passes. You must call expand_dummy_function_end
4835 to exit this context. */
4836
304c5bf1 4837void
de1b648b 4838init_dummy_function_start (void)
304c5bf1 4839{
20dc3373 4840 push_dummy_function (false);
87d4aa85 4841 prepare_function_start ();
304c5bf1 4842}
4843
4844/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4845 and initialize static variables for generating RTL for the statements
4846 of the function. */
4847
4848void
de1b648b 4849init_function_start (tree subr)
304c5bf1 4850{
87d4aa85 4851 if (subr && DECL_STRUCT_FUNCTION (subr))
4852 set_cfun (DECL_STRUCT_FUNCTION (subr));
4853 else
80f2ef47 4854 allocate_struct_function (subr, false);
e0ff5636 4855
4856 /* Initialize backend, if needed. */
4857 initialize_rtl ();
4858
87d4aa85 4859 prepare_function_start ();
756dcd13 4860 decide_function_section (subr);
304c5bf1 4861
897b77d6 4862 /* Warn if this value is an aggregate type,
4863 regardless of which calling convention we are using for it. */
efb9d9ee 4864 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4865 warning (OPT_Waggregate_return, "function returns an aggregate");
0a893c29 4866}
a590d94d 4867
f1a0edff 4868/* Expand code to verify the stack_protect_guard. This is invoked at
4869 the end of a function to be protected. */
4870
71d89928 4871void
f1a0edff 4872stack_protect_epilogue (void)
4873{
4874 tree guard_decl = targetm.stack_protect_guard ();
79f6a8ed 4875 rtx_code_label *label = gen_label_rtx ();
f1a0edff 4876 rtx x, y, tmp;
4877
d2a99f05 4878 x = expand_normal (crtl->stack_protect_guard);
4879 y = expand_normal (guard_decl);
f1a0edff 4880
4881 /* Allow the target to compare Y with X without leaking either into
4882 a register. */
e9b06442 4883 switch (targetm.have_stack_protect_test ())
f1a0edff 4884 {
4885 case 1:
e9b06442 4886 if (rtx_insn *seq = targetm.gen_stack_protect_test (x, y, label))
f1a0edff 4887 {
e9b06442 4888 emit_insn (seq);
f1a0edff 4889 break;
4890 }
4891 /* FALLTHRU */
4892
4893 default:
4894 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4895 break;
4896 }
4897
4898 /* The noreturn predictor has been moved to the tree level. The rtl-level
4899 predictors estimate this branch about 20%, which isn't enough to get
4900 things moved out of line. Since this is the only extant case of adding
4901 a noreturn function at the rtl level, it doesn't seem worth doing ought
4902 except adding the prediction by hand. */
4903 tmp = get_last_insn ();
4904 if (JUMP_P (tmp))
ee5f6585 4905 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
f1a0edff 4906
5a13cc45 4907 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4908 free_temp_slots ();
f1a0edff 4909 emit_label (label);
4910}
4911\f
897b77d6 4912/* Start the RTL for a new function, and set variables used for
4913 emitting RTL.
4914 SUBR is the FUNCTION_DECL node.
4915 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4916 the function's parameters, which must be run at any return statement. */
4917
4918void
82aa4bd5 4919expand_function_start (tree subr)
897b77d6 4920{
897b77d6 4921 /* Make sure volatile mem refs aren't considered
4922 valid operands of arithmetic insns. */
4923 init_recog_no_volatile ();
4924
18d50ae6 4925 crtl->profile
7811c823 4926 = (profile_flag
4927 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4928
18d50ae6 4929 crtl->limit_stack
8f8ac140 4930 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4931
df4b504c 4932 /* Make the label for return statements to jump to. Do not special
4933 case machines with special return instructions -- they will be
4934 handled later during jump, ifcvt, or epilogue creation. */
897b77d6 4935 return_label = gen_label_rtx ();
897b77d6 4936
4937 /* Initialize rtx used to return the value. */
4938 /* Do this before assign_parms so that we copy the struct value address
4939 before any library calls that assign parms might generate. */
4940
4941 /* Decide whether to return the value in memory or in a register. */
1382992b 4942 if (aggregate_value_p (DECL_RESULT (subr), subr))
897b77d6 4943 {
4944 /* Returning something that won't go in a register. */
19cb6b50 4945 rtx value_address = 0;
897b77d6 4946
4947#ifdef PCC_STATIC_STRUCT_RETURN
18d50ae6 4948 if (cfun->returns_pcc_struct)
897b77d6 4949 {
1382992b 4950 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
897b77d6 4951 value_address = assemble_static_space (size);
4952 }
4953 else
4954#endif
4955 {
d8c09ceb 4956 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
897b77d6 4957 /* Expect to be passed the address of a place to store the value.
4958 If it is passed as an argument, assign_parms will take care of
4959 it. */
45550790 4960 if (sv)
897b77d6 4961 {
1382992b 4962 value_address = gen_reg_rtx (Pmode);
45550790 4963 emit_move_insn (value_address, sv);
897b77d6 4964 }
4965 }
4966 if (value_address)
ce88c7f0 4967 {
648c102e 4968 rtx x = value_address;
1382992b 4969 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
648c102e 4970 {
1382992b 4971 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4972 set_mem_attributes (x, DECL_RESULT (subr), 1);
648c102e 4973 }
1382992b 4974 SET_DECL_RTL (DECL_RESULT (subr), x);
ce88c7f0 4975 }
897b77d6 4976 }
1382992b 4977 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
897b77d6 4978 /* If return mode is void, this decl rtl should not be used. */
1382992b 4979 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
7ab29b28 4980 else
7e8dfb30 4981 {
7ab29b28 4982 /* Compute the return values into a pseudo reg, which we will copy
4983 into the true return register after the cleanups are done. */
1382992b 4984 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4985 if (TYPE_MODE (return_type) != BLKmode
4986 && targetm.calls.return_in_msb (return_type))
05d18e8b 4987 /* expand_function_end will insert the appropriate padding in
4988 this case. Use the return value's natural (unpadded) mode
4989 within the function proper. */
1382992b 4990 SET_DECL_RTL (DECL_RESULT (subr),
4991 gen_reg_rtx (TYPE_MODE (return_type)));
92f708ec 4992 else
fdada98f 4993 {
05d18e8b 4994 /* In order to figure out what mode to use for the pseudo, we
4995 figure out what the mode of the eventual return register will
4996 actually be, and use that. */
46b3ff29 4997 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
05d18e8b 4998
4999 /* Structures that are returned in registers are not
5000 aggregate_value_p, so we may see a PARALLEL or a REG. */
5001 if (REG_P (hard_reg))
1382992b 5002 SET_DECL_RTL (DECL_RESULT (subr),
5003 gen_reg_rtx (GET_MODE (hard_reg)));
05d18e8b 5004 else
5005 {
5006 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
1382992b 5007 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
05d18e8b 5008 }
fdada98f 5009 }
7e8dfb30 5010
b566e2e5 5011 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5012 result to the real return register(s). */
1382992b 5013 DECL_REGISTER (DECL_RESULT (subr)) = 1;
058a1b7a 5014
5015 if (chkp_function_instrumented_p (current_function_decl))
5016 {
1382992b 5017 tree return_type = TREE_TYPE (DECL_RESULT (subr));
058a1b7a 5018 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5019 subr, 1);
1382992b 5020 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
058a1b7a 5021 }
7e8dfb30 5022 }
897b77d6 5023
5024 /* Initialize rtx for parameters and local variables.
5025 In some cases this requires emitting insns. */
bffcf014 5026 assign_parms (subr);
897b77d6 5027
4ee9c684 5028 /* If function gets a static chain arg, store it. */
5029 if (cfun->static_chain_decl)
5030 {
3efaa21f 5031 tree parm = cfun->static_chain_decl;
bf79ca12 5032 rtx local, chain;
5033 rtx_insn *insn;
3efaa21f 5034
1382992b 5035 local = gen_reg_rtx (Pmode);
82c7907c 5036 chain = targetm.calls.static_chain (current_function_decl, true);
5037
5038 set_decl_incoming_rtl (parm, chain, false);
3efaa21f 5039 SET_DECL_RTL (parm, local);
3efaa21f 5040 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4ee9c684 5041
82c7907c 5042 insn = emit_move_insn (local, chain);
5043
5044 /* Mark the register as eliminable, similar to parameters. */
5045 if (MEM_P (chain)
5046 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
41cf444a 5047 set_dst_reg_note (insn, REG_EQUIV, chain, local);
eac967db 5048
5049 /* If we aren't optimizing, save the static chain onto the stack. */
5050 if (!optimize)
5051 {
5052 tree saved_static_chain_decl
5053 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5054 DECL_NAME (parm), TREE_TYPE (parm));
5055 rtx saved_static_chain_rtx
5056 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5057 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5058 emit_move_insn (saved_static_chain_rtx, chain);
5059 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5060 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5061 }
4ee9c684 5062 }
5063
5064 /* If the function receives a non-local goto, then store the
5065 bits we need to restore the frame pointer. */
5066 if (cfun->nonlocal_goto_save_area)
5067 {
5068 tree t_save;
5069 rtx r_save;
5070
1a105fae 5071 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
7843e4bc 5072 gcc_assert (DECL_RTL_SET_P (var));
4ee9c684 5073
21dc8b2b 5074 t_save = build4 (ARRAY_REF,
5075 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
b55f9493 5076 cfun->nonlocal_goto_save_area,
5077 integer_zero_node, NULL_TREE, NULL_TREE);
4ee9c684 5078 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
21dc8b2b 5079 gcc_assert (GET_MODE (r_save) == Pmode);
50c48f9b 5080
6a5dfe57 5081 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4ee9c684 5082 update_nonlocal_goto_save_area ();
5083 }
50c48f9b 5084
897b77d6 5085 /* The following was moved from init_function_start.
5086 The move is supposed to make sdb output more accurate. */
5087 /* Indicate the beginning of the function body,
5088 as opposed to parm setup. */
31b97e8f 5089 emit_note (NOTE_INSN_FUNCTION_BEG);
897b77d6 5090
1edb3690 5091 gcc_assert (NOTE_P (get_last_insn ()));
5092
897b77d6 5093 parm_birth_insn = get_last_insn ();
5094
18d50ae6 5095 if (crtl->profile)
b8a21949 5096 {
b8a21949 5097#ifdef PROFILE_HOOK
4781f9b9 5098 PROFILE_HOOK (current_function_funcdef_no);
104d9861 5099#endif
b8a21949 5100 }
104d9861 5101
f8c438a1 5102 /* If we are doing generic stack checking, the probe should go here. */
5103 if (flag_stack_check == GENERIC_STACK_CHECK)
1edb3690 5104 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
897b77d6 5105}
5106\f
20dc3373 5107void
5108pop_dummy_function (void)
5109{
5110 pop_cfun ();
5111 in_dummy_function = false;
5112}
5113
0a893c29 5114/* Undo the effects of init_dummy_function_start. */
5115void
de1b648b 5116expand_dummy_function_end (void)
0a893c29 5117{
87d4aa85 5118 gcc_assert (in_dummy_function);
5119
0a893c29 5120 /* End any sequences that failed to be closed due to syntax errors. */
5121 while (in_sequence_p ())
5122 end_sequence ();
5123
5124 /* Outside function body, can't compute type's actual size
5125 until next function's body starts. */
3c3bb268 5126
08513b52 5127 free_after_parsing (cfun);
5128 free_after_compilation (cfun);
20dc3373 5129 pop_dummy_function ();
0a893c29 5130}
5131
058a1b7a 5132/* Helper for diddle_return_value. */
631ef7ce 5133
5134void
058a1b7a 5135diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
631ef7ce 5136{
2766437e 5137 if (! outgoing)
5138 return;
631ef7ce 5139
8ad4c111 5140 if (REG_P (outgoing))
2766437e 5141 (*doit) (outgoing, arg);
5142 else if (GET_CODE (outgoing) == PARALLEL)
5143 {
5144 int i;
631ef7ce 5145
2766437e 5146 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5147 {
5148 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5149
8ad4c111 5150 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2766437e 5151 (*doit) (x, arg);
631ef7ce 5152 }
5153 }
5154}
5155
058a1b7a 5156/* Call DOIT for each hard register used as a return value from
5157 the current function. */
5158
5159void
5160diddle_return_value (void (*doit) (rtx, void *), void *arg)
5161{
058a1b7a 5162 diddle_return_value_1 (doit, arg, crtl->return_bnd);
1b172b45 5163 diddle_return_value_1 (doit, arg, crtl->return_rtx);
058a1b7a 5164}
5165
2766437e 5166static void
de1b648b 5167do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
2766437e 5168{
18b42941 5169 emit_clobber (reg);
2766437e 5170}
5171
5172void
de1b648b 5173clobber_return_register (void)
2766437e 5174{
5175 diddle_return_value (do_clobber_return_reg, NULL);
1b2c7cbd 5176
5177 /* In case we do use pseudo to return value, clobber it too. */
5178 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5179 {
5180 tree decl_result = DECL_RESULT (current_function_decl);
5181 rtx decl_rtl = DECL_RTL (decl_result);
5182 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5183 {
5184 do_clobber_return_reg (decl_rtl, NULL);
5185 }
5186 }
2766437e 5187}
5188
5189static void
de1b648b 5190do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
2766437e 5191{
18b42941 5192 emit_use (reg);
2766437e 5193}
5194
ab4605bf 5195static void
de1b648b 5196use_return_register (void)
2766437e 5197{
5198 diddle_return_value (do_use_return_reg, NULL);
5199}
5200
0e80b01d 5201/* Set the location of the insn chain starting at INSN to LOC. */
5202
5203static void
4cd001d5 5204set_insn_locations (rtx_insn *insn, int loc)
0e80b01d 5205{
4cd001d5 5206 while (insn != NULL)
0e80b01d 5207 {
5208 if (INSN_P (insn))
5209 INSN_LOCATION (insn) = loc;
5210 insn = NEXT_INSN (insn);
5211 }
5212}
5213
6473f3f4 5214/* Generate RTL for the end of the current function. */
897b77d6 5215
5216void
de1b648b 5217expand_function_end (void)
897b77d6 5218{
2032b31d 5219 /* If arg_pointer_save_area was referenced only from a nested
5220 function, we will not have initialized it yet. Do that now. */
18d50ae6 5221 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
b079a207 5222 get_arg_pointer_save_area ();
2032b31d 5223
4852b829 5224 /* If we are doing generic stack checking and this function makes calls,
b22178d2 5225 do a stack probe at the start of the function to ensure we have enough
5226 space for another stack frame. */
4852b829 5227 if (flag_stack_check == GENERIC_STACK_CHECK)
b22178d2 5228 {
8bb2625b 5229 rtx_insn *insn, *seq;
b22178d2 5230
5231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 5232 if (CALL_P (insn))
b22178d2 5233 {
d1b92264 5234 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
b22178d2 5235 start_sequence ();
d1b92264 5236 if (STACK_CHECK_MOVING_SP)
5237 anti_adjust_stack_and_probe (max_frame_size, true);
5238 else
5239 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
b22178d2 5240 seq = get_insns ();
5241 end_sequence ();
5169661d 5242 set_insn_locations (seq, prologue_location);
1edb3690 5243 emit_insn_before (seq, stack_check_probe_note);
b22178d2 5244 break;
5245 }
5246 }
5247
897b77d6 5248 /* End any sequences that failed to be closed due to syntax errors. */
5249 while (in_sequence_p ())
1bb04728 5250 end_sequence ();
897b77d6 5251
897b77d6 5252 clear_pending_stack_adjust ();
5253 do_pending_stack_adjust ();
5254
897b77d6 5255 /* Output a linenumber for the end of the function.
5256 SDB depends on this. */
5169661d 5257 set_curr_insn_location (input_location);
897b77d6 5258
b41180f5 5259 /* Before the return label (if any), clobber the return
3fb1e43b 5260 registers so that they are not propagated live to the rest of
b41180f5 5261 the function. This can only happen with functions that drop
5262 through; if there had been a return statement, there would
9b56368f 5263 have either been a return rtx, or a jump to the return label.
5264
5265 We delay actual code generation after the current_function_value_rtx
5266 is computed. */
9ed997be 5267 rtx_insn *clobber_after = get_last_insn ();
b41180f5 5268
7861133f 5269 /* Output the label for the actual return from the function. */
5270 emit_label (return_label);
897b77d6 5271
218e3e4e 5272 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
a7e05170 5273 {
5274 /* Let except.c know where it should emit the call to unregister
5275 the function context for sjlj exceptions. */
5276 if (flag_exceptions)
5277 sjlj_emit_function_exit_after (get_last_insn ());
5278 }
3072d30e 5279 else
5280 {
5281 /* We want to ensure that instructions that may trap are not
5282 moved into the epilogue by scheduling, because we don't
5283 always emit unwind information for the epilogue. */
cbeb677e 5284 if (cfun->can_throw_non_call_exceptions)
3072d30e 5285 emit_insn (gen_blockage ());
5286 }
855f1e85 5287
80e467e2 5288 /* If this is an implementation of throw, do what's necessary to
5289 communicate between __builtin_eh_return and the epilogue. */
5290 expand_eh_return ();
5291
ae39498f 5292 /* If scalar return value was computed in a pseudo-reg, or was a named
5293 return value that got dumped to the stack, copy that to the hard
5294 return register. */
0e8e37b2 5295 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
897b77d6 5296 {
ae39498f 5297 tree decl_result = DECL_RESULT (current_function_decl);
5298 rtx decl_rtl = DECL_RTL (decl_result);
5299
5300 if (REG_P (decl_rtl)
5301 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5302 : DECL_REGISTER (decl_result))
5303 {
abe32cce 5304 rtx real_decl_rtl = crtl->return_rtx;
897b77d6 5305
8839b7f1 5306 /* This should be set in assign_parms. */
fdada98f 5307 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
ae39498f 5308
5309 /* If this is a BLKmode structure being returned in registers,
5310 then use the mode computed in expand_return. Note that if
60d903f5 5311 decl_rtl is memory, then its mode may have been changed,
abe32cce 5312 but that crtl->return_rtx has not. */
ae39498f 5313 if (GET_MODE (real_decl_rtl) == BLKmode)
8839b7f1 5314 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
ae39498f 5315
05d18e8b 5316 /* If a non-BLKmode return value should be padded at the least
5317 significant end of the register, shift it left by the appropriate
5318 amount. BLKmode results are handled using the group load/store
5319 machinery. */
5320 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
d8ef55fc 5321 && REG_P (real_decl_rtl)
05d18e8b 5322 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5323 {
5324 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5325 REGNO (real_decl_rtl)),
5326 decl_rtl);
5327 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5328 }
ae39498f 5329 /* If a named return value dumped decl_return to memory, then
60d903f5 5330 we may need to re-do the PROMOTE_MODE signed/unsigned
ae39498f 5331 extension. */
05d18e8b 5332 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
ae39498f 5333 {
78a8ed03 5334 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
3b2411a8 5335 promote_function_mode (TREE_TYPE (decl_result),
5336 GET_MODE (decl_rtl), &unsignedp,
5337 TREE_TYPE (current_function_decl), 1);
ae39498f 5338
5339 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5340 }
3395ec76 5341 else if (GET_CODE (real_decl_rtl) == PARALLEL)
b566e2e5 5342 {
5343 /* If expand_function_start has created a PARALLEL for decl_rtl,
5344 move the result to the real return registers. Otherwise, do
5345 a group load from decl_rtl for a named return. */
5346 if (GET_CODE (decl_rtl) == PARALLEL)
5347 emit_group_move (real_decl_rtl, decl_rtl);
5348 else
5349 emit_group_load (real_decl_rtl, decl_rtl,
5f4cd670 5350 TREE_TYPE (decl_result),
b566e2e5 5351 int_size_in_bytes (TREE_TYPE (decl_result)));
5352 }
80e467e2 5353 /* In the case of complex integer modes smaller than a word, we'll
5354 need to generate some non-trivial bitfield insertions. Do that
5355 on a pseudo and not the hard register. */
5356 else if (GET_CODE (decl_rtl) == CONCAT
5357 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5358 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5359 {
5360 int old_generating_concat_p;
5361 rtx tmp;
5362
5363 old_generating_concat_p = generating_concat_p;
5364 generating_concat_p = 0;
5365 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5366 generating_concat_p = old_generating_concat_p;
5367
5368 emit_move_insn (tmp, decl_rtl);
5369 emit_move_insn (real_decl_rtl, tmp);
5370 }
ae39498f 5371 else
5372 emit_move_insn (real_decl_rtl, decl_rtl);
ae39498f 5373 }
897b77d6 5374 }
5375
5376 /* If returning a structure, arrange to return the address of the value
5377 in a place where debuggers expect to find it.
5378
5379 If returning a structure PCC style,
5380 the caller also depends on this value.
18d50ae6 5381 And cfun->returns_pcc_struct is not necessarily set. */
809140f3 5382 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5383 && !targetm.calls.omit_struct_return_reg)
897b77d6 5384 {
806e4c12 5385 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
897b77d6 5386 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
806e4c12 5387 rtx outgoing;
5388
5389 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5390 type = TREE_TYPE (type);
5391 else
5392 value_address = XEXP (value_address, 0);
5393
46b3ff29 5394 outgoing = targetm.calls.function_value (build_pointer_type (type),
5395 current_function_decl, true);
897b77d6 5396
5397 /* Mark this as a function return value so integrate will delete the
5398 assignment and USE below when inlining this function. */
5399 REG_FUNCTION_VALUE_P (outgoing) = 1;
5400
c54c9422 5401 /* The address may be ptr_mode and OUTGOING may be Pmode. */
85d654dd 5402 value_address = convert_memory_address (GET_MODE (outgoing),
5403 value_address);
c54c9422 5404
897b77d6 5405 emit_move_insn (outgoing, value_address);
c54c9422 5406
5407 /* Show return register used to hold result (in this case the address
5408 of the result. */
abe32cce 5409 crtl->return_rtx = outgoing;
897b77d6 5410 }
5411
04e7d9cb 5412 /* Emit the actual code to clobber return register. Don't emit
5413 it if clobber_after is a barrier, then the previous basic block
5414 certainly doesn't fall thru into the exit block. */
5415 if (!BARRIER_P (clobber_after))
5416 {
04e7d9cb 5417 start_sequence ();
5418 clobber_return_register ();
9ed997be 5419 rtx_insn *seq = get_insns ();
04e7d9cb 5420 end_sequence ();
9b56368f 5421
04e7d9cb 5422 emit_insn_after (seq, clobber_after);
5423 }
9b56368f 5424
01628e06 5425 /* Output the label for the naked return from the function. */
b2ee26d5 5426 if (naked_return_label)
5427 emit_label (naked_return_label);
62380d2d 5428
1b7fd1d9 5429 /* @@@ This is a kludge. We want to ensure that instructions that
5430 may trap are not moved into the epilogue by scheduling, because
d86df71c 5431 we don't always emit unwind information for the epilogue. */
cc7d6aed 5432 if (cfun->can_throw_non_call_exceptions
218e3e4e 5433 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
d86df71c 5434 emit_insn (gen_blockage ());
1b7fd1d9 5435
f1a0edff 5436 /* If stack protection is enabled for this function, check the guard. */
edb7afe8 5437 if (crtl->stack_protect_guard)
f1a0edff 5438 stack_protect_epilogue ();
5439
6a7492e8 5440 /* If we had calls to alloca, and this machine needs
5441 an accurate stack pointer to exit the function,
5442 insert some code to save and restore the stack pointer. */
5443 if (! EXIT_IGNORE_STACK
18d50ae6 5444 && cfun->calls_alloca)
6a7492e8 5445 {
9ed997be 5446 rtx tem = 0;
6a7492e8 5447
e9c97615 5448 start_sequence ();
5449 emit_stack_save (SAVE_FUNCTION, &tem);
9ed997be 5450 rtx_insn *seq = get_insns ();
e9c97615 5451 end_sequence ();
5452 emit_insn_before (seq, parm_birth_insn);
5453
5454 emit_stack_restore (SAVE_FUNCTION, tem);
6a7492e8 5455 }
5456
2766437e 5457 /* ??? This should no longer be necessary since stupid is no longer with
5458 us, but there are some parts of the compiler (eg reload_combine, and
5459 sh mach_dep_reorg) that still try and compute their own lifetime info
5460 instead of using the general framework. */
5461 use_return_register ();
897b77d6 5462}
05927e40 5463
5464rtx
b079a207 5465get_arg_pointer_save_area (void)
05927e40 5466{
b079a207 5467 rtx ret = arg_pointer_save_area;
05927e40 5468
5469 if (! ret)
5470 {
b079a207 5471 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5472 arg_pointer_save_area = ret;
2032b31d 5473 }
5474
18d50ae6 5475 if (! crtl->arg_pointer_save_area_init)
2032b31d 5476 {
60d903f5 5477 /* Save the arg pointer at the beginning of the function. The
2032b31d 5478 generated stack slot may not be a valid memory address, so we
05927e40 5479 have to check it and fix it if necessary. */
5480 start_sequence ();
d2b9158b 5481 emit_move_insn (validize_mem (copy_rtx (ret)),
27a7a23a 5482 crtl->args.internal_arg_pointer);
9ed997be 5483 rtx_insn *seq = get_insns ();
05927e40 5484 end_sequence ();
5485
2032b31d 5486 push_topmost_sequence ();
c838448c 5487 emit_insn_after (seq, entry_of_function ());
2032b31d 5488 pop_topmost_sequence ();
050f9ef1 5489
5490 crtl->arg_pointer_save_area_init = true;
05927e40 5491 }
5492
5493 return ret;
5494}
b2c5602e 5495\f
25e880b1 5496/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5497 for the first time. */
b2c5602e 5498
60ecc450 5499static void
d1023d12 5500record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
b2c5602e 5501{
4cd001d5 5502 rtx_insn *tmp;
d1023d12 5503 hash_table<insn_cache_hasher> *hash = *hashp;
60ecc450 5504
25e880b1 5505 if (hash == NULL)
d1023d12 5506 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
25e880b1 5507
5508 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5509 {
d1023d12 5510 rtx *slot = hash->find_slot (tmp, INSERT);
25e880b1 5511 gcc_assert (*slot == NULL);
5512 *slot = tmp;
5513 }
5514}
5515
1eefcaee 5516/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5517 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5518 insn, then record COPY as well. */
25e880b1 5519
5520void
1eefcaee 5521maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
25e880b1 5522{
d1023d12 5523 hash_table<insn_cache_hasher> *hash;
5524 rtx *slot;
25e880b1 5525
1eefcaee 5526 hash = epilogue_insn_hash;
d1023d12 5527 if (!hash || !hash->find (insn))
1eefcaee 5528 {
5529 hash = prologue_insn_hash;
d1023d12 5530 if (!hash || !hash->find (insn))
1eefcaee 5531 return;
5532 }
25e880b1 5533
d1023d12 5534 slot = hash->find_slot (copy, INSERT);
25e880b1 5535 gcc_assert (*slot == NULL);
5536 *slot = copy;
b2c5602e 5537}
5538
25e880b1 5539/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5540 we can be running after reorg, SEQUENCE rtl is possible. */
b2c5602e 5541
25e880b1 5542static bool
d1023d12 5543contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
b2c5602e 5544{
25e880b1 5545 if (hash == NULL)
5546 return false;
b2c5602e 5547
25e880b1 5548 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
b2c5602e 5549 {
9e21f364 5550 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
25e880b1 5551 int i;
9e21f364 5552 for (i = seq->len () - 1; i >= 0; i--)
d1023d12 5553 if (hash->find (seq->element (i)))
25e880b1 5554 return true;
5555 return false;
b2c5602e 5556 }
25e880b1 5557
d1023d12 5558 return hash->find (const_cast<rtx> (insn)) != NULL;
b2c5602e 5559}
a590d94d 5560
5561int
52d07779 5562prologue_epilogue_contains (const_rtx insn)
a590d94d 5563{
25e880b1 5564 if (contains (insn, prologue_insn_hash))
a590d94d 5565 return 1;
25e880b1 5566 if (contains (insn, epilogue_insn_hash))
a590d94d 5567 return 1;
5568 return 0;
5569}
b2c5602e 5570
f2c8a251 5571/* Insert use of return register before the end of BB. */
5572
5573static void
5574emit_use_return_register_into_block (basic_block bb)
5575{
f2c8a251 5576 start_sequence ();
5577 use_return_register ();
9ed997be 5578 rtx_insn *seq = get_insns ();
f2c8a251 5579 end_sequence ();
9ed997be 5580 rtx_insn *insn = BB_END (bb);
ff900b8e 5581 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
96129913 5582 insn = prev_cc0_setter (insn);
ff900b8e 5583
96129913 5584 emit_insn_before (seq, insn);
f2c8a251 5585}
5586
1f021f97 5587
5588/* Create a return pattern, either simple_return or return, depending on
5589 simple_p. */
5590
5da5e283 5591static rtx_insn *
1f021f97 5592gen_return_pattern (bool simple_p)
5593{
5da5e283 5594 return (simple_p
5595 ? targetm.gen_simple_return ()
5596 : targetm.gen_return ());
1f021f97 5597}
5598
5599/* Insert an appropriate return pattern at the end of block BB. This
5600 also means updating block_for_insn appropriately. SIMPLE_P is
5601 the same as in gen_return_pattern and passed to it. */
2215ca0d 5602
c562205f 5603void
1f021f97 5604emit_return_into_block (bool simple_p, basic_block bb)
2215ca0d 5605{
9ed997be 5606 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5607 BB_END (bb));
5608 rtx pat = PATTERN (jump);
9cb2517e 5609 if (GET_CODE (pat) == PARALLEL)
5610 pat = XVECEXP (pat, 0, 0);
5611 gcc_assert (ANY_RETURN_P (pat));
5612 JUMP_LABEL (jump) = pat;
2215ca0d 5613}
5614
31a53363 5615/* Set JUMP_LABEL for a return insn. */
5616
5617void
a9634f6a 5618set_return_jump_label (rtx_insn *returnjump)
31a53363 5619{
5620 rtx pat = PATTERN (returnjump);
5621 if (GET_CODE (pat) == PARALLEL)
5622 pat = XVECEXP (pat, 0, 0);
5623 if (ANY_RETURN_P (pat))
5624 JUMP_LABEL (returnjump) = pat;
5625 else
5626 JUMP_LABEL (returnjump) = ret_rtx;
5627}
5628
0a55d497 5629/* Return true if there are any active insns between HEAD and TAIL. */
c562205f 5630bool
64e72baf 5631active_insn_between (rtx_insn *head, rtx_insn *tail)
cde48de6 5632{
0a55d497 5633 while (tail)
5634 {
5635 if (active_insn_p (tail))
5636 return true;
5637 if (tail == head)
5638 return false;
5639 tail = PREV_INSN (tail);
5640 }
5641 return false;
5642}
5643
5644/* LAST_BB is a block that exits, and empty of active instructions.
5645 Examine its predecessors for jumps that can be converted to
5646 (conditional) returns. */
c562205f 5647vec<edge>
0a55d497 5648convert_jumps_to_returns (basic_block last_bb, bool simple_p,
f1f41a6c 5649 vec<edge> unconverted ATTRIBUTE_UNUSED)
0a55d497 5650{
5651 int i;
5652 basic_block bb;
0a55d497 5653 edge_iterator ei;
5654 edge e;
c2078b80 5655 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
cde48de6 5656
0a55d497 5657 FOR_EACH_EDGE (e, ei, last_bb->preds)
34154e27 5658 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
f1f41a6c 5659 src_bbs.quick_push (e->src);
0a55d497 5660
9ed997be 5661 rtx_insn *label = BB_HEAD (last_bb);
0a55d497 5662
f1f41a6c 5663 FOR_EACH_VEC_ELT (src_bbs, i, bb)
cde48de6 5664 {
93ee8dfb 5665 rtx_insn *jump = BB_END (bb);
0a55d497 5666
5667 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5668 continue;
5669
5670 e = find_edge (bb, last_bb);
5671
5672 /* If we have an unconditional jump, we can replace that
5673 with a simple return instruction. */
5674 if (simplejump_p (jump))
5675 {
5676 /* The use of the return register might be present in the exit
5677 fallthru block. Either:
5678 - removing the use is safe, and we should remove the use in
5679 the exit fallthru block, or
5680 - removing the use is not safe, and we should add it here.
5681 For now, we conservatively choose the latter. Either of the
5682 2 helps in crossjumping. */
5683 emit_use_return_register_into_block (bb);
5684
5685 emit_return_into_block (simple_p, bb);
5686 delete_insn (jump);
5687 }
5688
5689 /* If we have a conditional jump branching to the last
5690 block, we can try to replace that with a conditional
5691 return instruction. */
5692 else if (condjump_p (jump))
5693 {
5694 rtx dest;
5695
5696 if (simple_p)
5697 dest = simple_return_rtx;
5698 else
5699 dest = ret_rtx;
f9a00e9e 5700 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
0a55d497 5701 {
5da5e283 5702 if (targetm.have_simple_return () && simple_p)
0a55d497 5703 {
5704 if (dump_file)
5705 fprintf (dump_file,
5706 "Failed to redirect bb %d branch.\n", bb->index);
f1f41a6c 5707 unconverted.safe_push (e);
0a55d497 5708 }
0a55d497 5709 continue;
5710 }
5711
5712 /* See comment in simplejump_p case above. */
5713 emit_use_return_register_into_block (bb);
5714
5715 /* If this block has only one successor, it both jumps
5716 and falls through to the fallthru block, so we can't
5717 delete the edge. */
5718 if (single_succ_p (bb))
5719 continue;
5720 }
5721 else
5722 {
5da5e283 5723 if (targetm.have_simple_return () && simple_p)
0a55d497 5724 {
5725 if (dump_file)
5726 fprintf (dump_file,
5727 "Failed to redirect bb %d branch.\n", bb->index);
f1f41a6c 5728 unconverted.safe_push (e);
0a55d497 5729 }
0a55d497 5730 continue;
5731 }
5732
5733 /* Fix up the CFG for the successful change we just made. */
34154e27 5734 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
ebb58a45 5735 e->flags &= ~EDGE_CROSSING;
cde48de6 5736 }
f1f41a6c 5737 src_bbs.release ();
0a55d497 5738 return unconverted;
cde48de6 5739}
5740
0a55d497 5741/* Emit a return insn for the exit fallthru block. */
c562205f 5742basic_block
0a55d497 5743emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5744{
5745 basic_block last_bb = exit_fallthru_edge->src;
5746
5747 if (JUMP_P (BB_END (last_bb)))
5748 {
5749 last_bb = split_edge (exit_fallthru_edge);
5750 exit_fallthru_edge = single_succ_edge (last_bb);
5751 }
5752 emit_barrier_after (BB_END (last_bb));
5753 emit_return_into_block (simple_p, last_bb);
5754 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5755 return last_bb;
5756}
0a55d497 5757
5758
c3418f42 5759/* Generate the prologue and epilogue RTL if the machine supports it. Thread
b2c5602e 5760 this into place with notes indicating where the prologue ends and where
1f021f97 5761 the epilogue begins. Update the basic block information when possible.
5762
5763 Notes on epilogue placement:
5764 There are several kinds of edges to the exit block:
5765 * a single fallthru edge from LAST_BB
5766 * possibly, edges from blocks containing sibcalls
5767 * possibly, fake edges from infinite loops
5768
5769 The epilogue is always emitted on the fallthru edge from the last basic
5770 block in the function, LAST_BB, into the exit block.
5771
5772 If LAST_BB is empty except for a label, it is the target of every
5773 other basic block in the function that ends in a return. If a
5774 target has a return or simple_return pattern (possibly with
5775 conditional variants), these basic blocks can be changed so that a
5776 return insn is emitted into them, and their target is adjusted to
5777 the real exit block.
5778
5779 Notes on shrink wrapping: We implement a fairly conservative
5780 version of shrink-wrapping rather than the textbook one. We only
5781 generate a single prologue and a single epilogue. This is
5782 sufficient to catch a number of interesting cases involving early
5783 exits.
5784
5785 First, we identify the blocks that require the prologue to occur before
5786 them. These are the ones that modify a call-saved register, or reference
5787 any of the stack or frame pointer registers. To simplify things, we then
5788 mark everything reachable from these blocks as also requiring a prologue.
5789 This takes care of loops automatically, and avoids the need to examine
5790 whether MEMs reference the frame, since it is sufficient to check for
5791 occurrences of the stack or frame pointer.
5792
5793 We then compute the set of blocks for which the need for a prologue
5794 is anticipatable (borrowing terminology from the shrink-wrapping
5795 description in Muchnick's book). These are the blocks which either
5796 require a prologue themselves, or those that have only successors
5797 where the prologue is anticipatable. The prologue needs to be
5798 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5799 is not. For the moment, we ensure that only one such edge exists.
5800
5801 The epilogue is placed as described above, but we make a
5802 distinction between inserting return and simple_return patterns
5803 when modifying other blocks that end in a return. Blocks that end
5804 in a sibcall omit the sibcall_epilogue if the block is not in
5805 ANTIC. */
b2c5602e 5806
7ed9df76 5807void
3072d30e 5808thread_prologue_and_epilogue_insns (void)
b2c5602e 5809{
48b14f50 5810 bool inserted;
1e094109 5811 vec<edge> unconverted_simple_returns = vNULL;
0a55d497 5812 bitmap_head bb_flags;
5a7c3c87 5813 rtx_insn *returnjump;
5a7c3c87 5814 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
4cd001d5 5815 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
1f021f97 5816 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
cd665a06 5817 edge_iterator ei;
1f021f97 5818
5819 df_analyze ();
71caadc0 5820
34154e27 5821 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
48b14f50 5822
5823 inserted = false;
5a7c3c87 5824 epilogue_end = NULL;
5825 returnjump = NULL;
48b14f50 5826
5827 /* Can't deal with multiple successors of the entry block at the
5828 moment. Function should always have at least one entry
5829 point. */
34154e27 5830 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5831 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1f021f97 5832 orig_entry_edge = entry_edge;
5833
4cd001d5 5834 split_prologue_seq = NULL;
48b14f50 5835 if (flag_split_stack
5836 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5837 == NULL))
5838 {
48b14f50 5839 start_sequence ();
a558802e 5840 emit_insn (targetm.gen_split_stack_prologue ());
1f021f97 5841 split_prologue_seq = get_insns ();
48b14f50 5842 end_sequence ();
5843
1f021f97 5844 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5169661d 5845 set_insn_locations (split_prologue_seq, prologue_location);
48b14f50 5846 }
5847
4cd001d5 5848 prologue_seq = NULL;
cf3a33c8 5849 if (targetm.have_prologue ())
b2c5602e 5850 {
71caadc0 5851 start_sequence ();
cf3a33c8 5852 rtx_insn *seq = targetm.gen_prologue ();
71caadc0 5853 emit_insn (seq);
b2c5602e 5854
48e1416a 5855 /* Insert an explicit USE for the frame pointer
3072d30e 5856 if the profiling is on and the frame pointer is required. */
18d50ae6 5857 if (crtl->profile && frame_pointer_needed)
18b42941 5858 emit_use (hard_frame_pointer_rtx);
3072d30e 5859
b2c5602e 5860 /* Retain a map of the prologue insns. */
25e880b1 5861 record_insns (seq, NULL, &prologue_insn_hash);
d86df71c 5862 emit_note (NOTE_INSN_PROLOGUE_END);
48e1416a 5863
d86df71c 5864 /* Ensure that instructions are not moved into the prologue when
5865 profiling is on. The call to the profiling routine can be
5866 emitted within the live range of a call-clobbered register. */
8637d6a2 5867 if (!targetm.profile_before_prologue () && crtl->profile)
d86df71c 5868 emit_insn (gen_blockage ());
3b934b09 5869
1f021f97 5870 prologue_seq = get_insns ();
71caadc0 5871 end_sequence ();
5169661d 5872 set_insn_locations (prologue_seq, prologue_location);
1f021f97 5873 }
71caadc0 5874
1f021f97 5875 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5876
1f021f97 5877 /* Try to perform a kind of shrink-wrapping, making sure the
5878 prologue/epilogue is emitted only around those parts of the
5879 function that require it. */
5880
c562205f 5881 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
b2c5602e 5882
1f021f97 5883 if (split_prologue_seq != NULL_RTX)
5884 {
4db91b33 5885 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
1f021f97 5886 inserted = true;
5887 }
5888 if (prologue_seq != NULL_RTX)
5889 {
5890 insert_insn_on_edge (prologue_seq, entry_edge);
5891 inserted = true;
5892 }
5893
777e249a 5894 /* If the exit block has no non-fake predecessors, we don't need
5895 an epilogue. */
34154e27 5896 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
777e249a 5897 if ((e->flags & EDGE_FAKE) == 0)
5898 break;
5899 if (e == NULL)
5900 goto epilogue_done;
5901
34154e27 5902 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
1f021f97 5903
34154e27 5904 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
0a55d497 5905
5da5e283 5906 if (targetm.have_simple_return () && entry_edge != orig_entry_edge)
c562205f 5907 exit_fallthru_edge
5908 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5909 &unconverted_simple_returns,
5910 &returnjump);
5da5e283 5911 if (targetm.have_return ())
0a55d497 5912 {
5913 if (exit_fallthru_edge == NULL)
5914 goto epilogue_done;
2215ca0d 5915
0a55d497 5916 if (optimize)
5917 {
5918 basic_block last_bb = exit_fallthru_edge->src;
1f021f97 5919
0a55d497 5920 if (LABEL_P (BB_HEAD (last_bb))
5921 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
1e094109 5922 convert_jumps_to_returns (last_bb, false, vNULL);
0a55d497 5923
3c4ca362 5924 if (EDGE_COUNT (last_bb->preds) != 0
5925 && single_succ_p (last_bb))
1f021f97 5926 {
0a55d497 5927 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5928 epilogue_end = returnjump = BB_END (last_bb);
ae6fd0a8 5929
0a55d497 5930 /* Emitting the return may add a basic block.
5931 Fix bb_flags for the added block. */
5da5e283 5932 if (targetm.have_simple_return ()
5933 && last_bb != exit_fallthru_edge->src)
0a55d497 5934 bitmap_set_bit (&bb_flags, last_bb->index);
ae6fd0a8 5935
0a55d497 5936 goto epilogue_done;
2215ca0d 5937 }
ffb61627 5938 }
2215ca0d 5939 }
25e880b1 5940
5941 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5942 this marker for the splits of EH_RETURN patterns, and nothing else
5943 uses the flag in the meantime. */
5944 epilogue_completed = 1;
5945
5946#ifdef HAVE_eh_return
5947 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5948 some targets, these get split to a special version of the epilogue
5949 code. In order to be able to properly annotate these with unwind
5950 info, try to split them now. If we get a valid split, drop an
5951 EPILOGUE_BEG note and mark the insns as epilogue insns. */
34154e27 5952 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
25e880b1 5953 {
8bb2625b 5954 rtx_insn *prev, *last, *trial;
25e880b1 5955
5956 if (e->flags & EDGE_FALLTHRU)
5957 continue;
5958 last = BB_END (e->src);
5959 if (!eh_returnjump_p (last))
5960 continue;
5961
5962 prev = PREV_INSN (last);
5963 trial = try_split (PATTERN (last), last, 1);
5964 if (trial == last)
5965 continue;
5966
5967 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5968 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5969 }
5970#endif
5971
1f021f97 5972 /* If nothing falls through into the exit block, we don't need an
5973 epilogue. */
9bb8a4af 5974
1f021f97 5975 if (exit_fallthru_edge == NULL)
9bb8a4af 5976 goto epilogue_done;
5977
cf3a33c8 5978 if (targetm.have_epilogue ())
b2c5602e 5979 {
777e249a 5980 start_sequence ();
31b97e8f 5981 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
cf3a33c8 5982 rtx_insn *seq = targetm.gen_epilogue ();
11efe736 5983 if (seq)
5984 emit_jump_insn (seq);
b2c5602e 5985
777e249a 5986 /* Retain a map of the epilogue insns. */
25e880b1 5987 record_insns (seq, NULL, &epilogue_insn_hash);
5169661d 5988 set_insn_locations (seq, epilogue_location);
b2c5602e 5989
31d3e01c 5990 seq = get_insns ();
1f021f97 5991 returnjump = get_last_insn ();
06ebc183 5992 end_sequence ();
71caadc0 5993
1f021f97 5994 insert_insn_on_edge (seq, exit_fallthru_edge);
48b14f50 5995 inserted = true;
4115ac36 5996
5997 if (JUMP_P (returnjump))
31a53363 5998 set_return_jump_label (returnjump);
b2c5602e 5999 }
9bb8a4af 6000 else
9bb8a4af 6001 {
6002 basic_block cur_bb;
6003
1f021f97 6004 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
9bb8a4af 6005 goto epilogue_done;
6006 /* We have a fall-through edge to the exit block, the source is not
6007 at the end of the function, and there will be an assembler epilogue
6008 at the end of the function.
6009 We can't use force_nonfallthru here, because that would try to
1f021f97 6010 use return. Inserting a jump 'by hand' is extremely messy, so
9bb8a4af 6011 we take advantage of cfg_layout_finalize using
1f021f97 6012 fixup_fallthru_exit_predecessor. */
d2ed6106 6013 cfg_layout_initialize (0);
fc00614f 6014 FOR_EACH_BB_FN (cur_bb, cfun)
4d2e5d52 6015 if (cur_bb->index >= NUM_FIXED_BLOCKS
6016 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
bc5f266a 6017 cur_bb->aux = cur_bb->next_bb;
9bb8a4af 6018 cfg_layout_finalize ();
6019 }
202bbc06 6020
777e249a 6021epilogue_done:
1f021f97 6022
c107ab96 6023 default_rtl_profile ();
71caadc0 6024
58d5b39c 6025 if (inserted)
e08b2eb8 6026 {
202bbc06 6027 sbitmap blocks;
6028
e08b2eb8 6029 commit_edge_insertions ();
6030
202bbc06 6031 /* Look for basic blocks within the prologue insns. */
fe672ac0 6032 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
53c5d9d4 6033 bitmap_clear (blocks);
08b7917c 6034 bitmap_set_bit (blocks, entry_edge->dest->index);
6035 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
202bbc06 6036 find_many_sub_basic_blocks (blocks);
6037 sbitmap_free (blocks);
6038
e08b2eb8 6039 /* The epilogue insns we inserted may cause the exit edge to no longer
6040 be fallthru. */
34154e27 6041 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
e08b2eb8 6042 {
6043 if (((e->flags & EDGE_FALLTHRU) != 0)
6044 && returnjump_p (BB_END (e->src)))
6045 e->flags &= ~EDGE_FALLTHRU;
6046 }
6047 }
60ecc450 6048
5da5e283 6049 if (targetm.have_simple_return ())
ae6fd0a8 6050 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6051 returnjump, unconverted_simple_returns);
1f021f97 6052
60ecc450 6053 /* Emit sibling epilogues before any sibling call sites. */
34154e27 6054 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6055 ei_safe_edge (ei));
6056 )
60ecc450 6057 {
6058 basic_block bb = e->src;
8bb2625b 6059 rtx_insn *insn = BB_END (bb);
60ecc450 6060
6d7dc5b9 6061 if (!CALL_P (insn)
1f021f97 6062 || ! SIBLING_CALL_P (insn)
5da5e283 6063 || (targetm.have_simple_return ()
6064 && entry_edge != orig_entry_edge
6065 && !bitmap_bit_p (&bb_flags, bb->index)))
cd665a06 6066 {
6067 ei_next (&ei);
6068 continue;
6069 }
60ecc450 6070
cf3a33c8 6071 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
1f021f97 6072 {
6073 start_sequence ();
6074 emit_note (NOTE_INSN_EPILOGUE_BEG);
6075 emit_insn (ep_seq);
4cd001d5 6076 rtx_insn *seq = get_insns ();
1f021f97 6077 end_sequence ();
60ecc450 6078
1f021f97 6079 /* Retain a map of the epilogue insns. Used in life analysis to
6080 avoid getting rid of sibcall epilogue insns. Do this before we
6081 actually emit the sequence. */
6082 record_insns (seq, NULL, &epilogue_insn_hash);
5169661d 6083 set_insn_locations (seq, epilogue_location);
31d3e01c 6084
1f021f97 6085 emit_insn_before (seq, insn);
6086 }
cd665a06 6087 ei_next (&ei);
60ecc450 6088 }
58d5b39c 6089
142e7d22 6090 if (epilogue_end)
6091 {
5a7c3c87 6092 rtx_insn *insn, *next;
142e7d22 6093
6094 /* Similarly, move any line notes that appear after the epilogue.
424da949 6095 There is no need, however, to be quite so anal about the existence
737251e7 6096 of such a note. Also possibly move
dc8def52 6097 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6098 info generation. */
06ebc183 6099 for (insn = epilogue_end; insn; insn = next)
142e7d22 6100 {
6101 next = NEXT_INSN (insn);
48e1416a 6102 if (NOTE_P (insn)
ad4583d9 6103 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
142e7d22 6104 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6105 }
6106 }
3072d30e 6107
1f021f97 6108 bitmap_clear (&bb_flags);
6109
3072d30e 6110 /* Threading the prologue and epilogue changes the artificial refs
6111 in the entry and exit blocks. */
6112 epilogue_completed = 1;
6113 df_update_entry_exit_and_calls ();
b2c5602e 6114}
6115
25e880b1 6116/* Reposition the prologue-end and epilogue-begin notes after
6117 instruction scheduling. */
b2c5602e 6118
6119void
3072d30e 6120reposition_prologue_and_epilogue_notes (void)
b2c5602e 6121{
cf3a33c8 6122 if (!targetm.have_prologue ()
6123 && !targetm.have_epilogue ()
6124 && !targetm.have_sibcall_epilogue ())
317443b3 6125 return;
317443b3 6126
25e880b1 6127 /* Since the hash table is created on demand, the fact that it is
6128 non-null is a signal that it is non-empty. */
6129 if (prologue_insn_hash != NULL)
b2c5602e 6130 {
d1023d12 6131 size_t len = prologue_insn_hash->elements ();
8bb2625b 6132 rtx_insn *insn, *last = NULL, *note = NULL;
b2c5602e 6133
25e880b1 6134 /* Scan from the beginning until we reach the last prologue insn. */
6135 /* ??? While we do have the CFG intact, there are two problems:
6136 (1) The prologue can contain loops (typically probing the stack),
6137 which means that the end of the prologue isn't in the first bb.
6138 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
3072d30e 6139 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
b2c5602e 6140 {
6d7dc5b9 6141 if (NOTE_P (insn))
12d1c03c 6142 {
ad4583d9 6143 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
60ecc450 6144 note = insn;
6145 }
25e880b1 6146 else if (contains (insn, prologue_insn_hash))
60ecc450 6147 {
5c0913b4 6148 last = insn;
6149 if (--len == 0)
6150 break;
6151 }
6152 }
60d903f5 6153
5c0913b4 6154 if (last)
6155 {
25e880b1 6156 if (note == NULL)
5c0913b4 6157 {
25e880b1 6158 /* Scan forward looking for the PROLOGUE_END note. It should
6159 be right at the beginning of the block, possibly with other
6160 insn notes that got moved there. */
6161 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6162 {
6163 if (NOTE_P (note)
6164 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6165 break;
6166 }
5c0913b4 6167 }
2a588794 6168
5c0913b4 6169 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6d7dc5b9 6170 if (LABEL_P (last))
5c0913b4 6171 last = NEXT_INSN (last);
6172 reorder_insns (note, note, last);
b2c5602e 6173 }
60ecc450 6174 }
6175
25e880b1 6176 if (epilogue_insn_hash != NULL)
60ecc450 6177 {
25e880b1 6178 edge_iterator ei;
6179 edge e;
b2c5602e 6180
34154e27 6181 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
b2c5602e 6182 {
8bb2625b 6183 rtx_insn *insn, *first = NULL, *note = NULL;
c009a3ec 6184 basic_block bb = e->src;
2a588794 6185
c009a3ec 6186 /* Scan from the beginning until we reach the first epilogue insn. */
25e880b1 6187 FOR_BB_INSNS (bb, insn)
5c0913b4 6188 {
25e880b1 6189 if (NOTE_P (insn))
6190 {
6191 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6192 {
6193 note = insn;
c009a3ec 6194 if (first != NULL)
25e880b1 6195 break;
6196 }
6197 }
c009a3ec 6198 else if (first == NULL && contains (insn, epilogue_insn_hash))
25e880b1 6199 {
c009a3ec 6200 first = insn;
25e880b1 6201 if (note != NULL)
6202 break;
6203 }
12d1c03c 6204 }
c009a3ec 6205
6206 if (note)
6207 {
6208 /* If the function has a single basic block, and no real
48e1416a 6209 epilogue insns (e.g. sibcall with no cleanup), the
c009a3ec 6210 epilogue note can get scheduled before the prologue
6211 note. If we have frame related prologue insns, having
6212 them scanned during the epilogue will result in a crash.
6213 In this case re-order the epilogue note to just before
6214 the last insn in the block. */
6215 if (first == NULL)
6216 first = BB_END (bb);
6217
6218 if (PREV_INSN (first) != note)
6219 reorder_insns (note, note, PREV_INSN (first));
6220 }
b2c5602e 6221 }
6222 }
b2c5602e 6223}
a7b0c170 6224
9631926a 6225/* Returns the name of function declared by FNDECL. */
6226const char *
6227fndecl_name (tree fndecl)
6228{
6229 if (fndecl == NULL)
6230 return "(nofn)";
6231 return lang_hooks.decl_printable_name (fndecl, 2);
6232}
6233
4a020a8c 6234/* Returns the name of function FN. */
6235const char *
6236function_name (struct function *fn)
6237{
9631926a 6238 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6239 return fndecl_name (fndecl);
4a020a8c 6240}
6241
35901471 6242/* Returns the name of the current function. */
6243const char *
6244current_function_name (void)
6245{
4a020a8c 6246 return function_name (cfun);
35901471 6247}
77fce4cd 6248\f
6249
2a1990e9 6250static unsigned int
77fce4cd 6251rest_of_handle_check_leaf_regs (void)
6252{
6253#ifdef LEAF_REGISTERS
d5bf7b64 6254 crtl->uses_only_leaf_regs
77fce4cd 6255 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6256#endif
2a1990e9 6257 return 0;
77fce4cd 6258}
6259
35df6eb4 6260/* Insert a TYPE into the used types hash table of CFUN. */
1a4c44c5 6261
35df6eb4 6262static void
6263used_types_insert_helper (tree type, struct function *func)
f6e59711 6264{
35df6eb4 6265 if (type != NULL && func != NULL)
f6e59711 6266 {
f6e59711 6267 if (func->used_types_hash == NULL)
8f359205 6268 func->used_types_hash = hash_set<tree>::create_ggc (37);
6269
6270 func->used_types_hash->add (type);
f6e59711 6271 }
6272}
6273
35df6eb4 6274/* Given a type, insert it into the used hash table in cfun. */
6275void
6276used_types_insert (tree t)
6277{
6278 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
bd564c29 6279 if (TYPE_NAME (t))
6280 break;
6281 else
6282 t = TREE_TYPE (t);
26ee9e7a 6283 if (TREE_CODE (t) == ERROR_MARK)
6284 return;
bd564c29 6285 if (TYPE_NAME (t) == NULL_TREE
6286 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6287 t = TYPE_MAIN_VARIANT (t);
35df6eb4 6288 if (debug_info_level > DINFO_LEVEL_NONE)
1a4c44c5 6289 {
6290 if (cfun)
6291 used_types_insert_helper (t, cfun);
6292 else
f1f41a6c 6293 {
6294 /* So this might be a type referenced by a global variable.
6295 Record that type so that we can later decide to emit its
6296 debug information. */
6297 vec_safe_push (types_used_by_cur_var_decl, t);
6298 }
1a4c44c5 6299 }
6300}
6301
6302/* Helper to Hash a struct types_used_by_vars_entry. */
6303
6304static hashval_t
6305hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6306{
6307 gcc_assert (entry && entry->var_decl && entry->type);
6308
6309 return iterative_hash_object (entry->type,
6310 iterative_hash_object (entry->var_decl, 0));
6311}
6312
6313/* Hash function of the types_used_by_vars_entry hash table. */
6314
6315hashval_t
2ef51f0e 6316used_type_hasher::hash (types_used_by_vars_entry *entry)
1a4c44c5 6317{
1a4c44c5 6318 return hash_types_used_by_vars_entry (entry);
6319}
6320
6321/*Equality function of the types_used_by_vars_entry hash table. */
6322
2ef51f0e 6323bool
6324used_type_hasher::equal (types_used_by_vars_entry *e1,
6325 types_used_by_vars_entry *e2)
1a4c44c5 6326{
1a4c44c5 6327 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6328}
6329
6330/* Inserts an entry into the types_used_by_vars_hash hash table. */
6331
6332void
6333types_used_by_var_decl_insert (tree type, tree var_decl)
6334{
6335 if (type != NULL && var_decl != NULL)
6336 {
2ef51f0e 6337 types_used_by_vars_entry **slot;
1a4c44c5 6338 struct types_used_by_vars_entry e;
6339 e.var_decl = var_decl;
6340 e.type = type;
6341 if (types_used_by_vars_hash == NULL)
2ef51f0e 6342 types_used_by_vars_hash
6343 = hash_table<used_type_hasher>::create_ggc (37);
6344
6345 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
1a4c44c5 6346 if (*slot == NULL)
6347 {
6348 struct types_used_by_vars_entry *entry;
25a27413 6349 entry = ggc_alloc<types_used_by_vars_entry> ();
1a4c44c5 6350 entry->type = type;
6351 entry->var_decl = var_decl;
6352 *slot = entry;
6353 }
6354 }
35df6eb4 6355}
6356
cbe8bda8 6357namespace {
6358
6359const pass_data pass_data_leaf_regs =
6360{
6361 RTL_PASS, /* type */
6362 "*leaf_regs", /* name */
6363 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 6364 TV_NONE, /* tv_id */
6365 0, /* properties_required */
6366 0, /* properties_provided */
6367 0, /* properties_destroyed */
6368 0, /* todo_flags_start */
6369 0, /* todo_flags_finish */
77fce4cd 6370};
6371
cbe8bda8 6372class pass_leaf_regs : public rtl_opt_pass
6373{
6374public:
9af5ce0c 6375 pass_leaf_regs (gcc::context *ctxt)
6376 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
cbe8bda8 6377 {}
6378
6379 /* opt_pass methods: */
65b0537f 6380 virtual unsigned int execute (function *)
6381 {
6382 return rest_of_handle_check_leaf_regs ();
6383 }
cbe8bda8 6384
6385}; // class pass_leaf_regs
6386
6387} // anon namespace
6388
6389rtl_opt_pass *
6390make_pass_leaf_regs (gcc::context *ctxt)
6391{
6392 return new pass_leaf_regs (ctxt);
6393}
6394
3072d30e 6395static unsigned int
6396rest_of_handle_thread_prologue_and_epilogue (void)
6397{
6398 if (optimize)
6399 cleanup_cfg (CLEANUP_EXPENSIVE);
990495a7 6400
3072d30e 6401 /* On some machines, the prologue and epilogue code, or parts thereof,
6402 can be represented as RTL. Doing so lets us schedule insns between
6403 it and the rest of the code and also allows delayed branch
6404 scheduling to operate in the epilogue. */
3072d30e 6405 thread_prologue_and_epilogue_insns ();
990495a7 6406
6a5f2336 6407 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6408 see PR57320. */
6409 cleanup_cfg (0);
6410
990495a7 6411 /* The stack usage info is finalized during prologue expansion. */
8c0dd614 6412 if (flag_stack_usage_info)
990495a7 6413 output_stack_usage ();
6414
3072d30e 6415 return 0;
6416}
6417
cbe8bda8 6418namespace {
6419
6420const pass_data pass_data_thread_prologue_and_epilogue =
6421{
6422 RTL_PASS, /* type */
6423 "pro_and_epilogue", /* name */
6424 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 6425 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6426 0, /* properties_required */
6427 0, /* properties_provided */
6428 0, /* properties_destroyed */
8b88439e 6429 0, /* todo_flags_start */
6430 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
3072d30e 6431};
cbe8bda8 6432
6433class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6434{
6435public:
9af5ce0c 6436 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6437 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
cbe8bda8 6438 {}
6439
6440 /* opt_pass methods: */
65b0537f 6441 virtual unsigned int execute (function *)
6442 {
6443 return rest_of_handle_thread_prologue_and_epilogue ();
6444 }
cbe8bda8 6445
6446}; // class pass_thread_prologue_and_epilogue
6447
6448} // anon namespace
6449
6450rtl_opt_pass *
6451make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6452{
6453 return new pass_thread_prologue_and_epilogue (ctxt);
6454}
9dc6d5bb 6455\f
6456
6457/* This mini-pass fixes fall-out from SSA in asm statements that have
48e1416a 6458 in-out constraints. Say you start with
9dc6d5bb 6459
6460 orig = inout;
6461 asm ("": "+mr" (inout));
6462 use (orig);
6463
6464 which is transformed very early to use explicit output and match operands:
6465
6466 orig = inout;
6467 asm ("": "=mr" (inout) : "0" (inout));
6468 use (orig);
6469
6470 Or, after SSA and copyprop,
6471
6472 asm ("": "=mr" (inout_2) : "0" (inout_1));
6473 use (inout_1);
6474
6475 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6476 they represent two separate values, so they will get different pseudo
6477 registers during expansion. Then, since the two operands need to match
6478 per the constraints, but use different pseudo registers, reload can
6479 only register a reload for these operands. But reloads can only be
6480 satisfied by hardregs, not by memory, so we need a register for this
6481 reload, just because we are presented with non-matching operands.
6482 So, even though we allow memory for this operand, no memory can be
6483 used for it, just because the two operands don't match. This can
6484 cause reload failures on register-starved targets.
6485
6486 So it's a symptom of reload not being able to use memory for reloads
6487 or, alternatively it's also a symptom of both operands not coming into
6488 reload as matching (in which case the pseudo could go to memory just
6489 fine, as the alternative allows it, and no reload would be necessary).
6490 We fix the latter problem here, by transforming
6491
6492 asm ("": "=mr" (inout_2) : "0" (inout_1));
6493
6494 back to
6495
6496 inout_2 = inout_1;
6497 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6498
6499static void
8bb2625b 6500match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
9dc6d5bb 6501{
6502 int i;
6503 bool changed = false;
6504 rtx op = SET_SRC (p_sets[0]);
6505 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6506 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
2457c754 6507 bool *output_matched = XALLOCAVEC (bool, noutputs);
9dc6d5bb 6508
3f982e5a 6509 memset (output_matched, 0, noutputs * sizeof (bool));
9dc6d5bb 6510 for (i = 0; i < ninputs; i++)
6511 {
8bb2625b 6512 rtx input, output;
6513 rtx_insn *insns;
9dc6d5bb 6514 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6515 char *end;
d069e0d3 6516 int match, j;
9dc6d5bb 6517
fbcb9be4 6518 if (*constraint == '%')
6519 constraint++;
6520
9dc6d5bb 6521 match = strtoul (constraint, &end, 10);
6522 if (end == constraint)
6523 continue;
6524
6525 gcc_assert (match < noutputs);
6526 output = SET_DEST (p_sets[match]);
6527 input = RTVEC_ELT (inputs, i);
d069e0d3 6528 /* Only do the transformation for pseudos. */
6529 if (! REG_P (output)
6530 || rtx_equal_p (output, input)
9dc6d5bb 6531 || (GET_MODE (input) != VOIDmode
6532 && GET_MODE (input) != GET_MODE (output)))
6533 continue;
6534
d069e0d3 6535 /* We can't do anything if the output is also used as input,
6536 as we're going to overwrite it. */
6537 for (j = 0; j < ninputs; j++)
6538 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6539 break;
6540 if (j != ninputs)
6541 continue;
6542
3f982e5a 6543 /* Avoid changing the same input several times. For
6544 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6545 only change in once (to out1), rather than changing it
6546 first to out1 and afterwards to out2. */
6547 if (i > 0)
6548 {
6549 for (j = 0; j < noutputs; j++)
6550 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6551 break;
6552 if (j != noutputs)
6553 continue;
6554 }
6555 output_matched[match] = true;
6556
9dc6d5bb 6557 start_sequence ();
d069e0d3 6558 emit_move_insn (output, input);
9dc6d5bb 6559 insns = get_insns ();
6560 end_sequence ();
9dc6d5bb 6561 emit_insn_before (insns, insn);
d069e0d3 6562
6563 /* Now replace all mentions of the input with output. We can't
f0b5f617 6564 just replace the occurrence in inputs[i], as the register might
d069e0d3 6565 also be used in some other input (or even in an address of an
6566 output), which would mean possibly increasing the number of
6567 inputs by one (namely 'output' in addition), which might pose
6568 a too complicated problem for reload to solve. E.g. this situation:
6569
6570 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6571
c7684b8e 6572 Here 'input' is used in two occurrences as input (once for the
d069e0d3 6573 input operand, once for the address in the second output operand).
f0b5f617 6574 If we would replace only the occurrence of the input operand (to
d069e0d3 6575 make the matching) we would be left with this:
6576
6577 output = input
6578 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6579
6580 Now we suddenly have two different input values (containing the same
6581 value, but different pseudos) where we formerly had only one.
6582 With more complicated asms this might lead to reload failures
6583 which wouldn't have happen without this pass. So, iterate over
c7684b8e 6584 all operands and replace all occurrences of the register used. */
d069e0d3 6585 for (j = 0; j < noutputs; j++)
f211ad17 6586 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
d069e0d3 6587 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6588 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6589 input, output);
6590 for (j = 0; j < ninputs; j++)
6591 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6592 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6593 input, output);
6594
9dc6d5bb 6595 changed = true;
6596 }
6597
6598 if (changed)
6599 df_insn_rescan (insn);
6600}
6601
bdb8985a 6602/* Add the decl D to the local_decls list of FUN. */
6603
6604void
6605add_local_decl (struct function *fun, tree d)
6606{
6607 gcc_assert (TREE_CODE (d) == VAR_DECL);
6608 vec_safe_push (fun->local_decls, d);
6609}
6610
65b0537f 6611namespace {
6612
6613const pass_data pass_data_match_asm_constraints =
6614{
6615 RTL_PASS, /* type */
6616 "asmcons", /* name */
6617 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 6618 TV_NONE, /* tv_id */
6619 0, /* properties_required */
6620 0, /* properties_provided */
6621 0, /* properties_destroyed */
6622 0, /* todo_flags_start */
6623 0, /* todo_flags_finish */
6624};
6625
6626class pass_match_asm_constraints : public rtl_opt_pass
6627{
6628public:
6629 pass_match_asm_constraints (gcc::context *ctxt)
6630 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6631 {}
6632
6633 /* opt_pass methods: */
6634 virtual unsigned int execute (function *);
6635
6636}; // class pass_match_asm_constraints
6637
6638unsigned
6639pass_match_asm_constraints::execute (function *fun)
9dc6d5bb 6640{
6641 basic_block bb;
8bb2625b 6642 rtx_insn *insn;
6643 rtx pat, *p_sets;
9dc6d5bb 6644 int noutputs;
6645
18d50ae6 6646 if (!crtl->has_asm_statement)
9dc6d5bb 6647 return 0;
6648
6649 df_set_flags (DF_DEFER_INSN_RESCAN);
65b0537f 6650 FOR_EACH_BB_FN (bb, fun)
9dc6d5bb 6651 {
6652 FOR_BB_INSNS (bb, insn)
6653 {
6654 if (!INSN_P (insn))
6655 continue;
6656
6657 pat = PATTERN (insn);
6658 if (GET_CODE (pat) == PARALLEL)
6659 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6660 else if (GET_CODE (pat) == SET)
6661 p_sets = &PATTERN (insn), noutputs = 1;
6662 else
6663 continue;
6664
6665 if (GET_CODE (*p_sets) == SET
6666 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6667 match_asm_constraints_1 (insn, p_sets, noutputs);
6668 }
6669 }
6670
6671 return TODO_df_finish;
6672}
6673
cbe8bda8 6674} // anon namespace
6675
6676rtl_opt_pass *
6677make_pass_match_asm_constraints (gcc::context *ctxt)
6678{
6679 return new pass_match_asm_constraints (ctxt);
6680}
6681
35901471 6682
1f3233d1 6683#include "gt-function.h"