]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/function.c
.
[thirdparty/gcc.git] / gcc / function.c
CommitLineData
bccafa26 1/* Expands front end tree to back end RTL for GCC.
aad93da1 2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
897b77d6 3
f12b58b3 4This file is part of GCC.
897b77d6 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
897b77d6 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
897b77d6 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
897b77d6 19
897b77d6 20/* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
e8825bb0 32 not get a hard register. */
897b77d6 33
34#include "config.h"
405711de 35#include "system.h"
805e22b2 36#include "coretypes.h"
9ef16211 37#include "backend.h"
7c29e30e 38#include "target.h"
9ef16211 39#include "rtl.h"
7c29e30e 40#include "tree.h"
41#include "gimple-expr.h"
42#include "cfghooks.h"
9ef16211 43#include "df.h"
ad7b10a2 44#include "memmodel.h"
7c29e30e 45#include "tm_p.h"
46#include "stringpool.h"
47#include "expmed.h"
48#include "optabs.h"
49#include "regs.h"
50#include "emit-rtl.h"
51#include "recog.h"
d7091a76 52#include "rtl-error.h"
b20a8bb4 53#include "alias.h"
b20a8bb4 54#include "fold-const.h"
9ed99284 55#include "stor-layout.h"
56#include "varasm.h"
dcabb90e 57#include "except.h"
d53441c8 58#include "dojump.h"
59#include "explow.h"
60#include "calls.h"
897b77d6 61#include "expr.h"
947ed59a 62#include "optabs-tree.h"
897b77d6 63#include "output.h"
96554925 64#include "langhooks.h"
218e3e4e 65#include "common/common-target.h"
a8783bee 66#include "gimplify.h"
77fce4cd 67#include "tree-pass.h"
94ea8568 68#include "cfgrtl.h"
69#include "cfganal.h"
70#include "cfgbuild.h"
71#include "cfgcleanup.h"
94f92c36 72#include "cfgexpand.h"
c562205f 73#include "shrink-wrap.h"
e0ff5636 74#include "toplev.h"
2d184b77 75#include "rtl-iter.h"
058a1b7a 76#include "tree-chkp.h"
77#include "rtl-chkp.h"
b2df3bbf 78#include "tree-dfa.h"
b1090780 79#include "tree-ssa.h"
30a86690 80#include "stringpool.h"
81#include "attribs.h"
f1a0edff 82
c8a152f6 83/* So we can assign to cfun in this file. */
84#undef cfun
85
256f9b65 86#ifndef STACK_ALIGNMENT_NEEDED
87#define STACK_ALIGNMENT_NEEDED 1
88#endif
89
1cd50c9a 90#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
91
897b77d6 92/* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
96
97/* Similar, but round to the next highest integer that meets the
98 alignment. */
99#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
100
897b77d6 101/* Nonzero once virtual register instantiation has been done.
9c0a756f 102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105int virtuals_instantiated;
897b77d6 106
4781f9b9 107/* Assign unique numbers to labels generated for profiling, debugging, etc. */
573aba85 108static GTY(()) int funcdef_no;
b8a21949 109
ab5beff9 110/* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
de1b648b 112struct machine_function * (*init_machine_status) (void);
adc2961c 113
304c5bf1 114/* The currently compiled function. */
08513b52 115struct function *cfun = 0;
304c5bf1 116
25e880b1 117/* These hashes record the prologue and epilogue insns. */
d1023d12 118
eae1ecb4 119struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
d1023d12 120{
121 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
122 static bool equal (rtx a, rtx b) { return a == b; }
123};
124
125static GTY((cache))
126 hash_table<insn_cache_hasher> *prologue_insn_hash;
127static GTY((cache))
128 hash_table<insn_cache_hasher> *epilogue_insn_hash;
897b77d6 129\f
1a4c44c5 130
2ef51f0e 131hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
f1f41a6c 132vec<tree, va_gc> *types_used_by_cur_var_decl;
1a4c44c5 133
209a68cc 134/* Forward declarations. */
135
de1b648b 136static struct temp_slot *find_temp_slot_from_address (rtx);
de1b648b 137static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
3754d046 138static void pad_below (struct args_size *, machine_mode, tree);
8bb2625b 139static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
de1b648b 140static int all_blocks (tree, tree *);
141static tree *get_block_vector (tree, int *);
142extern tree debug_find_var_in_block_tree (tree, tree);
4885b286 143/* We always define `record_insns' even if it's not used so that we
2dc40d2d 144 can always export `prologue_epilogue_contains'. */
d1023d12 145static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
146 ATTRIBUTE_UNUSED;
e7ea1192 147static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
87d4aa85 148static void prepare_function_start (void);
de1b648b 149static void do_clobber_return_reg (rtx, void *);
150static void do_use_return_reg (rtx, void *);
94f92c36 151
8e4c05da 152\f
5737913a 153/* Stack of nested functions. */
154/* Keep track of the cfun stack. */
997d68fe 155
04009ada 156static vec<function *> function_context_stack;
897b77d6 157
158/* Save the current context for compilation of a nested function.
d2764e2d 159 This is called from language-specific code. */
897b77d6 160
161void
d2764e2d 162push_function_context (void)
897b77d6 163{
08513b52 164 if (cfun == 0)
80f2ef47 165 allocate_struct_function (NULL, false);
304c5bf1 166
f1f41a6c 167 function_context_stack.safe_push (cfun);
87d4aa85 168 set_cfun (NULL);
897b77d6 169}
170
171/* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174void
d2764e2d 175pop_function_context (void)
897b77d6 176{
f1f41a6c 177 struct function *p = function_context_stack.pop ();
87d4aa85 178 set_cfun (p);
897b77d6 179 current_function_decl = p->decl;
897b77d6 180
897b77d6 181 /* Reset variables that have known state during rtx generation. */
897b77d6 182 virtuals_instantiated = 0;
316bc009 183 generating_concat_p = 1;
897b77d6 184}
2a228d52 185
3c3bb268 186/* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190void
de1b648b 191free_after_parsing (struct function *f)
3c3bb268 192{
b75409ba 193 f->language = 0;
3c3bb268 194}
195
26df1c5e 196/* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
a57bcb3b 198 reclaim the memory. */
c788feb1 199
26df1c5e 200void
de1b648b 201free_after_compilation (struct function *f)
26df1c5e 202{
25e880b1 203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
dd045aee 206 free (crtl->emit.regno_pointer_align);
a4a0e8fd 207
fd6ffb7c 208 memset (crtl, 0, sizeof (struct rtl_data));
1f3233d1 209 f->eh = NULL;
1f3233d1 210 f->machine = NULL;
7a22afab 211 f->cfg = NULL;
789581b6 212 f->curr_properties &= ~PROP_cfg;
3c3bb268 213
a9f6414b 214 regno_reg_rtx = NULL;
26df1c5e 215}
897b77d6 216\f
0a893c29 217/* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
26d04e5f 220
0a893c29 221HOST_WIDE_INT
de1b648b 222get_frame_size (void)
0a893c29 223{
b079a207 224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
0a893c29 228}
229
26d04e5f 230/* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234bool
235frame_offset_overflow (HOST_WIDE_INT offset, tree func)
48e1416a 236{
26d04e5f 237 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238
edc19fd0 239 if (size > (HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
26d04e5f 240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD)
242 {
712d2297 243 error_at (DECL_SOURCE_LOCATION (func),
244 "total size of local objects too large");
26d04e5f 245 return TRUE;
246 }
247
248 return FALSE;
249}
250
c899a840 251/* Return the minimum spill slot alignment for a register of mode MODE. */
252
253unsigned int
254spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
255{
256 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
257}
258
ad33891d 259/* Return stack slot alignment in bits for TYPE and MODE. */
260
261static unsigned int
3754d046 262get_stack_local_alignment (tree type, machine_mode mode)
ad33891d 263{
264 unsigned int alignment;
265
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
270
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
275
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
277}
278
43165fe4 279/* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
285
286static bool
287try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
290{
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
293
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
299
300 /* Round the frame offset to the specified alignment. */
301
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
316
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
329
330 *poffset = this_frame_offset;
331 return true;
332}
333
334/* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
337
338static void
339add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
340{
25a27413 341 struct frame_space *space = ggc_alloc<frame_space> ();
43165fe4 342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
346}
347
897b77d6 348/* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
06ebc183 350
897b77d6 351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
c20b6803 354 -2 means use BITS_PER_UNIT,
897b77d6 355 positive specifies alignment boundary in bits.
356
943d8723 357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
27a7a23a 362
b079a207 363 We do not round to stack_boundary here. */
897b77d6 364
b079a207 365rtx
3754d046 366assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
943d8723 367 int align, int kind)
897b77d6 368{
19cb6b50 369 rtx x, addr;
897b77d6 370 int bigend_correction = 0;
286887d9 371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
ad33891d 372 unsigned int alignment, alignment_in_bits;
897b77d6 373
374 if (align == 0)
375 {
ad33891d 376 alignment = get_stack_local_alignment (NULL, mode);
9bd87fd2 377 alignment /= BITS_PER_UNIT;
897b77d6 378 }
379 else if (align == -1)
380 {
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
383 }
c20b6803 384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
897b77d6 386 else
387 alignment = align / BITS_PER_UNIT;
388
27a7a23a 389 alignment_in_bits = alignment * BITS_PER_UNIT;
390
27a7a23a 391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
393 {
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
396 }
a79cb8e9 397
27a7a23a 398 if (SUPPORTS_STACK_ALIGNMENT)
399 {
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
401 {
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
405 {
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
413 {
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
943d8723 417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
27a7a23a 418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
423 }
424 }
425 }
426 }
ad33891d 427
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
bd9c33a8 430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
a79cb8e9 432
43165fe4 433 if (mode != BLKmode || size != 0)
434 {
943d8723 435 if (kind & ASLK_RECORD_PAD)
43165fe4 436 {
943d8723 437 struct frame_space **psp;
438
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
440 {
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
452 }
43165fe4 453 }
454 }
455 else if (!STACK_ALIGNMENT_NEEDED)
456 {
457 slot_offset = frame_offset;
458 goto found_space;
459 }
460
461 old_frame_offset = frame_offset;
462
463 if (FRAME_GROWS_DOWNWARD)
464 {
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
9f843b44 467
943d8723 468 if (kind & ASLK_RECORD_PAD)
469 {
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
474 }
43165fe4 475 }
476 else
256f9b65 477 {
43165fe4 478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
480
943d8723 481 if (kind & ASLK_RECORD_PAD)
482 {
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
487 }
256f9b65 488 }
897b77d6 489
43165fe4 490 found_space:
897b77d6 491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
1c088911 493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
897b77d6 494 bigend_correction = size - GET_MODE_SIZE (mode);
897b77d6 495
897b77d6 496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
b079a207 498 if (virtuals_instantiated)
29c05e22 499 addr = plus_constant (Pmode, frame_pointer_rtx,
eb21abb2 500 trunc_int_for_mode
43165fe4 501 (slot_offset + bigend_correction
eb21abb2 502 + STARTING_FRAME_OFFSET, Pmode));
897b77d6 503 else
29c05e22 504 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
eb21abb2 505 trunc_int_for_mode
43165fe4 506 (slot_offset + bigend_correction,
eb21abb2 507 Pmode));
897b77d6 508
941522d6 509 x = gen_rtx_MEM (mode, addr);
ad33891d 510 set_mem_align (x, alignment_in_bits);
43283c91 511 MEM_NOTRAP_P (x) = 1;
897b77d6 512
84f4f7bf 513 vec_safe_push (stack_slot_list, x);
26df1c5e 514
b079a207 515 if (frame_offset_overflow (frame_offset, current_function_decl))
516 frame_offset = 0;
55abba5b 517
897b77d6 518 return x;
519}
27a7a23a 520
521/* Wrap up assign_stack_local_1 with last parameter as false. */
522
523rtx
3754d046 524assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
27a7a23a 525{
943d8723 526 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
27a7a23a 527}
a6629703 528\f
fef299ce 529/* In order to evaluate some expressions, such as function calls returning
530 structures in memory, we need to temporarily allocate stack locations.
531 We record each allocated temporary in the following structure.
532
533 Associated with each temporary slot is a nesting level. When we pop up
534 one level, all temporaries associated with the previous level are freed.
535 Normally, all temporaries are freed after the execution of the statement
536 in which they were created. However, if we are inside a ({...}) grouping,
537 the result may be in a temporary and hence must be preserved. If the
538 result could be in a temporary, we preserve it if we can determine which
539 one it is in. If we cannot determine which temporary may contain the
540 result, all temporaries are preserved. A temporary is preserved by
0ab48139 541 pretending it was allocated at the previous nesting level. */
fef299ce 542
fb1e4f4a 543struct GTY(()) temp_slot {
fef299ce 544 /* Points to next temporary slot. */
545 struct temp_slot *next;
546 /* Points to previous temporary slot. */
547 struct temp_slot *prev;
548 /* The rtx to used to reference the slot. */
549 rtx slot;
fef299ce 550 /* The size, in units, of the slot. */
551 HOST_WIDE_INT size;
552 /* The type of the object in the slot, or zero if it doesn't correspond
553 to a type. We use this to determine whether a slot can be reused.
554 It can be reused if objects of the type of the new slot will always
555 conflict with objects of the type of the old slot. */
556 tree type;
0ac758f7 557 /* The alignment (in bits) of the slot. */
558 unsigned int align;
fef299ce 559 /* Nonzero if this temporary is currently in use. */
560 char in_use;
fef299ce 561 /* Nesting level at which this slot is being used. */
562 int level;
fef299ce 563 /* The offset of the slot from the frame_pointer, including extra space
564 for alignment. This info is for combine_temp_slots. */
565 HOST_WIDE_INT base_offset;
566 /* The size of the slot, including extra space for alignment. This
567 info is for combine_temp_slots. */
568 HOST_WIDE_INT full_size;
569};
570
2ef51f0e 571/* Entry for the below hash table. */
572struct GTY((for_user)) temp_slot_address_entry {
fef299ce 573 hashval_t hash;
574 rtx address;
575 struct temp_slot *temp_slot;
576};
577
b594087e 578struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
2ef51f0e 579{
580 static hashval_t hash (temp_slot_address_entry *);
581 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
582};
583
584/* A table of addresses that represent a stack slot. The table is a mapping
585 from address RTXen to a temp slot. */
586static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
587static size_t n_temp_slots_in_use;
588
a6629703 589/* Removes temporary slot TEMP from LIST. */
590
591static void
592cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
593{
594 if (temp->next)
595 temp->next->prev = temp->prev;
596 if (temp->prev)
597 temp->prev->next = temp->next;
598 else
599 *list = temp->next;
600
601 temp->prev = temp->next = NULL;
602}
603
604/* Inserts temporary slot TEMP to LIST. */
605
606static void
607insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
608{
609 temp->next = *list;
610 if (*list)
611 (*list)->prev = temp;
612 temp->prev = NULL;
613 *list = temp;
614}
615
616/* Returns the list of used temp slots at LEVEL. */
617
618static struct temp_slot **
619temp_slots_at_level (int level)
620{
f1f41a6c 621 if (level >= (int) vec_safe_length (used_temp_slots))
622 vec_safe_grow_cleared (used_temp_slots, level + 1);
a6629703 623
f1f41a6c 624 return &(*used_temp_slots)[level];
a6629703 625}
626
627/* Returns the maximal temporary slot level. */
628
629static int
630max_slot_level (void)
631{
632 if (!used_temp_slots)
633 return -1;
634
f1f41a6c 635 return used_temp_slots->length () - 1;
a6629703 636}
637
638/* Moves temporary slot TEMP to LEVEL. */
639
640static void
641move_slot_to_level (struct temp_slot *temp, int level)
642{
643 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
644 insert_slot_to_list (temp, temp_slots_at_level (level));
645 temp->level = level;
646}
647
648/* Make temporary slot TEMP available. */
649
650static void
651make_slot_available (struct temp_slot *temp)
652{
653 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
654 insert_slot_to_list (temp, &avail_temp_slots);
655 temp->in_use = 0;
656 temp->level = -1;
fc3c948c 657 n_temp_slots_in_use--;
a6629703 658}
fef299ce 659
660/* Compute the hash value for an address -> temp slot mapping.
661 The value is cached on the mapping entry. */
662static hashval_t
663temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
664{
665 int do_not_record = 0;
666 return hash_rtx (t->address, GET_MODE (t->address),
667 &do_not_record, NULL, false);
668}
669
670/* Return the hash value for an address -> temp slot mapping. */
2ef51f0e 671hashval_t
672temp_address_hasher::hash (temp_slot_address_entry *t)
fef299ce 673{
fef299ce 674 return t->hash;
675}
676
677/* Compare two address -> temp slot mapping entries. */
2ef51f0e 678bool
679temp_address_hasher::equal (temp_slot_address_entry *t1,
680 temp_slot_address_entry *t2)
fef299ce 681{
fef299ce 682 return exp_equiv_p (t1->address, t2->address, 0, true);
683}
684
685/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
686static void
687insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
688{
25a27413 689 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
fef299ce 690 t->address = address;
691 t->temp_slot = temp_slot;
692 t->hash = temp_slot_address_compute_hash (t);
2ef51f0e 693 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
fef299ce 694}
695
696/* Remove an address -> temp slot mapping entry if the temp slot is
697 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
2ef51f0e 698int
699remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
fef299ce 700{
2ef51f0e 701 const struct temp_slot_address_entry *t = *slot;
fef299ce 702 if (! t->temp_slot->in_use)
2ef51f0e 703 temp_slot_address_table->clear_slot (slot);
fef299ce 704 return 1;
705}
706
707/* Remove all mappings of addresses to unused temp slots. */
708static void
709remove_unused_temp_slot_addresses (void)
710{
fc3c948c 711 /* Use quicker clearing if there aren't any active temp slots. */
712 if (n_temp_slots_in_use)
2ef51f0e 713 temp_slot_address_table->traverse
714 <void *, remove_unused_temp_slot_addresses_1> (NULL);
fc3c948c 715 else
2ef51f0e 716 temp_slot_address_table->empty ();
fef299ce 717}
718
719/* Find the temp slot corresponding to the object at address X. */
720
721static struct temp_slot *
722find_temp_slot_from_address (rtx x)
723{
724 struct temp_slot *p;
725 struct temp_slot_address_entry tmp, *t;
726
727 /* First try the easy way:
728 See if X exists in the address -> temp slot mapping. */
729 tmp.address = x;
730 tmp.temp_slot = NULL;
731 tmp.hash = temp_slot_address_compute_hash (&tmp);
2ef51f0e 732 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
fef299ce 733 if (t)
734 return t->temp_slot;
735
736 /* If we have a sum involving a register, see if it points to a temp
737 slot. */
738 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
739 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
740 return p;
741 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
742 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
743 return p;
744
745 /* Last resort: Address is a virtual stack var address. */
746 if (GET_CODE (x) == PLUS
747 && XEXP (x, 0) == virtual_stack_vars_rtx
971ba038 748 && CONST_INT_P (XEXP (x, 1)))
fef299ce 749 {
750 int i;
751 for (i = max_slot_level (); i >= 0; i--)
752 for (p = *temp_slots_at_level (i); p; p = p->next)
753 {
754 if (INTVAL (XEXP (x, 1)) >= p->base_offset
755 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
756 return p;
757 }
758 }
759
760 return NULL;
761}
897b77d6 762\f
763/* Allocate a temporary stack slot and record it for possible later
764 reuse.
765
766 MODE is the machine mode to be given to the returned rtx.
767
768 SIZE is the size in units of the space required. We do no rounding here
769 since assign_stack_local will do any required rounding.
770
59241190 771 TYPE is the type that will be used for the stack slot. */
897b77d6 772
2b96c5f6 773rtx
3754d046 774assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
0ab48139 775 tree type)
897b77d6 776{
d3e10bed 777 unsigned int align;
a6629703 778 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
84be287d 779 rtx slot;
897b77d6 780
babc13fa 781 /* If SIZE is -1 it means that somebody tried to allocate a temporary
782 of a variable size. */
fdada98f 783 gcc_assert (size != -1);
babc13fa 784
ad33891d 785 align = get_stack_local_alignment (type, mode);
9bd87fd2 786
787 /* Try to find an available, already-allocated temporary of the proper
788 mode which meets the size and alignment requirements. Choose the
867eb367 789 smallest one with the closest alignment.
48e1416a 790
867eb367 791 If assign_stack_temp is called outside of the tree->rtl expansion,
792 we cannot reuse the stack slots (that may still refer to
793 VIRTUAL_STACK_VARS_REGNUM). */
794 if (!virtuals_instantiated)
a6629703 795 {
867eb367 796 for (p = avail_temp_slots; p; p = p->next)
a6629703 797 {
867eb367 798 if (p->align >= align && p->size >= size
799 && GET_MODE (p->slot) == mode
800 && objects_must_conflict_p (p->type, type)
801 && (best_p == 0 || best_p->size > p->size
802 || (best_p->size == p->size && best_p->align > p->align)))
a6629703 803 {
867eb367 804 if (p->align == align && p->size == size)
805 {
806 selected = p;
807 cut_slot_from_list (selected, &avail_temp_slots);
808 best_p = 0;
809 break;
810 }
811 best_p = p;
a6629703 812 }
a6629703 813 }
814 }
897b77d6 815
816 /* Make our best, if any, the one to use. */
817 if (best_p)
49d3d726 818 {
a6629703 819 selected = best_p;
820 cut_slot_from_list (selected, &avail_temp_slots);
821
49d3d726 822 /* If there are enough aligned bytes left over, make them into a new
823 temp_slot so that the extra bytes don't get wasted. Do this only
824 for BLKmode slots, so that we can be sure of the alignment. */
f7c44134 825 if (GET_MODE (best_p->slot) == BLKmode)
49d3d726 826 {
9bd87fd2 827 int alignment = best_p->align / BITS_PER_UNIT;
997d68fe 828 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
49d3d726 829
830 if (best_p->size - rounded_size >= alignment)
831 {
25a27413 832 p = ggc_alloc<temp_slot> ();
0ab48139 833 p->in_use = 0;
49d3d726 834 p->size = best_p->size - rounded_size;
e8a637a3 835 p->base_offset = best_p->base_offset + rounded_size;
836 p->full_size = best_p->full_size - rounded_size;
43283c91 837 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
9bd87fd2 838 p->align = best_p->align;
387bc205 839 p->type = best_p->type;
a6629703 840 insert_slot_to_list (p, &avail_temp_slots);
49d3d726 841
84f4f7bf 842 vec_safe_push (stack_slot_list, p->slot);
49d3d726 843
844 best_p->size = rounded_size;
5ea3c815 845 best_p->full_size = rounded_size;
49d3d726 846 }
847 }
49d3d726 848 }
06ebc183 849
897b77d6 850 /* If we still didn't find one, make a new temporary. */
a6629703 851 if (selected == 0)
897b77d6 852 {
997d68fe 853 HOST_WIDE_INT frame_offset_old = frame_offset;
854
25a27413 855 p = ggc_alloc<temp_slot> ();
997d68fe 856
d61726bc 857 /* We are passing an explicit alignment request to assign_stack_local.
858 One side effect of that is assign_stack_local will not round SIZE
859 to ensure the frame offset remains suitably aligned.
860
861 So for requests which depended on the rounding of SIZE, we go ahead
862 and round it now. We also make sure ALIGNMENT is at least
863 BIGGEST_ALIGNMENT. */
fdada98f 864 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
943d8723 865 p->slot = assign_stack_local_1 (mode,
866 (mode == BLKmode
867 ? CEIL_ROUND (size,
868 (int) align
869 / BITS_PER_UNIT)
870 : size),
871 align, 0);
9bd87fd2 872
873 p->align = align;
997d68fe 874
ef4d68c5 875 /* The following slot size computation is necessary because we don't
876 know the actual size of the temporary slot until assign_stack_local
877 has performed all the frame alignment and size rounding for the
d53be447 878 requested temporary. Note that extra space added for alignment
879 can be either above or below this stack slot depending on which
880 way the frame grows. We include the extra space if and only if it
881 is above this slot. */
d28d5017 882 if (FRAME_GROWS_DOWNWARD)
883 p->size = frame_offset_old - frame_offset;
884 else
885 p->size = size;
997d68fe 886
d53be447 887 /* Now define the fields used by combine_temp_slots. */
d28d5017 888 if (FRAME_GROWS_DOWNWARD)
889 {
890 p->base_offset = frame_offset;
891 p->full_size = frame_offset_old - frame_offset;
892 }
893 else
894 {
895 p->base_offset = frame_offset_old;
896 p->full_size = frame_offset - frame_offset_old;
897 }
a6629703 898
899 selected = p;
897b77d6 900 }
901
a6629703 902 p = selected;
897b77d6 903 p->in_use = 1;
387bc205 904 p->type = type;
fcb807f8 905 p->level = temp_slot_level;
fc3c948c 906 n_temp_slots_in_use++;
21c867df 907
a6629703 908 pp = temp_slots_at_level (p->level);
909 insert_slot_to_list (p, pp);
fef299ce 910 insert_temp_slot_address (XEXP (p->slot, 0), p);
84be287d 911
912 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
913 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
84f4f7bf 914 vec_safe_push (stack_slot_list, slot);
f7c44134 915
387bc205 916 /* If we know the alias set for the memory that will be used, use
917 it. If there's no TYPE, then we don't know anything about the
918 alias set for the memory. */
84be287d 919 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
920 set_mem_align (slot, align);
387bc205 921
6312a35e 922 /* If a type is specified, set the relevant flags. */
f7c44134 923 if (type != 0)
402f6a9e 924 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
43283c91 925 MEM_NOTRAP_P (slot) = 1;
f7c44134 926
84be287d 927 return slot;
897b77d6 928}
9bd87fd2 929
930/* Allocate a temporary stack slot and record it for possible later
0ab48139 931 reuse. First two arguments are same as in preceding function. */
9bd87fd2 932
933rtx
3754d046 934assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
9bd87fd2 935{
0ab48139 936 return assign_stack_temp_for_type (mode, size, NULL_TREE);
9bd87fd2 937}
ad6d0e80 938\f
567c22a9 939/* Assign a temporary.
940 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
941 and so that should be used in error messages. In either case, we
942 allocate of the given type.
9c457457 943 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
30dd806d 944 it is 0 if a register is OK.
945 DONT_PROMOTE is 1 if we should not promote values in register
946 to wider modes. */
9c457457 947
948rtx
0ab48139 949assign_temp (tree type_or_decl, int memory_required,
de1b648b 950 int dont_promote ATTRIBUTE_UNUSED)
9c457457 951{
567c22a9 952 tree type, decl;
3754d046 953 machine_mode mode;
7752d341 954#ifdef PROMOTE_MODE
567c22a9 955 int unsignedp;
956#endif
957
958 if (DECL_P (type_or_decl))
959 decl = type_or_decl, type = TREE_TYPE (decl);
960 else
961 decl = NULL, type = type_or_decl;
962
963 mode = TYPE_MODE (type);
7752d341 964#ifdef PROMOTE_MODE
78a8ed03 965 unsignedp = TYPE_UNSIGNED (type);
aeb6d7ef 966#endif
ad6d0e80 967
1fe75cf2 968 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
969 end. See also create_tmp_var for the gimplification-time check. */
970 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
971
9c457457 972 if (mode == BLKmode || memory_required)
973 {
997d68fe 974 HOST_WIDE_INT size = int_size_in_bytes (type);
9c457457 975 rtx tmp;
976
779a20c8 977 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
978 problems with allocating the stack space. */
979 if (size == 0)
980 size = 1;
981
9c457457 982 /* Unfortunately, we don't yet know how to allocate variable-sized
150edb07 983 temporaries. However, sometimes we can find a fixed upper limit on
984 the size, so try that instead. */
985 else if (size == -1)
986 size = max_int_size_in_bytes (type);
8c3216ae 987
567c22a9 988 /* The size of the temporary may be too large to fit into an integer. */
989 /* ??? Not sure this should happen except for user silliness, so limit
60d903f5 990 this to things that aren't compiler-generated temporaries. The
89f18f73 991 rest of the time we'll die in assign_stack_temp_for_type. */
567c22a9 992 if (decl && size == -1
993 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
994 {
3cf8b391 995 error ("size of variable %q+D is too large", decl);
567c22a9 996 size = 1;
997 }
998
0ab48139 999 tmp = assign_stack_temp_for_type (mode, size, type);
9c457457 1000 return tmp;
1001 }
ad6d0e80 1002
7752d341 1003#ifdef PROMOTE_MODE
30dd806d 1004 if (! dont_promote)
3b2411a8 1005 mode = promote_mode (type, mode, &unsignedp);
9c457457 1006#endif
ad6d0e80 1007
9c457457 1008 return gen_reg_rtx (mode);
1009}
ad6d0e80 1010\f
49d3d726 1011/* Combine temporary stack slots which are adjacent on the stack.
1012
1013 This allows for better use of already allocated stack space. This is only
1014 done for BLKmode slots because we can be sure that we won't have alignment
1015 problems in this case. */
1016
3f0895d3 1017static void
de1b648b 1018combine_temp_slots (void)
49d3d726 1019{
a6629703 1020 struct temp_slot *p, *q, *next, *next_q;
997d68fe 1021 int num_slots;
1022
59241190 1023 /* We can't combine slots, because the information about which slot
1024 is in which alias set will be lost. */
1025 if (flag_strict_aliasing)
1026 return;
1027
06ebc183 1028 /* If there are a lot of temp slots, don't do anything unless
cb0ccc1e 1029 high levels of optimization. */
997d68fe 1030 if (! flag_expensive_optimizations)
a6629703 1031 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
997d68fe 1032 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1033 return;
49d3d726 1034
a6629703 1035 for (p = avail_temp_slots; p; p = next)
ccf0a5eb 1036 {
1037 int delete_p = 0;
997d68fe 1038
a6629703 1039 next = p->next;
1040
1041 if (GET_MODE (p->slot) != BLKmode)
1042 continue;
1043
1044 for (q = p->next; q; q = next_q)
ccf0a5eb 1045 {
a6629703 1046 int delete_q = 0;
1047
1048 next_q = q->next;
1049
1050 if (GET_MODE (q->slot) != BLKmode)
1051 continue;
1052
1053 if (p->base_offset + p->full_size == q->base_offset)
1054 {
1055 /* Q comes after P; combine Q into P. */
1056 p->size += q->size;
1057 p->full_size += q->full_size;
1058 delete_q = 1;
1059 }
1060 else if (q->base_offset + q->full_size == p->base_offset)
1061 {
1062 /* P comes after Q; combine P into Q. */
1063 q->size += p->size;
1064 q->full_size += p->full_size;
1065 delete_p = 1;
1066 break;
1067 }
1068 if (delete_q)
1069 cut_slot_from_list (q, &avail_temp_slots);
ccf0a5eb 1070 }
a6629703 1071
1072 /* Either delete P or advance past it. */
1073 if (delete_p)
1074 cut_slot_from_list (p, &avail_temp_slots);
ccf0a5eb 1075 }
49d3d726 1076}
897b77d6 1077\f
f4e36c33 1078/* Indicate that NEW_RTX is an alternate way of referring to the temp
1079 slot that previously was known by OLD_RTX. */
64e90dae 1080
1081void
f4e36c33 1082update_temp_slot_address (rtx old_rtx, rtx new_rtx)
64e90dae 1083{
155b05dc 1084 struct temp_slot *p;
64e90dae 1085
f4e36c33 1086 if (rtx_equal_p (old_rtx, new_rtx))
64e90dae 1087 return;
155b05dc 1088
f4e36c33 1089 p = find_temp_slot_from_address (old_rtx);
155b05dc 1090
f4e36c33 1091 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1092 NEW_RTX is a register, see if one operand of the PLUS is a
1093 temporary location. If so, NEW_RTX points into it. Otherwise,
1094 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1095 in common between them. If so, try a recursive call on those
1096 values. */
155b05dc 1097 if (p == 0)
1098 {
f4e36c33 1099 if (GET_CODE (old_rtx) != PLUS)
8911b943 1100 return;
1101
f4e36c33 1102 if (REG_P (new_rtx))
8911b943 1103 {
f4e36c33 1104 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1105 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
8911b943 1106 return;
1107 }
f4e36c33 1108 else if (GET_CODE (new_rtx) != PLUS)
155b05dc 1109 return;
1110
f4e36c33 1111 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1112 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1113 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1114 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1115 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1116 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1117 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1118 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
155b05dc 1119
1120 return;
1121 }
1122
06ebc183 1123 /* Otherwise add an alias for the temp's address. */
fef299ce 1124 insert_temp_slot_address (new_rtx, p);
64e90dae 1125}
1126
30f413ae 1127/* If X could be a reference to a temporary slot, mark that slot as
1128 belonging to the to one level higher than the current level. If X
1129 matched one of our slots, just mark that one. Otherwise, we can't
0ab48139 1130 easily predict which it is, so upgrade all of them.
897b77d6 1131
1132 This is called when an ({...}) construct occurs and a statement
1133 returns a value in memory. */
1134
1135void
de1b648b 1136preserve_temp_slots (rtx x)
897b77d6 1137{
a6629703 1138 struct temp_slot *p = 0, *next;
897b77d6 1139
c7c7590a 1140 if (x == 0)
0ab48139 1141 return;
41969bd3 1142
e8825bb0 1143 /* If X is a register that is being used as a pointer, see if we have
0ab48139 1144 a temporary slot we know it points to. */
e8825bb0 1145 if (REG_P (x) && REG_POINTER (x))
1146 p = find_temp_slot_from_address (x);
41969bd3 1147
e8825bb0 1148 /* If X is not in memory or is at a constant address, it cannot be in
0ab48139 1149 a temporary slot. */
e8825bb0 1150 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
0ab48139 1151 return;
e8825bb0 1152
1153 /* First see if we can find a match. */
1154 if (p == 0)
1155 p = find_temp_slot_from_address (XEXP (x, 0));
1156
1157 if (p != 0)
1158 {
e8825bb0 1159 if (p->level == temp_slot_level)
0ab48139 1160 move_slot_to_level (p, temp_slot_level - 1);
e8825bb0 1161 return;
41969bd3 1162 }
0dbd1c74 1163
e8825bb0 1164 /* Otherwise, preserve all non-kept slots at this level. */
1165 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
0dbd1c74 1166 {
e8825bb0 1167 next = p->next;
0ab48139 1168 move_slot_to_level (p, temp_slot_level - 1);
e8825bb0 1169 }
c925694c 1170}
1171
e8825bb0 1172/* Free all temporaries used so far. This is normally called at the
1173 end of generating code for a statement. */
c925694c 1174
e8825bb0 1175void
1176free_temp_slots (void)
c925694c 1177{
e8825bb0 1178 struct temp_slot *p, *next;
a4da9a83 1179 bool some_available = false;
c925694c 1180
e8825bb0 1181 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1182 {
1183 next = p->next;
0ab48139 1184 make_slot_available (p);
1185 some_available = true;
e8825bb0 1186 }
c925694c 1187
a4da9a83 1188 if (some_available)
1189 {
1190 remove_unused_temp_slot_addresses ();
1191 combine_temp_slots ();
1192 }
e8825bb0 1193}
c925694c 1194
e8825bb0 1195/* Push deeper into the nesting level for stack temporaries. */
c925694c 1196
e8825bb0 1197void
1198push_temp_slots (void)
c925694c 1199{
e8825bb0 1200 temp_slot_level++;
c925694c 1201}
1202
e8825bb0 1203/* Pop a temporary nesting level. All slots in use in the current level
1204 are freed. */
c925694c 1205
e8825bb0 1206void
1207pop_temp_slots (void)
c925694c 1208{
0ab48139 1209 free_temp_slots ();
e8825bb0 1210 temp_slot_level--;
bf5a43e2 1211}
1212
e8825bb0 1213/* Initialize temporary slots. */
0dbd1c74 1214
1215void
e8825bb0 1216init_temp_slots (void)
0dbd1c74 1217{
e8825bb0 1218 /* We have not allocated any temporaries yet. */
1219 avail_temp_slots = 0;
f1f41a6c 1220 vec_alloc (used_temp_slots, 0);
e8825bb0 1221 temp_slot_level = 0;
fc3c948c 1222 n_temp_slots_in_use = 0;
fef299ce 1223
1224 /* Set up the table to map addresses to temp slots. */
1225 if (! temp_slot_address_table)
2ef51f0e 1226 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
fef299ce 1227 else
2ef51f0e 1228 temp_slot_address_table->empty ();
e8825bb0 1229}
1230\f
ea1760a3 1231/* Functions and data structures to keep track of the values hard regs
1232 had at the start of the function. */
1233
1234/* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1235 and has_hard_reg_initial_val.. */
df8eb490 1236struct GTY(()) initial_value_pair {
ea1760a3 1237 rtx hard_reg;
1238 rtx pseudo;
df8eb490 1239};
ea1760a3 1240/* ??? This could be a VEC but there is currently no way to define an
1241 opaque VEC type. This could be worked around by defining struct
1242 initial_value_pair in function.h. */
df8eb490 1243struct GTY(()) initial_value_struct {
ea1760a3 1244 int num_entries;
1245 int max_entries;
1246 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
df8eb490 1247};
ea1760a3 1248
1249/* If a pseudo represents an initial hard reg (or expression), return
1250 it, else return NULL_RTX. */
1251
1252rtx
1253get_hard_reg_initial_reg (rtx reg)
1254{
1255 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1256 int i;
1257
1258 if (ivs == 0)
1259 return NULL_RTX;
1260
1261 for (i = 0; i < ivs->num_entries; i++)
1262 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1263 return ivs->entries[i].hard_reg;
1264
1265 return NULL_RTX;
1266}
1267
1268/* Make sure that there's a pseudo register of mode MODE that stores the
1269 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1270
1271rtx
3754d046 1272get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
ea1760a3 1273{
1274 struct initial_value_struct *ivs;
1275 rtx rv;
1276
1277 rv = has_hard_reg_initial_val (mode, regno);
1278 if (rv)
1279 return rv;
1280
1281 ivs = crtl->hard_reg_initial_vals;
1282 if (ivs == 0)
1283 {
25a27413 1284 ivs = ggc_alloc<initial_value_struct> ();
ea1760a3 1285 ivs->num_entries = 0;
1286 ivs->max_entries = 5;
25a27413 1287 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
ea1760a3 1288 crtl->hard_reg_initial_vals = ivs;
1289 }
1290
1291 if (ivs->num_entries >= ivs->max_entries)
1292 {
1293 ivs->max_entries += 5;
1294 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1295 ivs->max_entries);
1296 }
1297
1298 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1299 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1300
1301 return ivs->entries[ivs->num_entries++].pseudo;
1302}
1303
1304/* See if get_hard_reg_initial_val has been used to create a pseudo
1305 for the initial value of hard register REGNO in mode MODE. Return
1306 the associated pseudo if so, otherwise return NULL. */
1307
1308rtx
3754d046 1309has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
ea1760a3 1310{
1311 struct initial_value_struct *ivs;
1312 int i;
1313
1314 ivs = crtl->hard_reg_initial_vals;
1315 if (ivs != 0)
1316 for (i = 0; i < ivs->num_entries; i++)
1317 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1318 && REGNO (ivs->entries[i].hard_reg) == regno)
1319 return ivs->entries[i].pseudo;
1320
1321 return NULL_RTX;
1322}
1323
1324unsigned int
1325emit_initial_value_sets (void)
1326{
1327 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1328 int i;
8bb2625b 1329 rtx_insn *seq;
ea1760a3 1330
1331 if (ivs == 0)
1332 return 0;
1333
1334 start_sequence ();
1335 for (i = 0; i < ivs->num_entries; i++)
1336 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1337 seq = get_insns ();
1338 end_sequence ();
1339
1340 emit_insn_at_entry (seq);
1341 return 0;
1342}
1343
1344/* Return the hardreg-pseudoreg initial values pair entry I and
1345 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1346bool
1347initial_value_entry (int i, rtx *hreg, rtx *preg)
1348{
1349 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1350 if (!ivs || i >= ivs->num_entries)
1351 return false;
1352
1353 *hreg = ivs->entries[i].hard_reg;
1354 *preg = ivs->entries[i].pseudo;
1355 return true;
1356}
1357\f
e8825bb0 1358/* These routines are responsible for converting virtual register references
1359 to the actual hard register references once RTL generation is complete.
06ebc183 1360
e8825bb0 1361 The following four variables are used for communication between the
1362 routines. They contain the offsets of the virtual registers from their
1363 respective hard registers. */
c925694c 1364
e8825bb0 1365static int in_arg_offset;
1366static int var_offset;
1367static int dynamic_offset;
1368static int out_arg_offset;
1369static int cfa_offset;
a8636638 1370
e8825bb0 1371/* In most machines, the stack pointer register is equivalent to the bottom
1372 of the stack. */
06ebc183 1373
e8825bb0 1374#ifndef STACK_POINTER_OFFSET
1375#define STACK_POINTER_OFFSET 0
1376#endif
bf5a43e2 1377
02114c95 1378#if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1379#define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1380#endif
1381
e8825bb0 1382/* If not defined, pick an appropriate default for the offset of dynamically
1383 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
02114c95 1384 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
c925694c 1385
e8825bb0 1386#ifndef STACK_DYNAMIC_OFFSET
a8636638 1387
e8825bb0 1388/* The bottom of the stack points to the actual arguments. If
1389 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1390 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1391 stack space for register parameters is not pushed by the caller, but
1392 rather part of the fixed stack areas and hence not included in
abe32cce 1393 `crtl->outgoing_args_size'. Nevertheless, we must allow
e8825bb0 1394 for it when allocating stack dynamic objects. */
a8636638 1395
02114c95 1396#ifdef INCOMING_REG_PARM_STACK_SPACE
e8825bb0 1397#define STACK_DYNAMIC_OFFSET(FNDECL) \
1398((ACCUMULATE_OUTGOING_ARGS \
abe32cce 1399 ? (crtl->outgoing_args_size \
22c61100 1400 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
02114c95 1401 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
63c68695 1402 : 0) + (STACK_POINTER_OFFSET))
e8825bb0 1403#else
1404#define STACK_DYNAMIC_OFFSET(FNDECL) \
abe32cce 1405((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
e8825bb0 1406 + (STACK_POINTER_OFFSET))
1407#endif
1408#endif
f678883b 1409
e3d5af87 1410\f
f15c4004 1411/* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1412 is a virtual register, return the equivalent hard register and set the
1413 offset indirectly through the pointer. Otherwise, return 0. */
897b77d6 1414
f15c4004 1415static rtx
1416instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
897b77d6 1417{
f4e36c33 1418 rtx new_rtx;
f15c4004 1419 HOST_WIDE_INT offset;
897b77d6 1420
f15c4004 1421 if (x == virtual_incoming_args_rtx)
27a7a23a 1422 {
f6754469 1423 if (stack_realign_drap)
27a7a23a 1424 {
f6754469 1425 /* Replace virtual_incoming_args_rtx with internal arg
1426 pointer if DRAP is used to realign stack. */
f4e36c33 1427 new_rtx = crtl->args.internal_arg_pointer;
27a7a23a 1428 offset = 0;
1429 }
1430 else
f4e36c33 1431 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
27a7a23a 1432 }
f15c4004 1433 else if (x == virtual_stack_vars_rtx)
f4e36c33 1434 new_rtx = frame_pointer_rtx, offset = var_offset;
f15c4004 1435 else if (x == virtual_stack_dynamic_rtx)
f4e36c33 1436 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
f15c4004 1437 else if (x == virtual_outgoing_args_rtx)
f4e36c33 1438 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
f15c4004 1439 else if (x == virtual_cfa_rtx)
da72c083 1440 {
1441#ifdef FRAME_POINTER_CFA_OFFSET
f4e36c33 1442 new_rtx = frame_pointer_rtx;
da72c083 1443#else
f4e36c33 1444 new_rtx = arg_pointer_rtx;
da72c083 1445#endif
1446 offset = cfa_offset;
1447 }
60778e62 1448 else if (x == virtual_preferred_stack_boundary_rtx)
1449 {
1450 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1451 offset = 0;
1452 }
f15c4004 1453 else
1454 return NULL_RTX;
897b77d6 1455
f15c4004 1456 *poffset = offset;
f4e36c33 1457 return new_rtx;
897b77d6 1458}
1459
2d184b77 1460/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1461 registers present inside of *LOC. The expression is simplified,
1462 as much as possible, but is not to be considered "valid" in any sense
1463 implied by the target. Return true if any change is made. */
897b77d6 1464
2d184b77 1465static bool
1466instantiate_virtual_regs_in_rtx (rtx *loc)
897b77d6 1467{
2d184b77 1468 if (!*loc)
1469 return false;
1470 bool changed = false;
1471 subrtx_ptr_iterator::array_type array;
1472 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
897b77d6 1473 {
2d184b77 1474 rtx *loc = *iter;
1475 if (rtx x = *loc)
f15c4004 1476 {
2d184b77 1477 rtx new_rtx;
1478 HOST_WIDE_INT offset;
1479 switch (GET_CODE (x))
1480 {
1481 case REG:
1482 new_rtx = instantiate_new_reg (x, &offset);
1483 if (new_rtx)
1484 {
1485 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1486 changed = true;
1487 }
1488 iter.skip_subrtxes ();
1489 break;
f15c4004 1490
2d184b77 1491 case PLUS:
1492 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1493 if (new_rtx)
1494 {
1495 XEXP (x, 0) = new_rtx;
1496 *loc = plus_constant (GET_MODE (x), x, offset, true);
1497 changed = true;
1498 iter.skip_subrtxes ();
1499 break;
1500 }
997d68fe 1501
2d184b77 1502 /* FIXME -- from old code */
1503 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1504 we can commute the PLUS and SUBREG because pointers into the
1505 frame are well-behaved. */
1506 break;
5970b26a 1507
2d184b77 1508 default:
1509 break;
1510 }
1511 }
897b77d6 1512 }
2d184b77 1513 return changed;
897b77d6 1514}
1515
f15c4004 1516/* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1517 matches the predicate for insn CODE operand OPERAND. */
897b77d6 1518
f15c4004 1519static int
1520safe_insn_predicate (int code, int operand, rtx x)
897b77d6 1521{
39c56a89 1522 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
f15c4004 1523}
6d0423b8 1524
f15c4004 1525/* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1526 registers present inside of insn. The result will be a valid insn. */
6d0423b8 1527
1528static void
8bb2625b 1529instantiate_virtual_regs_in_insn (rtx_insn *insn)
6d0423b8 1530{
f15c4004 1531 HOST_WIDE_INT offset;
1532 int insn_code, i;
27ca6129 1533 bool any_change = false;
8bb2625b 1534 rtx set, new_rtx, x;
1535 rtx_insn *seq;
00dfb616 1536
f15c4004 1537 /* There are some special cases to be handled first. */
1538 set = single_set (insn);
1539 if (set)
00dfb616 1540 {
f15c4004 1541 /* We're allowed to assign to a virtual register. This is interpreted
1542 to mean that the underlying register gets assigned the inverse
1543 transformation. This is used, for example, in the handling of
1544 non-local gotos. */
f4e36c33 1545 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1546 if (new_rtx)
f15c4004 1547 {
1548 start_sequence ();
00dfb616 1549
2d184b77 1550 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
f4e36c33 1551 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
5d5ee71f 1552 gen_int_mode (-offset, GET_MODE (new_rtx)));
f4e36c33 1553 x = force_operand (x, new_rtx);
1554 if (x != new_rtx)
1555 emit_move_insn (new_rtx, x);
6d0423b8 1556
f15c4004 1557 seq = get_insns ();
1558 end_sequence ();
6d0423b8 1559
f15c4004 1560 emit_insn_before (seq, insn);
1561 delete_insn (insn);
1562 return;
1563 }
6d0423b8 1564
f15c4004 1565 /* Handle a straight copy from a virtual register by generating a
1566 new add insn. The difference between this and falling through
1567 to the generic case is avoiding a new pseudo and eliminating a
1568 move insn in the initial rtl stream. */
f4e36c33 1569 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1570 if (new_rtx && offset != 0
f15c4004 1571 && REG_P (SET_DEST (set))
1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1573 {
1574 start_sequence ();
6d0423b8 1575
0359f9f5 1576 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1577 gen_int_mode (offset,
1578 GET_MODE (SET_DEST (set))),
1579 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
f15c4004 1580 if (x != SET_DEST (set))
1581 emit_move_insn (SET_DEST (set), x);
02e7a332 1582
f15c4004 1583 seq = get_insns ();
1584 end_sequence ();
e3f529ab 1585
f15c4004 1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
e3f529ab 1588 return;
f15c4004 1589 }
6d0423b8 1590
f15c4004 1591 extract_insn (insn);
27ca6129 1592 insn_code = INSN_CODE (insn);
6d0423b8 1593
f15c4004 1594 /* Handle a plus involving a virtual register by determining if the
1595 operands remain valid if they're modified in place. */
1596 if (GET_CODE (SET_SRC (set)) == PLUS
1597 && recog_data.n_operands >= 3
1598 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1599 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
971ba038 1600 && CONST_INT_P (recog_data.operand[2])
f4e36c33 1601 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
f15c4004 1602 {
1603 offset += INTVAL (recog_data.operand[2]);
6d0423b8 1604
f15c4004 1605 /* If the sum is zero, then replace with a plain move. */
27ca6129 1606 if (offset == 0
1607 && REG_P (SET_DEST (set))
1608 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
f15c4004 1609 {
1610 start_sequence ();
f4e36c33 1611 emit_move_insn (SET_DEST (set), new_rtx);
f15c4004 1612 seq = get_insns ();
1613 end_sequence ();
bc17f7a4 1614
f15c4004 1615 emit_insn_before (seq, insn);
1616 delete_insn (insn);
1617 return;
1618 }
bc17f7a4 1619
f15c4004 1620 x = gen_int_mode (offset, recog_data.operand_mode[2]);
f15c4004 1621
1622 /* Using validate_change and apply_change_group here leaves
1623 recog_data in an invalid state. Since we know exactly what
1624 we want to check, do those two by hand. */
f4e36c33 1625 if (safe_insn_predicate (insn_code, 1, new_rtx)
f15c4004 1626 && safe_insn_predicate (insn_code, 2, x))
1627 {
f4e36c33 1628 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
f15c4004 1629 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1630 any_change = true;
27ca6129 1631
1632 /* Fall through into the regular operand fixup loop in
1633 order to take care of operands other than 1 and 2. */
f15c4004 1634 }
1635 }
1636 }
bc17f7a4 1637 else
27ca6129 1638 {
1639 extract_insn (insn);
1640 insn_code = INSN_CODE (insn);
1641 }
dd79abfb 1642
f15c4004 1643 /* In the general case, we expect virtual registers to appear only in
1644 operands, and then only as either bare registers or inside memories. */
1645 for (i = 0; i < recog_data.n_operands; ++i)
1646 {
1647 x = recog_data.operand[i];
1648 switch (GET_CODE (x))
1649 {
1650 case MEM:
1651 {
1652 rtx addr = XEXP (x, 0);
f15c4004 1653
2d184b77 1654 if (!instantiate_virtual_regs_in_rtx (&addr))
f15c4004 1655 continue;
1656
1657 start_sequence ();
5cc04e45 1658 x = replace_equiv_address (x, addr, true);
7e507322 1659 /* It may happen that the address with the virtual reg
1660 was valid (e.g. based on the virtual stack reg, which might
1661 be acceptable to the predicates with all offsets), whereas
1662 the address now isn't anymore, for instance when the address
1663 is still offsetted, but the base reg isn't virtual-stack-reg
1664 anymore. Below we would do a force_reg on the whole operand,
1665 but this insn might actually only accept memory. Hence,
1666 before doing that last resort, try to reload the address into
1667 a register, so this operand stays a MEM. */
1668 if (!safe_insn_predicate (insn_code, i, x))
1669 {
1670 addr = force_reg (GET_MODE (addr), addr);
5cc04e45 1671 x = replace_equiv_address (x, addr, true);
7e507322 1672 }
f15c4004 1673 seq = get_insns ();
1674 end_sequence ();
1675 if (seq)
1676 emit_insn_before (seq, insn);
1677 }
1678 break;
1679
1680 case REG:
f4e36c33 1681 new_rtx = instantiate_new_reg (x, &offset);
1682 if (new_rtx == NULL)
f15c4004 1683 continue;
1684 if (offset == 0)
f4e36c33 1685 x = new_rtx;
f15c4004 1686 else
1687 {
1688 start_sequence ();
897b77d6 1689
f15c4004 1690 /* Careful, special mode predicates may have stuff in
1691 insn_data[insn_code].operand[i].mode that isn't useful
1692 to us for computing a new value. */
1693 /* ??? Recognize address_operand and/or "p" constraints
1694 to see if (plus new offset) is a valid before we put
1695 this through expand_simple_binop. */
f4e36c33 1696 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
0359f9f5 1697 gen_int_mode (offset, GET_MODE (x)),
1698 NULL_RTX, 1, OPTAB_LIB_WIDEN);
f15c4004 1699 seq = get_insns ();
1700 end_sequence ();
1701 emit_insn_before (seq, insn);
1702 }
1703 break;
897b77d6 1704
f15c4004 1705 case SUBREG:
f4e36c33 1706 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1707 if (new_rtx == NULL)
f15c4004 1708 continue;
1709 if (offset != 0)
1710 {
1711 start_sequence ();
0359f9f5 1712 new_rtx = expand_simple_binop
1713 (GET_MODE (new_rtx), PLUS, new_rtx,
1714 gen_int_mode (offset, GET_MODE (new_rtx)),
1715 NULL_RTX, 1, OPTAB_LIB_WIDEN);
f15c4004 1716 seq = get_insns ();
1717 end_sequence ();
1718 emit_insn_before (seq, insn);
1719 }
f4e36c33 1720 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1721 GET_MODE (new_rtx), SUBREG_BYTE (x));
024f0a8a 1722 gcc_assert (x);
f15c4004 1723 break;
897b77d6 1724
f15c4004 1725 default:
1726 continue;
1727 }
897b77d6 1728
f15c4004 1729 /* At this point, X contains the new value for the operand.
1730 Validate the new value vs the insn predicate. Note that
1731 asm insns will have insn_code -1 here. */
1732 if (!safe_insn_predicate (insn_code, i, x))
c5159852 1733 {
1734 start_sequence ();
83b6c9db 1735 if (REG_P (x))
1736 {
1737 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1738 x = copy_to_reg (x);
1739 }
1740 else
1741 x = force_reg (insn_data[insn_code].operand[i].mode, x);
c5159852 1742 seq = get_insns ();
1743 end_sequence ();
1744 if (seq)
1745 emit_insn_before (seq, insn);
1746 }
897b77d6 1747
f15c4004 1748 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1749 any_change = true;
1750 }
897b77d6 1751
f15c4004 1752 if (any_change)
1753 {
1754 /* Propagate operand changes into the duplicates. */
1755 for (i = 0; i < recog_data.n_dups; ++i)
1756 *recog_data.dup_loc[i]
cdf37bc1 1757 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
dd79abfb 1758
f15c4004 1759 /* Force re-recognition of the instruction for validation. */
1760 INSN_CODE (insn) = -1;
1761 }
897b77d6 1762
f15c4004 1763 if (asm_noperands (PATTERN (insn)) >= 0)
897b77d6 1764 {
f15c4004 1765 if (!check_asm_operands (PATTERN (insn)))
897b77d6 1766 {
f15c4004 1767 error_for_asm (insn, "impossible constraint in %<asm%>");
33a7b2d7 1768 /* For asm goto, instead of fixing up all the edges
1769 just clear the template and clear input operands
1770 (asm goto doesn't have any output operands). */
1771 if (JUMP_P (insn))
1772 {
1773 rtx asm_op = extract_asm_operands (PATTERN (insn));
1774 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1775 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1776 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1777 }
1778 else
1779 delete_insn (insn);
f15c4004 1780 }
1781 }
1782 else
1783 {
1784 if (recog_memoized (insn) < 0)
1785 fatal_insn_not_found (insn);
1786 }
1787}
155b05dc 1788
f15c4004 1789/* Subroutine of instantiate_decls. Given RTL representing a decl,
1790 do any instantiation required. */
155b05dc 1791
bc5e6ea1 1792void
1793instantiate_decl_rtl (rtx x)
f15c4004 1794{
1795 rtx addr;
897b77d6 1796
f15c4004 1797 if (x == 0)
1798 return;
897b77d6 1799
f15c4004 1800 /* If this is a CONCAT, recurse for the pieces. */
1801 if (GET_CODE (x) == CONCAT)
1802 {
bc5e6ea1 1803 instantiate_decl_rtl (XEXP (x, 0));
1804 instantiate_decl_rtl (XEXP (x, 1));
f15c4004 1805 return;
1806 }
897b77d6 1807
f15c4004 1808 /* If this is not a MEM, no need to do anything. Similarly if the
1809 address is a constant or a register that is not a virtual register. */
1810 if (!MEM_P (x))
1811 return;
897b77d6 1812
f15c4004 1813 addr = XEXP (x, 0);
1814 if (CONSTANT_P (addr)
1815 || (REG_P (addr)
1816 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1817 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1818 return;
897b77d6 1819
2d184b77 1820 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
f15c4004 1821}
897b77d6 1822
9338678e 1823/* Helper for instantiate_decls called via walk_tree: Process all decls
1824 in the given DECL_VALUE_EXPR. */
1825
1826static tree
1827instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1828{
1829 tree t = *tp;
75a70cf9 1830 if (! EXPR_P (t))
9338678e 1831 {
1832 *walk_subtrees = 0;
95b985e5 1833 if (DECL_P (t))
1834 {
1835 if (DECL_RTL_SET_P (t))
1836 instantiate_decl_rtl (DECL_RTL (t));
1837 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1838 && DECL_INCOMING_RTL (t))
1839 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
53e9c5c4 1840 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
95b985e5 1841 && DECL_HAS_VALUE_EXPR_P (t))
1842 {
1843 tree v = DECL_VALUE_EXPR (t);
1844 walk_tree (&v, instantiate_expr, NULL, NULL);
1845 }
1846 }
9338678e 1847 }
1848 return NULL;
1849}
1850
f15c4004 1851/* Subroutine of instantiate_decls: Process all decls in the given
1852 BLOCK node and all its subblocks. */
897b77d6 1853
f15c4004 1854static void
1855instantiate_decls_1 (tree let)
1856{
1857 tree t;
897b77d6 1858
1767a056 1859 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
9338678e 1860 {
1861 if (DECL_RTL_SET_P (t))
bc5e6ea1 1862 instantiate_decl_rtl (DECL_RTL (t));
53e9c5c4 1863 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
9338678e 1864 {
1865 tree v = DECL_VALUE_EXPR (t);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1867 }
1868 }
897b77d6 1869
f15c4004 1870 /* Process all subblocks. */
93110716 1871 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
f15c4004 1872 instantiate_decls_1 (t);
1873}
897b77d6 1874
f15c4004 1875/* Scan all decls in FNDECL (both variables and parameters) and instantiate
1876 all virtual registers in their DECL_RTL's. */
897b77d6 1877
f15c4004 1878static void
1879instantiate_decls (tree fndecl)
1880{
2ab2ce89 1881 tree decl;
1882 unsigned ix;
897b77d6 1883
f15c4004 1884 /* Process all parameters of the function. */
1767a056 1885 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
f15c4004 1886 {
bc5e6ea1 1887 instantiate_decl_rtl (DECL_RTL (decl));
1888 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
9338678e 1889 if (DECL_HAS_VALUE_EXPR_P (decl))
1890 {
1891 tree v = DECL_VALUE_EXPR (decl);
1892 walk_tree (&v, instantiate_expr, NULL, NULL);
1893 }
f15c4004 1894 }
a51c8974 1895
95b985e5 1896 if ((decl = DECL_RESULT (fndecl))
1897 && TREE_CODE (decl) == RESULT_DECL)
1898 {
1899 if (DECL_RTL_SET_P (decl))
1900 instantiate_decl_rtl (DECL_RTL (decl));
1901 if (DECL_HAS_VALUE_EXPR_P (decl))
1902 {
1903 tree v = DECL_VALUE_EXPR (decl);
1904 walk_tree (&v, instantiate_expr, NULL, NULL);
1905 }
1906 }
1907
eac967db 1908 /* Process the saved static chain if it exists. */
1909 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1910 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1911 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1912
f15c4004 1913 /* Now process all variables defined in the function or its subblocks. */
836c1c68 1914 if (DECL_INITIAL (fndecl))
1915 instantiate_decls_1 (DECL_INITIAL (fndecl));
78fa9ba7 1916
2ab2ce89 1917 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1918 if (DECL_RTL_SET_P (decl))
1919 instantiate_decl_rtl (DECL_RTL (decl));
f1f41a6c 1920 vec_free (cfun->local_decls);
f15c4004 1921}
897b77d6 1922
f15c4004 1923/* Pass through the INSNS of function FNDECL and convert virtual register
1924 references to hard register references. */
897b77d6 1925
2a1990e9 1926static unsigned int
f15c4004 1927instantiate_virtual_regs (void)
1928{
8bb2625b 1929 rtx_insn *insn;
897b77d6 1930
f15c4004 1931 /* Compute the offsets to use for this function. */
1932 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1933 var_offset = STARTING_FRAME_OFFSET;
1934 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1935 out_arg_offset = STACK_POINTER_OFFSET;
da72c083 1936#ifdef FRAME_POINTER_CFA_OFFSET
1937 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1938#else
f15c4004 1939 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
da72c083 1940#endif
0dbd1c74 1941
f15c4004 1942 /* Initialize recognition, indicating that volatile is OK. */
1943 init_recog ();
897b77d6 1944
f15c4004 1945 /* Scan through all the insns, instantiating every virtual register still
1946 present. */
ca8a2945 1947 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1948 if (INSN_P (insn))
1949 {
1950 /* These patterns in the instruction stream can never be recognized.
1951 Fortunately, they shouldn't contain virtual registers either. */
91f71fa3 1952 if (GET_CODE (PATTERN (insn)) == USE
ca8a2945 1953 || GET_CODE (PATTERN (insn)) == CLOBBER
ca8a2945 1954 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1955 continue;
1956 else if (DEBUG_INSN_P (insn))
2d184b77 1957 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
ca8a2945 1958 else
1959 instantiate_virtual_regs_in_insn (insn);
201f6961 1960
dd1286fb 1961 if (insn->deleted ())
ca8a2945 1962 continue;
d304b9e1 1963
2d184b77 1964 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
201f6961 1965
ca8a2945 1966 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1967 if (CALL_P (insn))
2d184b77 1968 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
ca8a2945 1969 }
897b77d6 1970
f15c4004 1971 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1972 instantiate_decls (current_function_decl);
1973
bc5e6ea1 1974 targetm.instantiate_decls ();
1975
f15c4004 1976 /* Indicate that, from now on, assign_stack_local should use
1977 frame_pointer_rtx. */
1978 virtuals_instantiated = 1;
990495a7 1979
2a1990e9 1980 return 0;
897b77d6 1981}
77fce4cd 1982
cbe8bda8 1983namespace {
1984
1985const pass_data pass_data_instantiate_virtual_regs =
1986{
1987 RTL_PASS, /* type */
1988 "vregs", /* name */
1989 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 1990 TV_NONE, /* tv_id */
1991 0, /* properties_required */
1992 0, /* properties_provided */
1993 0, /* properties_destroyed */
1994 0, /* todo_flags_start */
1995 0, /* todo_flags_finish */
77fce4cd 1996};
1997
cbe8bda8 1998class pass_instantiate_virtual_regs : public rtl_opt_pass
1999{
2000public:
9af5ce0c 2001 pass_instantiate_virtual_regs (gcc::context *ctxt)
2002 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
cbe8bda8 2003 {}
2004
2005 /* opt_pass methods: */
65b0537f 2006 virtual unsigned int execute (function *)
2007 {
2008 return instantiate_virtual_regs ();
2009 }
cbe8bda8 2010
2011}; // class pass_instantiate_virtual_regs
2012
2013} // anon namespace
2014
2015rtl_opt_pass *
2016make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2017{
2018 return new pass_instantiate_virtual_regs (ctxt);
2019}
2020
897b77d6 2021\f
8f48fc81 2022/* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2023 This means a type for which function calls must pass an address to the
2024 function or get an address back from the function.
2025 EXP may be a type node or an expression (whose type is tested). */
897b77d6 2026
2027int
fb80456a 2028aggregate_value_p (const_tree exp, const_tree fntype)
897b77d6 2029{
4cd5bb61 2030 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
d5c7cfd2 2031 int i, regno, nregs;
2032 rtx reg;
9308e976 2033
45550790 2034 if (fntype)
2035 switch (TREE_CODE (fntype))
2036 {
2037 case CALL_EXPR:
4cd5bb61 2038 {
2039 tree fndecl = get_callee_fndecl (fntype);
0c93c8a9 2040 if (fndecl)
2041 fntype = TREE_TYPE (fndecl);
2042 else if (CALL_EXPR_FN (fntype))
2043 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2044 else
2045 /* For internal functions, assume nothing needs to be
2046 returned in memory. */
2047 return 0;
4cd5bb61 2048 }
45550790 2049 break;
2050 case FUNCTION_DECL:
4cd5bb61 2051 fntype = TREE_TYPE (fntype);
45550790 2052 break;
2053 case FUNCTION_TYPE:
2054 case METHOD_TYPE:
2055 break;
2056 case IDENTIFIER_NODE:
4cd5bb61 2057 fntype = NULL_TREE;
45550790 2058 break;
2059 default:
4cd5bb61 2060 /* We don't expect other tree types here. */
fdada98f 2061 gcc_unreachable ();
45550790 2062 }
2063
4cd5bb61 2064 if (VOID_TYPE_P (type))
2c8db4fe 2065 return 0;
6f18455e 2066
8df5a43d 2067 /* If a record should be passed the same as its first (and only) member
2068 don't pass it as an aggregate. */
2069 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2070 return aggregate_value_p (first_field (type), fntype);
2071
806e4c12 2072 /* If the front end has decided that this needs to be passed by
2073 reference, do so. */
2074 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2075 && DECL_BY_REFERENCE (exp))
2076 return 1;
6f18455e 2077
4cd5bb61 2078 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2079 if (fntype && TREE_ADDRESSABLE (fntype))
6f18455e 2080 return 1;
48e1416a 2081
ad87de1e 2082 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
79f1a380 2083 and thus can't be returned in registers. */
2084 if (TREE_ADDRESSABLE (type))
2085 return 1;
4cd5bb61 2086
727a13df 2087 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
897b77d6 2088 return 1;
4cd5bb61 2089
2090 if (targetm.calls.return_in_memory (type, fntype))
2091 return 1;
2092
d5c7cfd2 2093 /* Make sure we have suitable call-clobbered regs to return
2094 the value in; if not, we must return it in memory. */
46b3ff29 2095 reg = hard_function_value (type, 0, fntype, 0);
84d69b33 2096
2097 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2098 it is OK. */
8ad4c111 2099 if (!REG_P (reg))
84d69b33 2100 return 0;
2101
d5c7cfd2 2102 regno = REGNO (reg);
67d6c12b 2103 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
d5c7cfd2 2104 for (i = 0; i < nregs; i++)
2105 if (! call_used_regs[regno + i])
2106 return 1;
4cd5bb61 2107
897b77d6 2108 return 0;
2109}
2110\f
e8825bb0 2111/* Return true if we should assign DECL a pseudo register; false if it
2112 should live on the local stack. */
2113
2114bool
b7bf20db 2115use_register_for_decl (const_tree decl)
e8825bb0 2116{
94f92c36 2117 if (TREE_CODE (decl) == SSA_NAME)
2118 {
2119 /* We often try to use the SSA_NAME, instead of its underlying
2120 decl, to get type information and guide decisions, to avoid
2121 differences of behavior between anonymous and named
2122 variables, but in this one case we have to go for the actual
2123 variable if there is one. The main reason is that, at least
2124 at -O0, we want to place user variables on the stack, but we
2125 don't mind using pseudos for anonymous or ignored temps.
2126 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2127 should go in pseudos, whereas their corresponding variables
2128 might have to go on the stack. So, disregarding the decl
2129 here would negatively impact debug info at -O0, enable
2130 coalescing between SSA_NAMEs that ought to get different
2131 stack/pseudo assignments, and get the incoming argument
2132 processing thoroughly confused by PARM_DECLs expected to live
2133 in stack slots but assigned to pseudos. */
2134 if (!SSA_NAME_VAR (decl))
2135 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2136 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2137
2138 decl = SSA_NAME_VAR (decl);
2139 }
2140
e8825bb0 2141 /* Honor volatile. */
2142 if (TREE_SIDE_EFFECTS (decl))
2143 return false;
2144
2145 /* Honor addressability. */
2146 if (TREE_ADDRESSABLE (decl))
2147 return false;
2148
b2df3bbf 2149 /* RESULT_DECLs are a bit special in that they're assigned without
2150 regard to use_register_for_decl, but we generally only store in
2151 them. If we coalesce their SSA NAMEs, we'd better return a
2152 result that matches the assignment in expand_function_start. */
2153 if (TREE_CODE (decl) == RESULT_DECL)
2154 {
2155 /* If it's not an aggregate, we're going to use a REG or a
2156 PARALLEL containing a REG. */
2157 if (!aggregate_value_p (decl, current_function_decl))
2158 return true;
2159
2160 /* If expand_function_start determines the return value, we'll
2161 use MEM if it's not by reference. */
2162 if (cfun->returns_pcc_struct
2163 || (targetm.calls.struct_value_rtx
2164 (TREE_TYPE (current_function_decl), 1)))
2165 return DECL_BY_REFERENCE (decl);
2166
2167 /* Otherwise, we're taking an extra all.function_result_decl
2168 argument. It's set up in assign_parms_augmented_arg_list,
2169 under the (negated) conditions above, and then it's used to
2170 set up the RESULT_DECL rtl in assign_params, after looping
2171 over all parameters. Now, if the RESULT_DECL is not by
2172 reference, we'll use a MEM either way. */
2173 if (!DECL_BY_REFERENCE (decl))
2174 return false;
2175
2176 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2177 the function_result_decl's assignment. Since it's a pointer,
2178 we can short-circuit a number of the tests below, and we must
2179 duplicat e them because we don't have the
2180 function_result_decl to test. */
2181 if (!targetm.calls.allocate_stack_slots_for_args ())
2182 return true;
2183 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2184 if (optimize)
2185 return true;
2186 /* We don't set DECL_REGISTER for the function_result_decl. */
2187 return false;
2188 }
2189
058a1b7a 2190 /* Decl is implicitly addressible by bound stores and loads
2191 if it is an aggregate holding bounds. */
2192 if (chkp_function_instrumented_p (current_function_decl)
2193 && TREE_TYPE (decl)
2194 && !BOUNDED_P (decl)
2195 && chkp_type_has_pointer (TREE_TYPE (decl)))
2196 return false;
2197
e8825bb0 2198 /* Only register-like things go in registers. */
2199 if (DECL_MODE (decl) == BLKmode)
2200 return false;
2201
2202 /* If -ffloat-store specified, don't put explicit float variables
2203 into registers. */
2204 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2205 propagates values across these stores, and it probably shouldn't. */
2206 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2207 return false;
2208
f27f1575 2209 if (!targetm.calls.allocate_stack_slots_for_args ())
2210 return true;
2211
553acd9c 2212 /* If we're not interested in tracking debugging information for
2213 this decl, then we can certainly put it in a register. */
2214 if (DECL_IGNORED_P (decl))
e8825bb0 2215 return true;
2216
f24ccada 2217 if (optimize)
2218 return true;
2219
2220 if (!DECL_REGISTER (decl))
2221 return false;
2222
ab87ee8f 2223 /* When not optimizing, disregard register keyword for types that
2224 could have methods, otherwise the methods won't be callable from
2225 the debugger. */
2226 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2227 return false;
f24ccada 2228
2229 return true;
e8825bb0 2230}
2231
35a569c6 2232/* Structures to communicate between the subroutines of assign_parms.
2233 The first holds data persistent across all parameters, the second
2234 is cleared out for each parameter. */
897b77d6 2235
35a569c6 2236struct assign_parm_data_all
897b77d6 2237{
39cba157 2238 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2239 should become a job of the target or otherwise encapsulated. */
2240 CUMULATIVE_ARGS args_so_far_v;
2241 cumulative_args_t args_so_far;
897b77d6 2242 struct args_size stack_args_size;
35a569c6 2243 tree function_result_decl;
2244 tree orig_fnargs;
c363cb8c 2245 rtx_insn *first_conversion_insn;
2246 rtx_insn *last_conversion_insn;
35a569c6 2247 HOST_WIDE_INT pretend_args_size;
2248 HOST_WIDE_INT extra_pretend_bytes;
2249 int reg_parm_stack_space;
2250};
897b77d6 2251
35a569c6 2252struct assign_parm_data_one
2253{
2254 tree nominal_type;
2255 tree passed_type;
2256 rtx entry_parm;
2257 rtx stack_parm;
3754d046 2258 machine_mode nominal_mode;
2259 machine_mode passed_mode;
2260 machine_mode promoted_mode;
35a569c6 2261 struct locate_and_pad_arg_data locate;
2262 int partial;
2263 BOOL_BITFIELD named_arg : 1;
35a569c6 2264 BOOL_BITFIELD passed_pointer : 1;
2265 BOOL_BITFIELD on_stack : 1;
2266 BOOL_BITFIELD loaded_in_reg : 1;
2267};
eb749d77 2268
058a1b7a 2269struct bounds_parm_data
2270{
2271 assign_parm_data_one parm_data;
2272 tree bounds_parm;
2273 tree ptr_parm;
2274 rtx ptr_entry;
2275 int bound_no;
2276};
2277
35a569c6 2278/* A subroutine of assign_parms. Initialize ALL. */
897b77d6 2279
35a569c6 2280static void
2281assign_parms_initialize_all (struct assign_parm_data_all *all)
2282{
132d5071 2283 tree fntype ATTRIBUTE_UNUSED;
897b77d6 2284
35a569c6 2285 memset (all, 0, sizeof (*all));
2286
2287 fntype = TREE_TYPE (current_function_decl);
2288
2289#ifdef INIT_CUMULATIVE_INCOMING_ARGS
39cba157 2290 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
35a569c6 2291#else
39cba157 2292 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
35a569c6 2293 current_function_decl, -1);
2294#endif
39cba157 2295 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
35a569c6 2296
02114c95 2297#ifdef INCOMING_REG_PARM_STACK_SPACE
2298 all->reg_parm_stack_space
2299 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
35a569c6 2300#endif
2301}
897b77d6 2302
35a569c6 2303/* If ARGS contains entries with complex types, split the entry into two
2304 entries of the component type. Return a new list of substitutions are
2305 needed, else the old list. */
2306
3e992c41 2307static void
b2df3bbf 2308split_complex_args (vec<tree> *args)
35a569c6 2309{
3e992c41 2310 unsigned i;
35a569c6 2311 tree p;
2312
f1f41a6c 2313 FOR_EACH_VEC_ELT (*args, i, p)
35a569c6 2314 {
2315 tree type = TREE_TYPE (p);
2316 if (TREE_CODE (type) == COMPLEX_TYPE
2317 && targetm.calls.split_complex_arg (type))
2318 {
2319 tree decl;
2320 tree subtype = TREE_TYPE (type);
e6427ef0 2321 bool addressable = TREE_ADDRESSABLE (p);
35a569c6 2322
2323 /* Rewrite the PARM_DECL's type with its component. */
3e992c41 2324 p = copy_node (p);
35a569c6 2325 TREE_TYPE (p) = subtype;
2326 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
adc78298 2327 SET_DECL_MODE (p, VOIDmode);
35a569c6 2328 DECL_SIZE (p) = NULL;
2329 DECL_SIZE_UNIT (p) = NULL;
e6427ef0 2330 /* If this arg must go in memory, put it in a pseudo here.
2331 We can't allow it to go in memory as per normal parms,
2332 because the usual place might not have the imag part
2333 adjacent to the real part. */
2334 DECL_ARTIFICIAL (p) = addressable;
2335 DECL_IGNORED_P (p) = addressable;
2336 TREE_ADDRESSABLE (p) = 0;
35a569c6 2337 layout_decl (p, 0);
f1f41a6c 2338 (*args)[i] = p;
35a569c6 2339
2340 /* Build a second synthetic decl. */
e60a6f7b 2341 decl = build_decl (EXPR_LOCATION (p),
2342 PARM_DECL, NULL_TREE, subtype);
35a569c6 2343 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
e6427ef0 2344 DECL_ARTIFICIAL (decl) = addressable;
2345 DECL_IGNORED_P (decl) = addressable;
35a569c6 2346 layout_decl (decl, 0);
f1f41a6c 2347 args->safe_insert (++i, decl);
35a569c6 2348 }
2349 }
35a569c6 2350}
2351
2352/* A subroutine of assign_parms. Adjust the parameter list to incorporate
2353 the hidden struct return argument, and (abi willing) complex args.
2354 Return the new parameter list. */
2355
f1f41a6c 2356static vec<tree>
35a569c6 2357assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2358{
2359 tree fndecl = current_function_decl;
2360 tree fntype = TREE_TYPE (fndecl);
1e094109 2361 vec<tree> fnargs = vNULL;
3e992c41 2362 tree arg;
2363
1767a056 2364 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
f1f41a6c 2365 fnargs.safe_push (arg);
3e992c41 2366
2367 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
897b77d6 2368
2369 /* If struct value address is treated as the first argument, make it so. */
45550790 2370 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
18d50ae6 2371 && ! cfun->returns_pcc_struct
45550790 2372 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
897b77d6 2373 {
3ff448ca 2374 tree type = build_pointer_type (TREE_TYPE (fntype));
35a569c6 2375 tree decl;
897b77d6 2376
e60a6f7b 2377 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
4d5b4e6a 2378 PARM_DECL, get_identifier (".result_ptr"), type);
35a569c6 2379 DECL_ARG_TYPE (decl) = type;
2380 DECL_ARTIFICIAL (decl) = 1;
4d5b4e6a 2381 DECL_NAMELESS (decl) = 1;
2382 TREE_CONSTANT (decl) = 1;
b2df3bbf 2383 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2384 changes, the end of the RESULT_DECL handling block in
2385 use_register_for_decl must be adjusted to match. */
897b77d6 2386
1767a056 2387 DECL_CHAIN (decl) = all->orig_fnargs;
3e992c41 2388 all->orig_fnargs = decl;
f1f41a6c 2389 fnargs.safe_insert (0, decl);
3e992c41 2390
35a569c6 2391 all->function_result_decl = decl;
058a1b7a 2392
2393 /* If function is instrumented then bounds of the
2394 passed structure address is the second argument. */
2395 if (chkp_function_instrumented_p (fndecl))
2396 {
2397 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2398 PARM_DECL, get_identifier (".result_bnd"),
2399 pointer_bounds_type_node);
2400 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2401 DECL_ARTIFICIAL (decl) = 1;
2402 DECL_NAMELESS (decl) = 1;
2403 TREE_CONSTANT (decl) = 1;
2404
2405 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2406 DECL_CHAIN (all->orig_fnargs) = decl;
2407 fnargs.safe_insert (1, decl);
2408 }
897b77d6 2409 }
06ebc183 2410
92d40bc4 2411 /* If the target wants to split complex arguments into scalars, do so. */
2412 if (targetm.calls.split_complex_arg)
b2df3bbf 2413 split_complex_args (&fnargs);
915e81b8 2414
35a569c6 2415 return fnargs;
2416}
241399f6 2417
35a569c6 2418/* A subroutine of assign_parms. Examine PARM and pull out type and mode
2419 data for the parameter. Incorporate ABI specifics such as pass-by-
2420 reference and type promotion. */
897b77d6 2421
35a569c6 2422static void
2423assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2424 struct assign_parm_data_one *data)
2425{
2426 tree nominal_type, passed_type;
3754d046 2427 machine_mode nominal_mode, passed_mode, promoted_mode;
3b2411a8 2428 int unsignedp;
897b77d6 2429
35a569c6 2430 memset (data, 0, sizeof (*data));
2431
f0b5f617 2432 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
18d50ae6 2433 if (!cfun->stdarg)
f0b5f617 2434 data->named_arg = 1; /* No variadic parms. */
1767a056 2435 else if (DECL_CHAIN (parm))
f0b5f617 2436 data->named_arg = 1; /* Not the last non-variadic parm. */
39cba157 2437 else if (targetm.calls.strict_argument_naming (all->args_so_far))
f0b5f617 2438 data->named_arg = 1; /* Only variadic ones are unnamed. */
35a569c6 2439 else
f0b5f617 2440 data->named_arg = 0; /* Treat as variadic. */
35a569c6 2441
2442 nominal_type = TREE_TYPE (parm);
2443 passed_type = DECL_ARG_TYPE (parm);
2444
2445 /* Look out for errors propagating this far. Also, if the parameter's
2446 type is void then its value doesn't matter. */
2447 if (TREE_TYPE (parm) == error_mark_node
2448 /* This can happen after weird syntax errors
2449 or if an enum type is defined among the parms. */
2450 || TREE_CODE (parm) != PARM_DECL
2451 || passed_type == NULL
2452 || VOID_TYPE_P (nominal_type))
2453 {
2454 nominal_type = passed_type = void_type_node;
2455 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2456 goto egress;
2457 }
d06f5fba 2458
35a569c6 2459 /* Find mode of arg as it is passed, and mode of arg as it should be
2460 during execution of this function. */
2461 passed_mode = TYPE_MODE (passed_type);
2462 nominal_mode = TYPE_MODE (nominal_type);
2463
8df5a43d 2464 /* If the parm is to be passed as a transparent union or record, use the
2465 type of the first field for the tests below. We have already verified
2466 that the modes are the same. */
2467 if ((TREE_CODE (passed_type) == UNION_TYPE
2468 || TREE_CODE (passed_type) == RECORD_TYPE)
2469 && TYPE_TRANSPARENT_AGGR (passed_type))
2470 passed_type = TREE_TYPE (first_field (passed_type));
35a569c6 2471
cc9b8628 2472 /* See if this arg was passed by invisible reference. */
39cba157 2473 if (pass_by_reference (&all->args_so_far_v, passed_mode,
cc9b8628 2474 passed_type, data->named_arg))
35a569c6 2475 {
2476 passed_type = nominal_type = build_pointer_type (passed_type);
2477 data->passed_pointer = true;
25178032 2478 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
35a569c6 2479 }
897b77d6 2480
35a569c6 2481 /* Find mode as it is passed by the ABI. */
3b2411a8 2482 unsignedp = TYPE_UNSIGNED (passed_type);
2483 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2484 TREE_TYPE (current_function_decl), 0);
897b77d6 2485
35a569c6 2486 egress:
2487 data->nominal_type = nominal_type;
2488 data->passed_type = passed_type;
2489 data->nominal_mode = nominal_mode;
2490 data->passed_mode = passed_mode;
2491 data->promoted_mode = promoted_mode;
2492}
24ec33e7 2493
35a569c6 2494/* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
897b77d6 2495
35a569c6 2496static void
2497assign_parms_setup_varargs (struct assign_parm_data_all *all,
2498 struct assign_parm_data_one *data, bool no_rtl)
2499{
2500 int varargs_pretend_bytes = 0;
2501
39cba157 2502 targetm.calls.setup_incoming_varargs (all->args_so_far,
35a569c6 2503 data->promoted_mode,
2504 data->passed_type,
2505 &varargs_pretend_bytes, no_rtl);
2506
2507 /* If the back-end has requested extra stack space, record how much is
2508 needed. Do not change pretend_args_size otherwise since it may be
2509 nonzero from an earlier partial argument. */
2510 if (varargs_pretend_bytes > 0)
2511 all->pretend_args_size = varargs_pretend_bytes;
2512}
7e8dfb30 2513
35a569c6 2514/* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2515 the incoming location of the current parameter. */
2516
2517static void
2518assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2519 struct assign_parm_data_one *data)
2520{
2521 HOST_WIDE_INT pretend_bytes = 0;
2522 rtx entry_parm;
2523 bool in_regs;
2524
2525 if (data->promoted_mode == VOIDmode)
2526 {
2527 data->entry_parm = data->stack_parm = const0_rtx;
2528 return;
2529 }
7e8dfb30 2530
39cba157 2531 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
f387af4f 2532 data->promoted_mode,
2533 data->passed_type,
2534 data->named_arg);
897b77d6 2535
35a569c6 2536 if (entry_parm == 0)
2537 data->promoted_mode = data->passed_mode;
897b77d6 2538
35a569c6 2539 /* Determine parm's home in the stack, in case it arrives in the stack
2540 or we should pretend it did. Compute the stack position and rtx where
2541 the argument arrives and its size.
897b77d6 2542
35a569c6 2543 There is one complexity here: If this was a parameter that would
2544 have been passed in registers, but wasn't only because it is
2545 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2546 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2547 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2548 as it was the previous time. */
058a1b7a 2549 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
897b77d6 2550#ifdef STACK_PARMS_IN_REG_PARM_AREA
35a569c6 2551 in_regs = true;
241399f6 2552#endif
35a569c6 2553 if (!in_regs && !data->named_arg)
2554 {
39cba157 2555 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
241399f6 2556 {
35a569c6 2557 rtx tem;
39cba157 2558 tem = targetm.calls.function_incoming_arg (all->args_so_far,
f387af4f 2559 data->promoted_mode,
2560 data->passed_type, true);
35a569c6 2561 in_regs = tem != NULL;
241399f6 2562 }
35a569c6 2563 }
241399f6 2564
35a569c6 2565 /* If this parameter was passed both in registers and in the stack, use
2566 the copy on the stack. */
0336f0f0 2567 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2568 data->passed_type))
35a569c6 2569 entry_parm = 0;
241399f6 2570
35a569c6 2571 if (entry_parm)
2572 {
2573 int partial;
2574
39cba157 2575 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
f054eb3c 2576 data->promoted_mode,
2577 data->passed_type,
2578 data->named_arg);
35a569c6 2579 data->partial = partial;
2580
2581 /* The caller might already have allocated stack space for the
2582 register parameters. */
2583 if (partial != 0 && all->reg_parm_stack_space == 0)
1cd50c9a 2584 {
35a569c6 2585 /* Part of this argument is passed in registers and part
2586 is passed on the stack. Ask the prologue code to extend
2587 the stack part so that we can recreate the full value.
2588
2589 PRETEND_BYTES is the size of the registers we need to store.
2590 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2591 stack space that the prologue should allocate.
2592
2593 Internally, gcc assumes that the argument pointer is aligned
2594 to STACK_BOUNDARY bits. This is used both for alignment
2595 optimizations (see init_emit) and to locate arguments that are
2596 aligned to more than PARM_BOUNDARY bits. We must preserve this
2597 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2598 a stack boundary. */
2599
2600 /* We assume at most one partial arg, and it must be the first
2601 argument on the stack. */
fdada98f 2602 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
35a569c6 2603
f054eb3c 2604 pretend_bytes = partial;
35a569c6 2605 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2606
2607 /* We want to align relative to the actual stack pointer, so
2608 don't include this in the stack size until later. */
2609 all->extra_pretend_bytes = all->pretend_args_size;
1cd50c9a 2610 }
35a569c6 2611 }
241399f6 2612
35a569c6 2613 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2e090bf6 2614 all->reg_parm_stack_space,
35a569c6 2615 entry_parm ? data->partial : 0, current_function_decl,
2616 &all->stack_args_size, &data->locate);
897b77d6 2617
c6586120 2618 /* Update parm_stack_boundary if this parameter is passed in the
2619 stack. */
2620 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2621 crtl->parm_stack_boundary = data->locate.boundary;
2622
35a569c6 2623 /* Adjust offsets to include the pretend args. */
2624 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2625 data->locate.slot_offset.constant += pretend_bytes;
2626 data->locate.offset.constant += pretend_bytes;
27664a4b 2627
35a569c6 2628 data->entry_parm = entry_parm;
2629}
897b77d6 2630
35a569c6 2631/* A subroutine of assign_parms. If there is actually space on the stack
2632 for this parm, count it in stack_args_size and return true. */
897b77d6 2633
35a569c6 2634static bool
2635assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2636 struct assign_parm_data_one *data)
2637{
058a1b7a 2638 /* Bounds are never passed on the stack to keep compatibility
2639 with not instrumented code. */
2640 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2641 return false;
a133d57d 2642 /* Trivially true if we've no incoming register. */
058a1b7a 2643 else if (data->entry_parm == NULL)
35a569c6 2644 ;
2645 /* Also true if we're partially in registers and partially not,
2646 since we've arranged to drop the entire argument on the stack. */
2647 else if (data->partial != 0)
2648 ;
2649 /* Also true if the target says that it's passed in both registers
2650 and on the stack. */
2651 else if (GET_CODE (data->entry_parm) == PARALLEL
2652 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2653 ;
2654 /* Also true if the target says that there's stack allocated for
2655 all register parameters. */
2656 else if (all->reg_parm_stack_space > 0)
2657 ;
2658 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2659 else
2660 return false;
897b77d6 2661
35a569c6 2662 all->stack_args_size.constant += data->locate.size.constant;
2663 if (data->locate.size.var)
2664 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
06ebc183 2665
35a569c6 2666 return true;
2667}
bffcf014 2668
35a569c6 2669/* A subroutine of assign_parms. Given that this parameter is allocated
2670 stack space by the ABI, find it. */
897b77d6 2671
35a569c6 2672static void
2673assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2674{
2675 rtx offset_rtx, stack_parm;
2676 unsigned int align, boundary;
897b77d6 2677
35a569c6 2678 /* If we're passing this arg using a reg, make its stack home the
2679 aligned stack slot. */
2680 if (data->entry_parm)
2681 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2682 else
2683 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2684
abe32cce 2685 stack_parm = crtl->args.internal_arg_pointer;
35a569c6 2686 if (offset_rtx != const0_rtx)
2687 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2688 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2689
d92e3973 2690 if (!data->passed_pointer)
7aeb4db5 2691 {
d92e3973 2692 set_mem_attributes (stack_parm, parm, 1);
2693 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2694 while promoted mode's size is needed. */
2695 if (data->promoted_mode != BLKmode
2696 && data->promoted_mode != DECL_MODE (parm))
7aeb4db5 2697 {
5b2a69fa 2698 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
da443c27 2699 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
d92e3973 2700 {
2701 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2702 data->promoted_mode);
2703 if (offset)
da443c27 2704 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
d92e3973 2705 }
7aeb4db5 2706 }
2707 }
35a569c6 2708
c5dc0c32 2709 boundary = data->locate.boundary;
2710 align = BITS_PER_UNIT;
35a569c6 2711
2712 /* If we're padding upward, we know that the alignment of the slot
bd99ba64 2713 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
35a569c6 2714 intentionally forcing upward padding. Otherwise we have to come
2715 up with a guess at the alignment based on OFFSET_RTX. */
c5dc0c32 2716 if (data->locate.where_pad != downward || data->entry_parm)
35a569c6 2717 align = boundary;
971ba038 2718 else if (CONST_INT_P (offset_rtx))
35a569c6 2719 {
2720 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
ac29ece2 2721 align = least_bit_hwi (align);
35a569c6 2722 }
c5dc0c32 2723 set_mem_align (stack_parm, align);
35a569c6 2724
2725 if (data->entry_parm)
2726 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2727
2728 data->stack_parm = stack_parm;
2729}
2730
2731/* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2732 always valid and contiguous. */
2733
2734static void
2735assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2736{
2737 rtx entry_parm = data->entry_parm;
2738 rtx stack_parm = data->stack_parm;
2739
2740 /* If this parm was passed part in regs and part in memory, pretend it
2741 arrived entirely in memory by pushing the register-part onto the stack.
2742 In the special case of a DImode or DFmode that is split, we could put
2743 it together in a pseudoreg directly, but for now that's not worth
2744 bothering with. */
2745 if (data->partial != 0)
2746 {
2747 /* Handle calls that pass values in multiple non-contiguous
2748 locations. The Irix 6 ABI has examples of this. */
2749 if (GET_CODE (entry_parm) == PARALLEL)
d2b9158b 2750 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
48e1416a 2751 data->passed_type,
35a569c6 2752 int_size_in_bytes (data->passed_type));
897b77d6 2753 else
f054eb3c 2754 {
2755 gcc_assert (data->partial % UNITS_PER_WORD == 0);
d2b9158b 2756 move_block_from_reg (REGNO (entry_parm),
2757 validize_mem (copy_rtx (stack_parm)),
f054eb3c 2758 data->partial / UNITS_PER_WORD);
2759 }
897b77d6 2760
35a569c6 2761 entry_parm = stack_parm;
2762 }
897b77d6 2763
35a569c6 2764 /* If we didn't decide this parm came in a register, by default it came
2765 on the stack. */
2766 else if (entry_parm == NULL)
2767 entry_parm = stack_parm;
2768
2769 /* When an argument is passed in multiple locations, we can't make use
2770 of this information, but we can save some copying if the whole argument
2771 is passed in a single register. */
2772 else if (GET_CODE (entry_parm) == PARALLEL
2773 && data->nominal_mode != BLKmode
2774 && data->passed_mode != BLKmode)
2775 {
2776 size_t i, len = XVECLEN (entry_parm, 0);
2777
2778 for (i = 0; i < len; i++)
2779 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2780 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2781 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2782 == data->passed_mode)
2783 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2784 {
2785 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2786 break;
2787 }
2788 }
4d6c855d 2789
35a569c6 2790 data->entry_parm = entry_parm;
2791}
897b77d6 2792
77c0eeb4 2793/* A subroutine of assign_parms. Reconstitute any values which were
2794 passed in multiple registers and would fit in a single register. */
2795
2796static void
2797assign_parm_remove_parallels (struct assign_parm_data_one *data)
2798{
2799 rtx entry_parm = data->entry_parm;
2800
2801 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2802 This can be done with register operations rather than on the
2803 stack, even if we will store the reconstituted parameter on the
2804 stack later. */
1cf0636a 2805 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
77c0eeb4 2806 {
2807 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
77b80ffd 2808 emit_group_store (parmreg, entry_parm, data->passed_type,
77c0eeb4 2809 GET_MODE_SIZE (GET_MODE (entry_parm)));
2810 entry_parm = parmreg;
2811 }
2812
2813 data->entry_parm = entry_parm;
2814}
2815
35a569c6 2816/* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2817 always valid and properly aligned. */
897b77d6 2818
35a569c6 2819static void
b2df3bbf 2820assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
35a569c6 2821{
2822 rtx stack_parm = data->stack_parm;
2823
2824 /* If we can't trust the parm stack slot to be aligned enough for its
2825 ultimate type, don't use that slot after entry. We'll make another
2826 stack slot, if we need one. */
b2df3bbf 2827 if (stack_parm
2828 && ((STRICT_ALIGNMENT
2829 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2830 || (data->nominal_type
2831 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2832 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
35a569c6 2833 stack_parm = NULL;
2834
2835 /* If parm was passed in memory, and we need to convert it on entry,
2836 don't store it back in that same slot. */
2837 else if (data->entry_parm == stack_parm
2838 && data->nominal_mode != BLKmode
2839 && data->nominal_mode != data->passed_mode)
2840 stack_parm = NULL;
2841
f1a0edff 2842 /* If stack protection is in effect for this function, don't leave any
2843 pointers in their passed stack slots. */
edb7afe8 2844 else if (crtl->stack_protect_guard
f1a0edff 2845 && (flag_stack_protect == 2
2846 || data->passed_pointer
2847 || POINTER_TYPE_P (data->nominal_type)))
2848 stack_parm = NULL;
2849
35a569c6 2850 data->stack_parm = stack_parm;
2851}
90b076ea 2852
35a569c6 2853/* A subroutine of assign_parms. Return true if the current parameter
2854 should be stored as a BLKmode in the current frame. */
2855
2856static bool
2857assign_parm_setup_block_p (struct assign_parm_data_one *data)
2858{
2859 if (data->nominal_mode == BLKmode)
2860 return true;
1cf0636a 2861 if (GET_MODE (data->entry_parm) == BLKmode)
2862 return true;
a2509aaa 2863
5f4cd670 2864#ifdef BLOCK_REG_PADDING
ed4b0b75 2865 /* Only assign_parm_setup_block knows how to deal with register arguments
2866 that are padded at the least significant end. */
2867 if (REG_P (data->entry_parm)
2868 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2869 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2870 == (BYTES_BIG_ENDIAN ? upward : downward)))
35a569c6 2871 return true;
5f4cd670 2872#endif
35a569c6 2873
2874 return false;
2875}
2876
48e1416a 2877/* A subroutine of assign_parms. Arrange for the parameter to be
35a569c6 2878 present and valid in DATA->STACK_RTL. */
2879
2880static void
e2ff5c1b 2881assign_parm_setup_block (struct assign_parm_data_all *all,
2882 tree parm, struct assign_parm_data_one *data)
35a569c6 2883{
2884 rtx entry_parm = data->entry_parm;
2885 rtx stack_parm = data->stack_parm;
b2df3bbf 2886 rtx target_reg = NULL_RTX;
6b2b4f3b 2887 bool in_conversion_seq = false;
c5dc0c32 2888 HOST_WIDE_INT size;
2889 HOST_WIDE_INT size_stored;
35a569c6 2890
e2ff5c1b 2891 if (GET_CODE (entry_parm) == PARALLEL)
2892 entry_parm = emit_group_move_into_temps (entry_parm);
2893
b2df3bbf 2894 /* If we want the parameter in a pseudo, don't use a stack slot. */
2895 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2896 {
2897 tree def = ssa_default_def (cfun, parm);
2898 gcc_assert (def);
2899 machine_mode mode = promote_ssa_mode (def, NULL);
2900 rtx reg = gen_reg_rtx (mode);
2901 if (GET_CODE (reg) != CONCAT)
2902 stack_parm = reg;
2903 else
6b2b4f3b 2904 {
2905 target_reg = reg;
2906 /* Avoid allocating a stack slot, if there isn't one
2907 preallocated by the ABI. It might seem like we should
2908 always prefer a pseudo, but converting between
2909 floating-point and integer modes goes through the stack
2910 on various machines, so it's better to use the reserved
2911 stack slot than to risk wasting it and allocating more
2912 for the conversion. */
2913 if (stack_parm == NULL_RTX)
2914 {
2915 int save = generating_concat_p;
2916 generating_concat_p = 0;
2917 stack_parm = gen_reg_rtx (mode);
2918 generating_concat_p = save;
2919 }
2920 }
b2df3bbf 2921 data->stack_parm = NULL;
2922 }
2923
c5dc0c32 2924 size = int_size_in_bytes (data->passed_type);
2925 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2926 if (stack_parm == 0)
2927 {
5d4b30ea 2928 SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
b2df3bbf 2929 stack_parm = assign_stack_local (BLKmode, size_stored,
2930 DECL_ALIGN (parm));
2931 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2932 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2933 set_mem_attributes (stack_parm, parm, 1);
c5dc0c32 2934 }
2935
35a569c6 2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2937 calls that pass values in multiple non-contiguous locations. */
2938 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2939 {
35a569c6 2940 rtx mem;
2941
2942 /* Note that we will be storing an integral number of words.
2943 So we have to be careful to ensure that we allocate an
c5dc0c32 2944 integral number of words. We do this above when we call
35a569c6 2945 assign_stack_local if space was not allocated in the argument
2946 list. If it was, this will not work if PARM_BOUNDARY is not
2947 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2948 if it becomes a problem. Exception is when BLKmode arrives
2949 with arguments not conforming to word_mode. */
2950
c5dc0c32 2951 if (data->stack_parm == 0)
2952 ;
35a569c6 2953 else if (GET_CODE (entry_parm) == PARALLEL)
2954 ;
fdada98f 2955 else
2956 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
897b77d6 2957
d2b9158b 2958 mem = validize_mem (copy_rtx (stack_parm));
530178a9 2959
35a569c6 2960 /* Handle values in multiple non-contiguous locations. */
6b2b4f3b 2961 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2962 emit_group_store (mem, entry_parm, data->passed_type, size);
2963 else if (GET_CODE (entry_parm) == PARALLEL)
e2ff5c1b 2964 {
28bf151d 2965 push_to_sequence2 (all->first_conversion_insn,
2966 all->last_conversion_insn);
e2ff5c1b 2967 emit_group_store (mem, entry_parm, data->passed_type, size);
28bf151d 2968 all->first_conversion_insn = get_insns ();
2969 all->last_conversion_insn = get_last_insn ();
e2ff5c1b 2970 end_sequence ();
6b2b4f3b 2971 in_conversion_seq = true;
e2ff5c1b 2972 }
530178a9 2973
35a569c6 2974 else if (size == 0)
2975 ;
dd6fed02 2976
35a569c6 2977 /* If SIZE is that of a mode no bigger than a word, just use
2978 that mode's store operation. */
2979 else if (size <= UNITS_PER_WORD)
2980 {
3754d046 2981 machine_mode mode
35a569c6 2982 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
530178a9 2983
35a569c6 2984 if (mode != BLKmode
5f4cd670 2985#ifdef BLOCK_REG_PADDING
35a569c6 2986 && (size == UNITS_PER_WORD
2987 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2988 != (BYTES_BIG_ENDIAN ? upward : downward)))
5f4cd670 2989#endif
35a569c6 2990 )
2991 {
2973927c 2992 rtx reg;
2993
2994 /* We are really truncating a word_mode value containing
2995 SIZE bytes into a value of mode MODE. If such an
2996 operation requires no actual instructions, we can refer
2997 to the value directly in mode MODE, otherwise we must
2998 start with the register in word_mode and explicitly
2999 convert it. */
3000 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
3001 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3002 else
3003 {
3004 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3005 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3006 }
35a569c6 3007 emit_move_insn (change_address (mem, mode, 0), reg);
3008 }
530178a9 3009
4b8026f8 3010#ifdef BLOCK_REG_PADDING
3011 /* Storing the register in memory as a full word, as
3012 move_block_from_reg below would do, and then using the
3013 MEM in a smaller mode, has the effect of shifting right
3014 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3015 shifting must be explicit. */
3016 else if (!MEM_P (mem))
3017 {
3018 rtx x;
3019
3020 /* If the assert below fails, we should have taken the
3021 mode != BLKmode path above, unless we have downward
3022 padding of smaller-than-word arguments on a machine
3023 with little-endian bytes, which would likely require
3024 additional changes to work correctly. */
3025 gcc_checking_assert (BYTES_BIG_ENDIAN
3026 && (BLOCK_REG_PADDING (mode,
3027 data->passed_type, 1)
3028 == upward));
3029
3030 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3031
3032 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3033 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3034 NULL_RTX, 1);
3035 x = force_reg (word_mode, x);
3036 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3037
3038 emit_move_insn (mem, x);
3039 }
3040#endif
3041
35a569c6 3042 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3043 machine must be aligned to the left before storing
3044 to memory. Note that the previous test doesn't
3045 handle all cases (e.g. SIZE == 3). */
3046 else if (size != UNITS_PER_WORD
5f4cd670 3047#ifdef BLOCK_REG_PADDING
35a569c6 3048 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
3049 == downward)
5f4cd670 3050#else
35a569c6 3051 && BYTES_BIG_ENDIAN
5f4cd670 3052#endif
35a569c6 3053 )
3054 {
3055 rtx tem, x;
3056 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
e1b9bbec 3057 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
35a569c6 3058
f5ff0b21 3059 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
35a569c6 3060 tem = change_address (mem, word_mode, 0);
3061 emit_move_insn (tem, x);
897b77d6 3062 }
35a569c6 3063 else
e2ff5c1b 3064 move_block_from_reg (REGNO (entry_parm), mem,
35a569c6 3065 size_stored / UNITS_PER_WORD);
897b77d6 3066 }
b2df3bbf 3067 else if (!MEM_P (mem))
4b8026f8 3068 {
3069 gcc_checking_assert (size > UNITS_PER_WORD);
3070#ifdef BLOCK_REG_PADDING
3071 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3072 data->passed_type, 0)
3073 == upward);
3074#endif
3075 emit_move_insn (mem, entry_parm);
3076 }
35a569c6 3077 else
e2ff5c1b 3078 move_block_from_reg (REGNO (entry_parm), mem,
35a569c6 3079 size_stored / UNITS_PER_WORD);
3080 }
c5dc0c32 3081 else if (data->stack_parm == 0)
3082 {
28bf151d 3083 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
c5dc0c32 3084 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3085 BLOCK_OP_NORMAL);
28bf151d 3086 all->first_conversion_insn = get_insns ();
3087 all->last_conversion_insn = get_last_insn ();
c5dc0c32 3088 end_sequence ();
6b2b4f3b 3089 in_conversion_seq = true;
c5dc0c32 3090 }
35a569c6 3091
b2df3bbf 3092 if (target_reg)
3093 {
6b2b4f3b 3094 if (!in_conversion_seq)
3095 emit_move_insn (target_reg, stack_parm);
3096 else
3097 {
3098 push_to_sequence2 (all->first_conversion_insn,
3099 all->last_conversion_insn);
3100 emit_move_insn (target_reg, stack_parm);
3101 all->first_conversion_insn = get_insns ();
3102 all->last_conversion_insn = get_last_insn ();
3103 end_sequence ();
3104 }
b2df3bbf 3105 stack_parm = target_reg;
3106 }
3107
c5dc0c32 3108 data->stack_parm = stack_parm;
b2df3bbf 3109 set_parm_rtl (parm, stack_parm);
35a569c6 3110}
3111
3112/* A subroutine of assign_parms. Allocate a pseudo to hold the current
3113 parameter. Get it there. Perform all ABI specified conversions. */
3114
3115static void
3116assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3117 struct assign_parm_data_one *data)
3118{
f3e93fd1 3119 rtx parmreg, validated_mem;
3120 rtx equiv_stack_parm;
3754d046 3121 machine_mode promoted_nominal_mode;
35a569c6 3122 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3123 bool did_conversion = false;
f3e93fd1 3124 bool need_conversion, moved;
b2df3bbf 3125 rtx rtl;
35a569c6 3126
3127 /* Store the parm in a pseudoregister during the function, but we may
c879dbcf 3128 need to do it in a wider mode. Using 2 here makes the result
3129 consistent with promote_decl_mode and thus expand_expr_real_1. */
35a569c6 3130 promoted_nominal_mode
3b2411a8 3131 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
c879dbcf 3132 TREE_TYPE (current_function_decl), 2);
35a569c6 3133
b2df3bbf 3134 parmreg = gen_reg_rtx (promoted_nominal_mode);
3135 if (!DECL_ARTIFICIAL (parm))
3136 mark_user_reg (parmreg);
35a569c6 3137
3138 /* If this was an item that we received a pointer to,
b2df3bbf 3139 set rtl appropriately. */
3140 if (data->passed_pointer)
35a569c6 3141 {
b2df3bbf 3142 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3143 set_mem_attributes (rtl, parm, 1);
35a569c6 3144 }
3145 else
b2df3bbf 3146 rtl = parmreg;
35a569c6 3147
77c0eeb4 3148 assign_parm_remove_parallels (data);
3149
c879dbcf 3150 /* Copy the value into the register, thus bridging between
3151 assign_parm_find_data_types and expand_expr_real_1. */
35a569c6 3152
f3e93fd1 3153 equiv_stack_parm = data->stack_parm;
d2b9158b 3154 validated_mem = validize_mem (copy_rtx (data->entry_parm));
f3e93fd1 3155
3156 need_conversion = (data->nominal_mode != data->passed_mode
3157 || promoted_nominal_mode != data->promoted_mode);
3158 moved = false;
3159
3939ef08 3160 if (need_conversion
3161 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3162 && data->nominal_mode == data->passed_mode
3163 && data->nominal_mode == GET_MODE (data->entry_parm))
f3e93fd1 3164 {
35a569c6 3165 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3166 mode, by the caller. We now have to convert it to
3167 NOMINAL_MODE, if different. However, PARMREG may be in
3168 a different mode than NOMINAL_MODE if it is being stored
3169 promoted.
3170
3171 If ENTRY_PARM is a hard register, it might be in a register
3172 not valid for operating in its mode (e.g., an odd-numbered
3173 register for a DFmode). In that case, moves are the only
3174 thing valid, so we can't do a convert from there. This
3175 occurs when the calling sequence allow such misaligned
3176 usages.
3177
3178 In addition, the conversion may involve a call, which could
3179 clobber parameters which haven't been copied to pseudo
f3e93fd1 3180 registers yet.
3181
3182 First, we try to emit an insn which performs the necessary
3183 conversion. We verify that this insn does not clobber any
3184 hard registers. */
3185
3186 enum insn_code icode;
3187 rtx op0, op1;
3188
3189 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3190 unsignedp);
3191
3192 op0 = parmreg;
3193 op1 = validated_mem;
3194 if (icode != CODE_FOR_nothing
39c56a89 3195 && insn_operand_matches (icode, 0, op0)
3196 && insn_operand_matches (icode, 1, op1))
f3e93fd1 3197 {
3198 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
91a55c11 3199 rtx_insn *insn, *insns;
3200 rtx t = op1;
f3e93fd1 3201 HARD_REG_SET hardregs;
3202
3203 start_sequence ();
30790040 3204 /* If op1 is a hard register that is likely spilled, first
3205 force it into a pseudo, otherwise combiner might extend
3206 its lifetime too much. */
3207 if (GET_CODE (t) == SUBREG)
3208 t = SUBREG_REG (t);
3209 if (REG_P (t)
3210 && HARD_REGISTER_P (t)
3211 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3212 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3213 {
3214 t = gen_reg_rtx (GET_MODE (op1));
3215 emit_move_insn (t, op1);
3216 }
3217 else
3218 t = op1;
9ed997be 3219 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3220 data->passed_mode, unsignedp);
eb10ade7 3221 emit_insn (pat);
f3e93fd1 3222 insns = get_insns ();
3223
3224 moved = true;
3225 CLEAR_HARD_REG_SET (hardregs);
3226 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3227 {
3228 if (INSN_P (insn))
3229 note_stores (PATTERN (insn), record_hard_reg_sets,
3230 &hardregs);
3231 if (!hard_reg_set_empty_p (hardregs))
3232 moved = false;
3233 }
3234
3235 end_sequence ();
3236
3237 if (moved)
3238 {
3239 emit_insn (insns);
3939ef08 3240 if (equiv_stack_parm != NULL_RTX)
3241 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3242 equiv_stack_parm);
f3e93fd1 3243 }
3244 }
3245 }
3246
3247 if (moved)
3248 /* Nothing to do. */
3249 ;
3250 else if (need_conversion)
3251 {
3252 /* We did not have an insn to convert directly, or the sequence
3253 generated appeared unsafe. We must first copy the parm to a
3254 pseudo reg, and save the conversion until after all
35a569c6 3255 parameters have been moved. */
3256
f3e93fd1 3257 int save_tree_used;
35a569c6 3258 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3259
f3e93fd1 3260 emit_move_insn (tempreg, validated_mem);
35a569c6 3261
28bf151d 3262 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
35a569c6 3263 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3264
3265 if (GET_CODE (tempreg) == SUBREG
3266 && GET_MODE (tempreg) == data->nominal_mode
3267 && REG_P (SUBREG_REG (tempreg))
3268 && data->nominal_mode == data->passed_mode
3269 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3270 && GET_MODE_SIZE (GET_MODE (tempreg))
3271 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
897b77d6 3272 {
35a569c6 3273 /* The argument is already sign/zero extended, so note it
3274 into the subreg. */
3275 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
e8629f9e 3276 SUBREG_PROMOTED_SET (tempreg, unsignedp);
35a569c6 3277 }
19e03a68 3278
35a569c6 3279 /* TREE_USED gets set erroneously during expand_assignment. */
3280 save_tree_used = TREE_USED (parm);
b2df3bbf 3281 SET_DECL_RTL (parm, rtl);
5b5037b3 3282 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
b2df3bbf 3283 SET_DECL_RTL (parm, NULL_RTX);
35a569c6 3284 TREE_USED (parm) = save_tree_used;
28bf151d 3285 all->first_conversion_insn = get_insns ();
3286 all->last_conversion_insn = get_last_insn ();
35a569c6 3287 end_sequence ();
19e03a68 3288
35a569c6 3289 did_conversion = true;
3290 }
b2df3bbf 3291 else
f3e93fd1 3292 emit_move_insn (parmreg, validated_mem);
35a569c6 3293
3294 /* If we were passed a pointer but the actual value can safely live
cad0d474 3295 in a register, retrieve it and use it directly. */
b2df3bbf 3296 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
35a569c6 3297 {
3298 /* We can't use nominal_mode, because it will have been set to
3299 Pmode above. We must use the actual mode of the parm. */
b2df3bbf 3300 if (use_register_for_decl (parm))
cad0d474 3301 {
3302 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3303 mark_user_reg (parmreg);
3304 }
3305 else
3306 {
3307 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3308 TYPE_MODE (TREE_TYPE (parm)),
3309 TYPE_ALIGN (TREE_TYPE (parm)));
3310 parmreg
3311 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3312 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3313 align);
3314 set_mem_attributes (parmreg, parm, 1);
3315 }
8815f4da 3316
2c21de58 3317 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3318 the debug info in case it is not legitimate. */
b2df3bbf 3319 if (GET_MODE (parmreg) != GET_MODE (rtl))
35a569c6 3320 {
b2df3bbf 3321 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
35a569c6 3322 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3323
28bf151d 3324 push_to_sequence2 (all->first_conversion_insn,
3325 all->last_conversion_insn);
b2df3bbf 3326 emit_move_insn (tempreg, rtl);
35a569c6 3327 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2c21de58 3328 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3329 tempreg);
28bf151d 3330 all->first_conversion_insn = get_insns ();
3331 all->last_conversion_insn = get_last_insn ();
35a569c6 3332 end_sequence ();
897b77d6 3333
35a569c6 3334 did_conversion = true;
3335 }
3336 else
2c21de58 3337 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
897b77d6 3338
b2df3bbf 3339 rtl = parmreg;
60d903f5 3340
35a569c6 3341 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3342 now the parm. */
b2df3bbf 3343 data->stack_parm = NULL;
35a569c6 3344 }
701e46d0 3345
b2df3bbf 3346 set_parm_rtl (parm, rtl);
3347
35a569c6 3348 /* Mark the register as eliminable if we did no conversion and it was
3349 copied from memory at a fixed offset, and the arg pointer was not
3350 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3351 offset formed an invalid address, such memory-equivalences as we
3352 make here would screw up life analysis for it. */
3353 if (data->nominal_mode == data->passed_mode
3354 && !did_conversion
b2df3bbf 3355 && data->stack_parm != 0
3356 && MEM_P (data->stack_parm)
35a569c6 3357 && data->locate.offset.var == 0
3358 && reg_mentioned_p (virtual_incoming_args_rtx,
b2df3bbf 3359 XEXP (data->stack_parm, 0)))
35a569c6 3360 {
8bb2625b 3361 rtx_insn *linsn = get_last_insn ();
3362 rtx_insn *sinsn;
3363 rtx set;
5f85a240 3364
35a569c6 3365 /* Mark complex types separately. */
3366 if (GET_CODE (parmreg) == CONCAT)
3367 {
3754d046 3368 machine_mode submode
35a569c6 3369 = GET_MODE_INNER (GET_MODE (parmreg));
de17a47b 3370 int regnor = REGNO (XEXP (parmreg, 0));
3371 int regnoi = REGNO (XEXP (parmreg, 1));
b2df3bbf 3372 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3373 rtx stacki = adjust_address_nv (data->stack_parm, submode,
de17a47b 3374 GET_MODE_SIZE (submode));
35a569c6 3375
3376 /* Scan backwards for the set of the real and
3377 imaginary parts. */
3378 for (sinsn = linsn; sinsn != 0;
3379 sinsn = prev_nonnote_insn (sinsn))
3380 {
3381 set = single_set (sinsn);
3382 if (set == 0)
3383 continue;
3384
3385 if (SET_DEST (set) == regno_reg_rtx [regnoi])
750a330e 3386 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
35a569c6 3387 else if (SET_DEST (set) == regno_reg_rtx [regnor])
750a330e 3388 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
5f85a240 3389 }
35a569c6 3390 }
b2df3bbf 3391 else
41cf444a 3392 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
35a569c6 3393 }
3394
3395 /* For pointer data type, suggest pointer register. */
3396 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3397 mark_reg_pointer (parmreg,
3398 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3399}
3400
3401/* A subroutine of assign_parms. Allocate stack space to hold the current
3402 parameter. Get it there. Perform all ABI specified conversions. */
3403
3404static void
3405assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3406 struct assign_parm_data_one *data)
3407{
3408 /* Value must be stored in the stack slot STACK_PARM during function
3409 execution. */
c5dc0c32 3410 bool to_conversion = false;
35a569c6 3411
77c0eeb4 3412 assign_parm_remove_parallels (data);
3413
35a569c6 3414 if (data->promoted_mode != data->nominal_mode)
3415 {
3416 /* Conversion is required. */
3417 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
897b77d6 3418
d2b9158b 3419 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
35a569c6 3420
28bf151d 3421 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
c5dc0c32 3422 to_conversion = true;
3423
35a569c6 3424 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3425 TYPE_UNSIGNED (TREE_TYPE (parm)));
3426
3427 if (data->stack_parm)
738ab6f5 3428 {
3429 int offset = subreg_lowpart_offset (data->nominal_mode,
3430 GET_MODE (data->stack_parm));
3431 /* ??? This may need a big-endian conversion on sparc64. */
3432 data->stack_parm
3433 = adjust_address (data->stack_parm, data->nominal_mode, 0);
da443c27 3434 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
738ab6f5 3435 set_mem_offset (data->stack_parm,
da443c27 3436 MEM_OFFSET (data->stack_parm) + offset);
738ab6f5 3437 }
35a569c6 3438 }
3439
3440 if (data->entry_parm != data->stack_parm)
3441 {
c5dc0c32 3442 rtx src, dest;
94f92c36 3443
35a569c6 3444 if (data->stack_parm == 0)
3445 {
c9b50df7 3446 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3447 GET_MODE (data->entry_parm),
3448 TYPE_ALIGN (data->passed_type));
35a569c6 3449 data->stack_parm
3450 = assign_stack_local (GET_MODE (data->entry_parm),
3451 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
c9b50df7 3452 align);
b2df3bbf 3453 set_mem_attributes (data->stack_parm, parm, 1);
897b77d6 3454 }
35a569c6 3455
d2b9158b 3456 dest = validize_mem (copy_rtx (data->stack_parm));
3457 src = validize_mem (copy_rtx (data->entry_parm));
c5dc0c32 3458
3459 if (MEM_P (src))
897b77d6 3460 {
c5dc0c32 3461 /* Use a block move to handle potentially misaligned entry_parm. */
3462 if (!to_conversion)
28bf151d 3463 push_to_sequence2 (all->first_conversion_insn,
3464 all->last_conversion_insn);
c5dc0c32 3465 to_conversion = true;
3466
3467 emit_block_move (dest, src,
3468 GEN_INT (int_size_in_bytes (data->passed_type)),
3469 BLOCK_OP_NORMAL);
35a569c6 3470 }
3471 else
e9f82fd3 3472 {
3473 if (!REG_P (src))
3474 src = force_reg (GET_MODE (src), src);
3475 emit_move_insn (dest, src);
3476 }
c5dc0c32 3477 }
3478
3479 if (to_conversion)
3480 {
28bf151d 3481 all->first_conversion_insn = get_insns ();
3482 all->last_conversion_insn = get_last_insn ();
c5dc0c32 3483 end_sequence ();
35a569c6 3484 }
897b77d6 3485
b2df3bbf 3486 set_parm_rtl (parm, data->stack_parm);
35a569c6 3487}
b8f621ce 3488
35a569c6 3489/* A subroutine of assign_parms. If the ABI splits complex arguments, then
3490 undo the frobbing that we did in assign_parms_augmented_arg_list. */
006be676 3491
35a569c6 3492static void
3e992c41 3493assign_parms_unsplit_complex (struct assign_parm_data_all *all,
f1f41a6c 3494 vec<tree> fnargs)
35a569c6 3495{
3496 tree parm;
e6427ef0 3497 tree orig_fnargs = all->orig_fnargs;
3e992c41 3498 unsigned i = 0;
e513d163 3499
3e992c41 3500 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
35a569c6 3501 {
3502 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3503 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3504 {
3505 rtx tmp, real, imag;
3754d046 3506 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
897b77d6 3507
f1f41a6c 3508 real = DECL_RTL (fnargs[i]);
3509 imag = DECL_RTL (fnargs[i + 1]);
35a569c6 3510 if (inner != GET_MODE (real))
897b77d6 3511 {
b2df3bbf 3512 real = gen_lowpart_SUBREG (inner, real);
3513 imag = gen_lowpart_SUBREG (inner, imag);
35a569c6 3514 }
e6427ef0 3515
b2df3bbf 3516 if (TREE_ADDRESSABLE (parm))
e6427ef0 3517 {
3518 rtx rmem, imem;
3519 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
c9b50df7 3520 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3521 DECL_MODE (parm),
3522 TYPE_ALIGN (TREE_TYPE (parm)));
e6427ef0 3523
3524 /* split_complex_arg put the real and imag parts in
3525 pseudos. Move them to memory. */
c9b50df7 3526 tmp = assign_stack_local (DECL_MODE (parm), size, align);
e6427ef0 3527 set_mem_attributes (tmp, parm, 1);
3528 rmem = adjust_address_nv (tmp, inner, 0);
3529 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
28bf151d 3530 push_to_sequence2 (all->first_conversion_insn,
3531 all->last_conversion_insn);
e6427ef0 3532 emit_move_insn (rmem, real);
3533 emit_move_insn (imem, imag);
28bf151d 3534 all->first_conversion_insn = get_insns ();
3535 all->last_conversion_insn = get_last_insn ();
e6427ef0 3536 end_sequence ();
3537 }
3538 else
3539 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
b2df3bbf 3540 set_parm_rtl (parm, tmp);
08531d36 3541
f1f41a6c 3542 real = DECL_INCOMING_RTL (fnargs[i]);
3543 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
35a569c6 3544 if (inner != GET_MODE (real))
3545 {
3546 real = gen_lowpart_SUBREG (inner, real);
3547 imag = gen_lowpart_SUBREG (inner, imag);
897b77d6 3548 }
35a569c6 3549 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
d91cf567 3550 set_decl_incoming_rtl (parm, tmp, false);
3e992c41 3551 i++;
897b77d6 3552 }
897b77d6 3553 }
35a569c6 3554}
3555
058a1b7a 3556/* Load bounds of PARM from bounds table. */
3557static void
3558assign_parm_load_bounds (struct assign_parm_data_one *data,
3559 tree parm,
3560 rtx entry,
3561 unsigned bound_no)
3562{
3563 bitmap_iterator bi;
3564 unsigned i, offs = 0;
3565 int bnd_no = -1;
3566 rtx slot = NULL, ptr = NULL;
3567
3568 if (parm)
3569 {
3570 bitmap slots;
3571 bitmap_obstack_initialize (NULL);
3572 slots = BITMAP_ALLOC (NULL);
3573 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3574 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3575 {
3576 if (bound_no)
3577 bound_no--;
3578 else
3579 {
3580 bnd_no = i;
3581 break;
3582 }
3583 }
3584 BITMAP_FREE (slots);
3585 bitmap_obstack_release (NULL);
3586 }
3587
3588 /* We may have bounds not associated with any pointer. */
3589 if (bnd_no != -1)
3590 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3591
3592 /* Find associated pointer. */
3593 if (bnd_no == -1)
3594 {
3595 /* If bounds are not associated with any bounds,
3596 then it is passed in a register or special slot. */
3597 gcc_assert (data->entry_parm);
3598 ptr = const0_rtx;
3599 }
3600 else if (MEM_P (entry))
3601 slot = adjust_address (entry, Pmode, offs);
3602 else if (REG_P (entry))
3603 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3604 else if (GET_CODE (entry) == PARALLEL)
3605 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3606 else
3607 gcc_unreachable ();
3608 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3609 data->entry_parm);
3610}
3611
3612/* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3613
3614static void
3615assign_bounds (vec<bounds_parm_data> &bndargs,
3cc70dc3 3616 struct assign_parm_data_all &all,
3617 bool assign_regs, bool assign_special,
3618 bool assign_bt)
058a1b7a 3619{
3cc70dc3 3620 unsigned i, pass;
058a1b7a 3621 bounds_parm_data *pbdata;
3622
3623 if (!bndargs.exists ())
3624 return;
3625
3626 /* We make few passes to store input bounds. Firstly handle bounds
3627 passed in registers. After that we load bounds passed in special
3628 slots. Finally we load bounds from Bounds Table. */
3629 for (pass = 0; pass < 3; pass++)
3630 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3631 {
3632 /* Pass 0 => regs only. */
3633 if (pass == 0
3cc70dc3 3634 && (!assign_regs
3635 ||(!pbdata->parm_data.entry_parm
3636 || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
058a1b7a 3637 continue;
3638 /* Pass 1 => slots only. */
3639 else if (pass == 1
3cc70dc3 3640 && (!assign_special
3641 || (!pbdata->parm_data.entry_parm
3642 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
058a1b7a 3643 continue;
3644 /* Pass 2 => BT only. */
3645 else if (pass == 2
3cc70dc3 3646 && (!assign_bt
3647 || pbdata->parm_data.entry_parm))
058a1b7a 3648 continue;
3649
3650 if (!pbdata->parm_data.entry_parm
3651 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3652 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3653 pbdata->ptr_entry, pbdata->bound_no);
3654
3655 set_decl_incoming_rtl (pbdata->bounds_parm,
3656 pbdata->parm_data.entry_parm, false);
3657
3658 if (assign_parm_setup_block_p (&pbdata->parm_data))
3659 assign_parm_setup_block (&all, pbdata->bounds_parm,
3660 &pbdata->parm_data);
3661 else if (pbdata->parm_data.passed_pointer
b2df3bbf 3662 || use_register_for_decl (pbdata->bounds_parm))
058a1b7a 3663 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3664 &pbdata->parm_data);
3665 else
3666 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3667 &pbdata->parm_data);
058a1b7a 3668 }
058a1b7a 3669}
3670
35a569c6 3671/* Assign RTL expressions to the function's parameters. This may involve
3672 copying them into registers and using those registers as the DECL_RTL. */
3673
3f0895d3 3674static void
35a569c6 3675assign_parms (tree fndecl)
3676{
3677 struct assign_parm_data_all all;
3e992c41 3678 tree parm;
f1f41a6c 3679 vec<tree> fnargs;
058a1b7a 3680 unsigned i, bound_no = 0;
3681 tree last_arg = NULL;
3682 rtx last_arg_entry = NULL;
3683 vec<bounds_parm_data> bndargs = vNULL;
3684 bounds_parm_data bdata;
897b77d6 3685
abe32cce 3686 crtl->args.internal_arg_pointer
567925e3 3687 = targetm.calls.internal_arg_pointer ();
35a569c6 3688
3689 assign_parms_initialize_all (&all);
3690 fnargs = assign_parms_augmented_arg_list (&all);
3691
f1f41a6c 3692 FOR_EACH_VEC_ELT (fnargs, i, parm)
915e81b8 3693 {
35a569c6 3694 struct assign_parm_data_one data;
3695
3696 /* Extract the type of PARM; adjust it according to ABI. */
3697 assign_parm_find_data_types (&all, parm, &data);
3698
3699 /* Early out for errors and void parameters. */
3700 if (data.passed_mode == VOIDmode)
915e81b8 3701 {
35a569c6 3702 SET_DECL_RTL (parm, const0_rtx);
3703 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3704 continue;
3705 }
1b4f3c7d 3706
27a7a23a 3707 /* Estimate stack alignment from parameter alignment. */
3708 if (SUPPORTS_STACK_ALIGNMENT)
3709 {
bd99ba64 3710 unsigned int align
3711 = targetm.calls.function_arg_boundary (data.promoted_mode,
3712 data.passed_type);
8645d3e7 3713 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3714 align);
27a7a23a 3715 if (TYPE_ALIGN (data.nominal_type) > align)
8645d3e7 3716 align = MINIMUM_ALIGNMENT (data.nominal_type,
3717 TYPE_MODE (data.nominal_type),
3718 TYPE_ALIGN (data.nominal_type));
27a7a23a 3719 if (crtl->stack_alignment_estimated < align)
3720 {
3721 gcc_assert (!crtl->stack_realign_processed);
3722 crtl->stack_alignment_estimated = align;
3723 }
3724 }
48e1416a 3725
35a569c6 3726 /* Find out where the parameter arrives in this function. */
3727 assign_parm_find_entry_rtl (&all, &data);
3728
3729 /* Find out where stack space for this parameter might be. */
3730 if (assign_parm_is_stack_parm (&all, &data))
3731 {
3732 assign_parm_find_stack_rtl (parm, &data);
3733 assign_parm_adjust_entry_rtl (&data);
915e81b8 3734 }
058a1b7a 3735 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3736 {
3737 /* Remember where last non bounds arg was passed in case
3738 we have to load associated bounds for it from Bounds
3739 Table. */
3740 last_arg = parm;
3741 last_arg_entry = data.entry_parm;
3742 bound_no = 0;
3743 }
35a569c6 3744 /* Record permanently how this parm was passed. */
56fe7223 3745 if (data.passed_pointer)
3746 {
3747 rtx incoming_rtl
3748 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3749 data.entry_parm);
3750 set_decl_incoming_rtl (parm, incoming_rtl, true);
3751 }
3752 else
3753 set_decl_incoming_rtl (parm, data.entry_parm, false);
35a569c6 3754
b2df3bbf 3755 assign_parm_adjust_stack_rtl (&data);
94f92c36 3756
3757 /* Bounds should be loaded in the particular order to
058a1b7a 3758 have registers allocated correctly. Collect info about
3759 input bounds and load them later. */
3760 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3761 {
3762 /* Expect bounds in instrumented functions only. */
3763 gcc_assert (chkp_function_instrumented_p (fndecl));
3764
3765 bdata.parm_data = data;
3766 bdata.bounds_parm = parm;
3767 bdata.ptr_parm = last_arg;
3768 bdata.ptr_entry = last_arg_entry;
3769 bdata.bound_no = bound_no;
3770 bndargs.safe_push (bdata);
3771 }
3772 else
3773 {
058a1b7a 3774 if (assign_parm_setup_block_p (&data))
3775 assign_parm_setup_block (&all, parm, &data);
b2df3bbf 3776 else if (data.passed_pointer || use_register_for_decl (parm))
058a1b7a 3777 assign_parm_setup_reg (&all, parm, &data);
3778 else
3779 assign_parm_setup_stack (&all, parm, &data);
3780 }
3781
3782 if (cfun->stdarg && !DECL_CHAIN (parm))
3783 {
3784 int pretend_bytes = 0;
3785
3786 assign_parms_setup_varargs (&all, &data, false);
3787
3788 if (chkp_function_instrumented_p (fndecl))
3789 {
3790 /* We expect this is the last parm. Otherwise it is wrong
3791 to assign bounds right now. */
3792 gcc_assert (i == (fnargs.length () - 1));
3cc70dc3 3793 assign_bounds (bndargs, all, true, false, false);
058a1b7a 3794 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3795 data.promoted_mode,
3796 data.passed_type,
3797 &pretend_bytes,
3798 false);
3cc70dc3 3799 assign_bounds (bndargs, all, false, true, true);
3800 bndargs.release ();
058a1b7a 3801 }
3802 }
3803
35a569c6 3804 /* Update info on where next arg arrives in registers. */
39cba157 3805 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
f387af4f 3806 data.passed_type, data.named_arg);
35a569c6 3807
058a1b7a 3808 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3809 bound_no++;
915e81b8 3810 }
3811
3cc70dc3 3812 assign_bounds (bndargs, all, true, true, true);
3813 bndargs.release ();
058a1b7a 3814
3e992c41 3815 if (targetm.calls.split_complex_arg)
e6427ef0 3816 assign_parms_unsplit_complex (&all, fnargs);
35a569c6 3817
f1f41a6c 3818 fnargs.release ();
3e992c41 3819
b8f621ce 3820 /* Output all parameter conversion instructions (possibly including calls)
3821 now that all parameters have been copied out of hard registers. */
28bf151d 3822 emit_insn (all.first_conversion_insn);
b8f621ce 3823
27a7a23a 3824 /* Estimate reload stack alignment from scalar return mode. */
3825 if (SUPPORTS_STACK_ALIGNMENT)
3826 {
3827 if (DECL_RESULT (fndecl))
3828 {
3829 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3754d046 3830 machine_mode mode = TYPE_MODE (type);
27a7a23a 3831
3832 if (mode != BLKmode
3833 && mode != VOIDmode
3834 && !AGGREGATE_TYPE_P (type))
3835 {
3836 unsigned int align = GET_MODE_ALIGNMENT (mode);
3837 if (crtl->stack_alignment_estimated < align)
3838 {
3839 gcc_assert (!crtl->stack_realign_processed);
3840 crtl->stack_alignment_estimated = align;
3841 }
3842 }
48e1416a 3843 }
27a7a23a 3844 }
3845
ba133423 3846 /* If we are receiving a struct value address as the first argument, set up
3847 the RTL for the function result. As this might require code to convert
3848 the transmitted address to Pmode, we do this here to ensure that possible
3849 preliminary conversions of the address have been emitted already. */
35a569c6 3850 if (all.function_result_decl)
ba133423 3851 {
35a569c6 3852 tree result = DECL_RESULT (current_function_decl);
3853 rtx addr = DECL_RTL (all.function_result_decl);
ba133423 3854 rtx x;
de1b648b 3855
806e4c12 3856 if (DECL_BY_REFERENCE (result))
4d5b4e6a 3857 {
3858 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3859 x = addr;
3860 }
806e4c12 3861 else
3862 {
4d5b4e6a 3863 SET_DECL_VALUE_EXPR (result,
3864 build1 (INDIRECT_REF, TREE_TYPE (result),
3865 all.function_result_decl));
806e4c12 3866 addr = convert_memory_address (Pmode, addr);
3867 x = gen_rtx_MEM (DECL_MODE (result), addr);
3868 set_mem_attributes (x, result, 1);
3869 }
4d5b4e6a 3870
3871 DECL_HAS_VALUE_EXPR_P (result) = 1;
3872
b2df3bbf 3873 set_parm_rtl (result, x);
ba133423 3874 }
3875
b0cdd2bb 3876 /* We have aligned all the args, so add space for the pretend args. */
abe32cce 3877 crtl->args.pretend_args_size = all.pretend_args_size;
35a569c6 3878 all.stack_args_size.constant += all.extra_pretend_bytes;
abe32cce 3879 crtl->args.size = all.stack_args_size.constant;
897b77d6 3880
3881 /* Adjust function incoming argument size for alignment and
3882 minimum length. */
3883
2e090bf6 3884 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
abe32cce 3885 crtl->args.size = CEIL_ROUND (crtl->args.size,
26be63dd 3886 PARM_BOUNDARY / BITS_PER_UNIT);
8967ddf7 3887
ccccd62c 3888 if (ARGS_GROW_DOWNWARD)
3889 {
3890 crtl->args.arg_offset_rtx
3891 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3892 : expand_expr (size_diffop (all.stack_args_size.var,
3893 size_int (-all.stack_args_size.constant)),
3894 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3895 }
3896 else
3897 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
897b77d6 3898
3899 /* See how many bytes, if any, of its args a function should try to pop
3900 on return. */
3901
f5bc28da 3902 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3903 TREE_TYPE (fndecl),
3904 crtl->args.size);
897b77d6 3905
ec195bc4 3906 /* For stdarg.h function, save info about
3907 regs and stack space used by the named args. */
897b77d6 3908
39cba157 3909 crtl->args.info = all.args_so_far_v;
897b77d6 3910
3911 /* Set the rtx used for the function return value. Put this in its
3912 own variable so any optimizers that need this information don't have
3913 to include tree.h. Do this here so it gets done when an inlined
3914 function gets output. */
3915
abe32cce 3916 crtl->return_rtx
0e8e37b2 3917 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3918 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
8839b7f1 3919
3920 /* If scalar return value was computed in a pseudo-reg, or was a named
3921 return value that got dumped to the stack, copy that to the hard
3922 return register. */
3923 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3924 {
3925 tree decl_result = DECL_RESULT (fndecl);
3926 rtx decl_rtl = DECL_RTL (decl_result);
3927
3928 if (REG_P (decl_rtl)
3929 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3930 : DECL_REGISTER (decl_result))
3931 {
3932 rtx real_decl_rtl;
3933
46b3ff29 3934 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3935 fndecl, true);
058a1b7a 3936 if (chkp_function_instrumented_p (fndecl))
3937 crtl->return_bnd
3938 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3939 fndecl, true);
8839b7f1 3940 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
abe32cce 3941 /* The delay slot scheduler assumes that crtl->return_rtx
8839b7f1 3942 holds the hard register containing the return value, not a
3943 temporary pseudo. */
abe32cce 3944 crtl->return_rtx = real_decl_rtl;
8839b7f1 3945 }
3946 }
897b77d6 3947}
6b275368 3948
3949/* A subroutine of gimplify_parameters, invoked via walk_tree.
3950 For all seen types, gimplify their sizes. */
3951
3952static tree
3953gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3954{
3955 tree t = *tp;
3956
3957 *walk_subtrees = 0;
3958 if (TYPE_P (t))
3959 {
3960 if (POINTER_TYPE_P (t))
3961 *walk_subtrees = 1;
bc97b18f 3962 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3963 && !TYPE_SIZES_GIMPLIFIED (t))
6b275368 3964 {
75a70cf9 3965 gimplify_type_sizes (t, (gimple_seq *) data);
6b275368 3966 *walk_subtrees = 1;
3967 }
3968 }
3969
3970 return NULL;
3971}
3972
3973/* Gimplify the parameter list for current_function_decl. This involves
3974 evaluating SAVE_EXPRs of variable sized parameters and generating code
75a70cf9 3975 to implement callee-copies reference parameters. Returns a sequence of
3976 statements to add to the beginning of the function. */
6b275368 3977
75a70cf9 3978gimple_seq
6b275368 3979gimplify_parameters (void)
3980{
3981 struct assign_parm_data_all all;
3e992c41 3982 tree parm;
75a70cf9 3983 gimple_seq stmts = NULL;
f1f41a6c 3984 vec<tree> fnargs;
3e992c41 3985 unsigned i;
6b275368 3986
3987 assign_parms_initialize_all (&all);
3988 fnargs = assign_parms_augmented_arg_list (&all);
3989
f1f41a6c 3990 FOR_EACH_VEC_ELT (fnargs, i, parm)
6b275368 3991 {
3992 struct assign_parm_data_one data;
3993
3994 /* Extract the type of PARM; adjust it according to ABI. */
3995 assign_parm_find_data_types (&all, parm, &data);
3996
3997 /* Early out for errors and void parameters. */
3998 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3999 continue;
4000
4001 /* Update info on where next arg arrives in registers. */
39cba157 4002 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
f387af4f 4003 data.passed_type, data.named_arg);
6b275368 4004
4005 /* ??? Once upon a time variable_size stuffed parameter list
4006 SAVE_EXPRs (amongst others) onto a pending sizes list. This
4007 turned out to be less than manageable in the gimple world.
4008 Now we have to hunt them down ourselves. */
4009 walk_tree_without_duplicates (&data.passed_type,
4010 gimplify_parm_type, &stmts);
4011
4852b829 4012 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
6b275368 4013 {
4014 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
4015 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
4016 }
4017
4018 if (data.passed_pointer)
4019 {
4020 tree type = TREE_TYPE (data.passed_type);
39cba157 4021 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
6b275368 4022 type, data.named_arg))
4023 {
4024 tree local, t;
4025
4852b829 4026 /* For constant-sized objects, this is trivial; for
6b275368 4027 variable-sized objects, we have to play games. */
4852b829 4028 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
4029 && !(flag_stack_check == GENERIC_STACK_CHECK
4030 && compare_tree_int (DECL_SIZE_UNIT (parm),
4031 STACK_CHECK_MAX_VAR_SIZE) > 0))
6b275368 4032 {
63e6b59a 4033 local = create_tmp_var (type, get_name (parm));
6b275368 4034 DECL_IGNORED_P (local) = 0;
ab349ddd 4035 /* If PARM was addressable, move that flag over
4036 to the local copy, as its address will be taken,
5a715a82 4037 not the PARMs. Keep the parms address taken
4038 as we'll query that flag during gimplification. */
ab349ddd 4039 if (TREE_ADDRESSABLE (parm))
5a715a82 4040 TREE_ADDRESSABLE (local) = 1;
63e6b59a 4041 else if (TREE_CODE (type) == COMPLEX_TYPE
4042 || TREE_CODE (type) == VECTOR_TYPE)
4043 DECL_GIMPLE_REG_P (local) = 1;
6b275368 4044 }
4045 else
4046 {
c2f47e15 4047 tree ptr_type, addr;
6b275368 4048
4049 ptr_type = build_pointer_type (type);
599548a7 4050 addr = create_tmp_reg (ptr_type, get_name (parm));
6b275368 4051 DECL_IGNORED_P (addr) = 0;
4052 local = build_fold_indirect_ref (addr);
4053
b9a16870 4054 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4f986f8b 4055 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
581bf1c2 4056 size_int (DECL_ALIGN (parm)));
4057
990495a7 4058 /* The call has been built for a variable-sized object. */
a882d754 4059 CALL_ALLOCA_FOR_VAR_P (t) = 1;
6b275368 4060 t = fold_convert (ptr_type, t);
75a70cf9 4061 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
6b275368 4062 gimplify_and_add (t, &stmts);
4063 }
4064
75a70cf9 4065 gimplify_assign (local, parm, &stmts);
6b275368 4066
75fa4f82 4067 SET_DECL_VALUE_EXPR (parm, local);
4068 DECL_HAS_VALUE_EXPR_P (parm) = 1;
6b275368 4069 }
4070 }
4071 }
4072
f1f41a6c 4073 fnargs.release ();
3e992c41 4074
6b275368 4075 return stmts;
4076}
96b1130a 4077\f
897b77d6 4078/* Compute the size and offset from the start of the stacked arguments for a
4079 parm passed in mode PASSED_MODE and with type TYPE.
4080
4081 INITIAL_OFFSET_PTR points to the current offset into the stacked
4082 arguments.
4083
241399f6 4084 The starting offset and size for this parm are returned in
4085 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
4086 nonzero, the offset is that of stack slot, which is returned in
4087 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4088 padding required from the initial offset ptr to the stack slot.
897b77d6 4089
6ef828f9 4090 IN_REGS is nonzero if the argument will be passed in registers. It will
897b77d6 4091 never be set if REG_PARM_STACK_SPACE is not defined.
4092
2e090bf6 4093 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4094 for arguments which are passed in registers.
4095
897b77d6 4096 FNDECL is the function in which the argument was defined.
4097
4098 There are two types of rounding that are done. The first, controlled by
bd99ba64 4099 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4100 argument list to be aligned to the specific boundary (in bits). This
4101 rounding affects the initial and starting offsets, but not the argument
4102 size.
897b77d6 4103
4104 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4105 optionally rounds the size of the parm to PARM_BOUNDARY. The
4106 initial offset is not affected by this rounding, while the size always
4107 is and the starting offset may be. */
4108
241399f6 4109/* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4110 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
897b77d6 4111 callers pass in the total size of args so far as
241399f6 4112 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
897b77d6 4113
897b77d6 4114void
3754d046 4115locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
2e090bf6 4116 int reg_parm_stack_space, int partial,
4117 tree fndecl ATTRIBUTE_UNUSED,
de1b648b 4118 struct args_size *initial_offset_ptr,
4119 struct locate_and_pad_arg_data *locate)
897b77d6 4120{
241399f6 4121 tree sizetree;
4122 enum direction where_pad;
17bfc2bc 4123 unsigned int boundary, round_boundary;
241399f6 4124 int part_size_in_regs;
897b77d6 4125
897b77d6 4126 /* If we have found a stack parm before we reach the end of the
4127 area reserved for registers, skip that area. */
4128 if (! in_regs)
4129 {
897b77d6 4130 if (reg_parm_stack_space > 0)
4131 {
4132 if (initial_offset_ptr->var)
4133 {
4134 initial_offset_ptr->var
4135 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
902de8ed 4136 ssize_int (reg_parm_stack_space));
897b77d6 4137 initial_offset_ptr->constant = 0;
4138 }
4139 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4140 initial_offset_ptr->constant = reg_parm_stack_space;
4141 }
4142 }
897b77d6 4143
f054eb3c 4144 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
241399f6 4145
4146 sizetree
4147 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4148 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
bd99ba64 4149 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
17bfc2bc 4150 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4151 type);
5f4cd670 4152 locate->where_pad = where_pad;
27a7a23a 4153
4154 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4155 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4156 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4157
c5dc0c32 4158 locate->boundary = boundary;
897b77d6 4159
27a7a23a 4160 if (SUPPORTS_STACK_ALIGNMENT)
4161 {
4162 /* stack_alignment_estimated can't change after stack has been
4163 realigned. */
4164 if (crtl->stack_alignment_estimated < boundary)
4165 {
4166 if (!crtl->stack_realign_processed)
4167 crtl->stack_alignment_estimated = boundary;
4168 else
4169 {
4170 /* If stack is realigned and stack alignment value
4171 hasn't been finalized, it is OK not to increase
4172 stack_alignment_estimated. The bigger alignment
4173 requirement is recorded in stack_alignment_needed
4174 below. */
4175 gcc_assert (!crtl->stack_realign_finalized
4176 && crtl->stack_realign_needed);
4177 }
4178 }
4179 }
4180
90ab54b2 4181 /* Remember if the outgoing parameter requires extra alignment on the
4182 calling function side. */
edb7afe8 4183 if (crtl->stack_alignment_needed < boundary)
4184 crtl->stack_alignment_needed = boundary;
27a7a23a 4185 if (crtl->preferred_stack_boundary < boundary)
4186 crtl->preferred_stack_boundary = boundary;
90ab54b2 4187
ccccd62c 4188 if (ARGS_GROW_DOWNWARD)
4189 {
4190 locate->slot_offset.constant = -initial_offset_ptr->constant;
4191 if (initial_offset_ptr->var)
4192 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4193 initial_offset_ptr->var);
4194
32dd1e51 4195 {
4196 tree s2 = sizetree;
4197 if (where_pad != none
4198 && (!tree_fits_uhwi_p (sizetree)
4199 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4200 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4201 SUB_PARM_SIZE (locate->slot_offset, s2);
4202 }
ccccd62c 4203
4204 locate->slot_offset.constant += part_size_in_regs;
4205
4206 if (!in_regs || reg_parm_stack_space > 0)
4207 pad_to_arg_alignment (&locate->slot_offset, boundary,
4208 &locate->alignment_pad);
4209
4210 locate->size.constant = (-initial_offset_ptr->constant
4211 - locate->slot_offset.constant);
4212 if (initial_offset_ptr->var)
4213 locate->size.var = size_binop (MINUS_EXPR,
4214 size_binop (MINUS_EXPR,
4215 ssize_int (0),
4216 initial_offset_ptr->var),
4217 locate->slot_offset.var);
4218
4219 /* Pad_below needs the pre-rounded size to know how much to pad
4220 below. */
4221 locate->offset = locate->slot_offset;
4222 if (where_pad == downward)
4223 pad_below (&locate->offset, passed_mode, sizetree);
4224
4225 }
4226 else
4227 {
4228 if (!in_regs || reg_parm_stack_space > 0)
4229 pad_to_arg_alignment (initial_offset_ptr, boundary,
4230 &locate->alignment_pad);
4231 locate->slot_offset = *initial_offset_ptr;
897b77d6 4232
4233#ifdef PUSH_ROUNDING
ccccd62c 4234 if (passed_mode != BLKmode)
4235 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
897b77d6 4236#endif
4237
ccccd62c 4238 /* Pad_below needs the pre-rounded size to know how much to pad below
4239 so this must be done before rounding up. */
4240 locate->offset = locate->slot_offset;
4241 if (where_pad == downward)
4242 pad_below (&locate->offset, passed_mode, sizetree);
82f48b55 4243
ccccd62c 4244 if (where_pad != none
4245 && (!tree_fits_uhwi_p (sizetree)
4246 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4247 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
897b77d6 4248
ccccd62c 4249 ADD_PARM_SIZE (locate->size, sizetree);
241399f6 4250
ccccd62c 4251 locate->size.constant -= part_size_in_regs;
4252 }
b704e80f 4253
4254#ifdef FUNCTION_ARG_OFFSET
4255 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4256#endif
897b77d6 4257}
4258
ba585215 4259/* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4260 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4261
897b77d6 4262static void
de1b648b 4263pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4264 struct args_size *alignment_pad)
897b77d6 4265{
ef2c4a29 4266 tree save_var = NULL_TREE;
4267 HOST_WIDE_INT save_constant = 0;
5cf5baa2 4268 int boundary_in_bytes = boundary / BITS_PER_UNIT;
891a1732 4269 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4270
4271#ifdef SPARC_STACK_BOUNDARY_HACK
1aecae7f 4272 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4273 the real alignment of %sp. However, when it does this, the
4274 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
891a1732 4275 if (SPARC_STACK_BOUNDARY_HACK)
4276 sp_offset = 0;
4277#endif
9d855d2f 4278
b3f75873 4279 if (boundary > PARM_BOUNDARY)
9d855d2f 4280 {
4281 save_var = offset_ptr->var;
4282 save_constant = offset_ptr->constant;
4283 }
4284
4285 alignment_pad->var = NULL_TREE;
4286 alignment_pad->constant = 0;
9d855d2f 4287
897b77d6 4288 if (boundary > BITS_PER_UNIT)
4289 {
4290 if (offset_ptr->var)
4291 {
891a1732 4292 tree sp_offset_tree = ssize_int (sp_offset);
4293 tree offset = size_binop (PLUS_EXPR,
4294 ARGS_SIZE_TREE (*offset_ptr),
4295 sp_offset_tree);
ccccd62c 4296 tree rounded;
4297 if (ARGS_GROW_DOWNWARD)
4298 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4299 else
4300 rounded = round_up (offset, boundary / BITS_PER_UNIT);
891a1732 4301
4302 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
241399f6 4303 /* ARGS_SIZE_TREE includes constant term. */
4304 offset_ptr->constant = 0;
b3f75873 4305 if (boundary > PARM_BOUNDARY)
d3371fcd 4306 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
902de8ed 4307 save_var);
897b77d6 4308 }
4309 else
06ebc183 4310 {
891a1732 4311 offset_ptr->constant = -sp_offset +
9e37e96e 4312 (ARGS_GROW_DOWNWARD
4313 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4314 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
ccccd62c 4315
b3f75873 4316 if (boundary > PARM_BOUNDARY)
06ebc183 4317 alignment_pad->constant = offset_ptr->constant - save_constant;
4318 }
897b77d6 4319 }
4320}
4321
4322static void
3754d046 4323pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
897b77d6 4324{
4325 if (passed_mode != BLKmode)
4326 {
4327 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4328 offset_ptr->constant
4329 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4330 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4331 - GET_MODE_SIZE (passed_mode));
4332 }
4333 else
4334 {
4335 if (TREE_CODE (sizetree) != INTEGER_CST
4336 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4337 {
4338 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4339 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4340 /* Add it in. */
4341 ADD_PARM_SIZE (*offset_ptr, s2);
4342 SUB_PARM_SIZE (*offset_ptr, sizetree);
4343 }
4344 }
4345}
897b77d6 4346\f
897b77d6 4347
3072d30e 4348/* True if register REGNO was alive at a place where `setjmp' was
4349 called and was set more than once or is an argument. Such regs may
4350 be clobbered by `longjmp'. */
4351
4352static bool
4353regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4354{
4355 /* There appear to be cases where some local vars never reach the
4356 backend but have bogus regnos. */
4357 if (regno >= max_reg_num ())
4358 return false;
4359
4360 return ((REG_N_SETS (regno) > 1
34154e27 4361 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4362 regno))
3072d30e 4363 && REGNO_REG_SET_P (setjmp_crosses, regno));
4364}
4365
4366/* Walk the tree of blocks describing the binding levels within a
4367 function and warn about variables the might be killed by setjmp or
4368 vfork. This is done after calling flow_analysis before register
4369 allocation since that will clobber the pseudo-regs to hard
4370 regs. */
4371
4372static void
4373setjmp_vars_warning (bitmap setjmp_crosses, tree block)
897b77d6 4374{
19cb6b50 4375 tree decl, sub;
4ee9c684 4376
1767a056 4377 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
897b77d6 4378 {
53e9c5c4 4379 if (VAR_P (decl)
49bf95f0 4380 && DECL_RTL_SET_P (decl)
8ad4c111 4381 && REG_P (DECL_RTL (decl))
3072d30e 4382 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
48e1416a 4383 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
0d438110 4384 " %<longjmp%> or %<vfork%>", decl);
897b77d6 4385 }
4ee9c684 4386
93110716 4387 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3072d30e 4388 setjmp_vars_warning (setjmp_crosses, sub);
897b77d6 4389}
4390
4ee9c684 4391/* Do the appropriate part of setjmp_vars_warning
897b77d6 4392 but for arguments instead of local variables. */
4393
3072d30e 4394static void
4395setjmp_args_warning (bitmap setjmp_crosses)
897b77d6 4396{
19cb6b50 4397 tree decl;
897b77d6 4398 for (decl = DECL_ARGUMENTS (current_function_decl);
1767a056 4399 decl; decl = DECL_CHAIN (decl))
897b77d6 4400 if (DECL_RTL (decl) != 0
8ad4c111 4401 && REG_P (DECL_RTL (decl))
3072d30e 4402 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
48e1416a 4403 warning (OPT_Wclobbered,
0d438110 4404 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3cf8b391 4405 decl);
897b77d6 4406}
4407
3072d30e 4408/* Generate warning messages for variables live across setjmp. */
4409
48e1416a 4410void
3072d30e 4411generate_setjmp_warnings (void)
4412{
4413 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4414
a28770e1 4415 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
3072d30e 4416 || bitmap_empty_p (setjmp_crosses))
4417 return;
4418
4419 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4420 setjmp_args_warning (setjmp_crosses);
4421}
4422
897b77d6 4423\f
d6263c49 4424/* Reverse the order of elements in the fragment chain T of blocks,
665611e7 4425 and return the new head of the chain (old last element).
4426 In addition to that clear BLOCK_SAME_RANGE flags when needed
4427 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4428 its super fragment origin. */
d6263c49 4429
4430static tree
4431block_fragments_nreverse (tree t)
4432{
665611e7 4433 tree prev = 0, block, next, prev_super = 0;
4434 tree super = BLOCK_SUPERCONTEXT (t);
4435 if (BLOCK_FRAGMENT_ORIGIN (super))
4436 super = BLOCK_FRAGMENT_ORIGIN (super);
d6263c49 4437 for (block = t; block; block = next)
4438 {
4439 next = BLOCK_FRAGMENT_CHAIN (block);
4440 BLOCK_FRAGMENT_CHAIN (block) = prev;
665611e7 4441 if ((prev && !BLOCK_SAME_RANGE (prev))
4442 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4443 != prev_super))
4444 BLOCK_SAME_RANGE (block) = 0;
4445 prev_super = BLOCK_SUPERCONTEXT (block);
4446 BLOCK_SUPERCONTEXT (block) = super;
d6263c49 4447 prev = block;
4448 }
665611e7 4449 t = BLOCK_FRAGMENT_ORIGIN (t);
4450 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4451 != prev_super)
4452 BLOCK_SAME_RANGE (t) = 0;
4453 BLOCK_SUPERCONTEXT (t) = super;
d6263c49 4454 return prev;
4455}
4456
4457/* Reverse the order of elements in the chain T of blocks,
4458 and return the new head of the chain (old last element).
4459 Also do the same on subblocks and reverse the order of elements
4460 in BLOCK_FRAGMENT_CHAIN as well. */
4461
4462static tree
4463blocks_nreverse_all (tree t)
4464{
4465 tree prev = 0, block, next;
4466 for (block = t; block; block = next)
4467 {
4468 next = BLOCK_CHAIN (block);
4469 BLOCK_CHAIN (block) = prev;
d6263c49 4470 if (BLOCK_FRAGMENT_CHAIN (block)
4471 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
665611e7 4472 {
4473 BLOCK_FRAGMENT_CHAIN (block)
4474 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4475 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4476 BLOCK_SAME_RANGE (block) = 0;
4477 }
4478 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
d6263c49 4479 prev = block;
4480 }
4481 return prev;
4482}
4483
4484
a36145ca 4485/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4486 and create duplicate blocks. */
4487/* ??? Need an option to either create block fragments or to create
4488 abstract origin duplicates of a source block. It really depends
4489 on what optimization has been performed. */
11b373ff 4490
f1ab82be 4491void
de1b648b 4492reorder_blocks (void)
11b373ff 4493{
f1ab82be 4494 tree block = DECL_INITIAL (current_function_decl);
11b373ff 4495
0c45344e 4496 if (block == NULL_TREE)
f1ab82be 4497 return;
9d819987 4498
4997014d 4499 auto_vec<tree, 10> block_stack;
5846cb0f 4500
a36145ca 4501 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4ee9c684 4502 clear_block_marks (block);
a36145ca 4503
f1ab82be 4504 /* Prune the old trees away, so that they don't get in the way. */
4505 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4506 BLOCK_CHAIN (block) = NULL_TREE;
9d819987 4507
a36145ca 4508 /* Recreate the block tree from the note nesting. */
f1ab82be 4509 reorder_blocks_1 (get_insns (), block, &block_stack);
d6263c49 4510 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
11b373ff 4511}
4512
a36145ca 4513/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
60ecc450 4514
4ee9c684 4515void
4516clear_block_marks (tree block)
5e960ca9 4517{
a36145ca 4518 while (block)
5e960ca9 4519 {
a36145ca 4520 TREE_ASM_WRITTEN (block) = 0;
4ee9c684 4521 clear_block_marks (BLOCK_SUBBLOCKS (block));
a36145ca 4522 block = BLOCK_CHAIN (block);
5e960ca9 4523 }
4524}
4525
60ecc450 4526static void
8bb2625b 4527reorder_blocks_1 (rtx_insn *insns, tree current_block,
4528 vec<tree> *p_block_stack)
60ecc450 4529{
8bb2625b 4530 rtx_insn *insn;
665611e7 4531 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
60ecc450 4532
4533 for (insn = insns; insn; insn = NEXT_INSN (insn))
4534 {
6d7dc5b9 4535 if (NOTE_P (insn))
60ecc450 4536 {
ad4583d9 4537 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
60ecc450 4538 {
4539 tree block = NOTE_BLOCK (insn);
70392493 4540 tree origin;
4541
d6263c49 4542 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4543 origin = block;
a36145ca 4544
665611e7 4545 if (prev_end)
4546 BLOCK_SAME_RANGE (prev_end) = 0;
4547 prev_end = NULL_TREE;
4548
a36145ca 4549 /* If we have seen this block before, that means it now
4550 spans multiple address regions. Create a new fragment. */
60ecc450 4551 if (TREE_ASM_WRITTEN (block))
4552 {
a36145ca 4553 tree new_block = copy_node (block);
a36145ca 4554
665611e7 4555 BLOCK_SAME_RANGE (new_block) = 0;
a36145ca 4556 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4557 BLOCK_FRAGMENT_CHAIN (new_block)
4558 = BLOCK_FRAGMENT_CHAIN (origin);
4559 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4560
4561 NOTE_BLOCK (insn) = new_block;
4562 block = new_block;
60ecc450 4563 }
a36145ca 4564
665611e7 4565 if (prev_beg == current_block && prev_beg)
4566 BLOCK_SAME_RANGE (block) = 1;
4567
4568 prev_beg = origin;
4569
60ecc450 4570 BLOCK_SUBBLOCKS (block) = 0;
4571 TREE_ASM_WRITTEN (block) = 1;
31ddae9f 4572 /* When there's only one block for the entire function,
4573 current_block == block and we mustn't do this, it
4574 will cause infinite recursion. */
4575 if (block != current_block)
4576 {
665611e7 4577 tree super;
70392493 4578 if (block != origin)
665611e7 4579 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4580 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4581 (origin))
4582 == current_block);
f1f41a6c 4583 if (p_block_stack->is_empty ())
665611e7 4584 super = current_block;
4585 else
4586 {
f1f41a6c 4587 super = p_block_stack->last ();
665611e7 4588 gcc_assert (super == current_block
4589 || BLOCK_FRAGMENT_ORIGIN (super)
4590 == current_block);
4591 }
4592 BLOCK_SUPERCONTEXT (block) = super;
31ddae9f 4593 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4594 BLOCK_SUBBLOCKS (current_block) = block;
70392493 4595 current_block = origin;
31ddae9f 4596 }
f1f41a6c 4597 p_block_stack->safe_push (block);
60ecc450 4598 }
ad4583d9 4599 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
60ecc450 4600 {
f1f41a6c 4601 NOTE_BLOCK (insn) = p_block_stack->pop ();
60ecc450 4602 current_block = BLOCK_SUPERCONTEXT (current_block);
665611e7 4603 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4604 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4605 prev_beg = NULL_TREE;
4606 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4607 ? NOTE_BLOCK (insn) : NULL_TREE;
60ecc450 4608 }
4609 }
665611e7 4610 else
4611 {
4612 prev_beg = NULL_TREE;
4613 if (prev_end)
4614 BLOCK_SAME_RANGE (prev_end) = 0;
4615 prev_end = NULL_TREE;
4616 }
60ecc450 4617 }
4618}
4619
11b373ff 4620/* Reverse the order of elements in the chain T of blocks,
4621 and return the new head of the chain (old last element). */
4622
4ee9c684 4623tree
de1b648b 4624blocks_nreverse (tree t)
11b373ff 4625{
d6263c49 4626 tree prev = 0, block, next;
4627 for (block = t; block; block = next)
11b373ff 4628 {
d6263c49 4629 next = BLOCK_CHAIN (block);
4630 BLOCK_CHAIN (block) = prev;
4631 prev = block;
11b373ff 4632 }
4633 return prev;
4634}
4635
2149d019 4636/* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4637 by modifying the last node in chain 1 to point to chain 2. */
4638
4639tree
4640block_chainon (tree op1, tree op2)
4641{
4642 tree t1;
4643
4644 if (!op1)
4645 return op2;
4646 if (!op2)
4647 return op1;
4648
4649 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4650 continue;
4651 BLOCK_CHAIN (t1) = op2;
4652
4653#ifdef ENABLE_TREE_CHECKING
4654 {
4655 tree t2;
4656 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4657 gcc_assert (t2 != t1);
4658 }
4659#endif
4660
4661 return op1;
4662}
4663
5846cb0f 4664/* Count the subblocks of the list starting with BLOCK. If VECTOR is
4665 non-NULL, list them all into VECTOR, in a depth-first preorder
4666 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
396bfb69 4667 blocks. */
11b373ff 4668
4669static int
de1b648b 4670all_blocks (tree block, tree *vector)
11b373ff 4671{
396bfb69 4672 int n_blocks = 0;
4673
874a9b8d 4674 while (block)
4675 {
4676 TREE_ASM_WRITTEN (block) = 0;
396bfb69 4677
874a9b8d 4678 /* Record this block. */
4679 if (vector)
4680 vector[n_blocks] = block;
396bfb69 4681
874a9b8d 4682 ++n_blocks;
06ebc183 4683
874a9b8d 4684 /* Record the subblocks, and their subblocks... */
4685 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4686 vector ? vector + n_blocks : 0);
4687 block = BLOCK_CHAIN (block);
4688 }
11b373ff 4689
4690 return n_blocks;
4691}
5846cb0f 4692
4693/* Return a vector containing all the blocks rooted at BLOCK. The
4694 number of elements in the vector is stored in N_BLOCKS_P. The
4695 vector is dynamically allocated; it is the caller's responsibility
4696 to call `free' on the pointer returned. */
06ebc183 4697
5846cb0f 4698static tree *
de1b648b 4699get_block_vector (tree block, int *n_blocks_p)
5846cb0f 4700{
4701 tree *block_vector;
4702
4703 *n_blocks_p = all_blocks (block, NULL);
4c36ffe6 4704 block_vector = XNEWVEC (tree, *n_blocks_p);
5846cb0f 4705 all_blocks (block, block_vector);
4706
4707 return block_vector;
4708}
4709
177c2ebc 4710static GTY(()) int next_block_index = 2;
5846cb0f 4711
4712/* Set BLOCK_NUMBER for all the blocks in FN. */
4713
4714void
de1b648b 4715number_blocks (tree fn)
5846cb0f 4716{
4717 int i;
4718 int n_blocks;
4719 tree *block_vector;
4720
4721 /* For SDB and XCOFF debugging output, we start numbering the blocks
4722 from 1 within each function, rather than keeping a running
4723 count. */
2e3b03ce 4724#if SDB_DEBUGGING_INFO || defined (XCOFF_DEBUGGING_INFO)
0eb76379 4725 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4726 next_block_index = 1;
5846cb0f 4727#endif
4728
4729 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4730
4731 /* The top-level BLOCK isn't numbered at all. */
4732 for (i = 1; i < n_blocks; ++i)
4733 /* We number the blocks from two. */
4734 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4735
4736 free (block_vector);
4737
4738 return;
4739}
baa8dec7 4740
4741/* If VAR is present in a subblock of BLOCK, return the subblock. */
4742
4b987fac 4743DEBUG_FUNCTION tree
de1b648b 4744debug_find_var_in_block_tree (tree var, tree block)
baa8dec7 4745{
4746 tree t;
4747
4748 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4749 if (t == var)
4750 return block;
4751
4752 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4753 {
4754 tree ret = debug_find_var_in_block_tree (var, t);
4755 if (ret)
4756 return ret;
4757 }
4758
4759 return NULL_TREE;
4760}
11b373ff 4761\f
87d4aa85 4762/* Keep track of whether we're in a dummy function context. If we are,
4763 we don't want to invoke the set_current_function hook, because we'll
4764 get into trouble if the hook calls target_reinit () recursively or
4765 when the initial initialization is not yet complete. */
4766
4767static bool in_dummy_function;
4768
46f8e3b0 4769/* Invoke the target hook when setting cfun. Update the optimization options
4770 if the function uses different options than the default. */
87d4aa85 4771
4772static void
4773invoke_set_current_function_hook (tree fndecl)
4774{
4775 if (!in_dummy_function)
46f8e3b0 4776 {
4777 tree opts = ((fndecl)
4778 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4779 : optimization_default_node);
4780
4781 if (!opts)
4782 opts = optimization_default_node;
4783
4784 /* Change optimization options if needed. */
4785 if (optimization_current_node != opts)
4786 {
4787 optimization_current_node = opts;
2c5d2e39 4788 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
46f8e3b0 4789 }
4790
6eaab580 4791 targetm.set_current_function (fndecl);
9d3fa937 4792 this_fn_optabs = this_target_optabs;
08c7d04b 4793
9d3fa937 4794 if (opts != optimization_default_node)
08c7d04b 4795 {
9d3fa937 4796 init_tree_optimization_optabs (opts);
4797 if (TREE_OPTIMIZATION_OPTABS (opts))
4798 this_fn_optabs = (struct target_optabs *)
4799 TREE_OPTIMIZATION_OPTABS (opts);
08c7d04b 4800 }
46f8e3b0 4801 }
87d4aa85 4802}
4803
4804/* cfun should never be set directly; use this function. */
4805
4806void
67b5f619 4807set_cfun (struct function *new_cfun, bool force)
87d4aa85 4808{
67b5f619 4809 if (cfun != new_cfun || force)
87d4aa85 4810 {
4811 cfun = new_cfun;
4812 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
b1090780 4813 redirect_edge_var_map_empty ();
87d4aa85 4814 }
4815}
4816
87d4aa85 4817/* Initialized with NOGC, making this poisonous to the garbage collector. */
4818
04009ada 4819static vec<function *> cfun_stack;
87d4aa85 4820
9078126c 4821/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4822 current_function_decl accordingly. */
87d4aa85 4823
4824void
4825push_cfun (struct function *new_cfun)
4826{
9078126c 4827 gcc_assert ((!cfun && !current_function_decl)
4828 || (cfun && current_function_decl == cfun->decl));
f1f41a6c 4829 cfun_stack.safe_push (cfun);
9078126c 4830 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
87d4aa85 4831 set_cfun (new_cfun);
4832}
4833
9078126c 4834/* Pop cfun from the stack. Also set current_function_decl accordingly. */
87d4aa85 4835
4836void
4837pop_cfun (void)
4838{
f1f41a6c 4839 struct function *new_cfun = cfun_stack.pop ();
9078126c 4840 /* When in_dummy_function, we do have a cfun but current_function_decl is
4841 NULL. We also allow pushing NULL cfun and subsequently changing
4842 current_function_decl to something else and have both restored by
4843 pop_cfun. */
4844 gcc_checking_assert (in_dummy_function
4845 || !cfun
4846 || current_function_decl == cfun->decl);
3c9dcda1 4847 set_cfun (new_cfun);
9078126c 4848 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
87d4aa85 4849}
a3adcd4a 4850
4851/* Return value of funcdef and increase it. */
4852int
48e1416a 4853get_next_funcdef_no (void)
a3adcd4a 4854{
4855 return funcdef_no++;
4856}
4857
1ad3e14c 4858/* Return value of funcdef. */
4859int
4860get_last_funcdef_no (void)
4861{
4862 return funcdef_no;
4863}
4864
ecc82929 4865/* Allocate a function structure for FNDECL and set its contents
87d4aa85 4866 to the defaults. Set cfun to the newly-allocated object.
4867 Some of the helper functions invoked during initialization assume
4868 that cfun has already been set. Therefore, assign the new object
4869 directly into cfun and invoke the back end hook explicitly at the
4870 very end, rather than initializing a temporary and calling set_cfun
4871 on it.
80f2ef47 4872
4873 ABSTRACT_P is true if this is a function that will never be seen by
4874 the middle-end. Such functions are front-end concepts (like C++
4875 function templates) that do not correspond directly to functions
4876 placed in object files. */
942cc45f 4877
ecc82929 4878void
80f2ef47 4879allocate_struct_function (tree fndecl, bool abstract_p)
897b77d6 4880{
4ee9c684 4881 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
897b77d6 4882
25a27413 4883 cfun = ggc_cleared_alloc<function> ();
304c5bf1 4884
ecc82929 4885 init_eh_for_function ();
897b77d6 4886
ecc82929 4887 if (init_machine_status)
4888 cfun->machine = (*init_machine_status) ();
26df1c5e 4889
d3feb168 4890#ifdef OVERRIDE_ABI_FORMAT
4891 OVERRIDE_ABI_FORMAT (fndecl);
4892#endif
4893
22c61100 4894 if (fndecl != NULL_TREE)
ecc82929 4895 {
87d4aa85 4896 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4897 cfun->decl = fndecl;
285aabd1 4898 current_function_funcdef_no = get_next_funcdef_no ();
a956a7a6 4899 }
4900
4901 invoke_set_current_function_hook (fndecl);
87d4aa85 4902
a956a7a6 4903 if (fndecl != NULL_TREE)
4904 {
4905 tree result = DECL_RESULT (fndecl);
b2df3bbf 4906
4907 if (!abstract_p)
4908 {
4909 /* Now that we have activated any function-specific attributes
4910 that might affect layout, particularly vector modes, relayout
4911 each of the parameters and the result. */
4912 relayout_decl (result);
4913 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4914 parm = DECL_CHAIN (parm))
4915 relayout_decl (parm);
9d0e3e3a 4916
4917 /* Similarly relayout the function decl. */
4918 targetm.target_option.relayout_function (fndecl);
b2df3bbf 4919 }
4920
80f2ef47 4921 if (!abstract_p && aggregate_value_p (result, fndecl))
87d4aa85 4922 {
ecc82929 4923#ifdef PCC_STATIC_STRUCT_RETURN
18d50ae6 4924 cfun->returns_pcc_struct = 1;
ecc82929 4925#endif
18d50ae6 4926 cfun->returns_struct = 1;
87d4aa85 4927 }
4928
257d99c3 4929 cfun->stdarg = stdarg_p (fntype);
48e1416a 4930
87d4aa85 4931 /* Assume all registers in stdarg functions need to be saved. */
4932 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4933 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
cbeb677e 4934
4935 /* ??? This could be set on a per-function basis by the front-end
4936 but is this worth the hassle? */
4937 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
c4c3cd53 4938 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4f6f9d05 4939
4940 if (!profile_flag && !flag_instrument_function_entry_exit)
4941 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
ecc82929 4942 }
87d4aa85 4943}
4944
4945/* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4946 instead of just setting it. */
a6c787e5 4947
87d4aa85 4948void
4949push_struct_function (tree fndecl)
4950{
9078126c 4951 /* When in_dummy_function we might be in the middle of a pop_cfun and
4952 current_function_decl and cfun may not match. */
4953 gcc_assert (in_dummy_function
4954 || (!cfun && !current_function_decl)
4955 || (cfun && current_function_decl == cfun->decl));
f1f41a6c 4956 cfun_stack.safe_push (cfun);
9078126c 4957 current_function_decl = fndecl;
80f2ef47 4958 allocate_struct_function (fndecl, false);
ecc82929 4959}
897b77d6 4960
cbeb677e 4961/* Reset crtl and other non-struct-function variables to defaults as
f024691d 4962 appropriate for emitting rtl at the start of a function. */
897b77d6 4963
ecc82929 4964static void
87d4aa85 4965prepare_function_start (void)
ecc82929 4966{
c36aa54b 4967 gcc_assert (!get_last_insn ());
fef299ce 4968 init_temp_slots ();
957211e4 4969 init_emit ();
b079a207 4970 init_varasm_status ();
957211e4 4971 init_expr ();
7dfb44a0 4972 default_rtl_profile ();
897b77d6 4973
8c0dd614 4974 if (flag_stack_usage_info)
990495a7 4975 {
25a27413 4976 cfun->su = ggc_cleared_alloc<stack_usage> ();
990495a7 4977 cfun->su->static_stack_size = -1;
4978 }
4979
ecc82929 4980 cse_not_expected = ! optimize;
897b77d6 4981
ecc82929 4982 /* Caller save not needed yet. */
4983 caller_save_needed = 0;
897b77d6 4984
ecc82929 4985 /* We haven't done register allocation yet. */
4986 reg_renumber = 0;
897b77d6 4987
304c5bf1 4988 /* Indicate that we have not instantiated virtual registers yet. */
4989 virtuals_instantiated = 0;
4990
316bc009 4991 /* Indicate that we want CONCATs now. */
4992 generating_concat_p = 1;
4993
304c5bf1 4994 /* Indicate we have no need of a frame pointer yet. */
4995 frame_pointer_needed = 0;
304c5bf1 4996}
4997
20dc3373 4998void
4999push_dummy_function (bool with_decl)
5000{
5001 tree fn_decl, fn_type, fn_result_decl;
5002
5003 gcc_assert (!in_dummy_function);
5004 in_dummy_function = true;
5005
5006 if (with_decl)
5007 {
5008 fn_type = build_function_type_list (void_type_node, NULL_TREE);
5009 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
5010 fn_type);
5011 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
5012 NULL_TREE, void_type_node);
5013 DECL_RESULT (fn_decl) = fn_result_decl;
5014 }
5015 else
5016 fn_decl = NULL_TREE;
5017
5018 push_struct_function (fn_decl);
5019}
5020
304c5bf1 5021/* Initialize the rtl expansion mechanism so that we can do simple things
5022 like generate sequences. This is used to provide a context during global
87d4aa85 5023 initialization of some passes. You must call expand_dummy_function_end
5024 to exit this context. */
5025
304c5bf1 5026void
de1b648b 5027init_dummy_function_start (void)
304c5bf1 5028{
20dc3373 5029 push_dummy_function (false);
87d4aa85 5030 prepare_function_start ();
304c5bf1 5031}
5032
5033/* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5034 and initialize static variables for generating RTL for the statements
5035 of the function. */
5036
5037void
de1b648b 5038init_function_start (tree subr)
304c5bf1 5039{
e0ff5636 5040 /* Initialize backend, if needed. */
5041 initialize_rtl ();
5042
87d4aa85 5043 prepare_function_start ();
756dcd13 5044 decide_function_section (subr);
304c5bf1 5045
897b77d6 5046 /* Warn if this value is an aggregate type,
5047 regardless of which calling convention we are using for it. */
efb9d9ee 5048 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5049 warning (OPT_Waggregate_return, "function returns an aggregate");
0a893c29 5050}
a590d94d 5051
f1a0edff 5052/* Expand code to verify the stack_protect_guard. This is invoked at
5053 the end of a function to be protected. */
5054
71d89928 5055void
f1a0edff 5056stack_protect_epilogue (void)
5057{
5058 tree guard_decl = targetm.stack_protect_guard ();
79f6a8ed 5059 rtx_code_label *label = gen_label_rtx ();
971b8267 5060 rtx x, y;
44e46898 5061 rtx_insn *seq;
f1a0edff 5062
d2a99f05 5063 x = expand_normal (crtl->stack_protect_guard);
8a23256f 5064 if (guard_decl)
5065 y = expand_normal (guard_decl);
5066 else
5067 y = const0_rtx;
f1a0edff 5068
5069 /* Allow the target to compare Y with X without leaking either into
5070 a register. */
44e46898 5071 if (targetm.have_stack_protect_test ()
5072 && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
5073 emit_insn (seq);
5074 else
5075 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
f1a0edff 5076
5077 /* The noreturn predictor has been moved to the tree level. The rtl-level
5078 predictors estimate this branch about 20%, which isn't enough to get
5079 things moved out of line. Since this is the only extant case of adding
5080 a noreturn function at the rtl level, it doesn't seem worth doing ought
5081 except adding the prediction by hand. */
971b8267 5082 rtx_insn *tmp = get_last_insn ();
f1a0edff 5083 if (JUMP_P (tmp))
971b8267 5084 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
f1a0edff 5085
5a13cc45 5086 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5087 free_temp_slots ();
f1a0edff 5088 emit_label (label);
5089}
5090\f
897b77d6 5091/* Start the RTL for a new function, and set variables used for
5092 emitting RTL.
5093 SUBR is the FUNCTION_DECL node.
5094 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5095 the function's parameters, which must be run at any return statement. */
5096
5097void
82aa4bd5 5098expand_function_start (tree subr)
897b77d6 5099{
897b77d6 5100 /* Make sure volatile mem refs aren't considered
5101 valid operands of arithmetic insns. */
5102 init_recog_no_volatile ();
5103
18d50ae6 5104 crtl->profile
7811c823 5105 = (profile_flag
5106 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5107
18d50ae6 5108 crtl->limit_stack
8f8ac140 5109 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5110
df4b504c 5111 /* Make the label for return statements to jump to. Do not special
5112 case machines with special return instructions -- they will be
5113 handled later during jump, ifcvt, or epilogue creation. */
897b77d6 5114 return_label = gen_label_rtx ();
897b77d6 5115
5116 /* Initialize rtx used to return the value. */
5117 /* Do this before assign_parms so that we copy the struct value address
5118 before any library calls that assign parms might generate. */
5119
5120 /* Decide whether to return the value in memory or in a register. */
94f92c36 5121 tree res = DECL_RESULT (subr);
94f92c36 5122 if (aggregate_value_p (res, subr))
897b77d6 5123 {
5124 /* Returning something that won't go in a register. */
19cb6b50 5125 rtx value_address = 0;
897b77d6 5126
5127#ifdef PCC_STATIC_STRUCT_RETURN
18d50ae6 5128 if (cfun->returns_pcc_struct)
897b77d6 5129 {
94f92c36 5130 int size = int_size_in_bytes (TREE_TYPE (res));
897b77d6 5131 value_address = assemble_static_space (size);
5132 }
5133 else
5134#endif
5135 {
d8c09ceb 5136 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
897b77d6 5137 /* Expect to be passed the address of a place to store the value.
5138 If it is passed as an argument, assign_parms will take care of
5139 it. */
45550790 5140 if (sv)
897b77d6 5141 {
b2df3bbf 5142 value_address = gen_reg_rtx (Pmode);
45550790 5143 emit_move_insn (value_address, sv);
897b77d6 5144 }
5145 }
5146 if (value_address)
ce88c7f0 5147 {
648c102e 5148 rtx x = value_address;
94f92c36 5149 if (!DECL_BY_REFERENCE (res))
648c102e 5150 {
b2df3bbf 5151 x = gen_rtx_MEM (DECL_MODE (res), x);
5152 set_mem_attributes (x, res, 1);
648c102e 5153 }
b2df3bbf 5154 set_parm_rtl (res, x);
ce88c7f0 5155 }
897b77d6 5156 }
94f92c36 5157 else if (DECL_MODE (res) == VOIDmode)
897b77d6 5158 /* If return mode is void, this decl rtl should not be used. */
b2df3bbf 5159 set_parm_rtl (res, NULL_RTX);
5160 else
7e8dfb30 5161 {
7ab29b28 5162 /* Compute the return values into a pseudo reg, which we will copy
5163 into the true return register after the cleanups are done. */
94f92c36 5164 tree return_type = TREE_TYPE (res);
796bb135 5165
5166 /* If we may coalesce this result, make sure it has the expected mode
5167 in case it was promoted. But we need not bother about BLKmode. */
5168 machine_mode promoted_mode
5169 = flag_tree_coalesce_vars && is_gimple_reg (res)
5170 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5171 : BLKmode;
5172
5173 if (promoted_mode != BLKmode)
5174 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
94f92c36 5175 else if (TYPE_MODE (return_type) != BLKmode
5176 && targetm.calls.return_in_msb (return_type))
05d18e8b 5177 /* expand_function_end will insert the appropriate padding in
5178 this case. Use the return value's natural (unpadded) mode
5179 within the function proper. */
b2df3bbf 5180 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
92f708ec 5181 else
fdada98f 5182 {
05d18e8b 5183 /* In order to figure out what mode to use for the pseudo, we
5184 figure out what the mode of the eventual return register will
5185 actually be, and use that. */
46b3ff29 5186 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
05d18e8b 5187
5188 /* Structures that are returned in registers are not
5189 aggregate_value_p, so we may see a PARALLEL or a REG. */
5190 if (REG_P (hard_reg))
b2df3bbf 5191 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
05d18e8b 5192 else
5193 {
5194 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
b2df3bbf 5195 set_parm_rtl (res, gen_group_rtx (hard_reg));
05d18e8b 5196 }
fdada98f 5197 }
7e8dfb30 5198
b566e2e5 5199 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5200 result to the real return register(s). */
94f92c36 5201 DECL_REGISTER (res) = 1;
058a1b7a 5202
5203 if (chkp_function_instrumented_p (current_function_decl))
5204 {
94f92c36 5205 tree return_type = TREE_TYPE (res);
058a1b7a 5206 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5207 subr, 1);
94f92c36 5208 SET_DECL_BOUNDS_RTL (res, bounds);
058a1b7a 5209 }
7e8dfb30 5210 }
897b77d6 5211
5212 /* Initialize rtx for parameters and local variables.
5213 In some cases this requires emitting insns. */
bffcf014 5214 assign_parms (subr);
897b77d6 5215
4ee9c684 5216 /* If function gets a static chain arg, store it. */
5217 if (cfun->static_chain_decl)
5218 {
3efaa21f 5219 tree parm = cfun->static_chain_decl;
bf79ca12 5220 rtx local, chain;
b2df3bbf 5221 rtx_insn *insn;
5222 int unsignedp;
3efaa21f 5223
b2df3bbf 5224 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
82c7907c 5225 chain = targetm.calls.static_chain (current_function_decl, true);
5226
5227 set_decl_incoming_rtl (parm, chain, false);
b2df3bbf 5228 set_parm_rtl (parm, local);
3efaa21f 5229 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4ee9c684 5230
b2df3bbf 5231 if (GET_MODE (local) != GET_MODE (chain))
5232 {
5233 convert_move (local, chain, unsignedp);
5234 insn = get_last_insn ();
5235 }
5236 else
5237 insn = emit_move_insn (local, chain);
82c7907c 5238
5239 /* Mark the register as eliminable, similar to parameters. */
5240 if (MEM_P (chain)
5241 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
41cf444a 5242 set_dst_reg_note (insn, REG_EQUIV, chain, local);
eac967db 5243
5244 /* If we aren't optimizing, save the static chain onto the stack. */
5245 if (!optimize)
5246 {
5247 tree saved_static_chain_decl
5248 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5249 DECL_NAME (parm), TREE_TYPE (parm));
5250 rtx saved_static_chain_rtx
5251 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5252 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5253 emit_move_insn (saved_static_chain_rtx, chain);
5254 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5255 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5256 }
4ee9c684 5257 }
5258
3cfa73b5 5259 /* The following was moved from init_function_start.
5260 The move is supposed to make sdb output more accurate. */
5261 /* Indicate the beginning of the function body,
5262 as opposed to parm setup. */
5263 emit_note (NOTE_INSN_FUNCTION_BEG);
5264
5265 gcc_assert (NOTE_P (get_last_insn ()));
5266
5267 parm_birth_insn = get_last_insn ();
5268
4ee9c684 5269 /* If the function receives a non-local goto, then store the
5270 bits we need to restore the frame pointer. */
5271 if (cfun->nonlocal_goto_save_area)
5272 {
5273 tree t_save;
5274 rtx r_save;
5275
1a105fae 5276 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
7843e4bc 5277 gcc_assert (DECL_RTL_SET_P (var));
4ee9c684 5278
21dc8b2b 5279 t_save = build4 (ARRAY_REF,
5280 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
b55f9493 5281 cfun->nonlocal_goto_save_area,
5282 integer_zero_node, NULL_TREE, NULL_TREE);
4ee9c684 5283 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
21dc8b2b 5284 gcc_assert (GET_MODE (r_save) == Pmode);
50c48f9b 5285
6a5dfe57 5286 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4ee9c684 5287 update_nonlocal_goto_save_area ();
5288 }
50c48f9b 5289
18d50ae6 5290 if (crtl->profile)
b8a21949 5291 {
b8a21949 5292#ifdef PROFILE_HOOK
4781f9b9 5293 PROFILE_HOOK (current_function_funcdef_no);
104d9861 5294#endif
b8a21949 5295 }
104d9861 5296
f8c438a1 5297 /* If we are doing generic stack checking, the probe should go here. */
5298 if (flag_stack_check == GENERIC_STACK_CHECK)
1edb3690 5299 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
897b77d6 5300}
5301\f
20dc3373 5302void
5303pop_dummy_function (void)
5304{
5305 pop_cfun ();
5306 in_dummy_function = false;
5307}
5308
0a893c29 5309/* Undo the effects of init_dummy_function_start. */
5310void
de1b648b 5311expand_dummy_function_end (void)
0a893c29 5312{
87d4aa85 5313 gcc_assert (in_dummy_function);
5314
0a893c29 5315 /* End any sequences that failed to be closed due to syntax errors. */
5316 while (in_sequence_p ())
5317 end_sequence ();
5318
5319 /* Outside function body, can't compute type's actual size
5320 until next function's body starts. */
3c3bb268 5321
08513b52 5322 free_after_parsing (cfun);
5323 free_after_compilation (cfun);
20dc3373 5324 pop_dummy_function ();
0a893c29 5325}
5326
058a1b7a 5327/* Helper for diddle_return_value. */
631ef7ce 5328
5329void
058a1b7a 5330diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
631ef7ce 5331{
2766437e 5332 if (! outgoing)
5333 return;
631ef7ce 5334
8ad4c111 5335 if (REG_P (outgoing))
2766437e 5336 (*doit) (outgoing, arg);
5337 else if (GET_CODE (outgoing) == PARALLEL)
5338 {
5339 int i;
631ef7ce 5340
2766437e 5341 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5342 {
5343 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5344
8ad4c111 5345 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2766437e 5346 (*doit) (x, arg);
631ef7ce 5347 }
5348 }
5349}
5350
058a1b7a 5351/* Call DOIT for each hard register used as a return value from
5352 the current function. */
5353
5354void
5355diddle_return_value (void (*doit) (rtx, void *), void *arg)
5356{
058a1b7a 5357 diddle_return_value_1 (doit, arg, crtl->return_bnd);
1b172b45 5358 diddle_return_value_1 (doit, arg, crtl->return_rtx);
058a1b7a 5359}
5360
2766437e 5361static void
de1b648b 5362do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
2766437e 5363{
18b42941 5364 emit_clobber (reg);
2766437e 5365}
5366
5367void
de1b648b 5368clobber_return_register (void)
2766437e 5369{
5370 diddle_return_value (do_clobber_return_reg, NULL);
1b2c7cbd 5371
5372 /* In case we do use pseudo to return value, clobber it too. */
5373 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5374 {
5375 tree decl_result = DECL_RESULT (current_function_decl);
5376 rtx decl_rtl = DECL_RTL (decl_result);
5377 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5378 {
5379 do_clobber_return_reg (decl_rtl, NULL);
5380 }
5381 }
2766437e 5382}
5383
5384static void
de1b648b 5385do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
2766437e 5386{
18b42941 5387 emit_use (reg);
2766437e 5388}
5389
ab4605bf 5390static void
de1b648b 5391use_return_register (void)
2766437e 5392{
5393 diddle_return_value (do_use_return_reg, NULL);
5394}
5395
0e80b01d 5396/* Set the location of the insn chain starting at INSN to LOC. */
5397
5398static void
4cd001d5 5399set_insn_locations (rtx_insn *insn, int loc)
0e80b01d 5400{
4cd001d5 5401 while (insn != NULL)
0e80b01d 5402 {
5403 if (INSN_P (insn))
5404 INSN_LOCATION (insn) = loc;
5405 insn = NEXT_INSN (insn);
5406 }
5407}
5408
6473f3f4 5409/* Generate RTL for the end of the current function. */
897b77d6 5410
5411void
de1b648b 5412expand_function_end (void)
897b77d6 5413{
2032b31d 5414 /* If arg_pointer_save_area was referenced only from a nested
5415 function, we will not have initialized it yet. Do that now. */
18d50ae6 5416 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
b079a207 5417 get_arg_pointer_save_area ();
2032b31d 5418
4852b829 5419 /* If we are doing generic stack checking and this function makes calls,
b22178d2 5420 do a stack probe at the start of the function to ensure we have enough
5421 space for another stack frame. */
4852b829 5422 if (flag_stack_check == GENERIC_STACK_CHECK)
b22178d2 5423 {
8bb2625b 5424 rtx_insn *insn, *seq;
b22178d2 5425
5426 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6d7dc5b9 5427 if (CALL_P (insn))
b22178d2 5428 {
d1b92264 5429 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
b22178d2 5430 start_sequence ();
d1b92264 5431 if (STACK_CHECK_MOVING_SP)
5432 anti_adjust_stack_and_probe (max_frame_size, true);
5433 else
5434 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
b22178d2 5435 seq = get_insns ();
5436 end_sequence ();
5169661d 5437 set_insn_locations (seq, prologue_location);
1edb3690 5438 emit_insn_before (seq, stack_check_probe_note);
b22178d2 5439 break;
5440 }
5441 }
5442
897b77d6 5443 /* End any sequences that failed to be closed due to syntax errors. */
5444 while (in_sequence_p ())
1bb04728 5445 end_sequence ();
897b77d6 5446
897b77d6 5447 clear_pending_stack_adjust ();
5448 do_pending_stack_adjust ();
5449
897b77d6 5450 /* Output a linenumber for the end of the function.
5451 SDB depends on this. */
5169661d 5452 set_curr_insn_location (input_location);
897b77d6 5453
b41180f5 5454 /* Before the return label (if any), clobber the return
3fb1e43b 5455 registers so that they are not propagated live to the rest of
b41180f5 5456 the function. This can only happen with functions that drop
5457 through; if there had been a return statement, there would
9b56368f 5458 have either been a return rtx, or a jump to the return label.
5459
5460 We delay actual code generation after the current_function_value_rtx
5461 is computed. */
9ed997be 5462 rtx_insn *clobber_after = get_last_insn ();
b41180f5 5463
7861133f 5464 /* Output the label for the actual return from the function. */
5465 emit_label (return_label);
897b77d6 5466
218e3e4e 5467 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
a7e05170 5468 {
5469 /* Let except.c know where it should emit the call to unregister
5470 the function context for sjlj exceptions. */
5471 if (flag_exceptions)
5472 sjlj_emit_function_exit_after (get_last_insn ());
5473 }
3072d30e 5474 else
5475 {
5476 /* We want to ensure that instructions that may trap are not
5477 moved into the epilogue by scheduling, because we don't
5478 always emit unwind information for the epilogue. */
cbeb677e 5479 if (cfun->can_throw_non_call_exceptions)
3072d30e 5480 emit_insn (gen_blockage ());
5481 }
855f1e85 5482
80e467e2 5483 /* If this is an implementation of throw, do what's necessary to
5484 communicate between __builtin_eh_return and the epilogue. */
5485 expand_eh_return ();
5486
ae39498f 5487 /* If scalar return value was computed in a pseudo-reg, or was a named
5488 return value that got dumped to the stack, copy that to the hard
5489 return register. */
0e8e37b2 5490 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
897b77d6 5491 {
ae39498f 5492 tree decl_result = DECL_RESULT (current_function_decl);
5493 rtx decl_rtl = DECL_RTL (decl_result);
5494
5495 if (REG_P (decl_rtl)
5496 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5497 : DECL_REGISTER (decl_result))
5498 {
abe32cce 5499 rtx real_decl_rtl = crtl->return_rtx;
897b77d6 5500
8839b7f1 5501 /* This should be set in assign_parms. */
fdada98f 5502 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
ae39498f 5503
5504 /* If this is a BLKmode structure being returned in registers,
5505 then use the mode computed in expand_return. Note that if
60d903f5 5506 decl_rtl is memory, then its mode may have been changed,
abe32cce 5507 but that crtl->return_rtx has not. */
ae39498f 5508 if (GET_MODE (real_decl_rtl) == BLKmode)
8839b7f1 5509 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
ae39498f 5510
05d18e8b 5511 /* If a non-BLKmode return value should be padded at the least
5512 significant end of the register, shift it left by the appropriate
5513 amount. BLKmode results are handled using the group load/store
5514 machinery. */
5515 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
d8ef55fc 5516 && REG_P (real_decl_rtl)
05d18e8b 5517 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5518 {
5519 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5520 REGNO (real_decl_rtl)),
5521 decl_rtl);
5522 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5523 }
3395ec76 5524 else if (GET_CODE (real_decl_rtl) == PARALLEL)
b566e2e5 5525 {
5526 /* If expand_function_start has created a PARALLEL for decl_rtl,
5527 move the result to the real return registers. Otherwise, do
5528 a group load from decl_rtl for a named return. */
5529 if (GET_CODE (decl_rtl) == PARALLEL)
5530 emit_group_move (real_decl_rtl, decl_rtl);
5531 else
5532 emit_group_load (real_decl_rtl, decl_rtl,
5f4cd670 5533 TREE_TYPE (decl_result),
b566e2e5 5534 int_size_in_bytes (TREE_TYPE (decl_result)));
5535 }
80e467e2 5536 /* In the case of complex integer modes smaller than a word, we'll
5537 need to generate some non-trivial bitfield insertions. Do that
5538 on a pseudo and not the hard register. */
5539 else if (GET_CODE (decl_rtl) == CONCAT
5540 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5541 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5542 {
5543 int old_generating_concat_p;
5544 rtx tmp;
5545
5546 old_generating_concat_p = generating_concat_p;
5547 generating_concat_p = 0;
5548 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5549 generating_concat_p = old_generating_concat_p;
5550
5551 emit_move_insn (tmp, decl_rtl);
5552 emit_move_insn (real_decl_rtl, tmp);
5553 }
418882d0 5554 /* If a named return value dumped decl_return to memory, then
5555 we may need to re-do the PROMOTE_MODE signed/unsigned
5556 extension. */
5557 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5558 {
5559 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5560 promote_function_mode (TREE_TYPE (decl_result),
5561 GET_MODE (decl_rtl), &unsignedp,
5562 TREE_TYPE (current_function_decl), 1);
5563
5564 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5565 }
ae39498f 5566 else
5567 emit_move_insn (real_decl_rtl, decl_rtl);
ae39498f 5568 }
897b77d6 5569 }
5570
5571 /* If returning a structure, arrange to return the address of the value
5572 in a place where debuggers expect to find it.
5573
5574 If returning a structure PCC style,
5575 the caller also depends on this value.
18d50ae6 5576 And cfun->returns_pcc_struct is not necessarily set. */
809140f3 5577 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5578 && !targetm.calls.omit_struct_return_reg)
897b77d6 5579 {
806e4c12 5580 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
897b77d6 5581 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
806e4c12 5582 rtx outgoing;
5583
5584 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5585 type = TREE_TYPE (type);
5586 else
5587 value_address = XEXP (value_address, 0);
5588
46b3ff29 5589 outgoing = targetm.calls.function_value (build_pointer_type (type),
5590 current_function_decl, true);
897b77d6 5591
5592 /* Mark this as a function return value so integrate will delete the
5593 assignment and USE below when inlining this function. */
5594 REG_FUNCTION_VALUE_P (outgoing) = 1;
5595
c54c9422 5596 /* The address may be ptr_mode and OUTGOING may be Pmode. */
85d654dd 5597 value_address = convert_memory_address (GET_MODE (outgoing),
5598 value_address);
c54c9422 5599
897b77d6 5600 emit_move_insn (outgoing, value_address);
c54c9422 5601
5602 /* Show return register used to hold result (in this case the address
5603 of the result. */
abe32cce 5604 crtl->return_rtx = outgoing;
897b77d6 5605 }
5606
04e7d9cb 5607 /* Emit the actual code to clobber return register. Don't emit
5608 it if clobber_after is a barrier, then the previous basic block
5609 certainly doesn't fall thru into the exit block. */
5610 if (!BARRIER_P (clobber_after))
5611 {
04e7d9cb 5612 start_sequence ();
5613 clobber_return_register ();
9ed997be 5614 rtx_insn *seq = get_insns ();
04e7d9cb 5615 end_sequence ();
9b56368f 5616
04e7d9cb 5617 emit_insn_after (seq, clobber_after);
5618 }
9b56368f 5619
01628e06 5620 /* Output the label for the naked return from the function. */
b2ee26d5 5621 if (naked_return_label)
5622 emit_label (naked_return_label);
62380d2d 5623
1b7fd1d9 5624 /* @@@ This is a kludge. We want to ensure that instructions that
5625 may trap are not moved into the epilogue by scheduling, because
d86df71c 5626 we don't always emit unwind information for the epilogue. */
cc7d6aed 5627 if (cfun->can_throw_non_call_exceptions
218e3e4e 5628 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
d86df71c 5629 emit_insn (gen_blockage ());
1b7fd1d9 5630
f1a0edff 5631 /* If stack protection is enabled for this function, check the guard. */
783f362b 5632 if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
f1a0edff 5633 stack_protect_epilogue ();
5634
6a7492e8 5635 /* If we had calls to alloca, and this machine needs
5636 an accurate stack pointer to exit the function,
5637 insert some code to save and restore the stack pointer. */
5638 if (! EXIT_IGNORE_STACK
18d50ae6 5639 && cfun->calls_alloca)
6a7492e8 5640 {
9ed997be 5641 rtx tem = 0;
6a7492e8 5642
e9c97615 5643 start_sequence ();
5644 emit_stack_save (SAVE_FUNCTION, &tem);
9ed997be 5645 rtx_insn *seq = get_insns ();
e9c97615 5646 end_sequence ();
5647 emit_insn_before (seq, parm_birth_insn);
5648
5649 emit_stack_restore (SAVE_FUNCTION, tem);
6a7492e8 5650 }
5651
2766437e 5652 /* ??? This should no longer be necessary since stupid is no longer with
5653 us, but there are some parts of the compiler (eg reload_combine, and
5654 sh mach_dep_reorg) that still try and compute their own lifetime info
5655 instead of using the general framework. */
5656 use_return_register ();
897b77d6 5657}
05927e40 5658
5659rtx
b079a207 5660get_arg_pointer_save_area (void)
05927e40 5661{
b079a207 5662 rtx ret = arg_pointer_save_area;
05927e40 5663
5664 if (! ret)
5665 {
b079a207 5666 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5667 arg_pointer_save_area = ret;
2032b31d 5668 }
5669
18d50ae6 5670 if (! crtl->arg_pointer_save_area_init)
2032b31d 5671 {
60d903f5 5672 /* Save the arg pointer at the beginning of the function. The
2032b31d 5673 generated stack slot may not be a valid memory address, so we
05927e40 5674 have to check it and fix it if necessary. */
5675 start_sequence ();
d2b9158b 5676 emit_move_insn (validize_mem (copy_rtx (ret)),
27a7a23a 5677 crtl->args.internal_arg_pointer);
9ed997be 5678 rtx_insn *seq = get_insns ();
05927e40 5679 end_sequence ();
5680
2032b31d 5681 push_topmost_sequence ();
c838448c 5682 emit_insn_after (seq, entry_of_function ());
2032b31d 5683 pop_topmost_sequence ();
050f9ef1 5684
5685 crtl->arg_pointer_save_area_init = true;
05927e40 5686 }
5687
5688 return ret;
5689}
b2c5602e 5690\f
25e880b1 5691/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5692 for the first time. */
b2c5602e 5693
60ecc450 5694static void
d1023d12 5695record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
b2c5602e 5696{
4cd001d5 5697 rtx_insn *tmp;
d1023d12 5698 hash_table<insn_cache_hasher> *hash = *hashp;
60ecc450 5699
25e880b1 5700 if (hash == NULL)
d1023d12 5701 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
25e880b1 5702
5703 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5704 {
d1023d12 5705 rtx *slot = hash->find_slot (tmp, INSERT);
25e880b1 5706 gcc_assert (*slot == NULL);
5707 *slot = tmp;
5708 }
5709}
5710
1eefcaee 5711/* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5712 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5713 insn, then record COPY as well. */
25e880b1 5714
5715void
1eefcaee 5716maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
25e880b1 5717{
d1023d12 5718 hash_table<insn_cache_hasher> *hash;
5719 rtx *slot;
25e880b1 5720
1eefcaee 5721 hash = epilogue_insn_hash;
d1023d12 5722 if (!hash || !hash->find (insn))
1eefcaee 5723 {
5724 hash = prologue_insn_hash;
d1023d12 5725 if (!hash || !hash->find (insn))
1eefcaee 5726 return;
5727 }
25e880b1 5728
d1023d12 5729 slot = hash->find_slot (copy, INSERT);
25e880b1 5730 gcc_assert (*slot == NULL);
5731 *slot = copy;
b2c5602e 5732}
5733
25e880b1 5734/* Determine if any INSNs in HASH are, or are part of, INSN. Because
5735 we can be running after reorg, SEQUENCE rtl is possible. */
b2c5602e 5736
25e880b1 5737static bool
e7ea1192 5738contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
b2c5602e 5739{
25e880b1 5740 if (hash == NULL)
5741 return false;
b2c5602e 5742
25e880b1 5743 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
b2c5602e 5744 {
9e21f364 5745 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
25e880b1 5746 int i;
9e21f364 5747 for (i = seq->len () - 1; i >= 0; i--)
d1023d12 5748 if (hash->find (seq->element (i)))
25e880b1 5749 return true;
5750 return false;
b2c5602e 5751 }
25e880b1 5752
e7ea1192 5753 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
b2c5602e 5754}
a590d94d 5755
5a321af0 5756int
e7ea1192 5757prologue_contains (const rtx_insn *insn)
5a321af0 5758{
5759 return contains (insn, prologue_insn_hash);
5760}
5761
5762int
e7ea1192 5763epilogue_contains (const rtx_insn *insn)
5a321af0 5764{
5765 return contains (insn, epilogue_insn_hash);
5766}
5767
a590d94d 5768int
e7ea1192 5769prologue_epilogue_contains (const rtx_insn *insn)
a590d94d 5770{
25e880b1 5771 if (contains (insn, prologue_insn_hash))
a590d94d 5772 return 1;
25e880b1 5773 if (contains (insn, epilogue_insn_hash))
a590d94d 5774 return 1;
5775 return 0;
5776}
b2c5602e 5777
5a321af0 5778void
5779record_prologue_seq (rtx_insn *seq)
5780{
5781 record_insns (seq, NULL, &prologue_insn_hash);
5782}
5783
5784void
5785record_epilogue_seq (rtx_insn *seq)
5786{
5787 record_insns (seq, NULL, &epilogue_insn_hash);
5788}
2215ca0d 5789
31a53363 5790/* Set JUMP_LABEL for a return insn. */
5791
5792void
a9634f6a 5793set_return_jump_label (rtx_insn *returnjump)
31a53363 5794{
5795 rtx pat = PATTERN (returnjump);
5796 if (GET_CODE (pat) == PARALLEL)
5797 pat = XVECEXP (pat, 0, 0);
5798 if (ANY_RETURN_P (pat))
5799 JUMP_LABEL (returnjump) = pat;
5800 else
5801 JUMP_LABEL (returnjump) = ret_rtx;
5802}
5803
e554af11 5804/* Return a sequence to be used as the split prologue for the current
5805 function, or NULL. */
5806
5807static rtx_insn *
5808make_split_prologue_seq (void)
5809{
5810 if (!flag_split_stack
5811 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5812 return NULL;
5813
5814 start_sequence ();
5815 emit_insn (targetm.gen_split_stack_prologue ());
5816 rtx_insn *seq = get_insns ();
5817 end_sequence ();
5818
5819 record_insns (seq, NULL, &prologue_insn_hash);
5820 set_insn_locations (seq, prologue_location);
5821
5822 return seq;
5823}
5824
5825/* Return a sequence to be used as the prologue for the current function,
5826 or NULL. */
5827
5828static rtx_insn *
5829make_prologue_seq (void)
5830{
5831 if (!targetm.have_prologue ())
5832 return NULL;
5833
5834 start_sequence ();
5835 rtx_insn *seq = targetm.gen_prologue ();
5836 emit_insn (seq);
5837
5838 /* Insert an explicit USE for the frame pointer
5839 if the profiling is on and the frame pointer is required. */
5840 if (crtl->profile && frame_pointer_needed)
5841 emit_use (hard_frame_pointer_rtx);
5842
5843 /* Retain a map of the prologue insns. */
5844 record_insns (seq, NULL, &prologue_insn_hash);
5845 emit_note (NOTE_INSN_PROLOGUE_END);
5846
5847 /* Ensure that instructions are not moved into the prologue when
5848 profiling is on. The call to the profiling routine can be
5849 emitted within the live range of a call-clobbered register. */
5850 if (!targetm.profile_before_prologue () && crtl->profile)
5851 emit_insn (gen_blockage ());
5852
5853 seq = get_insns ();
5854 end_sequence ();
5855 set_insn_locations (seq, prologue_location);
5856
5857 return seq;
5858}
5859
5860/* Return a sequence to be used as the epilogue for the current function,
5861 or NULL. */
5862
5863static rtx_insn *
d0695500 5864make_epilogue_seq (void)
e554af11 5865{
5866 if (!targetm.have_epilogue ())
5867 return NULL;
5868
5869 start_sequence ();
d0695500 5870 emit_note (NOTE_INSN_EPILOGUE_BEG);
e554af11 5871 rtx_insn *seq = targetm.gen_epilogue ();
5872 if (seq)
5873 emit_jump_insn (seq);
5874
5875 /* Retain a map of the epilogue insns. */
5876 record_insns (seq, NULL, &epilogue_insn_hash);
5877 set_insn_locations (seq, epilogue_location);
5878
5879 seq = get_insns ();
5880 rtx_insn *returnjump = get_last_insn ();
5881 end_sequence ();
5882
5883 if (JUMP_P (returnjump))
5884 set_return_jump_label (returnjump);
5885
5886 return seq;
5887}
5888
0a55d497 5889
c3418f42 5890/* Generate the prologue and epilogue RTL if the machine supports it. Thread
b2c5602e 5891 this into place with notes indicating where the prologue ends and where
1f021f97 5892 the epilogue begins. Update the basic block information when possible.
5893
5894 Notes on epilogue placement:
5895 There are several kinds of edges to the exit block:
5896 * a single fallthru edge from LAST_BB
5897 * possibly, edges from blocks containing sibcalls
5898 * possibly, fake edges from infinite loops
5899
5900 The epilogue is always emitted on the fallthru edge from the last basic
5901 block in the function, LAST_BB, into the exit block.
5902
5903 If LAST_BB is empty except for a label, it is the target of every
5904 other basic block in the function that ends in a return. If a
5905 target has a return or simple_return pattern (possibly with
5906 conditional variants), these basic blocks can be changed so that a
5907 return insn is emitted into them, and their target is adjusted to
5908 the real exit block.
5909
5910 Notes on shrink wrapping: We implement a fairly conservative
5911 version of shrink-wrapping rather than the textbook one. We only
5912 generate a single prologue and a single epilogue. This is
5913 sufficient to catch a number of interesting cases involving early
5914 exits.
5915
5916 First, we identify the blocks that require the prologue to occur before
5917 them. These are the ones that modify a call-saved register, or reference
5918 any of the stack or frame pointer registers. To simplify things, we then
5919 mark everything reachable from these blocks as also requiring a prologue.
5920 This takes care of loops automatically, and avoids the need to examine
5921 whether MEMs reference the frame, since it is sufficient to check for
5922 occurrences of the stack or frame pointer.
5923
5924 We then compute the set of blocks for which the need for a prologue
5925 is anticipatable (borrowing terminology from the shrink-wrapping
5926 description in Muchnick's book). These are the blocks which either
5927 require a prologue themselves, or those that have only successors
5928 where the prologue is anticipatable. The prologue needs to be
5929 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5930 is not. For the moment, we ensure that only one such edge exists.
5931
5932 The epilogue is placed as described above, but we make a
5933 distinction between inserting return and simple_return patterns
5934 when modifying other blocks that end in a return. Blocks that end
5935 in a sibcall omit the sibcall_epilogue if the block is not in
5936 ANTIC. */
b2c5602e 5937
7ed9df76 5938void
3072d30e 5939thread_prologue_and_epilogue_insns (void)
b2c5602e 5940{
1f021f97 5941 df_analyze ();
71caadc0 5942
48b14f50 5943 /* Can't deal with multiple successors of the entry block at the
5944 moment. Function should always have at least one entry
5945 point. */
34154e27 5946 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
d0695500 5947
5948 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5949 edge orig_entry_edge = entry_edge;
1f021f97 5950
9a3e1f72 5951 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
e554af11 5952 rtx_insn *prologue_seq = make_prologue_seq ();
9a3e1f72 5953 rtx_insn *epilogue_seq = make_epilogue_seq ();
1f021f97 5954
1f021f97 5955 /* Try to perform a kind of shrink-wrapping, making sure the
5956 prologue/epilogue is emitted only around those parts of the
5957 function that require it. */
d0695500 5958 try_shrink_wrapping (&entry_edge, prologue_seq);
1f021f97 5959
f6ec9420 5960 /* If the target can handle splitting the prologue/epilogue into separate
5961 components, try to shrink-wrap these components separately. */
5962 try_shrink_wrapping_separate (entry_edge->dest);
5963
5964 /* If that did anything for any component we now need the generate the
9a3e1f72 5965 "main" prologue again. Because some targets require some of these
5966 to be called in a specific order (i386 requires the split prologue
5967 to be first, for example), we create all three sequences again here.
5968 If this does not work for some target, that target should not enable
5969 separate shrink-wrapping. */
f6ec9420 5970 if (crtl->shrink_wrapped_separate)
9a3e1f72 5971 {
5972 split_prologue_seq = make_split_prologue_seq ();
5973 prologue_seq = make_prologue_seq ();
5974 epilogue_seq = make_epilogue_seq ();
5975 }
777e249a 5976
34154e27 5977 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
1f021f97 5978
25e880b1 5979 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5980 this marker for the splits of EH_RETURN patterns, and nothing else
5981 uses the flag in the meantime. */
5982 epilogue_completed = 1;
5983
25e880b1 5984 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5985 some targets, these get split to a special version of the epilogue
5986 code. In order to be able to properly annotate these with unwind
5987 info, try to split them now. If we get a valid split, drop an
5988 EPILOGUE_BEG note and mark the insns as epilogue insns. */
d0695500 5989 edge e;
5990 edge_iterator ei;
34154e27 5991 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
25e880b1 5992 {
8bb2625b 5993 rtx_insn *prev, *last, *trial;
25e880b1 5994
5995 if (e->flags & EDGE_FALLTHRU)
5996 continue;
5997 last = BB_END (e->src);
5998 if (!eh_returnjump_p (last))
5999 continue;
6000
6001 prev = PREV_INSN (last);
6002 trial = try_split (PATTERN (last), last, 1);
6003 if (trial == last)
6004 continue;
6005
6006 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6007 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6008 }
25e880b1 6009
d0695500 6010 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
fc8645f8 6011
d0695500 6012 if (exit_fallthru_edge)
9bb8a4af 6013 {
d0695500 6014 if (epilogue_seq)
6015 {
6016 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
1b912edf 6017 commit_edge_insertions ();
d0695500 6018
6019 /* The epilogue insns we inserted may cause the exit edge to no longer
6020 be fallthru. */
6021 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6022 {
6023 if (((e->flags & EDGE_FALLTHRU) != 0)
6024 && returnjump_p (BB_END (e->src)))
6025 e->flags &= ~EDGE_FALLTHRU;
6026 }
6027 }
6028 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6029 {
6030 /* We have a fall-through edge to the exit block, the source is not
6031 at the end of the function, and there will be an assembler epilogue
6032 at the end of the function.
6033 We can't use force_nonfallthru here, because that would try to
6034 use return. Inserting a jump 'by hand' is extremely messy, so
6035 we take advantage of cfg_layout_finalize using
6036 fixup_fallthru_exit_predecessor. */
6037 cfg_layout_initialize (0);
6038 basic_block cur_bb;
6039 FOR_EACH_BB_FN (cur_bb, cfun)
6040 if (cur_bb->index >= NUM_FIXED_BLOCKS
6041 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6042 cur_bb->aux = cur_bb->next_bb;
6043 cfg_layout_finalize ();
6044 }
9bb8a4af 6045 }
202bbc06 6046
d0695500 6047 /* Insert the prologue. */
1f021f97 6048
d0695500 6049 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
71caadc0 6050
d0695500 6051 if (split_prologue_seq || prologue_seq)
e08b2eb8 6052 {
af9068f3 6053 rtx_insn *split_prologue_insn = split_prologue_seq;
d0695500 6054 if (split_prologue_seq)
af9068f3 6055 {
6056 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6057 split_prologue_insn = NEXT_INSN (split_prologue_insn);
6058 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6059 }
d0695500 6060
af9068f3 6061 rtx_insn *prologue_insn = prologue_seq;
d0695500 6062 if (prologue_seq)
af9068f3 6063 {
6064 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6065 prologue_insn = NEXT_INSN (prologue_insn);
6066 insert_insn_on_edge (prologue_seq, entry_edge);
6067 }
202bbc06 6068
e08b2eb8 6069 commit_edge_insertions ();
6070
202bbc06 6071 /* Look for basic blocks within the prologue insns. */
af9068f3 6072 if (split_prologue_insn
6073 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6074 split_prologue_insn = NULL;
6075 if (prologue_insn
6076 && BLOCK_FOR_INSN (prologue_insn) == NULL)
6077 prologue_insn = NULL;
6078 if (split_prologue_insn || prologue_insn)
6079 {
6080 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6081 bitmap_clear (blocks);
6082 if (split_prologue_insn)
6083 bitmap_set_bit (blocks,
6084 BLOCK_FOR_INSN (split_prologue_insn)->index);
6085 if (prologue_insn)
6086 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6087 find_many_sub_basic_blocks (blocks);
6088 }
e08b2eb8 6089 }
60ecc450 6090
d0695500 6091 default_rtl_profile ();
6092
60ecc450 6093 /* Emit sibling epilogues before any sibling call sites. */
d0695500 6094 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6095 (e = ei_safe_edge (ei));
6096 ei_next (&ei))
60ecc450 6097 {
d0695500 6098 /* Skip those already handled, the ones that run without prologue. */
6099 if (e->flags & EDGE_IGNORE)
cd665a06 6100 {
d0695500 6101 e->flags &= ~EDGE_IGNORE;
cd665a06 6102 continue;
6103 }
60ecc450 6104
d0695500 6105 rtx_insn *insn = BB_END (e->src);
6106
6107 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6108 continue;
6109
cf3a33c8 6110 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
1f021f97 6111 {
6112 start_sequence ();
6113 emit_note (NOTE_INSN_EPILOGUE_BEG);
6114 emit_insn (ep_seq);
4cd001d5 6115 rtx_insn *seq = get_insns ();
1f021f97 6116 end_sequence ();
60ecc450 6117
1f021f97 6118 /* Retain a map of the epilogue insns. Used in life analysis to
6119 avoid getting rid of sibcall epilogue insns. Do this before we
6120 actually emit the sequence. */
6121 record_insns (seq, NULL, &epilogue_insn_hash);
5169661d 6122 set_insn_locations (seq, epilogue_location);
31d3e01c 6123
1f021f97 6124 emit_insn_before (seq, insn);
6125 }
60ecc450 6126 }
58d5b39c 6127
d0695500 6128 if (epilogue_seq)
142e7d22 6129 {
5a7c3c87 6130 rtx_insn *insn, *next;
142e7d22 6131
6132 /* Similarly, move any line notes that appear after the epilogue.
424da949 6133 There is no need, however, to be quite so anal about the existence
737251e7 6134 of such a note. Also possibly move
dc8def52 6135 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6136 info generation. */
d0695500 6137 for (insn = epilogue_seq; insn; insn = next)
142e7d22 6138 {
6139 next = NEXT_INSN (insn);
48e1416a 6140 if (NOTE_P (insn)
ad4583d9 6141 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
d0695500 6142 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
142e7d22 6143 }
6144 }
3072d30e 6145
6146 /* Threading the prologue and epilogue changes the artificial refs
6147 in the entry and exit blocks. */
6148 epilogue_completed = 1;
6149 df_update_entry_exit_and_calls ();
b2c5602e 6150}
6151
25e880b1 6152/* Reposition the prologue-end and epilogue-begin notes after
6153 instruction scheduling. */
b2c5602e 6154
6155void
3072d30e 6156reposition_prologue_and_epilogue_notes (void)
b2c5602e 6157{
cf3a33c8 6158 if (!targetm.have_prologue ()
6159 && !targetm.have_epilogue ()
6160 && !targetm.have_sibcall_epilogue ())
317443b3 6161 return;
317443b3 6162
25e880b1 6163 /* Since the hash table is created on demand, the fact that it is
6164 non-null is a signal that it is non-empty. */
6165 if (prologue_insn_hash != NULL)
b2c5602e 6166 {
d1023d12 6167 size_t len = prologue_insn_hash->elements ();
8bb2625b 6168 rtx_insn *insn, *last = NULL, *note = NULL;
b2c5602e 6169
25e880b1 6170 /* Scan from the beginning until we reach the last prologue insn. */
6171 /* ??? While we do have the CFG intact, there are two problems:
6172 (1) The prologue can contain loops (typically probing the stack),
6173 which means that the end of the prologue isn't in the first bb.
6174 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
3072d30e 6175 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
b2c5602e 6176 {
6d7dc5b9 6177 if (NOTE_P (insn))
12d1c03c 6178 {
ad4583d9 6179 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
60ecc450 6180 note = insn;
6181 }
25e880b1 6182 else if (contains (insn, prologue_insn_hash))
60ecc450 6183 {
5c0913b4 6184 last = insn;
6185 if (--len == 0)
6186 break;
6187 }
6188 }
60d903f5 6189
5c0913b4 6190 if (last)
6191 {
25e880b1 6192 if (note == NULL)
5c0913b4 6193 {
25e880b1 6194 /* Scan forward looking for the PROLOGUE_END note. It should
6195 be right at the beginning of the block, possibly with other
6196 insn notes that got moved there. */
6197 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6198 {
6199 if (NOTE_P (note)
6200 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6201 break;
6202 }
5c0913b4 6203 }
2a588794 6204
5c0913b4 6205 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6d7dc5b9 6206 if (LABEL_P (last))
5c0913b4 6207 last = NEXT_INSN (last);
6208 reorder_insns (note, note, last);
b2c5602e 6209 }
60ecc450 6210 }
6211
25e880b1 6212 if (epilogue_insn_hash != NULL)
60ecc450 6213 {
25e880b1 6214 edge_iterator ei;
6215 edge e;
b2c5602e 6216
34154e27 6217 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
b2c5602e 6218 {
8bb2625b 6219 rtx_insn *insn, *first = NULL, *note = NULL;
c009a3ec 6220 basic_block bb = e->src;
2a588794 6221
c009a3ec 6222 /* Scan from the beginning until we reach the first epilogue insn. */
25e880b1 6223 FOR_BB_INSNS (bb, insn)
5c0913b4 6224 {
25e880b1 6225 if (NOTE_P (insn))
6226 {
6227 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6228 {
6229 note = insn;
c009a3ec 6230 if (first != NULL)
25e880b1 6231 break;
6232 }
6233 }
c009a3ec 6234 else if (first == NULL && contains (insn, epilogue_insn_hash))
25e880b1 6235 {
c009a3ec 6236 first = insn;
25e880b1 6237 if (note != NULL)
6238 break;
6239 }
12d1c03c 6240 }
c009a3ec 6241
6242 if (note)
6243 {
6244 /* If the function has a single basic block, and no real
48e1416a 6245 epilogue insns (e.g. sibcall with no cleanup), the
c009a3ec 6246 epilogue note can get scheduled before the prologue
6247 note. If we have frame related prologue insns, having
6248 them scanned during the epilogue will result in a crash.
6249 In this case re-order the epilogue note to just before
6250 the last insn in the block. */
6251 if (first == NULL)
6252 first = BB_END (bb);
6253
6254 if (PREV_INSN (first) != note)
6255 reorder_insns (note, note, PREV_INSN (first));
6256 }
b2c5602e 6257 }
6258 }
b2c5602e 6259}
a7b0c170 6260
9631926a 6261/* Returns the name of function declared by FNDECL. */
6262const char *
6263fndecl_name (tree fndecl)
6264{
6265 if (fndecl == NULL)
6266 return "(nofn)";
6267 return lang_hooks.decl_printable_name (fndecl, 2);
6268}
6269
4a020a8c 6270/* Returns the name of function FN. */
6271const char *
6272function_name (struct function *fn)
6273{
9631926a 6274 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6275 return fndecl_name (fndecl);
4a020a8c 6276}
6277
35901471 6278/* Returns the name of the current function. */
6279const char *
6280current_function_name (void)
6281{
4a020a8c 6282 return function_name (cfun);
35901471 6283}
77fce4cd 6284\f
6285
2a1990e9 6286static unsigned int
77fce4cd 6287rest_of_handle_check_leaf_regs (void)
6288{
6289#ifdef LEAF_REGISTERS
d5bf7b64 6290 crtl->uses_only_leaf_regs
77fce4cd 6291 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6292#endif
2a1990e9 6293 return 0;
77fce4cd 6294}
6295
35df6eb4 6296/* Insert a TYPE into the used types hash table of CFUN. */
1a4c44c5 6297
35df6eb4 6298static void
6299used_types_insert_helper (tree type, struct function *func)
f6e59711 6300{
35df6eb4 6301 if (type != NULL && func != NULL)
f6e59711 6302 {
f6e59711 6303 if (func->used_types_hash == NULL)
8f359205 6304 func->used_types_hash = hash_set<tree>::create_ggc (37);
6305
6306 func->used_types_hash->add (type);
f6e59711 6307 }
6308}
6309
35df6eb4 6310/* Given a type, insert it into the used hash table in cfun. */
6311void
6312used_types_insert (tree t)
6313{
6314 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
bd564c29 6315 if (TYPE_NAME (t))
6316 break;
6317 else
6318 t = TREE_TYPE (t);
26ee9e7a 6319 if (TREE_CODE (t) == ERROR_MARK)
6320 return;
bd564c29 6321 if (TYPE_NAME (t) == NULL_TREE
6322 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6323 t = TYPE_MAIN_VARIANT (t);
35df6eb4 6324 if (debug_info_level > DINFO_LEVEL_NONE)
1a4c44c5 6325 {
6326 if (cfun)
6327 used_types_insert_helper (t, cfun);
6328 else
f1f41a6c 6329 {
6330 /* So this might be a type referenced by a global variable.
6331 Record that type so that we can later decide to emit its
6332 debug information. */
6333 vec_safe_push (types_used_by_cur_var_decl, t);
6334 }
1a4c44c5 6335 }
6336}
6337
6338/* Helper to Hash a struct types_used_by_vars_entry. */
6339
6340static hashval_t
6341hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6342{
6343 gcc_assert (entry && entry->var_decl && entry->type);
6344
6345 return iterative_hash_object (entry->type,
6346 iterative_hash_object (entry->var_decl, 0));
6347}
6348
6349/* Hash function of the types_used_by_vars_entry hash table. */
6350
6351hashval_t
2ef51f0e 6352used_type_hasher::hash (types_used_by_vars_entry *entry)
1a4c44c5 6353{
1a4c44c5 6354 return hash_types_used_by_vars_entry (entry);
6355}
6356
6357/*Equality function of the types_used_by_vars_entry hash table. */
6358
2ef51f0e 6359bool
6360used_type_hasher::equal (types_used_by_vars_entry *e1,
6361 types_used_by_vars_entry *e2)
1a4c44c5 6362{
1a4c44c5 6363 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6364}
6365
6366/* Inserts an entry into the types_used_by_vars_hash hash table. */
6367
6368void
6369types_used_by_var_decl_insert (tree type, tree var_decl)
6370{
6371 if (type != NULL && var_decl != NULL)
6372 {
2ef51f0e 6373 types_used_by_vars_entry **slot;
1a4c44c5 6374 struct types_used_by_vars_entry e;
6375 e.var_decl = var_decl;
6376 e.type = type;
6377 if (types_used_by_vars_hash == NULL)
2ef51f0e 6378 types_used_by_vars_hash
6379 = hash_table<used_type_hasher>::create_ggc (37);
6380
6381 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
1a4c44c5 6382 if (*slot == NULL)
6383 {
6384 struct types_used_by_vars_entry *entry;
25a27413 6385 entry = ggc_alloc<types_used_by_vars_entry> ();
1a4c44c5 6386 entry->type = type;
6387 entry->var_decl = var_decl;
6388 *slot = entry;
6389 }
6390 }
35df6eb4 6391}
6392
cbe8bda8 6393namespace {
6394
6395const pass_data pass_data_leaf_regs =
6396{
6397 RTL_PASS, /* type */
6398 "*leaf_regs", /* name */
6399 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 6400 TV_NONE, /* tv_id */
6401 0, /* properties_required */
6402 0, /* properties_provided */
6403 0, /* properties_destroyed */
6404 0, /* todo_flags_start */
6405 0, /* todo_flags_finish */
77fce4cd 6406};
6407
cbe8bda8 6408class pass_leaf_regs : public rtl_opt_pass
6409{
6410public:
9af5ce0c 6411 pass_leaf_regs (gcc::context *ctxt)
6412 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
cbe8bda8 6413 {}
6414
6415 /* opt_pass methods: */
65b0537f 6416 virtual unsigned int execute (function *)
6417 {
6418 return rest_of_handle_check_leaf_regs ();
6419 }
cbe8bda8 6420
6421}; // class pass_leaf_regs
6422
6423} // anon namespace
6424
6425rtl_opt_pass *
6426make_pass_leaf_regs (gcc::context *ctxt)
6427{
6428 return new pass_leaf_regs (ctxt);
6429}
6430
3072d30e 6431static unsigned int
6432rest_of_handle_thread_prologue_and_epilogue (void)
6433{
e80af455 6434 /* prepare_shrink_wrap is sensitive to the block structure of the control
6435 flow graph, so clean it up first. */
3072d30e 6436 if (optimize)
e80af455 6437 cleanup_cfg (0);
990495a7 6438
3072d30e 6439 /* On some machines, the prologue and epilogue code, or parts thereof,
6440 can be represented as RTL. Doing so lets us schedule insns between
6441 it and the rest of the code and also allows delayed branch
6442 scheduling to operate in the epilogue. */
3072d30e 6443 thread_prologue_and_epilogue_insns ();
990495a7 6444
0849803d 6445 /* Some non-cold blocks may now be only reachable from cold blocks.
6446 Fix that up. */
6447 fixup_partitions ();
6448
6a5f2336 6449 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6450 see PR57320. */
e80af455 6451 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6a5f2336 6452
990495a7 6453 /* The stack usage info is finalized during prologue expansion. */
8c0dd614 6454 if (flag_stack_usage_info)
990495a7 6455 output_stack_usage ();
6456
3072d30e 6457 return 0;
6458}
6459
cbe8bda8 6460namespace {
6461
6462const pass_data pass_data_thread_prologue_and_epilogue =
6463{
6464 RTL_PASS, /* type */
6465 "pro_and_epilogue", /* name */
6466 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 6467 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6468 0, /* properties_required */
6469 0, /* properties_provided */
6470 0, /* properties_destroyed */
8b88439e 6471 0, /* todo_flags_start */
6472 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
3072d30e 6473};
cbe8bda8 6474
6475class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6476{
6477public:
9af5ce0c 6478 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6479 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
cbe8bda8 6480 {}
6481
6482 /* opt_pass methods: */
65b0537f 6483 virtual unsigned int execute (function *)
6484 {
6485 return rest_of_handle_thread_prologue_and_epilogue ();
6486 }
cbe8bda8 6487
6488}; // class pass_thread_prologue_and_epilogue
6489
6490} // anon namespace
6491
6492rtl_opt_pass *
6493make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6494{
6495 return new pass_thread_prologue_and_epilogue (ctxt);
6496}
9dc6d5bb 6497\f
6498
6499/* This mini-pass fixes fall-out from SSA in asm statements that have
48e1416a 6500 in-out constraints. Say you start with
9dc6d5bb 6501
6502 orig = inout;
6503 asm ("": "+mr" (inout));
6504 use (orig);
6505
6506 which is transformed very early to use explicit output and match operands:
6507
6508 orig = inout;
6509 asm ("": "=mr" (inout) : "0" (inout));
6510 use (orig);
6511
6512 Or, after SSA and copyprop,
6513
6514 asm ("": "=mr" (inout_2) : "0" (inout_1));
6515 use (inout_1);
6516
6517 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6518 they represent two separate values, so they will get different pseudo
6519 registers during expansion. Then, since the two operands need to match
6520 per the constraints, but use different pseudo registers, reload can
6521 only register a reload for these operands. But reloads can only be
6522 satisfied by hardregs, not by memory, so we need a register for this
6523 reload, just because we are presented with non-matching operands.
6524 So, even though we allow memory for this operand, no memory can be
6525 used for it, just because the two operands don't match. This can
6526 cause reload failures on register-starved targets.
6527
6528 So it's a symptom of reload not being able to use memory for reloads
6529 or, alternatively it's also a symptom of both operands not coming into
6530 reload as matching (in which case the pseudo could go to memory just
6531 fine, as the alternative allows it, and no reload would be necessary).
6532 We fix the latter problem here, by transforming
6533
6534 asm ("": "=mr" (inout_2) : "0" (inout_1));
6535
6536 back to
6537
6538 inout_2 = inout_1;
6539 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6540
6541static void
8bb2625b 6542match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
9dc6d5bb 6543{
6544 int i;
6545 bool changed = false;
6546 rtx op = SET_SRC (p_sets[0]);
6547 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6548 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
2457c754 6549 bool *output_matched = XALLOCAVEC (bool, noutputs);
9dc6d5bb 6550
3f982e5a 6551 memset (output_matched, 0, noutputs * sizeof (bool));
9dc6d5bb 6552 for (i = 0; i < ninputs; i++)
6553 {
8bb2625b 6554 rtx input, output;
6555 rtx_insn *insns;
9dc6d5bb 6556 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6557 char *end;
d069e0d3 6558 int match, j;
9dc6d5bb 6559
fbcb9be4 6560 if (*constraint == '%')
6561 constraint++;
6562
9dc6d5bb 6563 match = strtoul (constraint, &end, 10);
6564 if (end == constraint)
6565 continue;
6566
6567 gcc_assert (match < noutputs);
6568 output = SET_DEST (p_sets[match]);
6569 input = RTVEC_ELT (inputs, i);
d069e0d3 6570 /* Only do the transformation for pseudos. */
6571 if (! REG_P (output)
6572 || rtx_equal_p (output, input)
9dc6d5bb 6573 || (GET_MODE (input) != VOIDmode
6574 && GET_MODE (input) != GET_MODE (output)))
6575 continue;
6576
d069e0d3 6577 /* We can't do anything if the output is also used as input,
6578 as we're going to overwrite it. */
6579 for (j = 0; j < ninputs; j++)
6580 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6581 break;
6582 if (j != ninputs)
6583 continue;
6584
3f982e5a 6585 /* Avoid changing the same input several times. For
6586 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6587 only change in once (to out1), rather than changing it
6588 first to out1 and afterwards to out2. */
6589 if (i > 0)
6590 {
6591 for (j = 0; j < noutputs; j++)
6592 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6593 break;
6594 if (j != noutputs)
6595 continue;
6596 }
6597 output_matched[match] = true;
6598
9dc6d5bb 6599 start_sequence ();
d069e0d3 6600 emit_move_insn (output, input);
9dc6d5bb 6601 insns = get_insns ();
6602 end_sequence ();
9dc6d5bb 6603 emit_insn_before (insns, insn);
d069e0d3 6604
6605 /* Now replace all mentions of the input with output. We can't
f0b5f617 6606 just replace the occurrence in inputs[i], as the register might
d069e0d3 6607 also be used in some other input (or even in an address of an
6608 output), which would mean possibly increasing the number of
6609 inputs by one (namely 'output' in addition), which might pose
6610 a too complicated problem for reload to solve. E.g. this situation:
6611
6612 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6613
c7684b8e 6614 Here 'input' is used in two occurrences as input (once for the
d069e0d3 6615 input operand, once for the address in the second output operand).
f0b5f617 6616 If we would replace only the occurrence of the input operand (to
d069e0d3 6617 make the matching) we would be left with this:
6618
6619 output = input
6620 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6621
6622 Now we suddenly have two different input values (containing the same
6623 value, but different pseudos) where we formerly had only one.
6624 With more complicated asms this might lead to reload failures
6625 which wouldn't have happen without this pass. So, iterate over
c7684b8e 6626 all operands and replace all occurrences of the register used. */
d069e0d3 6627 for (j = 0; j < noutputs; j++)
f211ad17 6628 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
d069e0d3 6629 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6630 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6631 input, output);
6632 for (j = 0; j < ninputs; j++)
6633 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6634 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6635 input, output);
6636
9dc6d5bb 6637 changed = true;
6638 }
6639
6640 if (changed)
6641 df_insn_rescan (insn);
6642}
6643
bdb8985a 6644/* Add the decl D to the local_decls list of FUN. */
6645
6646void
6647add_local_decl (struct function *fun, tree d)
6648{
53e9c5c4 6649 gcc_assert (VAR_P (d));
bdb8985a 6650 vec_safe_push (fun->local_decls, d);
6651}
6652
65b0537f 6653namespace {
6654
6655const pass_data pass_data_match_asm_constraints =
6656{
6657 RTL_PASS, /* type */
6658 "asmcons", /* name */
6659 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 6660 TV_NONE, /* tv_id */
6661 0, /* properties_required */
6662 0, /* properties_provided */
6663 0, /* properties_destroyed */
6664 0, /* todo_flags_start */
6665 0, /* todo_flags_finish */
6666};
6667
6668class pass_match_asm_constraints : public rtl_opt_pass
6669{
6670public:
6671 pass_match_asm_constraints (gcc::context *ctxt)
6672 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6673 {}
6674
6675 /* opt_pass methods: */
6676 virtual unsigned int execute (function *);
6677
6678}; // class pass_match_asm_constraints
6679
6680unsigned
6681pass_match_asm_constraints::execute (function *fun)
9dc6d5bb 6682{
6683 basic_block bb;
8bb2625b 6684 rtx_insn *insn;
6685 rtx pat, *p_sets;
9dc6d5bb 6686 int noutputs;
6687
18d50ae6 6688 if (!crtl->has_asm_statement)
9dc6d5bb 6689 return 0;
6690
6691 df_set_flags (DF_DEFER_INSN_RESCAN);
65b0537f 6692 FOR_EACH_BB_FN (bb, fun)
9dc6d5bb 6693 {
6694 FOR_BB_INSNS (bb, insn)
6695 {
6696 if (!INSN_P (insn))
6697 continue;
6698
6699 pat = PATTERN (insn);
6700 if (GET_CODE (pat) == PARALLEL)
6701 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6702 else if (GET_CODE (pat) == SET)
6703 p_sets = &PATTERN (insn), noutputs = 1;
6704 else
6705 continue;
6706
6707 if (GET_CODE (*p_sets) == SET
6708 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6709 match_asm_constraints_1 (insn, p_sets, noutputs);
6710 }
6711 }
6712
6713 return TODO_df_finish;
6714}
6715
cbe8bda8 6716} // anon namespace
6717
6718rtl_opt_pass *
6719make_pass_match_asm_constraints (gcc::context *ctxt)
6720{
6721 return new pass_match_asm_constraints (ctxt);
6722}
6723
35901471 6724
1f3233d1 6725#include "gt-function.h"