]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
Revert "[PR64164] Drop copyrename, use coalescible partition as base when optimizing."
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "tree.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "stringpool.h"
47 #include "flags.h"
48 #include "except.h"
49 #include "hard-reg-set.h"
50 #include "function.h"
51 #include "rtl.h"
52 #include "insn-config.h"
53 #include "expmed.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "calls.h"
57 #include "emit-rtl.h"
58 #include "stmt.h"
59 #include "expr.h"
60 #include "insn-codes.h"
61 #include "optabs.h"
62 #include "libfuncs.h"
63 #include "regs.h"
64 #include "recog.h"
65 #include "output.h"
66 #include "tm_p.h"
67 #include "langhooks.h"
68 #include "target.h"
69 #include "common/common-target.h"
70 #include "gimple-expr.h"
71 #include "gimplify.h"
72 #include "tree-pass.h"
73 #include "predict.h"
74 #include "dominance.h"
75 #include "cfg.h"
76 #include "cfgrtl.h"
77 #include "cfganal.h"
78 #include "cfgbuild.h"
79 #include "cfgcleanup.h"
80 #include "basic-block.h"
81 #include "df.h"
82 #include "params.h"
83 #include "bb-reorder.h"
84 #include "shrink-wrap.h"
85 #include "toplev.h"
86 #include "rtl-iter.h"
87 #include "tree-chkp.h"
88 #include "rtl-chkp.h"
89
90 /* So we can assign to cfun in this file. */
91 #undef cfun
92
93 #ifndef STACK_ALIGNMENT_NEEDED
94 #define STACK_ALIGNMENT_NEEDED 1
95 #endif
96
97 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
98
99 /* Round a value to the lowest integer less than it that is a multiple of
100 the required alignment. Avoid using division in case the value is
101 negative. Assume the alignment is a power of two. */
102 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
103
104 /* Similar, but round to the next highest integer that meets the
105 alignment. */
106 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
107
108 /* Nonzero once virtual register instantiation has been done.
109 assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 calls.c:emit_library_call_value_1 uses it to set up
111 post-instantiation libcalls. */
112 int virtuals_instantiated;
113
114 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
115 static GTY(()) int funcdef_no;
116
117 /* These variables hold pointers to functions to create and destroy
118 target specific, per-function data structures. */
119 struct machine_function * (*init_machine_status) (void);
120
121 /* The currently compiled function. */
122 struct function *cfun = 0;
123
124 /* These hashes record the prologue and epilogue insns. */
125
126 struct insn_cache_hasher : ggc_cache_hasher<rtx>
127 {
128 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
129 static bool equal (rtx a, rtx b) { return a == b; }
130 };
131
132 static GTY((cache))
133 hash_table<insn_cache_hasher> *prologue_insn_hash;
134 static GTY((cache))
135 hash_table<insn_cache_hasher> *epilogue_insn_hash;
136 \f
137
138 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
139 vec<tree, va_gc> *types_used_by_cur_var_decl;
140
141 /* Forward declarations. */
142
143 static struct temp_slot *find_temp_slot_from_address (rtx);
144 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
145 static void pad_below (struct args_size *, machine_mode, tree);
146 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
147 static int all_blocks (tree, tree *);
148 static tree *get_block_vector (tree, int *);
149 extern tree debug_find_var_in_block_tree (tree, tree);
150 /* We always define `record_insns' even if it's not used so that we
151 can always export `prologue_epilogue_contains'. */
152 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
153 ATTRIBUTE_UNUSED;
154 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
155 static void prepare_function_start (void);
156 static void do_clobber_return_reg (rtx, void *);
157 static void do_use_return_reg (rtx, void *);
158 \f
159 /* Stack of nested functions. */
160 /* Keep track of the cfun stack. */
161
162 typedef struct function *function_p;
163
164 static vec<function_p> function_context_stack;
165
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
168
169 void
170 push_function_context (void)
171 {
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
174
175 function_context_stack.safe_push (cfun);
176 set_cfun (NULL);
177 }
178
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
181
182 void
183 pop_function_context (void)
184 {
185 struct function *p = function_context_stack.pop ();
186 set_cfun (p);
187 current_function_decl = p->decl;
188
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
192 }
193
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
197
198 void
199 free_after_parsing (struct function *f)
200 {
201 f->language = 0;
202 }
203
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
207
208 void
209 free_after_compilation (struct function *f)
210 {
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
213
214 free (crtl->emit.regno_pointer_align);
215
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
220
221 regno_reg_rtx = NULL;
222 }
223 \f
224 /* Return size needed for stack frame based on slots so far allocated.
225 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
226 the caller may have to do that. */
227
228 HOST_WIDE_INT
229 get_frame_size (void)
230 {
231 if (FRAME_GROWS_DOWNWARD)
232 return -frame_offset;
233 else
234 return frame_offset;
235 }
236
237 /* Issue an error message and return TRUE if frame OFFSET overflows in
238 the signed target pointer arithmetics for function FUNC. Otherwise
239 return FALSE. */
240
241 bool
242 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
243 {
244 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
245
246 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
247 /* Leave room for the fixed part of the frame. */
248 - 64 * UNITS_PER_WORD)
249 {
250 error_at (DECL_SOURCE_LOCATION (func),
251 "total size of local objects too large");
252 return TRUE;
253 }
254
255 return FALSE;
256 }
257
258 /* Return stack slot alignment in bits for TYPE and MODE. */
259
260 static unsigned int
261 get_stack_local_alignment (tree type, machine_mode mode)
262 {
263 unsigned int alignment;
264
265 if (mode == BLKmode)
266 alignment = BIGGEST_ALIGNMENT;
267 else
268 alignment = GET_MODE_ALIGNMENT (mode);
269
270 /* Allow the frond-end to (possibly) increase the alignment of this
271 stack slot. */
272 if (! type)
273 type = lang_hooks.types.type_for_mode (mode, 0);
274
275 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
276 }
277
278 /* Determine whether it is possible to fit a stack slot of size SIZE and
279 alignment ALIGNMENT into an area in the stack frame that starts at
280 frame offset START and has a length of LENGTH. If so, store the frame
281 offset to be used for the stack slot in *POFFSET and return true;
282 return false otherwise. This function will extend the frame size when
283 given a start/length pair that lies at the end of the frame. */
284
285 static bool
286 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
287 HOST_WIDE_INT size, unsigned int alignment,
288 HOST_WIDE_INT *poffset)
289 {
290 HOST_WIDE_INT this_frame_offset;
291 int frame_off, frame_alignment, frame_phase;
292
293 /* Calculate how many bytes the start of local variables is off from
294 stack alignment. */
295 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
296 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
297 frame_phase = frame_off ? frame_alignment - frame_off : 0;
298
299 /* Round the frame offset to the specified alignment. */
300
301 /* We must be careful here, since FRAME_OFFSET might be negative and
302 division with a negative dividend isn't as well defined as we might
303 like. So we instead assume that ALIGNMENT is a power of two and
304 use logical operations which are unambiguous. */
305 if (FRAME_GROWS_DOWNWARD)
306 this_frame_offset
307 = (FLOOR_ROUND (start + length - size - frame_phase,
308 (unsigned HOST_WIDE_INT) alignment)
309 + frame_phase);
310 else
311 this_frame_offset
312 = (CEIL_ROUND (start - frame_phase,
313 (unsigned HOST_WIDE_INT) alignment)
314 + frame_phase);
315
316 /* See if it fits. If this space is at the edge of the frame,
317 consider extending the frame to make it fit. Our caller relies on
318 this when allocating a new slot. */
319 if (frame_offset == start && this_frame_offset < frame_offset)
320 frame_offset = this_frame_offset;
321 else if (this_frame_offset < start)
322 return false;
323 else if (start + length == frame_offset
324 && this_frame_offset + size > start + length)
325 frame_offset = this_frame_offset + size;
326 else if (this_frame_offset + size > start + length)
327 return false;
328
329 *poffset = this_frame_offset;
330 return true;
331 }
332
333 /* Create a new frame_space structure describing free space in the stack
334 frame beginning at START and ending at END, and chain it into the
335 function's frame_space_list. */
336
337 static void
338 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
339 {
340 struct frame_space *space = ggc_alloc<frame_space> ();
341 space->next = crtl->frame_space_list;
342 crtl->frame_space_list = space;
343 space->start = start;
344 space->length = end - start;
345 }
346
347 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
348 with machine mode MODE.
349
350 ALIGN controls the amount of alignment for the address of the slot:
351 0 means according to MODE,
352 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
353 -2 means use BITS_PER_UNIT,
354 positive specifies alignment boundary in bits.
355
356 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
357 alignment and ASLK_RECORD_PAD bit set if we should remember
358 extra space we allocated for alignment purposes. When we are
359 called from assign_stack_temp_for_type, it is not set so we don't
360 track the same stack slot in two independent lists.
361
362 We do not round to stack_boundary here. */
363
364 rtx
365 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
366 int align, int kind)
367 {
368 rtx x, addr;
369 int bigend_correction = 0;
370 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
371 unsigned int alignment, alignment_in_bits;
372
373 if (align == 0)
374 {
375 alignment = get_stack_local_alignment (NULL, mode);
376 alignment /= BITS_PER_UNIT;
377 }
378 else if (align == -1)
379 {
380 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
381 size = CEIL_ROUND (size, alignment);
382 }
383 else if (align == -2)
384 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
385 else
386 alignment = align / BITS_PER_UNIT;
387
388 alignment_in_bits = alignment * BITS_PER_UNIT;
389
390 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
391 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
392 {
393 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
394 alignment = alignment_in_bits / BITS_PER_UNIT;
395 }
396
397 if (SUPPORTS_STACK_ALIGNMENT)
398 {
399 if (crtl->stack_alignment_estimated < alignment_in_bits)
400 {
401 if (!crtl->stack_realign_processed)
402 crtl->stack_alignment_estimated = alignment_in_bits;
403 else
404 {
405 /* If stack is realigned and stack alignment value
406 hasn't been finalized, it is OK not to increase
407 stack_alignment_estimated. The bigger alignment
408 requirement is recorded in stack_alignment_needed
409 below. */
410 gcc_assert (!crtl->stack_realign_finalized);
411 if (!crtl->stack_realign_needed)
412 {
413 /* It is OK to reduce the alignment as long as the
414 requested size is 0 or the estimated stack
415 alignment >= mode alignment. */
416 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
417 || size == 0
418 || (crtl->stack_alignment_estimated
419 >= GET_MODE_ALIGNMENT (mode)));
420 alignment_in_bits = crtl->stack_alignment_estimated;
421 alignment = alignment_in_bits / BITS_PER_UNIT;
422 }
423 }
424 }
425 }
426
427 if (crtl->stack_alignment_needed < alignment_in_bits)
428 crtl->stack_alignment_needed = alignment_in_bits;
429 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
430 crtl->max_used_stack_slot_alignment = alignment_in_bits;
431
432 if (mode != BLKmode || size != 0)
433 {
434 if (kind & ASLK_RECORD_PAD)
435 {
436 struct frame_space **psp;
437
438 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
439 {
440 struct frame_space *space = *psp;
441 if (!try_fit_stack_local (space->start, space->length, size,
442 alignment, &slot_offset))
443 continue;
444 *psp = space->next;
445 if (slot_offset > space->start)
446 add_frame_space (space->start, slot_offset);
447 if (slot_offset + size < space->start + space->length)
448 add_frame_space (slot_offset + size,
449 space->start + space->length);
450 goto found_space;
451 }
452 }
453 }
454 else if (!STACK_ALIGNMENT_NEEDED)
455 {
456 slot_offset = frame_offset;
457 goto found_space;
458 }
459
460 old_frame_offset = frame_offset;
461
462 if (FRAME_GROWS_DOWNWARD)
463 {
464 frame_offset -= size;
465 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
466
467 if (kind & ASLK_RECORD_PAD)
468 {
469 if (slot_offset > frame_offset)
470 add_frame_space (frame_offset, slot_offset);
471 if (slot_offset + size < old_frame_offset)
472 add_frame_space (slot_offset + size, old_frame_offset);
473 }
474 }
475 else
476 {
477 frame_offset += size;
478 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
479
480 if (kind & ASLK_RECORD_PAD)
481 {
482 if (slot_offset > old_frame_offset)
483 add_frame_space (old_frame_offset, slot_offset);
484 if (slot_offset + size < frame_offset)
485 add_frame_space (slot_offset + size, frame_offset);
486 }
487 }
488
489 found_space:
490 /* On a big-endian machine, if we are allocating more space than we will use,
491 use the least significant bytes of those that are allocated. */
492 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
493 bigend_correction = size - GET_MODE_SIZE (mode);
494
495 /* If we have already instantiated virtual registers, return the actual
496 address relative to the frame pointer. */
497 if (virtuals_instantiated)
498 addr = plus_constant (Pmode, frame_pointer_rtx,
499 trunc_int_for_mode
500 (slot_offset + bigend_correction
501 + STARTING_FRAME_OFFSET, Pmode));
502 else
503 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
504 trunc_int_for_mode
505 (slot_offset + bigend_correction,
506 Pmode));
507
508 x = gen_rtx_MEM (mode, addr);
509 set_mem_align (x, alignment_in_bits);
510 MEM_NOTRAP_P (x) = 1;
511
512 stack_slot_list
513 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
514
515 if (frame_offset_overflow (frame_offset, current_function_decl))
516 frame_offset = 0;
517
518 return x;
519 }
520
521 /* Wrap up assign_stack_local_1 with last parameter as false. */
522
523 rtx
524 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
525 {
526 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
527 }
528 \f
529 /* In order to evaluate some expressions, such as function calls returning
530 structures in memory, we need to temporarily allocate stack locations.
531 We record each allocated temporary in the following structure.
532
533 Associated with each temporary slot is a nesting level. When we pop up
534 one level, all temporaries associated with the previous level are freed.
535 Normally, all temporaries are freed after the execution of the statement
536 in which they were created. However, if we are inside a ({...}) grouping,
537 the result may be in a temporary and hence must be preserved. If the
538 result could be in a temporary, we preserve it if we can determine which
539 one it is in. If we cannot determine which temporary may contain the
540 result, all temporaries are preserved. A temporary is preserved by
541 pretending it was allocated at the previous nesting level. */
542
543 struct GTY(()) temp_slot {
544 /* Points to next temporary slot. */
545 struct temp_slot *next;
546 /* Points to previous temporary slot. */
547 struct temp_slot *prev;
548 /* The rtx to used to reference the slot. */
549 rtx slot;
550 /* The size, in units, of the slot. */
551 HOST_WIDE_INT size;
552 /* The type of the object in the slot, or zero if it doesn't correspond
553 to a type. We use this to determine whether a slot can be reused.
554 It can be reused if objects of the type of the new slot will always
555 conflict with objects of the type of the old slot. */
556 tree type;
557 /* The alignment (in bits) of the slot. */
558 unsigned int align;
559 /* Nonzero if this temporary is currently in use. */
560 char in_use;
561 /* Nesting level at which this slot is being used. */
562 int level;
563 /* The offset of the slot from the frame_pointer, including extra space
564 for alignment. This info is for combine_temp_slots. */
565 HOST_WIDE_INT base_offset;
566 /* The size of the slot, including extra space for alignment. This
567 info is for combine_temp_slots. */
568 HOST_WIDE_INT full_size;
569 };
570
571 /* Entry for the below hash table. */
572 struct GTY((for_user)) temp_slot_address_entry {
573 hashval_t hash;
574 rtx address;
575 struct temp_slot *temp_slot;
576 };
577
578 struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
579 {
580 static hashval_t hash (temp_slot_address_entry *);
581 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
582 };
583
584 /* A table of addresses that represent a stack slot. The table is a mapping
585 from address RTXen to a temp slot. */
586 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
587 static size_t n_temp_slots_in_use;
588
589 /* Removes temporary slot TEMP from LIST. */
590
591 static void
592 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
593 {
594 if (temp->next)
595 temp->next->prev = temp->prev;
596 if (temp->prev)
597 temp->prev->next = temp->next;
598 else
599 *list = temp->next;
600
601 temp->prev = temp->next = NULL;
602 }
603
604 /* Inserts temporary slot TEMP to LIST. */
605
606 static void
607 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
608 {
609 temp->next = *list;
610 if (*list)
611 (*list)->prev = temp;
612 temp->prev = NULL;
613 *list = temp;
614 }
615
616 /* Returns the list of used temp slots at LEVEL. */
617
618 static struct temp_slot **
619 temp_slots_at_level (int level)
620 {
621 if (level >= (int) vec_safe_length (used_temp_slots))
622 vec_safe_grow_cleared (used_temp_slots, level + 1);
623
624 return &(*used_temp_slots)[level];
625 }
626
627 /* Returns the maximal temporary slot level. */
628
629 static int
630 max_slot_level (void)
631 {
632 if (!used_temp_slots)
633 return -1;
634
635 return used_temp_slots->length () - 1;
636 }
637
638 /* Moves temporary slot TEMP to LEVEL. */
639
640 static void
641 move_slot_to_level (struct temp_slot *temp, int level)
642 {
643 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
644 insert_slot_to_list (temp, temp_slots_at_level (level));
645 temp->level = level;
646 }
647
648 /* Make temporary slot TEMP available. */
649
650 static void
651 make_slot_available (struct temp_slot *temp)
652 {
653 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
654 insert_slot_to_list (temp, &avail_temp_slots);
655 temp->in_use = 0;
656 temp->level = -1;
657 n_temp_slots_in_use--;
658 }
659
660 /* Compute the hash value for an address -> temp slot mapping.
661 The value is cached on the mapping entry. */
662 static hashval_t
663 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
664 {
665 int do_not_record = 0;
666 return hash_rtx (t->address, GET_MODE (t->address),
667 &do_not_record, NULL, false);
668 }
669
670 /* Return the hash value for an address -> temp slot mapping. */
671 hashval_t
672 temp_address_hasher::hash (temp_slot_address_entry *t)
673 {
674 return t->hash;
675 }
676
677 /* Compare two address -> temp slot mapping entries. */
678 bool
679 temp_address_hasher::equal (temp_slot_address_entry *t1,
680 temp_slot_address_entry *t2)
681 {
682 return exp_equiv_p (t1->address, t2->address, 0, true);
683 }
684
685 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
686 static void
687 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
688 {
689 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
690 t->address = address;
691 t->temp_slot = temp_slot;
692 t->hash = temp_slot_address_compute_hash (t);
693 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
694 }
695
696 /* Remove an address -> temp slot mapping entry if the temp slot is
697 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
698 int
699 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
700 {
701 const struct temp_slot_address_entry *t = *slot;
702 if (! t->temp_slot->in_use)
703 temp_slot_address_table->clear_slot (slot);
704 return 1;
705 }
706
707 /* Remove all mappings of addresses to unused temp slots. */
708 static void
709 remove_unused_temp_slot_addresses (void)
710 {
711 /* Use quicker clearing if there aren't any active temp slots. */
712 if (n_temp_slots_in_use)
713 temp_slot_address_table->traverse
714 <void *, remove_unused_temp_slot_addresses_1> (NULL);
715 else
716 temp_slot_address_table->empty ();
717 }
718
719 /* Find the temp slot corresponding to the object at address X. */
720
721 static struct temp_slot *
722 find_temp_slot_from_address (rtx x)
723 {
724 struct temp_slot *p;
725 struct temp_slot_address_entry tmp, *t;
726
727 /* First try the easy way:
728 See if X exists in the address -> temp slot mapping. */
729 tmp.address = x;
730 tmp.temp_slot = NULL;
731 tmp.hash = temp_slot_address_compute_hash (&tmp);
732 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
733 if (t)
734 return t->temp_slot;
735
736 /* If we have a sum involving a register, see if it points to a temp
737 slot. */
738 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
739 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
740 return p;
741 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
742 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
743 return p;
744
745 /* Last resort: Address is a virtual stack var address. */
746 if (GET_CODE (x) == PLUS
747 && XEXP (x, 0) == virtual_stack_vars_rtx
748 && CONST_INT_P (XEXP (x, 1)))
749 {
750 int i;
751 for (i = max_slot_level (); i >= 0; i--)
752 for (p = *temp_slots_at_level (i); p; p = p->next)
753 {
754 if (INTVAL (XEXP (x, 1)) >= p->base_offset
755 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
756 return p;
757 }
758 }
759
760 return NULL;
761 }
762 \f
763 /* Allocate a temporary stack slot and record it for possible later
764 reuse.
765
766 MODE is the machine mode to be given to the returned rtx.
767
768 SIZE is the size in units of the space required. We do no rounding here
769 since assign_stack_local will do any required rounding.
770
771 TYPE is the type that will be used for the stack slot. */
772
773 rtx
774 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
775 tree type)
776 {
777 unsigned int align;
778 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
779 rtx slot;
780
781 /* If SIZE is -1 it means that somebody tried to allocate a temporary
782 of a variable size. */
783 gcc_assert (size != -1);
784
785 align = get_stack_local_alignment (type, mode);
786
787 /* Try to find an available, already-allocated temporary of the proper
788 mode which meets the size and alignment requirements. Choose the
789 smallest one with the closest alignment.
790
791 If assign_stack_temp is called outside of the tree->rtl expansion,
792 we cannot reuse the stack slots (that may still refer to
793 VIRTUAL_STACK_VARS_REGNUM). */
794 if (!virtuals_instantiated)
795 {
796 for (p = avail_temp_slots; p; p = p->next)
797 {
798 if (p->align >= align && p->size >= size
799 && GET_MODE (p->slot) == mode
800 && objects_must_conflict_p (p->type, type)
801 && (best_p == 0 || best_p->size > p->size
802 || (best_p->size == p->size && best_p->align > p->align)))
803 {
804 if (p->align == align && p->size == size)
805 {
806 selected = p;
807 cut_slot_from_list (selected, &avail_temp_slots);
808 best_p = 0;
809 break;
810 }
811 best_p = p;
812 }
813 }
814 }
815
816 /* Make our best, if any, the one to use. */
817 if (best_p)
818 {
819 selected = best_p;
820 cut_slot_from_list (selected, &avail_temp_slots);
821
822 /* If there are enough aligned bytes left over, make them into a new
823 temp_slot so that the extra bytes don't get wasted. Do this only
824 for BLKmode slots, so that we can be sure of the alignment. */
825 if (GET_MODE (best_p->slot) == BLKmode)
826 {
827 int alignment = best_p->align / BITS_PER_UNIT;
828 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
829
830 if (best_p->size - rounded_size >= alignment)
831 {
832 p = ggc_alloc<temp_slot> ();
833 p->in_use = 0;
834 p->size = best_p->size - rounded_size;
835 p->base_offset = best_p->base_offset + rounded_size;
836 p->full_size = best_p->full_size - rounded_size;
837 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
838 p->align = best_p->align;
839 p->type = best_p->type;
840 insert_slot_to_list (p, &avail_temp_slots);
841
842 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
843 stack_slot_list);
844
845 best_p->size = rounded_size;
846 best_p->full_size = rounded_size;
847 }
848 }
849 }
850
851 /* If we still didn't find one, make a new temporary. */
852 if (selected == 0)
853 {
854 HOST_WIDE_INT frame_offset_old = frame_offset;
855
856 p = ggc_alloc<temp_slot> ();
857
858 /* We are passing an explicit alignment request to assign_stack_local.
859 One side effect of that is assign_stack_local will not round SIZE
860 to ensure the frame offset remains suitably aligned.
861
862 So for requests which depended on the rounding of SIZE, we go ahead
863 and round it now. We also make sure ALIGNMENT is at least
864 BIGGEST_ALIGNMENT. */
865 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
866 p->slot = assign_stack_local_1 (mode,
867 (mode == BLKmode
868 ? CEIL_ROUND (size,
869 (int) align
870 / BITS_PER_UNIT)
871 : size),
872 align, 0);
873
874 p->align = align;
875
876 /* The following slot size computation is necessary because we don't
877 know the actual size of the temporary slot until assign_stack_local
878 has performed all the frame alignment and size rounding for the
879 requested temporary. Note that extra space added for alignment
880 can be either above or below this stack slot depending on which
881 way the frame grows. We include the extra space if and only if it
882 is above this slot. */
883 if (FRAME_GROWS_DOWNWARD)
884 p->size = frame_offset_old - frame_offset;
885 else
886 p->size = size;
887
888 /* Now define the fields used by combine_temp_slots. */
889 if (FRAME_GROWS_DOWNWARD)
890 {
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 }
894 else
895 {
896 p->base_offset = frame_offset_old;
897 p->full_size = frame_offset - frame_offset_old;
898 }
899
900 selected = p;
901 }
902
903 p = selected;
904 p->in_use = 1;
905 p->type = type;
906 p->level = temp_slot_level;
907 n_temp_slots_in_use++;
908
909 pp = temp_slots_at_level (p->level);
910 insert_slot_to_list (p, pp);
911 insert_temp_slot_address (XEXP (p->slot, 0), p);
912
913 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
914 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
915 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
916
917 /* If we know the alias set for the memory that will be used, use
918 it. If there's no TYPE, then we don't know anything about the
919 alias set for the memory. */
920 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
921 set_mem_align (slot, align);
922
923 /* If a type is specified, set the relevant flags. */
924 if (type != 0)
925 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
926 MEM_NOTRAP_P (slot) = 1;
927
928 return slot;
929 }
930
931 /* Allocate a temporary stack slot and record it for possible later
932 reuse. First two arguments are same as in preceding function. */
933
934 rtx
935 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
936 {
937 return assign_stack_temp_for_type (mode, size, NULL_TREE);
938 }
939 \f
940 /* Assign a temporary.
941 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
942 and so that should be used in error messages. In either case, we
943 allocate of the given type.
944 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
945 it is 0 if a register is OK.
946 DONT_PROMOTE is 1 if we should not promote values in register
947 to wider modes. */
948
949 rtx
950 assign_temp (tree type_or_decl, int memory_required,
951 int dont_promote ATTRIBUTE_UNUSED)
952 {
953 tree type, decl;
954 machine_mode mode;
955 #ifdef PROMOTE_MODE
956 int unsignedp;
957 #endif
958
959 if (DECL_P (type_or_decl))
960 decl = type_or_decl, type = TREE_TYPE (decl);
961 else
962 decl = NULL, type = type_or_decl;
963
964 mode = TYPE_MODE (type);
965 #ifdef PROMOTE_MODE
966 unsignedp = TYPE_UNSIGNED (type);
967 #endif
968
969 if (mode == BLKmode || memory_required)
970 {
971 HOST_WIDE_INT size = int_size_in_bytes (type);
972 rtx tmp;
973
974 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
975 problems with allocating the stack space. */
976 if (size == 0)
977 size = 1;
978
979 /* Unfortunately, we don't yet know how to allocate variable-sized
980 temporaries. However, sometimes we can find a fixed upper limit on
981 the size, so try that instead. */
982 else if (size == -1)
983 size = max_int_size_in_bytes (type);
984
985 /* The size of the temporary may be too large to fit into an integer. */
986 /* ??? Not sure this should happen except for user silliness, so limit
987 this to things that aren't compiler-generated temporaries. The
988 rest of the time we'll die in assign_stack_temp_for_type. */
989 if (decl && size == -1
990 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
991 {
992 error ("size of variable %q+D is too large", decl);
993 size = 1;
994 }
995
996 tmp = assign_stack_temp_for_type (mode, size, type);
997 return tmp;
998 }
999
1000 #ifdef PROMOTE_MODE
1001 if (! dont_promote)
1002 mode = promote_mode (type, mode, &unsignedp);
1003 #endif
1004
1005 return gen_reg_rtx (mode);
1006 }
1007 \f
1008 /* Combine temporary stack slots which are adjacent on the stack.
1009
1010 This allows for better use of already allocated stack space. This is only
1011 done for BLKmode slots because we can be sure that we won't have alignment
1012 problems in this case. */
1013
1014 static void
1015 combine_temp_slots (void)
1016 {
1017 struct temp_slot *p, *q, *next, *next_q;
1018 int num_slots;
1019
1020 /* We can't combine slots, because the information about which slot
1021 is in which alias set will be lost. */
1022 if (flag_strict_aliasing)
1023 return;
1024
1025 /* If there are a lot of temp slots, don't do anything unless
1026 high levels of optimization. */
1027 if (! flag_expensive_optimizations)
1028 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1029 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1030 return;
1031
1032 for (p = avail_temp_slots; p; p = next)
1033 {
1034 int delete_p = 0;
1035
1036 next = p->next;
1037
1038 if (GET_MODE (p->slot) != BLKmode)
1039 continue;
1040
1041 for (q = p->next; q; q = next_q)
1042 {
1043 int delete_q = 0;
1044
1045 next_q = q->next;
1046
1047 if (GET_MODE (q->slot) != BLKmode)
1048 continue;
1049
1050 if (p->base_offset + p->full_size == q->base_offset)
1051 {
1052 /* Q comes after P; combine Q into P. */
1053 p->size += q->size;
1054 p->full_size += q->full_size;
1055 delete_q = 1;
1056 }
1057 else if (q->base_offset + q->full_size == p->base_offset)
1058 {
1059 /* P comes after Q; combine P into Q. */
1060 q->size += p->size;
1061 q->full_size += p->full_size;
1062 delete_p = 1;
1063 break;
1064 }
1065 if (delete_q)
1066 cut_slot_from_list (q, &avail_temp_slots);
1067 }
1068
1069 /* Either delete P or advance past it. */
1070 if (delete_p)
1071 cut_slot_from_list (p, &avail_temp_slots);
1072 }
1073 }
1074 \f
1075 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1076 slot that previously was known by OLD_RTX. */
1077
1078 void
1079 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1080 {
1081 struct temp_slot *p;
1082
1083 if (rtx_equal_p (old_rtx, new_rtx))
1084 return;
1085
1086 p = find_temp_slot_from_address (old_rtx);
1087
1088 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1089 NEW_RTX is a register, see if one operand of the PLUS is a
1090 temporary location. If so, NEW_RTX points into it. Otherwise,
1091 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1092 in common between them. If so, try a recursive call on those
1093 values. */
1094 if (p == 0)
1095 {
1096 if (GET_CODE (old_rtx) != PLUS)
1097 return;
1098
1099 if (REG_P (new_rtx))
1100 {
1101 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1102 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1103 return;
1104 }
1105 else if (GET_CODE (new_rtx) != PLUS)
1106 return;
1107
1108 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1109 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1110 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1111 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1112 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1113 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1114 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1115 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1116
1117 return;
1118 }
1119
1120 /* Otherwise add an alias for the temp's address. */
1121 insert_temp_slot_address (new_rtx, p);
1122 }
1123
1124 /* If X could be a reference to a temporary slot, mark that slot as
1125 belonging to the to one level higher than the current level. If X
1126 matched one of our slots, just mark that one. Otherwise, we can't
1127 easily predict which it is, so upgrade all of them.
1128
1129 This is called when an ({...}) construct occurs and a statement
1130 returns a value in memory. */
1131
1132 void
1133 preserve_temp_slots (rtx x)
1134 {
1135 struct temp_slot *p = 0, *next;
1136
1137 if (x == 0)
1138 return;
1139
1140 /* If X is a register that is being used as a pointer, see if we have
1141 a temporary slot we know it points to. */
1142 if (REG_P (x) && REG_POINTER (x))
1143 p = find_temp_slot_from_address (x);
1144
1145 /* If X is not in memory or is at a constant address, it cannot be in
1146 a temporary slot. */
1147 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1148 return;
1149
1150 /* First see if we can find a match. */
1151 if (p == 0)
1152 p = find_temp_slot_from_address (XEXP (x, 0));
1153
1154 if (p != 0)
1155 {
1156 if (p->level == temp_slot_level)
1157 move_slot_to_level (p, temp_slot_level - 1);
1158 return;
1159 }
1160
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1163 {
1164 next = p->next;
1165 move_slot_to_level (p, temp_slot_level - 1);
1166 }
1167 }
1168
1169 /* Free all temporaries used so far. This is normally called at the
1170 end of generating code for a statement. */
1171
1172 void
1173 free_temp_slots (void)
1174 {
1175 struct temp_slot *p, *next;
1176 bool some_available = false;
1177
1178 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1179 {
1180 next = p->next;
1181 make_slot_available (p);
1182 some_available = true;
1183 }
1184
1185 if (some_available)
1186 {
1187 remove_unused_temp_slot_addresses ();
1188 combine_temp_slots ();
1189 }
1190 }
1191
1192 /* Push deeper into the nesting level for stack temporaries. */
1193
1194 void
1195 push_temp_slots (void)
1196 {
1197 temp_slot_level++;
1198 }
1199
1200 /* Pop a temporary nesting level. All slots in use in the current level
1201 are freed. */
1202
1203 void
1204 pop_temp_slots (void)
1205 {
1206 free_temp_slots ();
1207 temp_slot_level--;
1208 }
1209
1210 /* Initialize temporary slots. */
1211
1212 void
1213 init_temp_slots (void)
1214 {
1215 /* We have not allocated any temporaries yet. */
1216 avail_temp_slots = 0;
1217 vec_alloc (used_temp_slots, 0);
1218 temp_slot_level = 0;
1219 n_temp_slots_in_use = 0;
1220
1221 /* Set up the table to map addresses to temp slots. */
1222 if (! temp_slot_address_table)
1223 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1224 else
1225 temp_slot_address_table->empty ();
1226 }
1227 \f
1228 /* Functions and data structures to keep track of the values hard regs
1229 had at the start of the function. */
1230
1231 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1232 and has_hard_reg_initial_val.. */
1233 typedef struct GTY(()) initial_value_pair {
1234 rtx hard_reg;
1235 rtx pseudo;
1236 } initial_value_pair;
1237 /* ??? This could be a VEC but there is currently no way to define an
1238 opaque VEC type. This could be worked around by defining struct
1239 initial_value_pair in function.h. */
1240 typedef struct GTY(()) initial_value_struct {
1241 int num_entries;
1242 int max_entries;
1243 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1244 } initial_value_struct;
1245
1246 /* If a pseudo represents an initial hard reg (or expression), return
1247 it, else return NULL_RTX. */
1248
1249 rtx
1250 get_hard_reg_initial_reg (rtx reg)
1251 {
1252 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1253 int i;
1254
1255 if (ivs == 0)
1256 return NULL_RTX;
1257
1258 for (i = 0; i < ivs->num_entries; i++)
1259 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1260 return ivs->entries[i].hard_reg;
1261
1262 return NULL_RTX;
1263 }
1264
1265 /* Make sure that there's a pseudo register of mode MODE that stores the
1266 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1267
1268 rtx
1269 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1270 {
1271 struct initial_value_struct *ivs;
1272 rtx rv;
1273
1274 rv = has_hard_reg_initial_val (mode, regno);
1275 if (rv)
1276 return rv;
1277
1278 ivs = crtl->hard_reg_initial_vals;
1279 if (ivs == 0)
1280 {
1281 ivs = ggc_alloc<initial_value_struct> ();
1282 ivs->num_entries = 0;
1283 ivs->max_entries = 5;
1284 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1285 crtl->hard_reg_initial_vals = ivs;
1286 }
1287
1288 if (ivs->num_entries >= ivs->max_entries)
1289 {
1290 ivs->max_entries += 5;
1291 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1292 ivs->max_entries);
1293 }
1294
1295 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1296 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1297
1298 return ivs->entries[ivs->num_entries++].pseudo;
1299 }
1300
1301 /* See if get_hard_reg_initial_val has been used to create a pseudo
1302 for the initial value of hard register REGNO in mode MODE. Return
1303 the associated pseudo if so, otherwise return NULL. */
1304
1305 rtx
1306 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1307 {
1308 struct initial_value_struct *ivs;
1309 int i;
1310
1311 ivs = crtl->hard_reg_initial_vals;
1312 if (ivs != 0)
1313 for (i = 0; i < ivs->num_entries; i++)
1314 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1315 && REGNO (ivs->entries[i].hard_reg) == regno)
1316 return ivs->entries[i].pseudo;
1317
1318 return NULL_RTX;
1319 }
1320
1321 unsigned int
1322 emit_initial_value_sets (void)
1323 {
1324 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1325 int i;
1326 rtx_insn *seq;
1327
1328 if (ivs == 0)
1329 return 0;
1330
1331 start_sequence ();
1332 for (i = 0; i < ivs->num_entries; i++)
1333 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1334 seq = get_insns ();
1335 end_sequence ();
1336
1337 emit_insn_at_entry (seq);
1338 return 0;
1339 }
1340
1341 /* Return the hardreg-pseudoreg initial values pair entry I and
1342 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1343 bool
1344 initial_value_entry (int i, rtx *hreg, rtx *preg)
1345 {
1346 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1347 if (!ivs || i >= ivs->num_entries)
1348 return false;
1349
1350 *hreg = ivs->entries[i].hard_reg;
1351 *preg = ivs->entries[i].pseudo;
1352 return true;
1353 }
1354 \f
1355 /* These routines are responsible for converting virtual register references
1356 to the actual hard register references once RTL generation is complete.
1357
1358 The following four variables are used for communication between the
1359 routines. They contain the offsets of the virtual registers from their
1360 respective hard registers. */
1361
1362 static int in_arg_offset;
1363 static int var_offset;
1364 static int dynamic_offset;
1365 static int out_arg_offset;
1366 static int cfa_offset;
1367
1368 /* In most machines, the stack pointer register is equivalent to the bottom
1369 of the stack. */
1370
1371 #ifndef STACK_POINTER_OFFSET
1372 #define STACK_POINTER_OFFSET 0
1373 #endif
1374
1375 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1376 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1377 #endif
1378
1379 /* If not defined, pick an appropriate default for the offset of dynamically
1380 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1381 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1382
1383 #ifndef STACK_DYNAMIC_OFFSET
1384
1385 /* The bottom of the stack points to the actual arguments. If
1386 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1387 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1388 stack space for register parameters is not pushed by the caller, but
1389 rather part of the fixed stack areas and hence not included in
1390 `crtl->outgoing_args_size'. Nevertheless, we must allow
1391 for it when allocating stack dynamic objects. */
1392
1393 #ifdef INCOMING_REG_PARM_STACK_SPACE
1394 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1395 ((ACCUMULATE_OUTGOING_ARGS \
1396 ? (crtl->outgoing_args_size \
1397 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1398 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1399 : 0) + (STACK_POINTER_OFFSET))
1400 #else
1401 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1402 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1403 + (STACK_POINTER_OFFSET))
1404 #endif
1405 #endif
1406
1407 \f
1408 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1409 is a virtual register, return the equivalent hard register and set the
1410 offset indirectly through the pointer. Otherwise, return 0. */
1411
1412 static rtx
1413 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1414 {
1415 rtx new_rtx;
1416 HOST_WIDE_INT offset;
1417
1418 if (x == virtual_incoming_args_rtx)
1419 {
1420 if (stack_realign_drap)
1421 {
1422 /* Replace virtual_incoming_args_rtx with internal arg
1423 pointer if DRAP is used to realign stack. */
1424 new_rtx = crtl->args.internal_arg_pointer;
1425 offset = 0;
1426 }
1427 else
1428 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1429 }
1430 else if (x == virtual_stack_vars_rtx)
1431 new_rtx = frame_pointer_rtx, offset = var_offset;
1432 else if (x == virtual_stack_dynamic_rtx)
1433 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1434 else if (x == virtual_outgoing_args_rtx)
1435 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1436 else if (x == virtual_cfa_rtx)
1437 {
1438 #ifdef FRAME_POINTER_CFA_OFFSET
1439 new_rtx = frame_pointer_rtx;
1440 #else
1441 new_rtx = arg_pointer_rtx;
1442 #endif
1443 offset = cfa_offset;
1444 }
1445 else if (x == virtual_preferred_stack_boundary_rtx)
1446 {
1447 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1448 offset = 0;
1449 }
1450 else
1451 return NULL_RTX;
1452
1453 *poffset = offset;
1454 return new_rtx;
1455 }
1456
1457 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1458 registers present inside of *LOC. The expression is simplified,
1459 as much as possible, but is not to be considered "valid" in any sense
1460 implied by the target. Return true if any change is made. */
1461
1462 static bool
1463 instantiate_virtual_regs_in_rtx (rtx *loc)
1464 {
1465 if (!*loc)
1466 return false;
1467 bool changed = false;
1468 subrtx_ptr_iterator::array_type array;
1469 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1470 {
1471 rtx *loc = *iter;
1472 if (rtx x = *loc)
1473 {
1474 rtx new_rtx;
1475 HOST_WIDE_INT offset;
1476 switch (GET_CODE (x))
1477 {
1478 case REG:
1479 new_rtx = instantiate_new_reg (x, &offset);
1480 if (new_rtx)
1481 {
1482 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1483 changed = true;
1484 }
1485 iter.skip_subrtxes ();
1486 break;
1487
1488 case PLUS:
1489 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1490 if (new_rtx)
1491 {
1492 XEXP (x, 0) = new_rtx;
1493 *loc = plus_constant (GET_MODE (x), x, offset, true);
1494 changed = true;
1495 iter.skip_subrtxes ();
1496 break;
1497 }
1498
1499 /* FIXME -- from old code */
1500 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1501 we can commute the PLUS and SUBREG because pointers into the
1502 frame are well-behaved. */
1503 break;
1504
1505 default:
1506 break;
1507 }
1508 }
1509 }
1510 return changed;
1511 }
1512
1513 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1514 matches the predicate for insn CODE operand OPERAND. */
1515
1516 static int
1517 safe_insn_predicate (int code, int operand, rtx x)
1518 {
1519 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1520 }
1521
1522 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1523 registers present inside of insn. The result will be a valid insn. */
1524
1525 static void
1526 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1527 {
1528 HOST_WIDE_INT offset;
1529 int insn_code, i;
1530 bool any_change = false;
1531 rtx set, new_rtx, x;
1532 rtx_insn *seq;
1533
1534 /* There are some special cases to be handled first. */
1535 set = single_set (insn);
1536 if (set)
1537 {
1538 /* We're allowed to assign to a virtual register. This is interpreted
1539 to mean that the underlying register gets assigned the inverse
1540 transformation. This is used, for example, in the handling of
1541 non-local gotos. */
1542 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1543 if (new_rtx)
1544 {
1545 start_sequence ();
1546
1547 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1548 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1549 gen_int_mode (-offset, GET_MODE (new_rtx)));
1550 x = force_operand (x, new_rtx);
1551 if (x != new_rtx)
1552 emit_move_insn (new_rtx, x);
1553
1554 seq = get_insns ();
1555 end_sequence ();
1556
1557 emit_insn_before (seq, insn);
1558 delete_insn (insn);
1559 return;
1560 }
1561
1562 /* Handle a straight copy from a virtual register by generating a
1563 new add insn. The difference between this and falling through
1564 to the generic case is avoiding a new pseudo and eliminating a
1565 move insn in the initial rtl stream. */
1566 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1567 if (new_rtx && offset != 0
1568 && REG_P (SET_DEST (set))
1569 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1570 {
1571 start_sequence ();
1572
1573 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1574 gen_int_mode (offset,
1575 GET_MODE (SET_DEST (set))),
1576 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1577 if (x != SET_DEST (set))
1578 emit_move_insn (SET_DEST (set), x);
1579
1580 seq = get_insns ();
1581 end_sequence ();
1582
1583 emit_insn_before (seq, insn);
1584 delete_insn (insn);
1585 return;
1586 }
1587
1588 extract_insn (insn);
1589 insn_code = INSN_CODE (insn);
1590
1591 /* Handle a plus involving a virtual register by determining if the
1592 operands remain valid if they're modified in place. */
1593 if (GET_CODE (SET_SRC (set)) == PLUS
1594 && recog_data.n_operands >= 3
1595 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1596 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1597 && CONST_INT_P (recog_data.operand[2])
1598 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1599 {
1600 offset += INTVAL (recog_data.operand[2]);
1601
1602 /* If the sum is zero, then replace with a plain move. */
1603 if (offset == 0
1604 && REG_P (SET_DEST (set))
1605 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1606 {
1607 start_sequence ();
1608 emit_move_insn (SET_DEST (set), new_rtx);
1609 seq = get_insns ();
1610 end_sequence ();
1611
1612 emit_insn_before (seq, insn);
1613 delete_insn (insn);
1614 return;
1615 }
1616
1617 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1618
1619 /* Using validate_change and apply_change_group here leaves
1620 recog_data in an invalid state. Since we know exactly what
1621 we want to check, do those two by hand. */
1622 if (safe_insn_predicate (insn_code, 1, new_rtx)
1623 && safe_insn_predicate (insn_code, 2, x))
1624 {
1625 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1626 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1627 any_change = true;
1628
1629 /* Fall through into the regular operand fixup loop in
1630 order to take care of operands other than 1 and 2. */
1631 }
1632 }
1633 }
1634 else
1635 {
1636 extract_insn (insn);
1637 insn_code = INSN_CODE (insn);
1638 }
1639
1640 /* In the general case, we expect virtual registers to appear only in
1641 operands, and then only as either bare registers or inside memories. */
1642 for (i = 0; i < recog_data.n_operands; ++i)
1643 {
1644 x = recog_data.operand[i];
1645 switch (GET_CODE (x))
1646 {
1647 case MEM:
1648 {
1649 rtx addr = XEXP (x, 0);
1650
1651 if (!instantiate_virtual_regs_in_rtx (&addr))
1652 continue;
1653
1654 start_sequence ();
1655 x = replace_equiv_address (x, addr, true);
1656 /* It may happen that the address with the virtual reg
1657 was valid (e.g. based on the virtual stack reg, which might
1658 be acceptable to the predicates with all offsets), whereas
1659 the address now isn't anymore, for instance when the address
1660 is still offsetted, but the base reg isn't virtual-stack-reg
1661 anymore. Below we would do a force_reg on the whole operand,
1662 but this insn might actually only accept memory. Hence,
1663 before doing that last resort, try to reload the address into
1664 a register, so this operand stays a MEM. */
1665 if (!safe_insn_predicate (insn_code, i, x))
1666 {
1667 addr = force_reg (GET_MODE (addr), addr);
1668 x = replace_equiv_address (x, addr, true);
1669 }
1670 seq = get_insns ();
1671 end_sequence ();
1672 if (seq)
1673 emit_insn_before (seq, insn);
1674 }
1675 break;
1676
1677 case REG:
1678 new_rtx = instantiate_new_reg (x, &offset);
1679 if (new_rtx == NULL)
1680 continue;
1681 if (offset == 0)
1682 x = new_rtx;
1683 else
1684 {
1685 start_sequence ();
1686
1687 /* Careful, special mode predicates may have stuff in
1688 insn_data[insn_code].operand[i].mode that isn't useful
1689 to us for computing a new value. */
1690 /* ??? Recognize address_operand and/or "p" constraints
1691 to see if (plus new offset) is a valid before we put
1692 this through expand_simple_binop. */
1693 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1694 gen_int_mode (offset, GET_MODE (x)),
1695 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1696 seq = get_insns ();
1697 end_sequence ();
1698 emit_insn_before (seq, insn);
1699 }
1700 break;
1701
1702 case SUBREG:
1703 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1704 if (new_rtx == NULL)
1705 continue;
1706 if (offset != 0)
1707 {
1708 start_sequence ();
1709 new_rtx = expand_simple_binop
1710 (GET_MODE (new_rtx), PLUS, new_rtx,
1711 gen_int_mode (offset, GET_MODE (new_rtx)),
1712 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1713 seq = get_insns ();
1714 end_sequence ();
1715 emit_insn_before (seq, insn);
1716 }
1717 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1718 GET_MODE (new_rtx), SUBREG_BYTE (x));
1719 gcc_assert (x);
1720 break;
1721
1722 default:
1723 continue;
1724 }
1725
1726 /* At this point, X contains the new value for the operand.
1727 Validate the new value vs the insn predicate. Note that
1728 asm insns will have insn_code -1 here. */
1729 if (!safe_insn_predicate (insn_code, i, x))
1730 {
1731 start_sequence ();
1732 if (REG_P (x))
1733 {
1734 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1735 x = copy_to_reg (x);
1736 }
1737 else
1738 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1739 seq = get_insns ();
1740 end_sequence ();
1741 if (seq)
1742 emit_insn_before (seq, insn);
1743 }
1744
1745 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1746 any_change = true;
1747 }
1748
1749 if (any_change)
1750 {
1751 /* Propagate operand changes into the duplicates. */
1752 for (i = 0; i < recog_data.n_dups; ++i)
1753 *recog_data.dup_loc[i]
1754 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1755
1756 /* Force re-recognition of the instruction for validation. */
1757 INSN_CODE (insn) = -1;
1758 }
1759
1760 if (asm_noperands (PATTERN (insn)) >= 0)
1761 {
1762 if (!check_asm_operands (PATTERN (insn)))
1763 {
1764 error_for_asm (insn, "impossible constraint in %<asm%>");
1765 /* For asm goto, instead of fixing up all the edges
1766 just clear the template and clear input operands
1767 (asm goto doesn't have any output operands). */
1768 if (JUMP_P (insn))
1769 {
1770 rtx asm_op = extract_asm_operands (PATTERN (insn));
1771 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1772 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1773 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1774 }
1775 else
1776 delete_insn (insn);
1777 }
1778 }
1779 else
1780 {
1781 if (recog_memoized (insn) < 0)
1782 fatal_insn_not_found (insn);
1783 }
1784 }
1785
1786 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1787 do any instantiation required. */
1788
1789 void
1790 instantiate_decl_rtl (rtx x)
1791 {
1792 rtx addr;
1793
1794 if (x == 0)
1795 return;
1796
1797 /* If this is a CONCAT, recurse for the pieces. */
1798 if (GET_CODE (x) == CONCAT)
1799 {
1800 instantiate_decl_rtl (XEXP (x, 0));
1801 instantiate_decl_rtl (XEXP (x, 1));
1802 return;
1803 }
1804
1805 /* If this is not a MEM, no need to do anything. Similarly if the
1806 address is a constant or a register that is not a virtual register. */
1807 if (!MEM_P (x))
1808 return;
1809
1810 addr = XEXP (x, 0);
1811 if (CONSTANT_P (addr)
1812 || (REG_P (addr)
1813 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1814 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1815 return;
1816
1817 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1818 }
1819
1820 /* Helper for instantiate_decls called via walk_tree: Process all decls
1821 in the given DECL_VALUE_EXPR. */
1822
1823 static tree
1824 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1825 {
1826 tree t = *tp;
1827 if (! EXPR_P (t))
1828 {
1829 *walk_subtrees = 0;
1830 if (DECL_P (t))
1831 {
1832 if (DECL_RTL_SET_P (t))
1833 instantiate_decl_rtl (DECL_RTL (t));
1834 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1835 && DECL_INCOMING_RTL (t))
1836 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1837 if ((TREE_CODE (t) == VAR_DECL
1838 || TREE_CODE (t) == RESULT_DECL)
1839 && DECL_HAS_VALUE_EXPR_P (t))
1840 {
1841 tree v = DECL_VALUE_EXPR (t);
1842 walk_tree (&v, instantiate_expr, NULL, NULL);
1843 }
1844 }
1845 }
1846 return NULL;
1847 }
1848
1849 /* Subroutine of instantiate_decls: Process all decls in the given
1850 BLOCK node and all its subblocks. */
1851
1852 static void
1853 instantiate_decls_1 (tree let)
1854 {
1855 tree t;
1856
1857 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1858 {
1859 if (DECL_RTL_SET_P (t))
1860 instantiate_decl_rtl (DECL_RTL (t));
1861 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1862 {
1863 tree v = DECL_VALUE_EXPR (t);
1864 walk_tree (&v, instantiate_expr, NULL, NULL);
1865 }
1866 }
1867
1868 /* Process all subblocks. */
1869 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1870 instantiate_decls_1 (t);
1871 }
1872
1873 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1874 all virtual registers in their DECL_RTL's. */
1875
1876 static void
1877 instantiate_decls (tree fndecl)
1878 {
1879 tree decl;
1880 unsigned ix;
1881
1882 /* Process all parameters of the function. */
1883 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1884 {
1885 instantiate_decl_rtl (DECL_RTL (decl));
1886 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1887 if (DECL_HAS_VALUE_EXPR_P (decl))
1888 {
1889 tree v = DECL_VALUE_EXPR (decl);
1890 walk_tree (&v, instantiate_expr, NULL, NULL);
1891 }
1892 }
1893
1894 if ((decl = DECL_RESULT (fndecl))
1895 && TREE_CODE (decl) == RESULT_DECL)
1896 {
1897 if (DECL_RTL_SET_P (decl))
1898 instantiate_decl_rtl (DECL_RTL (decl));
1899 if (DECL_HAS_VALUE_EXPR_P (decl))
1900 {
1901 tree v = DECL_VALUE_EXPR (decl);
1902 walk_tree (&v, instantiate_expr, NULL, NULL);
1903 }
1904 }
1905
1906 /* Process the saved static chain if it exists. */
1907 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1908 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1909 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1910
1911 /* Now process all variables defined in the function or its subblocks. */
1912 instantiate_decls_1 (DECL_INITIAL (fndecl));
1913
1914 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1915 if (DECL_RTL_SET_P (decl))
1916 instantiate_decl_rtl (DECL_RTL (decl));
1917 vec_free (cfun->local_decls);
1918 }
1919
1920 /* Pass through the INSNS of function FNDECL and convert virtual register
1921 references to hard register references. */
1922
1923 static unsigned int
1924 instantiate_virtual_regs (void)
1925 {
1926 rtx_insn *insn;
1927
1928 /* Compute the offsets to use for this function. */
1929 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1930 var_offset = STARTING_FRAME_OFFSET;
1931 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1932 out_arg_offset = STACK_POINTER_OFFSET;
1933 #ifdef FRAME_POINTER_CFA_OFFSET
1934 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1935 #else
1936 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1937 #endif
1938
1939 /* Initialize recognition, indicating that volatile is OK. */
1940 init_recog ();
1941
1942 /* Scan through all the insns, instantiating every virtual register still
1943 present. */
1944 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1945 if (INSN_P (insn))
1946 {
1947 /* These patterns in the instruction stream can never be recognized.
1948 Fortunately, they shouldn't contain virtual registers either. */
1949 if (GET_CODE (PATTERN (insn)) == USE
1950 || GET_CODE (PATTERN (insn)) == CLOBBER
1951 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1952 continue;
1953 else if (DEBUG_INSN_P (insn))
1954 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1955 else
1956 instantiate_virtual_regs_in_insn (insn);
1957
1958 if (insn->deleted ())
1959 continue;
1960
1961 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1962
1963 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1964 if (CALL_P (insn))
1965 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1966 }
1967
1968 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1969 instantiate_decls (current_function_decl);
1970
1971 targetm.instantiate_decls ();
1972
1973 /* Indicate that, from now on, assign_stack_local should use
1974 frame_pointer_rtx. */
1975 virtuals_instantiated = 1;
1976
1977 return 0;
1978 }
1979
1980 namespace {
1981
1982 const pass_data pass_data_instantiate_virtual_regs =
1983 {
1984 RTL_PASS, /* type */
1985 "vregs", /* name */
1986 OPTGROUP_NONE, /* optinfo_flags */
1987 TV_NONE, /* tv_id */
1988 0, /* properties_required */
1989 0, /* properties_provided */
1990 0, /* properties_destroyed */
1991 0, /* todo_flags_start */
1992 0, /* todo_flags_finish */
1993 };
1994
1995 class pass_instantiate_virtual_regs : public rtl_opt_pass
1996 {
1997 public:
1998 pass_instantiate_virtual_regs (gcc::context *ctxt)
1999 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2000 {}
2001
2002 /* opt_pass methods: */
2003 virtual unsigned int execute (function *)
2004 {
2005 return instantiate_virtual_regs ();
2006 }
2007
2008 }; // class pass_instantiate_virtual_regs
2009
2010 } // anon namespace
2011
2012 rtl_opt_pass *
2013 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2014 {
2015 return new pass_instantiate_virtual_regs (ctxt);
2016 }
2017
2018 \f
2019 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2020 This means a type for which function calls must pass an address to the
2021 function or get an address back from the function.
2022 EXP may be a type node or an expression (whose type is tested). */
2023
2024 int
2025 aggregate_value_p (const_tree exp, const_tree fntype)
2026 {
2027 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2028 int i, regno, nregs;
2029 rtx reg;
2030
2031 if (fntype)
2032 switch (TREE_CODE (fntype))
2033 {
2034 case CALL_EXPR:
2035 {
2036 tree fndecl = get_callee_fndecl (fntype);
2037 if (fndecl)
2038 fntype = TREE_TYPE (fndecl);
2039 else if (CALL_EXPR_FN (fntype))
2040 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2041 else
2042 /* For internal functions, assume nothing needs to be
2043 returned in memory. */
2044 return 0;
2045 }
2046 break;
2047 case FUNCTION_DECL:
2048 fntype = TREE_TYPE (fntype);
2049 break;
2050 case FUNCTION_TYPE:
2051 case METHOD_TYPE:
2052 break;
2053 case IDENTIFIER_NODE:
2054 fntype = NULL_TREE;
2055 break;
2056 default:
2057 /* We don't expect other tree types here. */
2058 gcc_unreachable ();
2059 }
2060
2061 if (VOID_TYPE_P (type))
2062 return 0;
2063
2064 /* If a record should be passed the same as its first (and only) member
2065 don't pass it as an aggregate. */
2066 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2067 return aggregate_value_p (first_field (type), fntype);
2068
2069 /* If the front end has decided that this needs to be passed by
2070 reference, do so. */
2071 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2072 && DECL_BY_REFERENCE (exp))
2073 return 1;
2074
2075 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2076 if (fntype && TREE_ADDRESSABLE (fntype))
2077 return 1;
2078
2079 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2080 and thus can't be returned in registers. */
2081 if (TREE_ADDRESSABLE (type))
2082 return 1;
2083
2084 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2085 return 1;
2086
2087 if (targetm.calls.return_in_memory (type, fntype))
2088 return 1;
2089
2090 /* Make sure we have suitable call-clobbered regs to return
2091 the value in; if not, we must return it in memory. */
2092 reg = hard_function_value (type, 0, fntype, 0);
2093
2094 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2095 it is OK. */
2096 if (!REG_P (reg))
2097 return 0;
2098
2099 regno = REGNO (reg);
2100 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2101 for (i = 0; i < nregs; i++)
2102 if (! call_used_regs[regno + i])
2103 return 1;
2104
2105 return 0;
2106 }
2107 \f
2108 /* Return true if we should assign DECL a pseudo register; false if it
2109 should live on the local stack. */
2110
2111 bool
2112 use_register_for_decl (const_tree decl)
2113 {
2114 if (!targetm.calls.allocate_stack_slots_for_args ())
2115 return true;
2116
2117 /* Honor volatile. */
2118 if (TREE_SIDE_EFFECTS (decl))
2119 return false;
2120
2121 /* Honor addressability. */
2122 if (TREE_ADDRESSABLE (decl))
2123 return false;
2124
2125 /* Decl is implicitly addressible by bound stores and loads
2126 if it is an aggregate holding bounds. */
2127 if (chkp_function_instrumented_p (current_function_decl)
2128 && TREE_TYPE (decl)
2129 && !BOUNDED_P (decl)
2130 && chkp_type_has_pointer (TREE_TYPE (decl)))
2131 return false;
2132
2133 /* Only register-like things go in registers. */
2134 if (DECL_MODE (decl) == BLKmode)
2135 return false;
2136
2137 /* If -ffloat-store specified, don't put explicit float variables
2138 into registers. */
2139 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2140 propagates values across these stores, and it probably shouldn't. */
2141 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2142 return false;
2143
2144 /* If we're not interested in tracking debugging information for
2145 this decl, then we can certainly put it in a register. */
2146 if (DECL_IGNORED_P (decl))
2147 return true;
2148
2149 if (optimize)
2150 return true;
2151
2152 if (!DECL_REGISTER (decl))
2153 return false;
2154
2155 switch (TREE_CODE (TREE_TYPE (decl)))
2156 {
2157 case RECORD_TYPE:
2158 case UNION_TYPE:
2159 case QUAL_UNION_TYPE:
2160 /* When not optimizing, disregard register keyword for variables with
2161 types containing methods, otherwise the methods won't be callable
2162 from the debugger. */
2163 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2164 return false;
2165 break;
2166 default:
2167 break;
2168 }
2169
2170 return true;
2171 }
2172
2173 /* Return true if TYPE should be passed by invisible reference. */
2174
2175 bool
2176 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
2177 tree type, bool named_arg)
2178 {
2179 if (type)
2180 {
2181 /* If this type contains non-trivial constructors, then it is
2182 forbidden for the middle-end to create any new copies. */
2183 if (TREE_ADDRESSABLE (type))
2184 return true;
2185
2186 /* GCC post 3.4 passes *all* variable sized types by reference. */
2187 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2188 return true;
2189
2190 /* If a record type should be passed the same as its first (and only)
2191 member, use the type and mode of that member. */
2192 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2193 {
2194 type = TREE_TYPE (first_field (type));
2195 mode = TYPE_MODE (type);
2196 }
2197 }
2198
2199 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2200 type, named_arg);
2201 }
2202
2203 /* Return true if TYPE, which is passed by reference, should be callee
2204 copied instead of caller copied. */
2205
2206 bool
2207 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
2208 tree type, bool named_arg)
2209 {
2210 if (type && TREE_ADDRESSABLE (type))
2211 return false;
2212 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2213 named_arg);
2214 }
2215
2216 /* Structures to communicate between the subroutines of assign_parms.
2217 The first holds data persistent across all parameters, the second
2218 is cleared out for each parameter. */
2219
2220 struct assign_parm_data_all
2221 {
2222 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2223 should become a job of the target or otherwise encapsulated. */
2224 CUMULATIVE_ARGS args_so_far_v;
2225 cumulative_args_t args_so_far;
2226 struct args_size stack_args_size;
2227 tree function_result_decl;
2228 tree orig_fnargs;
2229 rtx_insn *first_conversion_insn;
2230 rtx_insn *last_conversion_insn;
2231 HOST_WIDE_INT pretend_args_size;
2232 HOST_WIDE_INT extra_pretend_bytes;
2233 int reg_parm_stack_space;
2234 };
2235
2236 struct assign_parm_data_one
2237 {
2238 tree nominal_type;
2239 tree passed_type;
2240 rtx entry_parm;
2241 rtx stack_parm;
2242 machine_mode nominal_mode;
2243 machine_mode passed_mode;
2244 machine_mode promoted_mode;
2245 struct locate_and_pad_arg_data locate;
2246 int partial;
2247 BOOL_BITFIELD named_arg : 1;
2248 BOOL_BITFIELD passed_pointer : 1;
2249 BOOL_BITFIELD on_stack : 1;
2250 BOOL_BITFIELD loaded_in_reg : 1;
2251 };
2252
2253 struct bounds_parm_data
2254 {
2255 assign_parm_data_one parm_data;
2256 tree bounds_parm;
2257 tree ptr_parm;
2258 rtx ptr_entry;
2259 int bound_no;
2260 };
2261
2262 /* A subroutine of assign_parms. Initialize ALL. */
2263
2264 static void
2265 assign_parms_initialize_all (struct assign_parm_data_all *all)
2266 {
2267 tree fntype ATTRIBUTE_UNUSED;
2268
2269 memset (all, 0, sizeof (*all));
2270
2271 fntype = TREE_TYPE (current_function_decl);
2272
2273 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2274 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2275 #else
2276 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2277 current_function_decl, -1);
2278 #endif
2279 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2280
2281 #ifdef INCOMING_REG_PARM_STACK_SPACE
2282 all->reg_parm_stack_space
2283 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2284 #endif
2285 }
2286
2287 /* If ARGS contains entries with complex types, split the entry into two
2288 entries of the component type. Return a new list of substitutions are
2289 needed, else the old list. */
2290
2291 static void
2292 split_complex_args (vec<tree> *args)
2293 {
2294 unsigned i;
2295 tree p;
2296
2297 FOR_EACH_VEC_ELT (*args, i, p)
2298 {
2299 tree type = TREE_TYPE (p);
2300 if (TREE_CODE (type) == COMPLEX_TYPE
2301 && targetm.calls.split_complex_arg (type))
2302 {
2303 tree decl;
2304 tree subtype = TREE_TYPE (type);
2305 bool addressable = TREE_ADDRESSABLE (p);
2306
2307 /* Rewrite the PARM_DECL's type with its component. */
2308 p = copy_node (p);
2309 TREE_TYPE (p) = subtype;
2310 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2311 DECL_MODE (p) = VOIDmode;
2312 DECL_SIZE (p) = NULL;
2313 DECL_SIZE_UNIT (p) = NULL;
2314 /* If this arg must go in memory, put it in a pseudo here.
2315 We can't allow it to go in memory as per normal parms,
2316 because the usual place might not have the imag part
2317 adjacent to the real part. */
2318 DECL_ARTIFICIAL (p) = addressable;
2319 DECL_IGNORED_P (p) = addressable;
2320 TREE_ADDRESSABLE (p) = 0;
2321 layout_decl (p, 0);
2322 (*args)[i] = p;
2323
2324 /* Build a second synthetic decl. */
2325 decl = build_decl (EXPR_LOCATION (p),
2326 PARM_DECL, NULL_TREE, subtype);
2327 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2328 DECL_ARTIFICIAL (decl) = addressable;
2329 DECL_IGNORED_P (decl) = addressable;
2330 layout_decl (decl, 0);
2331 args->safe_insert (++i, decl);
2332 }
2333 }
2334 }
2335
2336 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2337 the hidden struct return argument, and (abi willing) complex args.
2338 Return the new parameter list. */
2339
2340 static vec<tree>
2341 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2342 {
2343 tree fndecl = current_function_decl;
2344 tree fntype = TREE_TYPE (fndecl);
2345 vec<tree> fnargs = vNULL;
2346 tree arg;
2347
2348 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2349 fnargs.safe_push (arg);
2350
2351 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2352
2353 /* If struct value address is treated as the first argument, make it so. */
2354 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2355 && ! cfun->returns_pcc_struct
2356 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2357 {
2358 tree type = build_pointer_type (TREE_TYPE (fntype));
2359 tree decl;
2360
2361 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2362 PARM_DECL, get_identifier (".result_ptr"), type);
2363 DECL_ARG_TYPE (decl) = type;
2364 DECL_ARTIFICIAL (decl) = 1;
2365 DECL_NAMELESS (decl) = 1;
2366 TREE_CONSTANT (decl) = 1;
2367
2368 DECL_CHAIN (decl) = all->orig_fnargs;
2369 all->orig_fnargs = decl;
2370 fnargs.safe_insert (0, decl);
2371
2372 all->function_result_decl = decl;
2373
2374 /* If function is instrumented then bounds of the
2375 passed structure address is the second argument. */
2376 if (chkp_function_instrumented_p (fndecl))
2377 {
2378 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2379 PARM_DECL, get_identifier (".result_bnd"),
2380 pointer_bounds_type_node);
2381 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2382 DECL_ARTIFICIAL (decl) = 1;
2383 DECL_NAMELESS (decl) = 1;
2384 TREE_CONSTANT (decl) = 1;
2385
2386 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2387 DECL_CHAIN (all->orig_fnargs) = decl;
2388 fnargs.safe_insert (1, decl);
2389 }
2390 }
2391
2392 /* If the target wants to split complex arguments into scalars, do so. */
2393 if (targetm.calls.split_complex_arg)
2394 split_complex_args (&fnargs);
2395
2396 return fnargs;
2397 }
2398
2399 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2400 data for the parameter. Incorporate ABI specifics such as pass-by-
2401 reference and type promotion. */
2402
2403 static void
2404 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2405 struct assign_parm_data_one *data)
2406 {
2407 tree nominal_type, passed_type;
2408 machine_mode nominal_mode, passed_mode, promoted_mode;
2409 int unsignedp;
2410
2411 memset (data, 0, sizeof (*data));
2412
2413 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2414 if (!cfun->stdarg)
2415 data->named_arg = 1; /* No variadic parms. */
2416 else if (DECL_CHAIN (parm))
2417 data->named_arg = 1; /* Not the last non-variadic parm. */
2418 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2419 data->named_arg = 1; /* Only variadic ones are unnamed. */
2420 else
2421 data->named_arg = 0; /* Treat as variadic. */
2422
2423 nominal_type = TREE_TYPE (parm);
2424 passed_type = DECL_ARG_TYPE (parm);
2425
2426 /* Look out for errors propagating this far. Also, if the parameter's
2427 type is void then its value doesn't matter. */
2428 if (TREE_TYPE (parm) == error_mark_node
2429 /* This can happen after weird syntax errors
2430 or if an enum type is defined among the parms. */
2431 || TREE_CODE (parm) != PARM_DECL
2432 || passed_type == NULL
2433 || VOID_TYPE_P (nominal_type))
2434 {
2435 nominal_type = passed_type = void_type_node;
2436 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2437 goto egress;
2438 }
2439
2440 /* Find mode of arg as it is passed, and mode of arg as it should be
2441 during execution of this function. */
2442 passed_mode = TYPE_MODE (passed_type);
2443 nominal_mode = TYPE_MODE (nominal_type);
2444
2445 /* If the parm is to be passed as a transparent union or record, use the
2446 type of the first field for the tests below. We have already verified
2447 that the modes are the same. */
2448 if ((TREE_CODE (passed_type) == UNION_TYPE
2449 || TREE_CODE (passed_type) == RECORD_TYPE)
2450 && TYPE_TRANSPARENT_AGGR (passed_type))
2451 passed_type = TREE_TYPE (first_field (passed_type));
2452
2453 /* See if this arg was passed by invisible reference. */
2454 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2455 passed_type, data->named_arg))
2456 {
2457 passed_type = nominal_type = build_pointer_type (passed_type);
2458 data->passed_pointer = true;
2459 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2460 }
2461
2462 /* Find mode as it is passed by the ABI. */
2463 unsignedp = TYPE_UNSIGNED (passed_type);
2464 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2465 TREE_TYPE (current_function_decl), 0);
2466
2467 egress:
2468 data->nominal_type = nominal_type;
2469 data->passed_type = passed_type;
2470 data->nominal_mode = nominal_mode;
2471 data->passed_mode = passed_mode;
2472 data->promoted_mode = promoted_mode;
2473 }
2474
2475 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2476
2477 static void
2478 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2479 struct assign_parm_data_one *data, bool no_rtl)
2480 {
2481 int varargs_pretend_bytes = 0;
2482
2483 targetm.calls.setup_incoming_varargs (all->args_so_far,
2484 data->promoted_mode,
2485 data->passed_type,
2486 &varargs_pretend_bytes, no_rtl);
2487
2488 /* If the back-end has requested extra stack space, record how much is
2489 needed. Do not change pretend_args_size otherwise since it may be
2490 nonzero from an earlier partial argument. */
2491 if (varargs_pretend_bytes > 0)
2492 all->pretend_args_size = varargs_pretend_bytes;
2493 }
2494
2495 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2496 the incoming location of the current parameter. */
2497
2498 static void
2499 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2500 struct assign_parm_data_one *data)
2501 {
2502 HOST_WIDE_INT pretend_bytes = 0;
2503 rtx entry_parm;
2504 bool in_regs;
2505
2506 if (data->promoted_mode == VOIDmode)
2507 {
2508 data->entry_parm = data->stack_parm = const0_rtx;
2509 return;
2510 }
2511
2512 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2513 data->promoted_mode,
2514 data->passed_type,
2515 data->named_arg);
2516
2517 if (entry_parm == 0)
2518 data->promoted_mode = data->passed_mode;
2519
2520 /* Determine parm's home in the stack, in case it arrives in the stack
2521 or we should pretend it did. Compute the stack position and rtx where
2522 the argument arrives and its size.
2523
2524 There is one complexity here: If this was a parameter that would
2525 have been passed in registers, but wasn't only because it is
2526 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2527 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2528 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2529 as it was the previous time. */
2530 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2531 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2532 in_regs = true;
2533 #endif
2534 if (!in_regs && !data->named_arg)
2535 {
2536 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2537 {
2538 rtx tem;
2539 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2540 data->promoted_mode,
2541 data->passed_type, true);
2542 in_regs = tem != NULL;
2543 }
2544 }
2545
2546 /* If this parameter was passed both in registers and in the stack, use
2547 the copy on the stack. */
2548 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2549 data->passed_type))
2550 entry_parm = 0;
2551
2552 if (entry_parm)
2553 {
2554 int partial;
2555
2556 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2557 data->promoted_mode,
2558 data->passed_type,
2559 data->named_arg);
2560 data->partial = partial;
2561
2562 /* The caller might already have allocated stack space for the
2563 register parameters. */
2564 if (partial != 0 && all->reg_parm_stack_space == 0)
2565 {
2566 /* Part of this argument is passed in registers and part
2567 is passed on the stack. Ask the prologue code to extend
2568 the stack part so that we can recreate the full value.
2569
2570 PRETEND_BYTES is the size of the registers we need to store.
2571 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2572 stack space that the prologue should allocate.
2573
2574 Internally, gcc assumes that the argument pointer is aligned
2575 to STACK_BOUNDARY bits. This is used both for alignment
2576 optimizations (see init_emit) and to locate arguments that are
2577 aligned to more than PARM_BOUNDARY bits. We must preserve this
2578 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2579 a stack boundary. */
2580
2581 /* We assume at most one partial arg, and it must be the first
2582 argument on the stack. */
2583 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2584
2585 pretend_bytes = partial;
2586 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2587
2588 /* We want to align relative to the actual stack pointer, so
2589 don't include this in the stack size until later. */
2590 all->extra_pretend_bytes = all->pretend_args_size;
2591 }
2592 }
2593
2594 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2595 all->reg_parm_stack_space,
2596 entry_parm ? data->partial : 0, current_function_decl,
2597 &all->stack_args_size, &data->locate);
2598
2599 /* Update parm_stack_boundary if this parameter is passed in the
2600 stack. */
2601 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2602 crtl->parm_stack_boundary = data->locate.boundary;
2603
2604 /* Adjust offsets to include the pretend args. */
2605 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2606 data->locate.slot_offset.constant += pretend_bytes;
2607 data->locate.offset.constant += pretend_bytes;
2608
2609 data->entry_parm = entry_parm;
2610 }
2611
2612 /* A subroutine of assign_parms. If there is actually space on the stack
2613 for this parm, count it in stack_args_size and return true. */
2614
2615 static bool
2616 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2617 struct assign_parm_data_one *data)
2618 {
2619 /* Bounds are never passed on the stack to keep compatibility
2620 with not instrumented code. */
2621 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2622 return false;
2623 /* Trivially true if we've no incoming register. */
2624 else if (data->entry_parm == NULL)
2625 ;
2626 /* Also true if we're partially in registers and partially not,
2627 since we've arranged to drop the entire argument on the stack. */
2628 else if (data->partial != 0)
2629 ;
2630 /* Also true if the target says that it's passed in both registers
2631 and on the stack. */
2632 else if (GET_CODE (data->entry_parm) == PARALLEL
2633 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2634 ;
2635 /* Also true if the target says that there's stack allocated for
2636 all register parameters. */
2637 else if (all->reg_parm_stack_space > 0)
2638 ;
2639 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2640 else
2641 return false;
2642
2643 all->stack_args_size.constant += data->locate.size.constant;
2644 if (data->locate.size.var)
2645 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2646
2647 return true;
2648 }
2649
2650 /* A subroutine of assign_parms. Given that this parameter is allocated
2651 stack space by the ABI, find it. */
2652
2653 static void
2654 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2655 {
2656 rtx offset_rtx, stack_parm;
2657 unsigned int align, boundary;
2658
2659 /* If we're passing this arg using a reg, make its stack home the
2660 aligned stack slot. */
2661 if (data->entry_parm)
2662 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2663 else
2664 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2665
2666 stack_parm = crtl->args.internal_arg_pointer;
2667 if (offset_rtx != const0_rtx)
2668 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2669 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2670
2671 if (!data->passed_pointer)
2672 {
2673 set_mem_attributes (stack_parm, parm, 1);
2674 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2675 while promoted mode's size is needed. */
2676 if (data->promoted_mode != BLKmode
2677 && data->promoted_mode != DECL_MODE (parm))
2678 {
2679 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2680 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2681 {
2682 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2683 data->promoted_mode);
2684 if (offset)
2685 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2686 }
2687 }
2688 }
2689
2690 boundary = data->locate.boundary;
2691 align = BITS_PER_UNIT;
2692
2693 /* If we're padding upward, we know that the alignment of the slot
2694 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2695 intentionally forcing upward padding. Otherwise we have to come
2696 up with a guess at the alignment based on OFFSET_RTX. */
2697 if (data->locate.where_pad != downward || data->entry_parm)
2698 align = boundary;
2699 else if (CONST_INT_P (offset_rtx))
2700 {
2701 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2702 align = align & -align;
2703 }
2704 set_mem_align (stack_parm, align);
2705
2706 if (data->entry_parm)
2707 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2708
2709 data->stack_parm = stack_parm;
2710 }
2711
2712 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2713 always valid and contiguous. */
2714
2715 static void
2716 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2717 {
2718 rtx entry_parm = data->entry_parm;
2719 rtx stack_parm = data->stack_parm;
2720
2721 /* If this parm was passed part in regs and part in memory, pretend it
2722 arrived entirely in memory by pushing the register-part onto the stack.
2723 In the special case of a DImode or DFmode that is split, we could put
2724 it together in a pseudoreg directly, but for now that's not worth
2725 bothering with. */
2726 if (data->partial != 0)
2727 {
2728 /* Handle calls that pass values in multiple non-contiguous
2729 locations. The Irix 6 ABI has examples of this. */
2730 if (GET_CODE (entry_parm) == PARALLEL)
2731 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2732 data->passed_type,
2733 int_size_in_bytes (data->passed_type));
2734 else
2735 {
2736 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2737 move_block_from_reg (REGNO (entry_parm),
2738 validize_mem (copy_rtx (stack_parm)),
2739 data->partial / UNITS_PER_WORD);
2740 }
2741
2742 entry_parm = stack_parm;
2743 }
2744
2745 /* If we didn't decide this parm came in a register, by default it came
2746 on the stack. */
2747 else if (entry_parm == NULL)
2748 entry_parm = stack_parm;
2749
2750 /* When an argument is passed in multiple locations, we can't make use
2751 of this information, but we can save some copying if the whole argument
2752 is passed in a single register. */
2753 else if (GET_CODE (entry_parm) == PARALLEL
2754 && data->nominal_mode != BLKmode
2755 && data->passed_mode != BLKmode)
2756 {
2757 size_t i, len = XVECLEN (entry_parm, 0);
2758
2759 for (i = 0; i < len; i++)
2760 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2761 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2762 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2763 == data->passed_mode)
2764 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2765 {
2766 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2767 break;
2768 }
2769 }
2770
2771 data->entry_parm = entry_parm;
2772 }
2773
2774 /* A subroutine of assign_parms. Reconstitute any values which were
2775 passed in multiple registers and would fit in a single register. */
2776
2777 static void
2778 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2779 {
2780 rtx entry_parm = data->entry_parm;
2781
2782 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2783 This can be done with register operations rather than on the
2784 stack, even if we will store the reconstituted parameter on the
2785 stack later. */
2786 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2787 {
2788 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2789 emit_group_store (parmreg, entry_parm, data->passed_type,
2790 GET_MODE_SIZE (GET_MODE (entry_parm)));
2791 entry_parm = parmreg;
2792 }
2793
2794 data->entry_parm = entry_parm;
2795 }
2796
2797 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2798 always valid and properly aligned. */
2799
2800 static void
2801 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2802 {
2803 rtx stack_parm = data->stack_parm;
2804
2805 /* If we can't trust the parm stack slot to be aligned enough for its
2806 ultimate type, don't use that slot after entry. We'll make another
2807 stack slot, if we need one. */
2808 if (stack_parm
2809 && ((STRICT_ALIGNMENT
2810 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2811 || (data->nominal_type
2812 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2813 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2814 stack_parm = NULL;
2815
2816 /* If parm was passed in memory, and we need to convert it on entry,
2817 don't store it back in that same slot. */
2818 else if (data->entry_parm == stack_parm
2819 && data->nominal_mode != BLKmode
2820 && data->nominal_mode != data->passed_mode)
2821 stack_parm = NULL;
2822
2823 /* If stack protection is in effect for this function, don't leave any
2824 pointers in their passed stack slots. */
2825 else if (crtl->stack_protect_guard
2826 && (flag_stack_protect == 2
2827 || data->passed_pointer
2828 || POINTER_TYPE_P (data->nominal_type)))
2829 stack_parm = NULL;
2830
2831 data->stack_parm = stack_parm;
2832 }
2833
2834 /* A subroutine of assign_parms. Return true if the current parameter
2835 should be stored as a BLKmode in the current frame. */
2836
2837 static bool
2838 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2839 {
2840 if (data->nominal_mode == BLKmode)
2841 return true;
2842 if (GET_MODE (data->entry_parm) == BLKmode)
2843 return true;
2844
2845 #ifdef BLOCK_REG_PADDING
2846 /* Only assign_parm_setup_block knows how to deal with register arguments
2847 that are padded at the least significant end. */
2848 if (REG_P (data->entry_parm)
2849 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2850 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2851 == (BYTES_BIG_ENDIAN ? upward : downward)))
2852 return true;
2853 #endif
2854
2855 return false;
2856 }
2857
2858 /* A subroutine of assign_parms. Arrange for the parameter to be
2859 present and valid in DATA->STACK_RTL. */
2860
2861 static void
2862 assign_parm_setup_block (struct assign_parm_data_all *all,
2863 tree parm, struct assign_parm_data_one *data)
2864 {
2865 rtx entry_parm = data->entry_parm;
2866 rtx stack_parm = data->stack_parm;
2867 HOST_WIDE_INT size;
2868 HOST_WIDE_INT size_stored;
2869
2870 if (GET_CODE (entry_parm) == PARALLEL)
2871 entry_parm = emit_group_move_into_temps (entry_parm);
2872
2873 size = int_size_in_bytes (data->passed_type);
2874 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2875 if (stack_parm == 0)
2876 {
2877 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2878 stack_parm = assign_stack_local (BLKmode, size_stored,
2879 DECL_ALIGN (parm));
2880 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2881 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2882 set_mem_attributes (stack_parm, parm, 1);
2883 }
2884
2885 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2886 calls that pass values in multiple non-contiguous locations. */
2887 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2888 {
2889 rtx mem;
2890
2891 /* Note that we will be storing an integral number of words.
2892 So we have to be careful to ensure that we allocate an
2893 integral number of words. We do this above when we call
2894 assign_stack_local if space was not allocated in the argument
2895 list. If it was, this will not work if PARM_BOUNDARY is not
2896 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2897 if it becomes a problem. Exception is when BLKmode arrives
2898 with arguments not conforming to word_mode. */
2899
2900 if (data->stack_parm == 0)
2901 ;
2902 else if (GET_CODE (entry_parm) == PARALLEL)
2903 ;
2904 else
2905 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2906
2907 mem = validize_mem (copy_rtx (stack_parm));
2908
2909 /* Handle values in multiple non-contiguous locations. */
2910 if (GET_CODE (entry_parm) == PARALLEL)
2911 {
2912 push_to_sequence2 (all->first_conversion_insn,
2913 all->last_conversion_insn);
2914 emit_group_store (mem, entry_parm, data->passed_type, size);
2915 all->first_conversion_insn = get_insns ();
2916 all->last_conversion_insn = get_last_insn ();
2917 end_sequence ();
2918 }
2919
2920 else if (size == 0)
2921 ;
2922
2923 /* If SIZE is that of a mode no bigger than a word, just use
2924 that mode's store operation. */
2925 else if (size <= UNITS_PER_WORD)
2926 {
2927 machine_mode mode
2928 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2929
2930 if (mode != BLKmode
2931 #ifdef BLOCK_REG_PADDING
2932 && (size == UNITS_PER_WORD
2933 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2934 != (BYTES_BIG_ENDIAN ? upward : downward)))
2935 #endif
2936 )
2937 {
2938 rtx reg;
2939
2940 /* We are really truncating a word_mode value containing
2941 SIZE bytes into a value of mode MODE. If such an
2942 operation requires no actual instructions, we can refer
2943 to the value directly in mode MODE, otherwise we must
2944 start with the register in word_mode and explicitly
2945 convert it. */
2946 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2947 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2948 else
2949 {
2950 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2951 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2952 }
2953 emit_move_insn (change_address (mem, mode, 0), reg);
2954 }
2955
2956 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2957 machine must be aligned to the left before storing
2958 to memory. Note that the previous test doesn't
2959 handle all cases (e.g. SIZE == 3). */
2960 else if (size != UNITS_PER_WORD
2961 #ifdef BLOCK_REG_PADDING
2962 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2963 == downward)
2964 #else
2965 && BYTES_BIG_ENDIAN
2966 #endif
2967 )
2968 {
2969 rtx tem, x;
2970 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2971 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2972
2973 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2974 tem = change_address (mem, word_mode, 0);
2975 emit_move_insn (tem, x);
2976 }
2977 else
2978 move_block_from_reg (REGNO (entry_parm), mem,
2979 size_stored / UNITS_PER_WORD);
2980 }
2981 else
2982 move_block_from_reg (REGNO (entry_parm), mem,
2983 size_stored / UNITS_PER_WORD);
2984 }
2985 else if (data->stack_parm == 0)
2986 {
2987 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2988 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2989 BLOCK_OP_NORMAL);
2990 all->first_conversion_insn = get_insns ();
2991 all->last_conversion_insn = get_last_insn ();
2992 end_sequence ();
2993 }
2994
2995 data->stack_parm = stack_parm;
2996 SET_DECL_RTL (parm, stack_parm);
2997 }
2998
2999 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3000 parameter. Get it there. Perform all ABI specified conversions. */
3001
3002 static void
3003 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3004 struct assign_parm_data_one *data)
3005 {
3006 rtx parmreg, validated_mem;
3007 rtx equiv_stack_parm;
3008 machine_mode promoted_nominal_mode;
3009 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3010 bool did_conversion = false;
3011 bool need_conversion, moved;
3012
3013 /* Store the parm in a pseudoregister during the function, but we may
3014 need to do it in a wider mode. Using 2 here makes the result
3015 consistent with promote_decl_mode and thus expand_expr_real_1. */
3016 promoted_nominal_mode
3017 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3018 TREE_TYPE (current_function_decl), 2);
3019
3020 parmreg = gen_reg_rtx (promoted_nominal_mode);
3021
3022 if (!DECL_ARTIFICIAL (parm))
3023 mark_user_reg (parmreg);
3024
3025 /* If this was an item that we received a pointer to,
3026 set DECL_RTL appropriately. */
3027 if (data->passed_pointer)
3028 {
3029 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3030 set_mem_attributes (x, parm, 1);
3031 SET_DECL_RTL (parm, x);
3032 }
3033 else
3034 SET_DECL_RTL (parm, parmreg);
3035
3036 assign_parm_remove_parallels (data);
3037
3038 /* Copy the value into the register, thus bridging between
3039 assign_parm_find_data_types and expand_expr_real_1. */
3040
3041 equiv_stack_parm = data->stack_parm;
3042 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3043
3044 need_conversion = (data->nominal_mode != data->passed_mode
3045 || promoted_nominal_mode != data->promoted_mode);
3046 moved = false;
3047
3048 if (need_conversion
3049 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3050 && data->nominal_mode == data->passed_mode
3051 && data->nominal_mode == GET_MODE (data->entry_parm))
3052 {
3053 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3054 mode, by the caller. We now have to convert it to
3055 NOMINAL_MODE, if different. However, PARMREG may be in
3056 a different mode than NOMINAL_MODE if it is being stored
3057 promoted.
3058
3059 If ENTRY_PARM is a hard register, it might be in a register
3060 not valid for operating in its mode (e.g., an odd-numbered
3061 register for a DFmode). In that case, moves are the only
3062 thing valid, so we can't do a convert from there. This
3063 occurs when the calling sequence allow such misaligned
3064 usages.
3065
3066 In addition, the conversion may involve a call, which could
3067 clobber parameters which haven't been copied to pseudo
3068 registers yet.
3069
3070 First, we try to emit an insn which performs the necessary
3071 conversion. We verify that this insn does not clobber any
3072 hard registers. */
3073
3074 enum insn_code icode;
3075 rtx op0, op1;
3076
3077 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3078 unsignedp);
3079
3080 op0 = parmreg;
3081 op1 = validated_mem;
3082 if (icode != CODE_FOR_nothing
3083 && insn_operand_matches (icode, 0, op0)
3084 && insn_operand_matches (icode, 1, op1))
3085 {
3086 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3087 rtx_insn *insn, *insns;
3088 rtx t = op1;
3089 HARD_REG_SET hardregs;
3090
3091 start_sequence ();
3092 /* If op1 is a hard register that is likely spilled, first
3093 force it into a pseudo, otherwise combiner might extend
3094 its lifetime too much. */
3095 if (GET_CODE (t) == SUBREG)
3096 t = SUBREG_REG (t);
3097 if (REG_P (t)
3098 && HARD_REGISTER_P (t)
3099 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3100 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3101 {
3102 t = gen_reg_rtx (GET_MODE (op1));
3103 emit_move_insn (t, op1);
3104 }
3105 else
3106 t = op1;
3107 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3108 data->passed_mode, unsignedp);
3109 emit_insn (pat);
3110 insns = get_insns ();
3111
3112 moved = true;
3113 CLEAR_HARD_REG_SET (hardregs);
3114 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3115 {
3116 if (INSN_P (insn))
3117 note_stores (PATTERN (insn), record_hard_reg_sets,
3118 &hardregs);
3119 if (!hard_reg_set_empty_p (hardregs))
3120 moved = false;
3121 }
3122
3123 end_sequence ();
3124
3125 if (moved)
3126 {
3127 emit_insn (insns);
3128 if (equiv_stack_parm != NULL_RTX)
3129 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3130 equiv_stack_parm);
3131 }
3132 }
3133 }
3134
3135 if (moved)
3136 /* Nothing to do. */
3137 ;
3138 else if (need_conversion)
3139 {
3140 /* We did not have an insn to convert directly, or the sequence
3141 generated appeared unsafe. We must first copy the parm to a
3142 pseudo reg, and save the conversion until after all
3143 parameters have been moved. */
3144
3145 int save_tree_used;
3146 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3147
3148 emit_move_insn (tempreg, validated_mem);
3149
3150 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3151 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3152
3153 if (GET_CODE (tempreg) == SUBREG
3154 && GET_MODE (tempreg) == data->nominal_mode
3155 && REG_P (SUBREG_REG (tempreg))
3156 && data->nominal_mode == data->passed_mode
3157 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3158 && GET_MODE_SIZE (GET_MODE (tempreg))
3159 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3160 {
3161 /* The argument is already sign/zero extended, so note it
3162 into the subreg. */
3163 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3164 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3165 }
3166
3167 /* TREE_USED gets set erroneously during expand_assignment. */
3168 save_tree_used = TREE_USED (parm);
3169 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3170 TREE_USED (parm) = save_tree_used;
3171 all->first_conversion_insn = get_insns ();
3172 all->last_conversion_insn = get_last_insn ();
3173 end_sequence ();
3174
3175 did_conversion = true;
3176 }
3177 else
3178 emit_move_insn (parmreg, validated_mem);
3179
3180 /* If we were passed a pointer but the actual value can safely live
3181 in a register, retrieve it and use it directly. */
3182 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3183 {
3184 /* We can't use nominal_mode, because it will have been set to
3185 Pmode above. We must use the actual mode of the parm. */
3186 if (use_register_for_decl (parm))
3187 {
3188 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3189 mark_user_reg (parmreg);
3190 }
3191 else
3192 {
3193 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3194 TYPE_MODE (TREE_TYPE (parm)),
3195 TYPE_ALIGN (TREE_TYPE (parm)));
3196 parmreg
3197 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3198 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3199 align);
3200 set_mem_attributes (parmreg, parm, 1);
3201 }
3202
3203 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3204 {
3205 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3206 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3207
3208 push_to_sequence2 (all->first_conversion_insn,
3209 all->last_conversion_insn);
3210 emit_move_insn (tempreg, DECL_RTL (parm));
3211 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3212 emit_move_insn (parmreg, tempreg);
3213 all->first_conversion_insn = get_insns ();
3214 all->last_conversion_insn = get_last_insn ();
3215 end_sequence ();
3216
3217 did_conversion = true;
3218 }
3219 else
3220 emit_move_insn (parmreg, DECL_RTL (parm));
3221
3222 SET_DECL_RTL (parm, parmreg);
3223
3224 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3225 now the parm. */
3226 data->stack_parm = NULL;
3227 }
3228
3229 /* Mark the register as eliminable if we did no conversion and it was
3230 copied from memory at a fixed offset, and the arg pointer was not
3231 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3232 offset formed an invalid address, such memory-equivalences as we
3233 make here would screw up life analysis for it. */
3234 if (data->nominal_mode == data->passed_mode
3235 && !did_conversion
3236 && data->stack_parm != 0
3237 && MEM_P (data->stack_parm)
3238 && data->locate.offset.var == 0
3239 && reg_mentioned_p (virtual_incoming_args_rtx,
3240 XEXP (data->stack_parm, 0)))
3241 {
3242 rtx_insn *linsn = get_last_insn ();
3243 rtx_insn *sinsn;
3244 rtx set;
3245
3246 /* Mark complex types separately. */
3247 if (GET_CODE (parmreg) == CONCAT)
3248 {
3249 machine_mode submode
3250 = GET_MODE_INNER (GET_MODE (parmreg));
3251 int regnor = REGNO (XEXP (parmreg, 0));
3252 int regnoi = REGNO (XEXP (parmreg, 1));
3253 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3254 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3255 GET_MODE_SIZE (submode));
3256
3257 /* Scan backwards for the set of the real and
3258 imaginary parts. */
3259 for (sinsn = linsn; sinsn != 0;
3260 sinsn = prev_nonnote_insn (sinsn))
3261 {
3262 set = single_set (sinsn);
3263 if (set == 0)
3264 continue;
3265
3266 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3267 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3268 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3269 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3270 }
3271 }
3272 else
3273 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3274 }
3275
3276 /* For pointer data type, suggest pointer register. */
3277 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3278 mark_reg_pointer (parmreg,
3279 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3280 }
3281
3282 /* A subroutine of assign_parms. Allocate stack space to hold the current
3283 parameter. Get it there. Perform all ABI specified conversions. */
3284
3285 static void
3286 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3287 struct assign_parm_data_one *data)
3288 {
3289 /* Value must be stored in the stack slot STACK_PARM during function
3290 execution. */
3291 bool to_conversion = false;
3292
3293 assign_parm_remove_parallels (data);
3294
3295 if (data->promoted_mode != data->nominal_mode)
3296 {
3297 /* Conversion is required. */
3298 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3299
3300 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3301
3302 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3303 to_conversion = true;
3304
3305 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3306 TYPE_UNSIGNED (TREE_TYPE (parm)));
3307
3308 if (data->stack_parm)
3309 {
3310 int offset = subreg_lowpart_offset (data->nominal_mode,
3311 GET_MODE (data->stack_parm));
3312 /* ??? This may need a big-endian conversion on sparc64. */
3313 data->stack_parm
3314 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3315 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3316 set_mem_offset (data->stack_parm,
3317 MEM_OFFSET (data->stack_parm) + offset);
3318 }
3319 }
3320
3321 if (data->entry_parm != data->stack_parm)
3322 {
3323 rtx src, dest;
3324
3325 if (data->stack_parm == 0)
3326 {
3327 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3328 GET_MODE (data->entry_parm),
3329 TYPE_ALIGN (data->passed_type));
3330 data->stack_parm
3331 = assign_stack_local (GET_MODE (data->entry_parm),
3332 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3333 align);
3334 set_mem_attributes (data->stack_parm, parm, 1);
3335 }
3336
3337 dest = validize_mem (copy_rtx (data->stack_parm));
3338 src = validize_mem (copy_rtx (data->entry_parm));
3339
3340 if (MEM_P (src))
3341 {
3342 /* Use a block move to handle potentially misaligned entry_parm. */
3343 if (!to_conversion)
3344 push_to_sequence2 (all->first_conversion_insn,
3345 all->last_conversion_insn);
3346 to_conversion = true;
3347
3348 emit_block_move (dest, src,
3349 GEN_INT (int_size_in_bytes (data->passed_type)),
3350 BLOCK_OP_NORMAL);
3351 }
3352 else
3353 emit_move_insn (dest, src);
3354 }
3355
3356 if (to_conversion)
3357 {
3358 all->first_conversion_insn = get_insns ();
3359 all->last_conversion_insn = get_last_insn ();
3360 end_sequence ();
3361 }
3362
3363 SET_DECL_RTL (parm, data->stack_parm);
3364 }
3365
3366 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3367 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3368
3369 static void
3370 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3371 vec<tree> fnargs)
3372 {
3373 tree parm;
3374 tree orig_fnargs = all->orig_fnargs;
3375 unsigned i = 0;
3376
3377 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3378 {
3379 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3380 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3381 {
3382 rtx tmp, real, imag;
3383 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3384
3385 real = DECL_RTL (fnargs[i]);
3386 imag = DECL_RTL (fnargs[i + 1]);
3387 if (inner != GET_MODE (real))
3388 {
3389 real = gen_lowpart_SUBREG (inner, real);
3390 imag = gen_lowpart_SUBREG (inner, imag);
3391 }
3392
3393 if (TREE_ADDRESSABLE (parm))
3394 {
3395 rtx rmem, imem;
3396 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3397 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3398 DECL_MODE (parm),
3399 TYPE_ALIGN (TREE_TYPE (parm)));
3400
3401 /* split_complex_arg put the real and imag parts in
3402 pseudos. Move them to memory. */
3403 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3404 set_mem_attributes (tmp, parm, 1);
3405 rmem = adjust_address_nv (tmp, inner, 0);
3406 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3407 push_to_sequence2 (all->first_conversion_insn,
3408 all->last_conversion_insn);
3409 emit_move_insn (rmem, real);
3410 emit_move_insn (imem, imag);
3411 all->first_conversion_insn = get_insns ();
3412 all->last_conversion_insn = get_last_insn ();
3413 end_sequence ();
3414 }
3415 else
3416 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3417 SET_DECL_RTL (parm, tmp);
3418
3419 real = DECL_INCOMING_RTL (fnargs[i]);
3420 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3421 if (inner != GET_MODE (real))
3422 {
3423 real = gen_lowpart_SUBREG (inner, real);
3424 imag = gen_lowpart_SUBREG (inner, imag);
3425 }
3426 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3427 set_decl_incoming_rtl (parm, tmp, false);
3428 i++;
3429 }
3430 }
3431 }
3432
3433 /* Load bounds of PARM from bounds table. */
3434 static void
3435 assign_parm_load_bounds (struct assign_parm_data_one *data,
3436 tree parm,
3437 rtx entry,
3438 unsigned bound_no)
3439 {
3440 bitmap_iterator bi;
3441 unsigned i, offs = 0;
3442 int bnd_no = -1;
3443 rtx slot = NULL, ptr = NULL;
3444
3445 if (parm)
3446 {
3447 bitmap slots;
3448 bitmap_obstack_initialize (NULL);
3449 slots = BITMAP_ALLOC (NULL);
3450 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3451 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3452 {
3453 if (bound_no)
3454 bound_no--;
3455 else
3456 {
3457 bnd_no = i;
3458 break;
3459 }
3460 }
3461 BITMAP_FREE (slots);
3462 bitmap_obstack_release (NULL);
3463 }
3464
3465 /* We may have bounds not associated with any pointer. */
3466 if (bnd_no != -1)
3467 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3468
3469 /* Find associated pointer. */
3470 if (bnd_no == -1)
3471 {
3472 /* If bounds are not associated with any bounds,
3473 then it is passed in a register or special slot. */
3474 gcc_assert (data->entry_parm);
3475 ptr = const0_rtx;
3476 }
3477 else if (MEM_P (entry))
3478 slot = adjust_address (entry, Pmode, offs);
3479 else if (REG_P (entry))
3480 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3481 else if (GET_CODE (entry) == PARALLEL)
3482 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3483 else
3484 gcc_unreachable ();
3485 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3486 data->entry_parm);
3487 }
3488
3489 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3490
3491 static void
3492 assign_bounds (vec<bounds_parm_data> &bndargs,
3493 struct assign_parm_data_all &all)
3494 {
3495 unsigned i, pass, handled = 0;
3496 bounds_parm_data *pbdata;
3497
3498 if (!bndargs.exists ())
3499 return;
3500
3501 /* We make few passes to store input bounds. Firstly handle bounds
3502 passed in registers. After that we load bounds passed in special
3503 slots. Finally we load bounds from Bounds Table. */
3504 for (pass = 0; pass < 3; pass++)
3505 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3506 {
3507 /* Pass 0 => regs only. */
3508 if (pass == 0
3509 && (!pbdata->parm_data.entry_parm
3510 || GET_CODE (pbdata->parm_data.entry_parm) != REG))
3511 continue;
3512 /* Pass 1 => slots only. */
3513 else if (pass == 1
3514 && (!pbdata->parm_data.entry_parm
3515 || GET_CODE (pbdata->parm_data.entry_parm) == REG))
3516 continue;
3517 /* Pass 2 => BT only. */
3518 else if (pass == 2
3519 && pbdata->parm_data.entry_parm)
3520 continue;
3521
3522 if (!pbdata->parm_data.entry_parm
3523 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3524 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3525 pbdata->ptr_entry, pbdata->bound_no);
3526
3527 set_decl_incoming_rtl (pbdata->bounds_parm,
3528 pbdata->parm_data.entry_parm, false);
3529
3530 if (assign_parm_setup_block_p (&pbdata->parm_data))
3531 assign_parm_setup_block (&all, pbdata->bounds_parm,
3532 &pbdata->parm_data);
3533 else if (pbdata->parm_data.passed_pointer
3534 || use_register_for_decl (pbdata->bounds_parm))
3535 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3536 &pbdata->parm_data);
3537 else
3538 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3539 &pbdata->parm_data);
3540
3541 /* Count handled bounds to make sure we miss nothing. */
3542 handled++;
3543 }
3544
3545 gcc_assert (handled == bndargs.length ());
3546
3547 bndargs.release ();
3548 }
3549
3550 /* Assign RTL expressions to the function's parameters. This may involve
3551 copying them into registers and using those registers as the DECL_RTL. */
3552
3553 static void
3554 assign_parms (tree fndecl)
3555 {
3556 struct assign_parm_data_all all;
3557 tree parm;
3558 vec<tree> fnargs;
3559 unsigned i, bound_no = 0;
3560 tree last_arg = NULL;
3561 rtx last_arg_entry = NULL;
3562 vec<bounds_parm_data> bndargs = vNULL;
3563 bounds_parm_data bdata;
3564
3565 crtl->args.internal_arg_pointer
3566 = targetm.calls.internal_arg_pointer ();
3567
3568 assign_parms_initialize_all (&all);
3569 fnargs = assign_parms_augmented_arg_list (&all);
3570
3571 FOR_EACH_VEC_ELT (fnargs, i, parm)
3572 {
3573 struct assign_parm_data_one data;
3574
3575 /* Extract the type of PARM; adjust it according to ABI. */
3576 assign_parm_find_data_types (&all, parm, &data);
3577
3578 /* Early out for errors and void parameters. */
3579 if (data.passed_mode == VOIDmode)
3580 {
3581 SET_DECL_RTL (parm, const0_rtx);
3582 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3583 continue;
3584 }
3585
3586 /* Estimate stack alignment from parameter alignment. */
3587 if (SUPPORTS_STACK_ALIGNMENT)
3588 {
3589 unsigned int align
3590 = targetm.calls.function_arg_boundary (data.promoted_mode,
3591 data.passed_type);
3592 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3593 align);
3594 if (TYPE_ALIGN (data.nominal_type) > align)
3595 align = MINIMUM_ALIGNMENT (data.nominal_type,
3596 TYPE_MODE (data.nominal_type),
3597 TYPE_ALIGN (data.nominal_type));
3598 if (crtl->stack_alignment_estimated < align)
3599 {
3600 gcc_assert (!crtl->stack_realign_processed);
3601 crtl->stack_alignment_estimated = align;
3602 }
3603 }
3604
3605 /* Find out where the parameter arrives in this function. */
3606 assign_parm_find_entry_rtl (&all, &data);
3607
3608 /* Find out where stack space for this parameter might be. */
3609 if (assign_parm_is_stack_parm (&all, &data))
3610 {
3611 assign_parm_find_stack_rtl (parm, &data);
3612 assign_parm_adjust_entry_rtl (&data);
3613 }
3614 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3615 {
3616 /* Remember where last non bounds arg was passed in case
3617 we have to load associated bounds for it from Bounds
3618 Table. */
3619 last_arg = parm;
3620 last_arg_entry = data.entry_parm;
3621 bound_no = 0;
3622 }
3623 /* Record permanently how this parm was passed. */
3624 if (data.passed_pointer)
3625 {
3626 rtx incoming_rtl
3627 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3628 data.entry_parm);
3629 set_decl_incoming_rtl (parm, incoming_rtl, true);
3630 }
3631 else
3632 set_decl_incoming_rtl (parm, data.entry_parm, false);
3633
3634 /* Boudns should be loaded in the particular order to
3635 have registers allocated correctly. Collect info about
3636 input bounds and load them later. */
3637 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3638 {
3639 /* Expect bounds in instrumented functions only. */
3640 gcc_assert (chkp_function_instrumented_p (fndecl));
3641
3642 bdata.parm_data = data;
3643 bdata.bounds_parm = parm;
3644 bdata.ptr_parm = last_arg;
3645 bdata.ptr_entry = last_arg_entry;
3646 bdata.bound_no = bound_no;
3647 bndargs.safe_push (bdata);
3648 }
3649 else
3650 {
3651 assign_parm_adjust_stack_rtl (&data);
3652
3653 if (assign_parm_setup_block_p (&data))
3654 assign_parm_setup_block (&all, parm, &data);
3655 else if (data.passed_pointer || use_register_for_decl (parm))
3656 assign_parm_setup_reg (&all, parm, &data);
3657 else
3658 assign_parm_setup_stack (&all, parm, &data);
3659 }
3660
3661 if (cfun->stdarg && !DECL_CHAIN (parm))
3662 {
3663 int pretend_bytes = 0;
3664
3665 assign_parms_setup_varargs (&all, &data, false);
3666
3667 if (chkp_function_instrumented_p (fndecl))
3668 {
3669 /* We expect this is the last parm. Otherwise it is wrong
3670 to assign bounds right now. */
3671 gcc_assert (i == (fnargs.length () - 1));
3672 assign_bounds (bndargs, all);
3673 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3674 data.promoted_mode,
3675 data.passed_type,
3676 &pretend_bytes,
3677 false);
3678 }
3679 }
3680
3681 /* Update info on where next arg arrives in registers. */
3682 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3683 data.passed_type, data.named_arg);
3684
3685 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3686 bound_no++;
3687 }
3688
3689 assign_bounds (bndargs, all);
3690
3691 if (targetm.calls.split_complex_arg)
3692 assign_parms_unsplit_complex (&all, fnargs);
3693
3694 fnargs.release ();
3695
3696 /* Output all parameter conversion instructions (possibly including calls)
3697 now that all parameters have been copied out of hard registers. */
3698 emit_insn (all.first_conversion_insn);
3699
3700 /* Estimate reload stack alignment from scalar return mode. */
3701 if (SUPPORTS_STACK_ALIGNMENT)
3702 {
3703 if (DECL_RESULT (fndecl))
3704 {
3705 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3706 machine_mode mode = TYPE_MODE (type);
3707
3708 if (mode != BLKmode
3709 && mode != VOIDmode
3710 && !AGGREGATE_TYPE_P (type))
3711 {
3712 unsigned int align = GET_MODE_ALIGNMENT (mode);
3713 if (crtl->stack_alignment_estimated < align)
3714 {
3715 gcc_assert (!crtl->stack_realign_processed);
3716 crtl->stack_alignment_estimated = align;
3717 }
3718 }
3719 }
3720 }
3721
3722 /* If we are receiving a struct value address as the first argument, set up
3723 the RTL for the function result. As this might require code to convert
3724 the transmitted address to Pmode, we do this here to ensure that possible
3725 preliminary conversions of the address have been emitted already. */
3726 if (all.function_result_decl)
3727 {
3728 tree result = DECL_RESULT (current_function_decl);
3729 rtx addr = DECL_RTL (all.function_result_decl);
3730 rtx x;
3731
3732 if (DECL_BY_REFERENCE (result))
3733 {
3734 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3735 x = addr;
3736 }
3737 else
3738 {
3739 SET_DECL_VALUE_EXPR (result,
3740 build1 (INDIRECT_REF, TREE_TYPE (result),
3741 all.function_result_decl));
3742 addr = convert_memory_address (Pmode, addr);
3743 x = gen_rtx_MEM (DECL_MODE (result), addr);
3744 set_mem_attributes (x, result, 1);
3745 }
3746
3747 DECL_HAS_VALUE_EXPR_P (result) = 1;
3748
3749 SET_DECL_RTL (result, x);
3750 }
3751
3752 /* We have aligned all the args, so add space for the pretend args. */
3753 crtl->args.pretend_args_size = all.pretend_args_size;
3754 all.stack_args_size.constant += all.extra_pretend_bytes;
3755 crtl->args.size = all.stack_args_size.constant;
3756
3757 /* Adjust function incoming argument size for alignment and
3758 minimum length. */
3759
3760 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3761 crtl->args.size = CEIL_ROUND (crtl->args.size,
3762 PARM_BOUNDARY / BITS_PER_UNIT);
3763
3764 if (ARGS_GROW_DOWNWARD)
3765 {
3766 crtl->args.arg_offset_rtx
3767 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3768 : expand_expr (size_diffop (all.stack_args_size.var,
3769 size_int (-all.stack_args_size.constant)),
3770 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3771 }
3772 else
3773 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3774
3775 /* See how many bytes, if any, of its args a function should try to pop
3776 on return. */
3777
3778 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3779 TREE_TYPE (fndecl),
3780 crtl->args.size);
3781
3782 /* For stdarg.h function, save info about
3783 regs and stack space used by the named args. */
3784
3785 crtl->args.info = all.args_so_far_v;
3786
3787 /* Set the rtx used for the function return value. Put this in its
3788 own variable so any optimizers that need this information don't have
3789 to include tree.h. Do this here so it gets done when an inlined
3790 function gets output. */
3791
3792 crtl->return_rtx
3793 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3794 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3795
3796 /* If scalar return value was computed in a pseudo-reg, or was a named
3797 return value that got dumped to the stack, copy that to the hard
3798 return register. */
3799 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3800 {
3801 tree decl_result = DECL_RESULT (fndecl);
3802 rtx decl_rtl = DECL_RTL (decl_result);
3803
3804 if (REG_P (decl_rtl)
3805 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3806 : DECL_REGISTER (decl_result))
3807 {
3808 rtx real_decl_rtl;
3809
3810 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3811 fndecl, true);
3812 if (chkp_function_instrumented_p (fndecl))
3813 crtl->return_bnd
3814 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3815 fndecl, true);
3816 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3817 /* The delay slot scheduler assumes that crtl->return_rtx
3818 holds the hard register containing the return value, not a
3819 temporary pseudo. */
3820 crtl->return_rtx = real_decl_rtl;
3821 }
3822 }
3823 }
3824
3825 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3826 For all seen types, gimplify their sizes. */
3827
3828 static tree
3829 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3830 {
3831 tree t = *tp;
3832
3833 *walk_subtrees = 0;
3834 if (TYPE_P (t))
3835 {
3836 if (POINTER_TYPE_P (t))
3837 *walk_subtrees = 1;
3838 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3839 && !TYPE_SIZES_GIMPLIFIED (t))
3840 {
3841 gimplify_type_sizes (t, (gimple_seq *) data);
3842 *walk_subtrees = 1;
3843 }
3844 }
3845
3846 return NULL;
3847 }
3848
3849 /* Gimplify the parameter list for current_function_decl. This involves
3850 evaluating SAVE_EXPRs of variable sized parameters and generating code
3851 to implement callee-copies reference parameters. Returns a sequence of
3852 statements to add to the beginning of the function. */
3853
3854 gimple_seq
3855 gimplify_parameters (void)
3856 {
3857 struct assign_parm_data_all all;
3858 tree parm;
3859 gimple_seq stmts = NULL;
3860 vec<tree> fnargs;
3861 unsigned i;
3862
3863 assign_parms_initialize_all (&all);
3864 fnargs = assign_parms_augmented_arg_list (&all);
3865
3866 FOR_EACH_VEC_ELT (fnargs, i, parm)
3867 {
3868 struct assign_parm_data_one data;
3869
3870 /* Extract the type of PARM; adjust it according to ABI. */
3871 assign_parm_find_data_types (&all, parm, &data);
3872
3873 /* Early out for errors and void parameters. */
3874 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3875 continue;
3876
3877 /* Update info on where next arg arrives in registers. */
3878 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3879 data.passed_type, data.named_arg);
3880
3881 /* ??? Once upon a time variable_size stuffed parameter list
3882 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3883 turned out to be less than manageable in the gimple world.
3884 Now we have to hunt them down ourselves. */
3885 walk_tree_without_duplicates (&data.passed_type,
3886 gimplify_parm_type, &stmts);
3887
3888 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3889 {
3890 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3891 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3892 }
3893
3894 if (data.passed_pointer)
3895 {
3896 tree type = TREE_TYPE (data.passed_type);
3897 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3898 type, data.named_arg))
3899 {
3900 tree local, t;
3901
3902 /* For constant-sized objects, this is trivial; for
3903 variable-sized objects, we have to play games. */
3904 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3905 && !(flag_stack_check == GENERIC_STACK_CHECK
3906 && compare_tree_int (DECL_SIZE_UNIT (parm),
3907 STACK_CHECK_MAX_VAR_SIZE) > 0))
3908 {
3909 local = create_tmp_var (type, get_name (parm));
3910 DECL_IGNORED_P (local) = 0;
3911 /* If PARM was addressable, move that flag over
3912 to the local copy, as its address will be taken,
3913 not the PARMs. Keep the parms address taken
3914 as we'll query that flag during gimplification. */
3915 if (TREE_ADDRESSABLE (parm))
3916 TREE_ADDRESSABLE (local) = 1;
3917 else if (TREE_CODE (type) == COMPLEX_TYPE
3918 || TREE_CODE (type) == VECTOR_TYPE)
3919 DECL_GIMPLE_REG_P (local) = 1;
3920 }
3921 else
3922 {
3923 tree ptr_type, addr;
3924
3925 ptr_type = build_pointer_type (type);
3926 addr = create_tmp_reg (ptr_type, get_name (parm));
3927 DECL_IGNORED_P (addr) = 0;
3928 local = build_fold_indirect_ref (addr);
3929
3930 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3931 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3932 size_int (DECL_ALIGN (parm)));
3933
3934 /* The call has been built for a variable-sized object. */
3935 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3936 t = fold_convert (ptr_type, t);
3937 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3938 gimplify_and_add (t, &stmts);
3939 }
3940
3941 gimplify_assign (local, parm, &stmts);
3942
3943 SET_DECL_VALUE_EXPR (parm, local);
3944 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3945 }
3946 }
3947 }
3948
3949 fnargs.release ();
3950
3951 return stmts;
3952 }
3953 \f
3954 /* Compute the size and offset from the start of the stacked arguments for a
3955 parm passed in mode PASSED_MODE and with type TYPE.
3956
3957 INITIAL_OFFSET_PTR points to the current offset into the stacked
3958 arguments.
3959
3960 The starting offset and size for this parm are returned in
3961 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3962 nonzero, the offset is that of stack slot, which is returned in
3963 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3964 padding required from the initial offset ptr to the stack slot.
3965
3966 IN_REGS is nonzero if the argument will be passed in registers. It will
3967 never be set if REG_PARM_STACK_SPACE is not defined.
3968
3969 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3970 for arguments which are passed in registers.
3971
3972 FNDECL is the function in which the argument was defined.
3973
3974 There are two types of rounding that are done. The first, controlled by
3975 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3976 argument list to be aligned to the specific boundary (in bits). This
3977 rounding affects the initial and starting offsets, but not the argument
3978 size.
3979
3980 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3981 optionally rounds the size of the parm to PARM_BOUNDARY. The
3982 initial offset is not affected by this rounding, while the size always
3983 is and the starting offset may be. */
3984
3985 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3986 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3987 callers pass in the total size of args so far as
3988 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3989
3990 void
3991 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3992 int reg_parm_stack_space, int partial,
3993 tree fndecl ATTRIBUTE_UNUSED,
3994 struct args_size *initial_offset_ptr,
3995 struct locate_and_pad_arg_data *locate)
3996 {
3997 tree sizetree;
3998 enum direction where_pad;
3999 unsigned int boundary, round_boundary;
4000 int part_size_in_regs;
4001
4002 /* If we have found a stack parm before we reach the end of the
4003 area reserved for registers, skip that area. */
4004 if (! in_regs)
4005 {
4006 if (reg_parm_stack_space > 0)
4007 {
4008 if (initial_offset_ptr->var)
4009 {
4010 initial_offset_ptr->var
4011 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4012 ssize_int (reg_parm_stack_space));
4013 initial_offset_ptr->constant = 0;
4014 }
4015 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4016 initial_offset_ptr->constant = reg_parm_stack_space;
4017 }
4018 }
4019
4020 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4021
4022 sizetree
4023 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4024 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4025 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4026 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4027 type);
4028 locate->where_pad = where_pad;
4029
4030 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4031 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4032 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4033
4034 locate->boundary = boundary;
4035
4036 if (SUPPORTS_STACK_ALIGNMENT)
4037 {
4038 /* stack_alignment_estimated can't change after stack has been
4039 realigned. */
4040 if (crtl->stack_alignment_estimated < boundary)
4041 {
4042 if (!crtl->stack_realign_processed)
4043 crtl->stack_alignment_estimated = boundary;
4044 else
4045 {
4046 /* If stack is realigned and stack alignment value
4047 hasn't been finalized, it is OK not to increase
4048 stack_alignment_estimated. The bigger alignment
4049 requirement is recorded in stack_alignment_needed
4050 below. */
4051 gcc_assert (!crtl->stack_realign_finalized
4052 && crtl->stack_realign_needed);
4053 }
4054 }
4055 }
4056
4057 /* Remember if the outgoing parameter requires extra alignment on the
4058 calling function side. */
4059 if (crtl->stack_alignment_needed < boundary)
4060 crtl->stack_alignment_needed = boundary;
4061 if (crtl->preferred_stack_boundary < boundary)
4062 crtl->preferred_stack_boundary = boundary;
4063
4064 if (ARGS_GROW_DOWNWARD)
4065 {
4066 locate->slot_offset.constant = -initial_offset_ptr->constant;
4067 if (initial_offset_ptr->var)
4068 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4069 initial_offset_ptr->var);
4070
4071 {
4072 tree s2 = sizetree;
4073 if (where_pad != none
4074 && (!tree_fits_uhwi_p (sizetree)
4075 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4076 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4077 SUB_PARM_SIZE (locate->slot_offset, s2);
4078 }
4079
4080 locate->slot_offset.constant += part_size_in_regs;
4081
4082 if (!in_regs || reg_parm_stack_space > 0)
4083 pad_to_arg_alignment (&locate->slot_offset, boundary,
4084 &locate->alignment_pad);
4085
4086 locate->size.constant = (-initial_offset_ptr->constant
4087 - locate->slot_offset.constant);
4088 if (initial_offset_ptr->var)
4089 locate->size.var = size_binop (MINUS_EXPR,
4090 size_binop (MINUS_EXPR,
4091 ssize_int (0),
4092 initial_offset_ptr->var),
4093 locate->slot_offset.var);
4094
4095 /* Pad_below needs the pre-rounded size to know how much to pad
4096 below. */
4097 locate->offset = locate->slot_offset;
4098 if (where_pad == downward)
4099 pad_below (&locate->offset, passed_mode, sizetree);
4100
4101 }
4102 else
4103 {
4104 if (!in_regs || reg_parm_stack_space > 0)
4105 pad_to_arg_alignment (initial_offset_ptr, boundary,
4106 &locate->alignment_pad);
4107 locate->slot_offset = *initial_offset_ptr;
4108
4109 #ifdef PUSH_ROUNDING
4110 if (passed_mode != BLKmode)
4111 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4112 #endif
4113
4114 /* Pad_below needs the pre-rounded size to know how much to pad below
4115 so this must be done before rounding up. */
4116 locate->offset = locate->slot_offset;
4117 if (where_pad == downward)
4118 pad_below (&locate->offset, passed_mode, sizetree);
4119
4120 if (where_pad != none
4121 && (!tree_fits_uhwi_p (sizetree)
4122 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4123 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4124
4125 ADD_PARM_SIZE (locate->size, sizetree);
4126
4127 locate->size.constant -= part_size_in_regs;
4128 }
4129
4130 #ifdef FUNCTION_ARG_OFFSET
4131 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4132 #endif
4133 }
4134
4135 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4136 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4137
4138 static void
4139 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4140 struct args_size *alignment_pad)
4141 {
4142 tree save_var = NULL_TREE;
4143 HOST_WIDE_INT save_constant = 0;
4144 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4145 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4146
4147 #ifdef SPARC_STACK_BOUNDARY_HACK
4148 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4149 the real alignment of %sp. However, when it does this, the
4150 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4151 if (SPARC_STACK_BOUNDARY_HACK)
4152 sp_offset = 0;
4153 #endif
4154
4155 if (boundary > PARM_BOUNDARY)
4156 {
4157 save_var = offset_ptr->var;
4158 save_constant = offset_ptr->constant;
4159 }
4160
4161 alignment_pad->var = NULL_TREE;
4162 alignment_pad->constant = 0;
4163
4164 if (boundary > BITS_PER_UNIT)
4165 {
4166 if (offset_ptr->var)
4167 {
4168 tree sp_offset_tree = ssize_int (sp_offset);
4169 tree offset = size_binop (PLUS_EXPR,
4170 ARGS_SIZE_TREE (*offset_ptr),
4171 sp_offset_tree);
4172 tree rounded;
4173 if (ARGS_GROW_DOWNWARD)
4174 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4175 else
4176 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4177
4178 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4179 /* ARGS_SIZE_TREE includes constant term. */
4180 offset_ptr->constant = 0;
4181 if (boundary > PARM_BOUNDARY)
4182 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4183 save_var);
4184 }
4185 else
4186 {
4187 offset_ptr->constant = -sp_offset +
4188 (ARGS_GROW_DOWNWARD
4189 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4190 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4191
4192 if (boundary > PARM_BOUNDARY)
4193 alignment_pad->constant = offset_ptr->constant - save_constant;
4194 }
4195 }
4196 }
4197
4198 static void
4199 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4200 {
4201 if (passed_mode != BLKmode)
4202 {
4203 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4204 offset_ptr->constant
4205 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4206 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4207 - GET_MODE_SIZE (passed_mode));
4208 }
4209 else
4210 {
4211 if (TREE_CODE (sizetree) != INTEGER_CST
4212 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4213 {
4214 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4215 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4216 /* Add it in. */
4217 ADD_PARM_SIZE (*offset_ptr, s2);
4218 SUB_PARM_SIZE (*offset_ptr, sizetree);
4219 }
4220 }
4221 }
4222 \f
4223
4224 /* True if register REGNO was alive at a place where `setjmp' was
4225 called and was set more than once or is an argument. Such regs may
4226 be clobbered by `longjmp'. */
4227
4228 static bool
4229 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4230 {
4231 /* There appear to be cases where some local vars never reach the
4232 backend but have bogus regnos. */
4233 if (regno >= max_reg_num ())
4234 return false;
4235
4236 return ((REG_N_SETS (regno) > 1
4237 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4238 regno))
4239 && REGNO_REG_SET_P (setjmp_crosses, regno));
4240 }
4241
4242 /* Walk the tree of blocks describing the binding levels within a
4243 function and warn about variables the might be killed by setjmp or
4244 vfork. This is done after calling flow_analysis before register
4245 allocation since that will clobber the pseudo-regs to hard
4246 regs. */
4247
4248 static void
4249 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4250 {
4251 tree decl, sub;
4252
4253 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4254 {
4255 if (TREE_CODE (decl) == VAR_DECL
4256 && DECL_RTL_SET_P (decl)
4257 && REG_P (DECL_RTL (decl))
4258 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4259 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4260 " %<longjmp%> or %<vfork%>", decl);
4261 }
4262
4263 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4264 setjmp_vars_warning (setjmp_crosses, sub);
4265 }
4266
4267 /* Do the appropriate part of setjmp_vars_warning
4268 but for arguments instead of local variables. */
4269
4270 static void
4271 setjmp_args_warning (bitmap setjmp_crosses)
4272 {
4273 tree decl;
4274 for (decl = DECL_ARGUMENTS (current_function_decl);
4275 decl; decl = DECL_CHAIN (decl))
4276 if (DECL_RTL (decl) != 0
4277 && REG_P (DECL_RTL (decl))
4278 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4279 warning (OPT_Wclobbered,
4280 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4281 decl);
4282 }
4283
4284 /* Generate warning messages for variables live across setjmp. */
4285
4286 void
4287 generate_setjmp_warnings (void)
4288 {
4289 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4290
4291 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4292 || bitmap_empty_p (setjmp_crosses))
4293 return;
4294
4295 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4296 setjmp_args_warning (setjmp_crosses);
4297 }
4298
4299 \f
4300 /* Reverse the order of elements in the fragment chain T of blocks,
4301 and return the new head of the chain (old last element).
4302 In addition to that clear BLOCK_SAME_RANGE flags when needed
4303 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4304 its super fragment origin. */
4305
4306 static tree
4307 block_fragments_nreverse (tree t)
4308 {
4309 tree prev = 0, block, next, prev_super = 0;
4310 tree super = BLOCK_SUPERCONTEXT (t);
4311 if (BLOCK_FRAGMENT_ORIGIN (super))
4312 super = BLOCK_FRAGMENT_ORIGIN (super);
4313 for (block = t; block; block = next)
4314 {
4315 next = BLOCK_FRAGMENT_CHAIN (block);
4316 BLOCK_FRAGMENT_CHAIN (block) = prev;
4317 if ((prev && !BLOCK_SAME_RANGE (prev))
4318 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4319 != prev_super))
4320 BLOCK_SAME_RANGE (block) = 0;
4321 prev_super = BLOCK_SUPERCONTEXT (block);
4322 BLOCK_SUPERCONTEXT (block) = super;
4323 prev = block;
4324 }
4325 t = BLOCK_FRAGMENT_ORIGIN (t);
4326 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4327 != prev_super)
4328 BLOCK_SAME_RANGE (t) = 0;
4329 BLOCK_SUPERCONTEXT (t) = super;
4330 return prev;
4331 }
4332
4333 /* Reverse the order of elements in the chain T of blocks,
4334 and return the new head of the chain (old last element).
4335 Also do the same on subblocks and reverse the order of elements
4336 in BLOCK_FRAGMENT_CHAIN as well. */
4337
4338 static tree
4339 blocks_nreverse_all (tree t)
4340 {
4341 tree prev = 0, block, next;
4342 for (block = t; block; block = next)
4343 {
4344 next = BLOCK_CHAIN (block);
4345 BLOCK_CHAIN (block) = prev;
4346 if (BLOCK_FRAGMENT_CHAIN (block)
4347 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4348 {
4349 BLOCK_FRAGMENT_CHAIN (block)
4350 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4351 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4352 BLOCK_SAME_RANGE (block) = 0;
4353 }
4354 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4355 prev = block;
4356 }
4357 return prev;
4358 }
4359
4360
4361 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4362 and create duplicate blocks. */
4363 /* ??? Need an option to either create block fragments or to create
4364 abstract origin duplicates of a source block. It really depends
4365 on what optimization has been performed. */
4366
4367 void
4368 reorder_blocks (void)
4369 {
4370 tree block = DECL_INITIAL (current_function_decl);
4371
4372 if (block == NULL_TREE)
4373 return;
4374
4375 auto_vec<tree, 10> block_stack;
4376
4377 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4378 clear_block_marks (block);
4379
4380 /* Prune the old trees away, so that they don't get in the way. */
4381 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4382 BLOCK_CHAIN (block) = NULL_TREE;
4383
4384 /* Recreate the block tree from the note nesting. */
4385 reorder_blocks_1 (get_insns (), block, &block_stack);
4386 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4387 }
4388
4389 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4390
4391 void
4392 clear_block_marks (tree block)
4393 {
4394 while (block)
4395 {
4396 TREE_ASM_WRITTEN (block) = 0;
4397 clear_block_marks (BLOCK_SUBBLOCKS (block));
4398 block = BLOCK_CHAIN (block);
4399 }
4400 }
4401
4402 static void
4403 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4404 vec<tree> *p_block_stack)
4405 {
4406 rtx_insn *insn;
4407 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4408
4409 for (insn = insns; insn; insn = NEXT_INSN (insn))
4410 {
4411 if (NOTE_P (insn))
4412 {
4413 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4414 {
4415 tree block = NOTE_BLOCK (insn);
4416 tree origin;
4417
4418 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4419 origin = block;
4420
4421 if (prev_end)
4422 BLOCK_SAME_RANGE (prev_end) = 0;
4423 prev_end = NULL_TREE;
4424
4425 /* If we have seen this block before, that means it now
4426 spans multiple address regions. Create a new fragment. */
4427 if (TREE_ASM_WRITTEN (block))
4428 {
4429 tree new_block = copy_node (block);
4430
4431 BLOCK_SAME_RANGE (new_block) = 0;
4432 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4433 BLOCK_FRAGMENT_CHAIN (new_block)
4434 = BLOCK_FRAGMENT_CHAIN (origin);
4435 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4436
4437 NOTE_BLOCK (insn) = new_block;
4438 block = new_block;
4439 }
4440
4441 if (prev_beg == current_block && prev_beg)
4442 BLOCK_SAME_RANGE (block) = 1;
4443
4444 prev_beg = origin;
4445
4446 BLOCK_SUBBLOCKS (block) = 0;
4447 TREE_ASM_WRITTEN (block) = 1;
4448 /* When there's only one block for the entire function,
4449 current_block == block and we mustn't do this, it
4450 will cause infinite recursion. */
4451 if (block != current_block)
4452 {
4453 tree super;
4454 if (block != origin)
4455 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4456 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4457 (origin))
4458 == current_block);
4459 if (p_block_stack->is_empty ())
4460 super = current_block;
4461 else
4462 {
4463 super = p_block_stack->last ();
4464 gcc_assert (super == current_block
4465 || BLOCK_FRAGMENT_ORIGIN (super)
4466 == current_block);
4467 }
4468 BLOCK_SUPERCONTEXT (block) = super;
4469 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4470 BLOCK_SUBBLOCKS (current_block) = block;
4471 current_block = origin;
4472 }
4473 p_block_stack->safe_push (block);
4474 }
4475 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4476 {
4477 NOTE_BLOCK (insn) = p_block_stack->pop ();
4478 current_block = BLOCK_SUPERCONTEXT (current_block);
4479 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4480 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4481 prev_beg = NULL_TREE;
4482 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4483 ? NOTE_BLOCK (insn) : NULL_TREE;
4484 }
4485 }
4486 else
4487 {
4488 prev_beg = NULL_TREE;
4489 if (prev_end)
4490 BLOCK_SAME_RANGE (prev_end) = 0;
4491 prev_end = NULL_TREE;
4492 }
4493 }
4494 }
4495
4496 /* Reverse the order of elements in the chain T of blocks,
4497 and return the new head of the chain (old last element). */
4498
4499 tree
4500 blocks_nreverse (tree t)
4501 {
4502 tree prev = 0, block, next;
4503 for (block = t; block; block = next)
4504 {
4505 next = BLOCK_CHAIN (block);
4506 BLOCK_CHAIN (block) = prev;
4507 prev = block;
4508 }
4509 return prev;
4510 }
4511
4512 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4513 by modifying the last node in chain 1 to point to chain 2. */
4514
4515 tree
4516 block_chainon (tree op1, tree op2)
4517 {
4518 tree t1;
4519
4520 if (!op1)
4521 return op2;
4522 if (!op2)
4523 return op1;
4524
4525 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4526 continue;
4527 BLOCK_CHAIN (t1) = op2;
4528
4529 #ifdef ENABLE_TREE_CHECKING
4530 {
4531 tree t2;
4532 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4533 gcc_assert (t2 != t1);
4534 }
4535 #endif
4536
4537 return op1;
4538 }
4539
4540 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4541 non-NULL, list them all into VECTOR, in a depth-first preorder
4542 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4543 blocks. */
4544
4545 static int
4546 all_blocks (tree block, tree *vector)
4547 {
4548 int n_blocks = 0;
4549
4550 while (block)
4551 {
4552 TREE_ASM_WRITTEN (block) = 0;
4553
4554 /* Record this block. */
4555 if (vector)
4556 vector[n_blocks] = block;
4557
4558 ++n_blocks;
4559
4560 /* Record the subblocks, and their subblocks... */
4561 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4562 vector ? vector + n_blocks : 0);
4563 block = BLOCK_CHAIN (block);
4564 }
4565
4566 return n_blocks;
4567 }
4568
4569 /* Return a vector containing all the blocks rooted at BLOCK. The
4570 number of elements in the vector is stored in N_BLOCKS_P. The
4571 vector is dynamically allocated; it is the caller's responsibility
4572 to call `free' on the pointer returned. */
4573
4574 static tree *
4575 get_block_vector (tree block, int *n_blocks_p)
4576 {
4577 tree *block_vector;
4578
4579 *n_blocks_p = all_blocks (block, NULL);
4580 block_vector = XNEWVEC (tree, *n_blocks_p);
4581 all_blocks (block, block_vector);
4582
4583 return block_vector;
4584 }
4585
4586 static GTY(()) int next_block_index = 2;
4587
4588 /* Set BLOCK_NUMBER for all the blocks in FN. */
4589
4590 void
4591 number_blocks (tree fn)
4592 {
4593 int i;
4594 int n_blocks;
4595 tree *block_vector;
4596
4597 /* For SDB and XCOFF debugging output, we start numbering the blocks
4598 from 1 within each function, rather than keeping a running
4599 count. */
4600 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4601 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4602 next_block_index = 1;
4603 #endif
4604
4605 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4606
4607 /* The top-level BLOCK isn't numbered at all. */
4608 for (i = 1; i < n_blocks; ++i)
4609 /* We number the blocks from two. */
4610 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4611
4612 free (block_vector);
4613
4614 return;
4615 }
4616
4617 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4618
4619 DEBUG_FUNCTION tree
4620 debug_find_var_in_block_tree (tree var, tree block)
4621 {
4622 tree t;
4623
4624 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4625 if (t == var)
4626 return block;
4627
4628 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4629 {
4630 tree ret = debug_find_var_in_block_tree (var, t);
4631 if (ret)
4632 return ret;
4633 }
4634
4635 return NULL_TREE;
4636 }
4637 \f
4638 /* Keep track of whether we're in a dummy function context. If we are,
4639 we don't want to invoke the set_current_function hook, because we'll
4640 get into trouble if the hook calls target_reinit () recursively or
4641 when the initial initialization is not yet complete. */
4642
4643 static bool in_dummy_function;
4644
4645 /* Invoke the target hook when setting cfun. Update the optimization options
4646 if the function uses different options than the default. */
4647
4648 static void
4649 invoke_set_current_function_hook (tree fndecl)
4650 {
4651 if (!in_dummy_function)
4652 {
4653 tree opts = ((fndecl)
4654 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4655 : optimization_default_node);
4656
4657 if (!opts)
4658 opts = optimization_default_node;
4659
4660 /* Change optimization options if needed. */
4661 if (optimization_current_node != opts)
4662 {
4663 optimization_current_node = opts;
4664 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4665 }
4666
4667 targetm.set_current_function (fndecl);
4668 this_fn_optabs = this_target_optabs;
4669
4670 if (opts != optimization_default_node)
4671 {
4672 init_tree_optimization_optabs (opts);
4673 if (TREE_OPTIMIZATION_OPTABS (opts))
4674 this_fn_optabs = (struct target_optabs *)
4675 TREE_OPTIMIZATION_OPTABS (opts);
4676 }
4677 }
4678 }
4679
4680 /* cfun should never be set directly; use this function. */
4681
4682 void
4683 set_cfun (struct function *new_cfun)
4684 {
4685 if (cfun != new_cfun)
4686 {
4687 cfun = new_cfun;
4688 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4689 }
4690 }
4691
4692 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4693
4694 static vec<function_p> cfun_stack;
4695
4696 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4697 current_function_decl accordingly. */
4698
4699 void
4700 push_cfun (struct function *new_cfun)
4701 {
4702 gcc_assert ((!cfun && !current_function_decl)
4703 || (cfun && current_function_decl == cfun->decl));
4704 cfun_stack.safe_push (cfun);
4705 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4706 set_cfun (new_cfun);
4707 }
4708
4709 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4710
4711 void
4712 pop_cfun (void)
4713 {
4714 struct function *new_cfun = cfun_stack.pop ();
4715 /* When in_dummy_function, we do have a cfun but current_function_decl is
4716 NULL. We also allow pushing NULL cfun and subsequently changing
4717 current_function_decl to something else and have both restored by
4718 pop_cfun. */
4719 gcc_checking_assert (in_dummy_function
4720 || !cfun
4721 || current_function_decl == cfun->decl);
4722 set_cfun (new_cfun);
4723 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4724 }
4725
4726 /* Return value of funcdef and increase it. */
4727 int
4728 get_next_funcdef_no (void)
4729 {
4730 return funcdef_no++;
4731 }
4732
4733 /* Return value of funcdef. */
4734 int
4735 get_last_funcdef_no (void)
4736 {
4737 return funcdef_no;
4738 }
4739
4740 /* Allocate a function structure for FNDECL and set its contents
4741 to the defaults. Set cfun to the newly-allocated object.
4742 Some of the helper functions invoked during initialization assume
4743 that cfun has already been set. Therefore, assign the new object
4744 directly into cfun and invoke the back end hook explicitly at the
4745 very end, rather than initializing a temporary and calling set_cfun
4746 on it.
4747
4748 ABSTRACT_P is true if this is a function that will never be seen by
4749 the middle-end. Such functions are front-end concepts (like C++
4750 function templates) that do not correspond directly to functions
4751 placed in object files. */
4752
4753 void
4754 allocate_struct_function (tree fndecl, bool abstract_p)
4755 {
4756 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4757
4758 cfun = ggc_cleared_alloc<function> ();
4759
4760 init_eh_for_function ();
4761
4762 if (init_machine_status)
4763 cfun->machine = (*init_machine_status) ();
4764
4765 #ifdef OVERRIDE_ABI_FORMAT
4766 OVERRIDE_ABI_FORMAT (fndecl);
4767 #endif
4768
4769 if (fndecl != NULL_TREE)
4770 {
4771 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4772 cfun->decl = fndecl;
4773 current_function_funcdef_no = get_next_funcdef_no ();
4774 }
4775
4776 invoke_set_current_function_hook (fndecl);
4777
4778 if (fndecl != NULL_TREE)
4779 {
4780 tree result = DECL_RESULT (fndecl);
4781 if (!abstract_p && aggregate_value_p (result, fndecl))
4782 {
4783 #ifdef PCC_STATIC_STRUCT_RETURN
4784 cfun->returns_pcc_struct = 1;
4785 #endif
4786 cfun->returns_struct = 1;
4787 }
4788
4789 cfun->stdarg = stdarg_p (fntype);
4790
4791 /* Assume all registers in stdarg functions need to be saved. */
4792 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4793 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4794
4795 /* ??? This could be set on a per-function basis by the front-end
4796 but is this worth the hassle? */
4797 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4798 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4799
4800 if (!profile_flag && !flag_instrument_function_entry_exit)
4801 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4802 }
4803 }
4804
4805 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4806 instead of just setting it. */
4807
4808 void
4809 push_struct_function (tree fndecl)
4810 {
4811 /* When in_dummy_function we might be in the middle of a pop_cfun and
4812 current_function_decl and cfun may not match. */
4813 gcc_assert (in_dummy_function
4814 || (!cfun && !current_function_decl)
4815 || (cfun && current_function_decl == cfun->decl));
4816 cfun_stack.safe_push (cfun);
4817 current_function_decl = fndecl;
4818 allocate_struct_function (fndecl, false);
4819 }
4820
4821 /* Reset crtl and other non-struct-function variables to defaults as
4822 appropriate for emitting rtl at the start of a function. */
4823
4824 static void
4825 prepare_function_start (void)
4826 {
4827 gcc_assert (!get_last_insn ());
4828 init_temp_slots ();
4829 init_emit ();
4830 init_varasm_status ();
4831 init_expr ();
4832 default_rtl_profile ();
4833
4834 if (flag_stack_usage_info)
4835 {
4836 cfun->su = ggc_cleared_alloc<stack_usage> ();
4837 cfun->su->static_stack_size = -1;
4838 }
4839
4840 cse_not_expected = ! optimize;
4841
4842 /* Caller save not needed yet. */
4843 caller_save_needed = 0;
4844
4845 /* We haven't done register allocation yet. */
4846 reg_renumber = 0;
4847
4848 /* Indicate that we have not instantiated virtual registers yet. */
4849 virtuals_instantiated = 0;
4850
4851 /* Indicate that we want CONCATs now. */
4852 generating_concat_p = 1;
4853
4854 /* Indicate we have no need of a frame pointer yet. */
4855 frame_pointer_needed = 0;
4856 }
4857
4858 void
4859 push_dummy_function (bool with_decl)
4860 {
4861 tree fn_decl, fn_type, fn_result_decl;
4862
4863 gcc_assert (!in_dummy_function);
4864 in_dummy_function = true;
4865
4866 if (with_decl)
4867 {
4868 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4869 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4870 fn_type);
4871 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4872 NULL_TREE, void_type_node);
4873 DECL_RESULT (fn_decl) = fn_result_decl;
4874 }
4875 else
4876 fn_decl = NULL_TREE;
4877
4878 push_struct_function (fn_decl);
4879 }
4880
4881 /* Initialize the rtl expansion mechanism so that we can do simple things
4882 like generate sequences. This is used to provide a context during global
4883 initialization of some passes. You must call expand_dummy_function_end
4884 to exit this context. */
4885
4886 void
4887 init_dummy_function_start (void)
4888 {
4889 push_dummy_function (false);
4890 prepare_function_start ();
4891 }
4892
4893 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4894 and initialize static variables for generating RTL for the statements
4895 of the function. */
4896
4897 void
4898 init_function_start (tree subr)
4899 {
4900 if (subr && DECL_STRUCT_FUNCTION (subr))
4901 set_cfun (DECL_STRUCT_FUNCTION (subr));
4902 else
4903 allocate_struct_function (subr, false);
4904
4905 /* Initialize backend, if needed. */
4906 initialize_rtl ();
4907
4908 prepare_function_start ();
4909 decide_function_section (subr);
4910
4911 /* Warn if this value is an aggregate type,
4912 regardless of which calling convention we are using for it. */
4913 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4914 warning (OPT_Waggregate_return, "function returns an aggregate");
4915 }
4916
4917 /* Expand code to verify the stack_protect_guard. This is invoked at
4918 the end of a function to be protected. */
4919
4920 #ifndef HAVE_stack_protect_test
4921 # define HAVE_stack_protect_test 0
4922 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4923 #endif
4924
4925 void
4926 stack_protect_epilogue (void)
4927 {
4928 tree guard_decl = targetm.stack_protect_guard ();
4929 rtx_code_label *label = gen_label_rtx ();
4930 rtx x, y, tmp;
4931
4932 x = expand_normal (crtl->stack_protect_guard);
4933 y = expand_normal (guard_decl);
4934
4935 /* Allow the target to compare Y with X without leaking either into
4936 a register. */
4937 switch ((int) (HAVE_stack_protect_test != 0))
4938 {
4939 case 1:
4940 tmp = gen_stack_protect_test (x, y, label);
4941 if (tmp)
4942 {
4943 emit_insn (tmp);
4944 break;
4945 }
4946 /* FALLTHRU */
4947
4948 default:
4949 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4950 break;
4951 }
4952
4953 /* The noreturn predictor has been moved to the tree level. The rtl-level
4954 predictors estimate this branch about 20%, which isn't enough to get
4955 things moved out of line. Since this is the only extant case of adding
4956 a noreturn function at the rtl level, it doesn't seem worth doing ought
4957 except adding the prediction by hand. */
4958 tmp = get_last_insn ();
4959 if (JUMP_P (tmp))
4960 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4961
4962 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4963 free_temp_slots ();
4964 emit_label (label);
4965 }
4966 \f
4967 /* Start the RTL for a new function, and set variables used for
4968 emitting RTL.
4969 SUBR is the FUNCTION_DECL node.
4970 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4971 the function's parameters, which must be run at any return statement. */
4972
4973 void
4974 expand_function_start (tree subr)
4975 {
4976 /* Make sure volatile mem refs aren't considered
4977 valid operands of arithmetic insns. */
4978 init_recog_no_volatile ();
4979
4980 crtl->profile
4981 = (profile_flag
4982 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4983
4984 crtl->limit_stack
4985 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4986
4987 /* Make the label for return statements to jump to. Do not special
4988 case machines with special return instructions -- they will be
4989 handled later during jump, ifcvt, or epilogue creation. */
4990 return_label = gen_label_rtx ();
4991
4992 /* Initialize rtx used to return the value. */
4993 /* Do this before assign_parms so that we copy the struct value address
4994 before any library calls that assign parms might generate. */
4995
4996 /* Decide whether to return the value in memory or in a register. */
4997 if (aggregate_value_p (DECL_RESULT (subr), subr))
4998 {
4999 /* Returning something that won't go in a register. */
5000 rtx value_address = 0;
5001
5002 #ifdef PCC_STATIC_STRUCT_RETURN
5003 if (cfun->returns_pcc_struct)
5004 {
5005 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5006 value_address = assemble_static_space (size);
5007 }
5008 else
5009 #endif
5010 {
5011 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5012 /* Expect to be passed the address of a place to store the value.
5013 If it is passed as an argument, assign_parms will take care of
5014 it. */
5015 if (sv)
5016 {
5017 value_address = gen_reg_rtx (Pmode);
5018 emit_move_insn (value_address, sv);
5019 }
5020 }
5021 if (value_address)
5022 {
5023 rtx x = value_address;
5024 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
5025 {
5026 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
5027 set_mem_attributes (x, DECL_RESULT (subr), 1);
5028 }
5029 SET_DECL_RTL (DECL_RESULT (subr), x);
5030 }
5031 }
5032 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5033 /* If return mode is void, this decl rtl should not be used. */
5034 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
5035 else
5036 {
5037 /* Compute the return values into a pseudo reg, which we will copy
5038 into the true return register after the cleanups are done. */
5039 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5040 if (TYPE_MODE (return_type) != BLKmode
5041 && targetm.calls.return_in_msb (return_type))
5042 /* expand_function_end will insert the appropriate padding in
5043 this case. Use the return value's natural (unpadded) mode
5044 within the function proper. */
5045 SET_DECL_RTL (DECL_RESULT (subr),
5046 gen_reg_rtx (TYPE_MODE (return_type)));
5047 else
5048 {
5049 /* In order to figure out what mode to use for the pseudo, we
5050 figure out what the mode of the eventual return register will
5051 actually be, and use that. */
5052 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5053
5054 /* Structures that are returned in registers are not
5055 aggregate_value_p, so we may see a PARALLEL or a REG. */
5056 if (REG_P (hard_reg))
5057 SET_DECL_RTL (DECL_RESULT (subr),
5058 gen_reg_rtx (GET_MODE (hard_reg)));
5059 else
5060 {
5061 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5062 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5063 }
5064 }
5065
5066 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5067 result to the real return register(s). */
5068 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5069
5070 if (chkp_function_instrumented_p (current_function_decl))
5071 {
5072 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5073 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5074 subr, 1);
5075 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5076 }
5077 }
5078
5079 /* Initialize rtx for parameters and local variables.
5080 In some cases this requires emitting insns. */
5081 assign_parms (subr);
5082
5083 /* If function gets a static chain arg, store it. */
5084 if (cfun->static_chain_decl)
5085 {
5086 tree parm = cfun->static_chain_decl;
5087 rtx local, chain;
5088 rtx_insn *insn;
5089
5090 local = gen_reg_rtx (Pmode);
5091 chain = targetm.calls.static_chain (current_function_decl, true);
5092
5093 set_decl_incoming_rtl (parm, chain, false);
5094 SET_DECL_RTL (parm, local);
5095 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5096
5097 insn = emit_move_insn (local, chain);
5098
5099 /* Mark the register as eliminable, similar to parameters. */
5100 if (MEM_P (chain)
5101 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5102 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5103
5104 /* If we aren't optimizing, save the static chain onto the stack. */
5105 if (!optimize)
5106 {
5107 tree saved_static_chain_decl
5108 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5109 DECL_NAME (parm), TREE_TYPE (parm));
5110 rtx saved_static_chain_rtx
5111 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5112 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5113 emit_move_insn (saved_static_chain_rtx, chain);
5114 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5115 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5116 }
5117 }
5118
5119 /* If the function receives a non-local goto, then store the
5120 bits we need to restore the frame pointer. */
5121 if (cfun->nonlocal_goto_save_area)
5122 {
5123 tree t_save;
5124 rtx r_save;
5125
5126 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5127 gcc_assert (DECL_RTL_SET_P (var));
5128
5129 t_save = build4 (ARRAY_REF,
5130 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5131 cfun->nonlocal_goto_save_area,
5132 integer_zero_node, NULL_TREE, NULL_TREE);
5133 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5134 gcc_assert (GET_MODE (r_save) == Pmode);
5135
5136 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5137 update_nonlocal_goto_save_area ();
5138 }
5139
5140 /* The following was moved from init_function_start.
5141 The move is supposed to make sdb output more accurate. */
5142 /* Indicate the beginning of the function body,
5143 as opposed to parm setup. */
5144 emit_note (NOTE_INSN_FUNCTION_BEG);
5145
5146 gcc_assert (NOTE_P (get_last_insn ()));
5147
5148 parm_birth_insn = get_last_insn ();
5149
5150 if (crtl->profile)
5151 {
5152 #ifdef PROFILE_HOOK
5153 PROFILE_HOOK (current_function_funcdef_no);
5154 #endif
5155 }
5156
5157 /* If we are doing generic stack checking, the probe should go here. */
5158 if (flag_stack_check == GENERIC_STACK_CHECK)
5159 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5160 }
5161 \f
5162 void
5163 pop_dummy_function (void)
5164 {
5165 pop_cfun ();
5166 in_dummy_function = false;
5167 }
5168
5169 /* Undo the effects of init_dummy_function_start. */
5170 void
5171 expand_dummy_function_end (void)
5172 {
5173 gcc_assert (in_dummy_function);
5174
5175 /* End any sequences that failed to be closed due to syntax errors. */
5176 while (in_sequence_p ())
5177 end_sequence ();
5178
5179 /* Outside function body, can't compute type's actual size
5180 until next function's body starts. */
5181
5182 free_after_parsing (cfun);
5183 free_after_compilation (cfun);
5184 pop_dummy_function ();
5185 }
5186
5187 /* Helper for diddle_return_value. */
5188
5189 void
5190 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5191 {
5192 if (! outgoing)
5193 return;
5194
5195 if (REG_P (outgoing))
5196 (*doit) (outgoing, arg);
5197 else if (GET_CODE (outgoing) == PARALLEL)
5198 {
5199 int i;
5200
5201 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5202 {
5203 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5204
5205 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5206 (*doit) (x, arg);
5207 }
5208 }
5209 }
5210
5211 /* Call DOIT for each hard register used as a return value from
5212 the current function. */
5213
5214 void
5215 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5216 {
5217 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5218 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5219 }
5220
5221 static void
5222 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5223 {
5224 emit_clobber (reg);
5225 }
5226
5227 void
5228 clobber_return_register (void)
5229 {
5230 diddle_return_value (do_clobber_return_reg, NULL);
5231
5232 /* In case we do use pseudo to return value, clobber it too. */
5233 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5234 {
5235 tree decl_result = DECL_RESULT (current_function_decl);
5236 rtx decl_rtl = DECL_RTL (decl_result);
5237 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5238 {
5239 do_clobber_return_reg (decl_rtl, NULL);
5240 }
5241 }
5242 }
5243
5244 static void
5245 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5246 {
5247 emit_use (reg);
5248 }
5249
5250 static void
5251 use_return_register (void)
5252 {
5253 diddle_return_value (do_use_return_reg, NULL);
5254 }
5255
5256 /* Possibly warn about unused parameters. */
5257 void
5258 do_warn_unused_parameter (tree fn)
5259 {
5260 tree decl;
5261
5262 for (decl = DECL_ARGUMENTS (fn);
5263 decl; decl = DECL_CHAIN (decl))
5264 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5265 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5266 && !TREE_NO_WARNING (decl))
5267 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
5268 }
5269
5270 /* Set the location of the insn chain starting at INSN to LOC. */
5271
5272 static void
5273 set_insn_locations (rtx_insn *insn, int loc)
5274 {
5275 while (insn != NULL)
5276 {
5277 if (INSN_P (insn))
5278 INSN_LOCATION (insn) = loc;
5279 insn = NEXT_INSN (insn);
5280 }
5281 }
5282
5283 /* Generate RTL for the end of the current function. */
5284
5285 void
5286 expand_function_end (void)
5287 {
5288 /* If arg_pointer_save_area was referenced only from a nested
5289 function, we will not have initialized it yet. Do that now. */
5290 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5291 get_arg_pointer_save_area ();
5292
5293 /* If we are doing generic stack checking and this function makes calls,
5294 do a stack probe at the start of the function to ensure we have enough
5295 space for another stack frame. */
5296 if (flag_stack_check == GENERIC_STACK_CHECK)
5297 {
5298 rtx_insn *insn, *seq;
5299
5300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5301 if (CALL_P (insn))
5302 {
5303 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5304 start_sequence ();
5305 if (STACK_CHECK_MOVING_SP)
5306 anti_adjust_stack_and_probe (max_frame_size, true);
5307 else
5308 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5309 seq = get_insns ();
5310 end_sequence ();
5311 set_insn_locations (seq, prologue_location);
5312 emit_insn_before (seq, stack_check_probe_note);
5313 break;
5314 }
5315 }
5316
5317 /* End any sequences that failed to be closed due to syntax errors. */
5318 while (in_sequence_p ())
5319 end_sequence ();
5320
5321 clear_pending_stack_adjust ();
5322 do_pending_stack_adjust ();
5323
5324 /* Output a linenumber for the end of the function.
5325 SDB depends on this. */
5326 set_curr_insn_location (input_location);
5327
5328 /* Before the return label (if any), clobber the return
5329 registers so that they are not propagated live to the rest of
5330 the function. This can only happen with functions that drop
5331 through; if there had been a return statement, there would
5332 have either been a return rtx, or a jump to the return label.
5333
5334 We delay actual code generation after the current_function_value_rtx
5335 is computed. */
5336 rtx_insn *clobber_after = get_last_insn ();
5337
5338 /* Output the label for the actual return from the function. */
5339 emit_label (return_label);
5340
5341 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5342 {
5343 /* Let except.c know where it should emit the call to unregister
5344 the function context for sjlj exceptions. */
5345 if (flag_exceptions)
5346 sjlj_emit_function_exit_after (get_last_insn ());
5347 }
5348 else
5349 {
5350 /* We want to ensure that instructions that may trap are not
5351 moved into the epilogue by scheduling, because we don't
5352 always emit unwind information for the epilogue. */
5353 if (cfun->can_throw_non_call_exceptions)
5354 emit_insn (gen_blockage ());
5355 }
5356
5357 /* If this is an implementation of throw, do what's necessary to
5358 communicate between __builtin_eh_return and the epilogue. */
5359 expand_eh_return ();
5360
5361 /* If scalar return value was computed in a pseudo-reg, or was a named
5362 return value that got dumped to the stack, copy that to the hard
5363 return register. */
5364 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5365 {
5366 tree decl_result = DECL_RESULT (current_function_decl);
5367 rtx decl_rtl = DECL_RTL (decl_result);
5368
5369 if (REG_P (decl_rtl)
5370 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5371 : DECL_REGISTER (decl_result))
5372 {
5373 rtx real_decl_rtl = crtl->return_rtx;
5374
5375 /* This should be set in assign_parms. */
5376 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5377
5378 /* If this is a BLKmode structure being returned in registers,
5379 then use the mode computed in expand_return. Note that if
5380 decl_rtl is memory, then its mode may have been changed,
5381 but that crtl->return_rtx has not. */
5382 if (GET_MODE (real_decl_rtl) == BLKmode)
5383 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5384
5385 /* If a non-BLKmode return value should be padded at the least
5386 significant end of the register, shift it left by the appropriate
5387 amount. BLKmode results are handled using the group load/store
5388 machinery. */
5389 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5390 && REG_P (real_decl_rtl)
5391 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5392 {
5393 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5394 REGNO (real_decl_rtl)),
5395 decl_rtl);
5396 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5397 }
5398 /* If a named return value dumped decl_return to memory, then
5399 we may need to re-do the PROMOTE_MODE signed/unsigned
5400 extension. */
5401 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5402 {
5403 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5404 promote_function_mode (TREE_TYPE (decl_result),
5405 GET_MODE (decl_rtl), &unsignedp,
5406 TREE_TYPE (current_function_decl), 1);
5407
5408 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5409 }
5410 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5411 {
5412 /* If expand_function_start has created a PARALLEL for decl_rtl,
5413 move the result to the real return registers. Otherwise, do
5414 a group load from decl_rtl for a named return. */
5415 if (GET_CODE (decl_rtl) == PARALLEL)
5416 emit_group_move (real_decl_rtl, decl_rtl);
5417 else
5418 emit_group_load (real_decl_rtl, decl_rtl,
5419 TREE_TYPE (decl_result),
5420 int_size_in_bytes (TREE_TYPE (decl_result)));
5421 }
5422 /* In the case of complex integer modes smaller than a word, we'll
5423 need to generate some non-trivial bitfield insertions. Do that
5424 on a pseudo and not the hard register. */
5425 else if (GET_CODE (decl_rtl) == CONCAT
5426 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5427 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5428 {
5429 int old_generating_concat_p;
5430 rtx tmp;
5431
5432 old_generating_concat_p = generating_concat_p;
5433 generating_concat_p = 0;
5434 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5435 generating_concat_p = old_generating_concat_p;
5436
5437 emit_move_insn (tmp, decl_rtl);
5438 emit_move_insn (real_decl_rtl, tmp);
5439 }
5440 else
5441 emit_move_insn (real_decl_rtl, decl_rtl);
5442 }
5443 }
5444
5445 /* If returning a structure, arrange to return the address of the value
5446 in a place where debuggers expect to find it.
5447
5448 If returning a structure PCC style,
5449 the caller also depends on this value.
5450 And cfun->returns_pcc_struct is not necessarily set. */
5451 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5452 && !targetm.calls.omit_struct_return_reg)
5453 {
5454 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5455 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5456 rtx outgoing;
5457
5458 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5459 type = TREE_TYPE (type);
5460 else
5461 value_address = XEXP (value_address, 0);
5462
5463 outgoing = targetm.calls.function_value (build_pointer_type (type),
5464 current_function_decl, true);
5465
5466 /* Mark this as a function return value so integrate will delete the
5467 assignment and USE below when inlining this function. */
5468 REG_FUNCTION_VALUE_P (outgoing) = 1;
5469
5470 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5471 value_address = convert_memory_address (GET_MODE (outgoing),
5472 value_address);
5473
5474 emit_move_insn (outgoing, value_address);
5475
5476 /* Show return register used to hold result (in this case the address
5477 of the result. */
5478 crtl->return_rtx = outgoing;
5479 }
5480
5481 /* Emit the actual code to clobber return register. Don't emit
5482 it if clobber_after is a barrier, then the previous basic block
5483 certainly doesn't fall thru into the exit block. */
5484 if (!BARRIER_P (clobber_after))
5485 {
5486 start_sequence ();
5487 clobber_return_register ();
5488 rtx_insn *seq = get_insns ();
5489 end_sequence ();
5490
5491 emit_insn_after (seq, clobber_after);
5492 }
5493
5494 /* Output the label for the naked return from the function. */
5495 if (naked_return_label)
5496 emit_label (naked_return_label);
5497
5498 /* @@@ This is a kludge. We want to ensure that instructions that
5499 may trap are not moved into the epilogue by scheduling, because
5500 we don't always emit unwind information for the epilogue. */
5501 if (cfun->can_throw_non_call_exceptions
5502 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5503 emit_insn (gen_blockage ());
5504
5505 /* If stack protection is enabled for this function, check the guard. */
5506 if (crtl->stack_protect_guard)
5507 stack_protect_epilogue ();
5508
5509 /* If we had calls to alloca, and this machine needs
5510 an accurate stack pointer to exit the function,
5511 insert some code to save and restore the stack pointer. */
5512 if (! EXIT_IGNORE_STACK
5513 && cfun->calls_alloca)
5514 {
5515 rtx tem = 0;
5516
5517 start_sequence ();
5518 emit_stack_save (SAVE_FUNCTION, &tem);
5519 rtx_insn *seq = get_insns ();
5520 end_sequence ();
5521 emit_insn_before (seq, parm_birth_insn);
5522
5523 emit_stack_restore (SAVE_FUNCTION, tem);
5524 }
5525
5526 /* ??? This should no longer be necessary since stupid is no longer with
5527 us, but there are some parts of the compiler (eg reload_combine, and
5528 sh mach_dep_reorg) that still try and compute their own lifetime info
5529 instead of using the general framework. */
5530 use_return_register ();
5531 }
5532
5533 rtx
5534 get_arg_pointer_save_area (void)
5535 {
5536 rtx ret = arg_pointer_save_area;
5537
5538 if (! ret)
5539 {
5540 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5541 arg_pointer_save_area = ret;
5542 }
5543
5544 if (! crtl->arg_pointer_save_area_init)
5545 {
5546 /* Save the arg pointer at the beginning of the function. The
5547 generated stack slot may not be a valid memory address, so we
5548 have to check it and fix it if necessary. */
5549 start_sequence ();
5550 emit_move_insn (validize_mem (copy_rtx (ret)),
5551 crtl->args.internal_arg_pointer);
5552 rtx_insn *seq = get_insns ();
5553 end_sequence ();
5554
5555 push_topmost_sequence ();
5556 emit_insn_after (seq, entry_of_function ());
5557 pop_topmost_sequence ();
5558
5559 crtl->arg_pointer_save_area_init = true;
5560 }
5561
5562 return ret;
5563 }
5564 \f
5565 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5566 for the first time. */
5567
5568 static void
5569 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5570 {
5571 rtx_insn *tmp;
5572 hash_table<insn_cache_hasher> *hash = *hashp;
5573
5574 if (hash == NULL)
5575 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5576
5577 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5578 {
5579 rtx *slot = hash->find_slot (tmp, INSERT);
5580 gcc_assert (*slot == NULL);
5581 *slot = tmp;
5582 }
5583 }
5584
5585 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5586 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5587 insn, then record COPY as well. */
5588
5589 void
5590 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5591 {
5592 hash_table<insn_cache_hasher> *hash;
5593 rtx *slot;
5594
5595 hash = epilogue_insn_hash;
5596 if (!hash || !hash->find (insn))
5597 {
5598 hash = prologue_insn_hash;
5599 if (!hash || !hash->find (insn))
5600 return;
5601 }
5602
5603 slot = hash->find_slot (copy, INSERT);
5604 gcc_assert (*slot == NULL);
5605 *slot = copy;
5606 }
5607
5608 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5609 we can be running after reorg, SEQUENCE rtl is possible. */
5610
5611 static bool
5612 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5613 {
5614 if (hash == NULL)
5615 return false;
5616
5617 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5618 {
5619 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5620 int i;
5621 for (i = seq->len () - 1; i >= 0; i--)
5622 if (hash->find (seq->element (i)))
5623 return true;
5624 return false;
5625 }
5626
5627 return hash->find (const_cast<rtx> (insn)) != NULL;
5628 }
5629
5630 int
5631 prologue_epilogue_contains (const_rtx insn)
5632 {
5633 if (contains (insn, prologue_insn_hash))
5634 return 1;
5635 if (contains (insn, epilogue_insn_hash))
5636 return 1;
5637 return 0;
5638 }
5639
5640 /* Insert use of return register before the end of BB. */
5641
5642 static void
5643 emit_use_return_register_into_block (basic_block bb)
5644 {
5645 start_sequence ();
5646 use_return_register ();
5647 rtx_insn *seq = get_insns ();
5648 end_sequence ();
5649 rtx_insn *insn = BB_END (bb);
5650 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5651 insn = prev_cc0_setter (insn);
5652
5653 emit_insn_before (seq, insn);
5654 }
5655
5656
5657 /* Create a return pattern, either simple_return or return, depending on
5658 simple_p. */
5659
5660 static rtx
5661 gen_return_pattern (bool simple_p)
5662 {
5663 if (!HAVE_simple_return)
5664 gcc_assert (!simple_p);
5665
5666 return simple_p ? gen_simple_return () : gen_return ();
5667 }
5668
5669 /* Insert an appropriate return pattern at the end of block BB. This
5670 also means updating block_for_insn appropriately. SIMPLE_P is
5671 the same as in gen_return_pattern and passed to it. */
5672
5673 void
5674 emit_return_into_block (bool simple_p, basic_block bb)
5675 {
5676 rtx_jump_insn *jump = emit_jump_insn_after (gen_return_pattern (simple_p),
5677 BB_END (bb));
5678 rtx pat = PATTERN (jump);
5679 if (GET_CODE (pat) == PARALLEL)
5680 pat = XVECEXP (pat, 0, 0);
5681 gcc_assert (ANY_RETURN_P (pat));
5682 JUMP_LABEL (jump) = pat;
5683 }
5684
5685 /* Set JUMP_LABEL for a return insn. */
5686
5687 void
5688 set_return_jump_label (rtx_insn *returnjump)
5689 {
5690 rtx pat = PATTERN (returnjump);
5691 if (GET_CODE (pat) == PARALLEL)
5692 pat = XVECEXP (pat, 0, 0);
5693 if (ANY_RETURN_P (pat))
5694 JUMP_LABEL (returnjump) = pat;
5695 else
5696 JUMP_LABEL (returnjump) = ret_rtx;
5697 }
5698
5699 /* Return true if there are any active insns between HEAD and TAIL. */
5700 bool
5701 active_insn_between (rtx_insn *head, rtx_insn *tail)
5702 {
5703 while (tail)
5704 {
5705 if (active_insn_p (tail))
5706 return true;
5707 if (tail == head)
5708 return false;
5709 tail = PREV_INSN (tail);
5710 }
5711 return false;
5712 }
5713
5714 /* LAST_BB is a block that exits, and empty of active instructions.
5715 Examine its predecessors for jumps that can be converted to
5716 (conditional) returns. */
5717 vec<edge>
5718 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5719 vec<edge> unconverted ATTRIBUTE_UNUSED)
5720 {
5721 int i;
5722 basic_block bb;
5723 edge_iterator ei;
5724 edge e;
5725 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5726
5727 FOR_EACH_EDGE (e, ei, last_bb->preds)
5728 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5729 src_bbs.quick_push (e->src);
5730
5731 rtx_insn *label = BB_HEAD (last_bb);
5732
5733 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5734 {
5735 rtx_insn *jump = BB_END (bb);
5736
5737 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5738 continue;
5739
5740 e = find_edge (bb, last_bb);
5741
5742 /* If we have an unconditional jump, we can replace that
5743 with a simple return instruction. */
5744 if (simplejump_p (jump))
5745 {
5746 /* The use of the return register might be present in the exit
5747 fallthru block. Either:
5748 - removing the use is safe, and we should remove the use in
5749 the exit fallthru block, or
5750 - removing the use is not safe, and we should add it here.
5751 For now, we conservatively choose the latter. Either of the
5752 2 helps in crossjumping. */
5753 emit_use_return_register_into_block (bb);
5754
5755 emit_return_into_block (simple_p, bb);
5756 delete_insn (jump);
5757 }
5758
5759 /* If we have a conditional jump branching to the last
5760 block, we can try to replace that with a conditional
5761 return instruction. */
5762 else if (condjump_p (jump))
5763 {
5764 rtx dest;
5765
5766 if (simple_p)
5767 dest = simple_return_rtx;
5768 else
5769 dest = ret_rtx;
5770 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5771 {
5772 if (HAVE_simple_return && simple_p)
5773 {
5774 if (dump_file)
5775 fprintf (dump_file,
5776 "Failed to redirect bb %d branch.\n", bb->index);
5777 unconverted.safe_push (e);
5778 }
5779 continue;
5780 }
5781
5782 /* See comment in simplejump_p case above. */
5783 emit_use_return_register_into_block (bb);
5784
5785 /* If this block has only one successor, it both jumps
5786 and falls through to the fallthru block, so we can't
5787 delete the edge. */
5788 if (single_succ_p (bb))
5789 continue;
5790 }
5791 else
5792 {
5793 if (HAVE_simple_return && simple_p)
5794 {
5795 if (dump_file)
5796 fprintf (dump_file,
5797 "Failed to redirect bb %d branch.\n", bb->index);
5798 unconverted.safe_push (e);
5799 }
5800 continue;
5801 }
5802
5803 /* Fix up the CFG for the successful change we just made. */
5804 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5805 e->flags &= ~EDGE_CROSSING;
5806 }
5807 src_bbs.release ();
5808 return unconverted;
5809 }
5810
5811 /* Emit a return insn for the exit fallthru block. */
5812 basic_block
5813 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5814 {
5815 basic_block last_bb = exit_fallthru_edge->src;
5816
5817 if (JUMP_P (BB_END (last_bb)))
5818 {
5819 last_bb = split_edge (exit_fallthru_edge);
5820 exit_fallthru_edge = single_succ_edge (last_bb);
5821 }
5822 emit_barrier_after (BB_END (last_bb));
5823 emit_return_into_block (simple_p, last_bb);
5824 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5825 return last_bb;
5826 }
5827
5828
5829 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5830 this into place with notes indicating where the prologue ends and where
5831 the epilogue begins. Update the basic block information when possible.
5832
5833 Notes on epilogue placement:
5834 There are several kinds of edges to the exit block:
5835 * a single fallthru edge from LAST_BB
5836 * possibly, edges from blocks containing sibcalls
5837 * possibly, fake edges from infinite loops
5838
5839 The epilogue is always emitted on the fallthru edge from the last basic
5840 block in the function, LAST_BB, into the exit block.
5841
5842 If LAST_BB is empty except for a label, it is the target of every
5843 other basic block in the function that ends in a return. If a
5844 target has a return or simple_return pattern (possibly with
5845 conditional variants), these basic blocks can be changed so that a
5846 return insn is emitted into them, and their target is adjusted to
5847 the real exit block.
5848
5849 Notes on shrink wrapping: We implement a fairly conservative
5850 version of shrink-wrapping rather than the textbook one. We only
5851 generate a single prologue and a single epilogue. This is
5852 sufficient to catch a number of interesting cases involving early
5853 exits.
5854
5855 First, we identify the blocks that require the prologue to occur before
5856 them. These are the ones that modify a call-saved register, or reference
5857 any of the stack or frame pointer registers. To simplify things, we then
5858 mark everything reachable from these blocks as also requiring a prologue.
5859 This takes care of loops automatically, and avoids the need to examine
5860 whether MEMs reference the frame, since it is sufficient to check for
5861 occurrences of the stack or frame pointer.
5862
5863 We then compute the set of blocks for which the need for a prologue
5864 is anticipatable (borrowing terminology from the shrink-wrapping
5865 description in Muchnick's book). These are the blocks which either
5866 require a prologue themselves, or those that have only successors
5867 where the prologue is anticipatable. The prologue needs to be
5868 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5869 is not. For the moment, we ensure that only one such edge exists.
5870
5871 The epilogue is placed as described above, but we make a
5872 distinction between inserting return and simple_return patterns
5873 when modifying other blocks that end in a return. Blocks that end
5874 in a sibcall omit the sibcall_epilogue if the block is not in
5875 ANTIC. */
5876
5877 void
5878 thread_prologue_and_epilogue_insns (void)
5879 {
5880 bool inserted;
5881 vec<edge> unconverted_simple_returns = vNULL;
5882 bitmap_head bb_flags;
5883 rtx_insn *returnjump;
5884 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5885 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5886 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5887 edge_iterator ei;
5888
5889 df_analyze ();
5890
5891 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5892
5893 inserted = false;
5894 epilogue_end = NULL;
5895 returnjump = NULL;
5896
5897 /* Can't deal with multiple successors of the entry block at the
5898 moment. Function should always have at least one entry
5899 point. */
5900 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5901 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5902 orig_entry_edge = entry_edge;
5903
5904 split_prologue_seq = NULL;
5905 if (flag_split_stack
5906 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5907 == NULL))
5908 {
5909 #ifndef HAVE_split_stack_prologue
5910 gcc_unreachable ();
5911 #else
5912 gcc_assert (HAVE_split_stack_prologue);
5913
5914 start_sequence ();
5915 emit_insn (gen_split_stack_prologue ());
5916 split_prologue_seq = get_insns ();
5917 end_sequence ();
5918
5919 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5920 set_insn_locations (split_prologue_seq, prologue_location);
5921 #endif
5922 }
5923
5924 prologue_seq = NULL;
5925 #ifdef HAVE_prologue
5926 if (HAVE_prologue)
5927 {
5928 start_sequence ();
5929 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
5930 emit_insn (seq);
5931
5932 /* Insert an explicit USE for the frame pointer
5933 if the profiling is on and the frame pointer is required. */
5934 if (crtl->profile && frame_pointer_needed)
5935 emit_use (hard_frame_pointer_rtx);
5936
5937 /* Retain a map of the prologue insns. */
5938 record_insns (seq, NULL, &prologue_insn_hash);
5939 emit_note (NOTE_INSN_PROLOGUE_END);
5940
5941 /* Ensure that instructions are not moved into the prologue when
5942 profiling is on. The call to the profiling routine can be
5943 emitted within the live range of a call-clobbered register. */
5944 if (!targetm.profile_before_prologue () && crtl->profile)
5945 emit_insn (gen_blockage ());
5946
5947 prologue_seq = get_insns ();
5948 end_sequence ();
5949 set_insn_locations (prologue_seq, prologue_location);
5950 }
5951 #endif
5952
5953 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5954
5955 /* Try to perform a kind of shrink-wrapping, making sure the
5956 prologue/epilogue is emitted only around those parts of the
5957 function that require it. */
5958
5959 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5960
5961 if (split_prologue_seq != NULL_RTX)
5962 {
5963 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5964 inserted = true;
5965 }
5966 if (prologue_seq != NULL_RTX)
5967 {
5968 insert_insn_on_edge (prologue_seq, entry_edge);
5969 inserted = true;
5970 }
5971
5972 /* If the exit block has no non-fake predecessors, we don't need
5973 an epilogue. */
5974 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5975 if ((e->flags & EDGE_FAKE) == 0)
5976 break;
5977 if (e == NULL)
5978 goto epilogue_done;
5979
5980 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5981
5982 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5983
5984 if (HAVE_simple_return && entry_edge != orig_entry_edge)
5985 exit_fallthru_edge
5986 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5987 &unconverted_simple_returns,
5988 &returnjump);
5989 if (HAVE_return)
5990 {
5991 if (exit_fallthru_edge == NULL)
5992 goto epilogue_done;
5993
5994 if (optimize)
5995 {
5996 basic_block last_bb = exit_fallthru_edge->src;
5997
5998 if (LABEL_P (BB_HEAD (last_bb))
5999 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6000 convert_jumps_to_returns (last_bb, false, vNULL);
6001
6002 if (EDGE_COUNT (last_bb->preds) != 0
6003 && single_succ_p (last_bb))
6004 {
6005 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6006 epilogue_end = returnjump = BB_END (last_bb);
6007
6008 /* Emitting the return may add a basic block.
6009 Fix bb_flags for the added block. */
6010 if (HAVE_simple_return && last_bb != exit_fallthru_edge->src)
6011 bitmap_set_bit (&bb_flags, last_bb->index);
6012
6013 goto epilogue_done;
6014 }
6015 }
6016 }
6017
6018 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6019 this marker for the splits of EH_RETURN patterns, and nothing else
6020 uses the flag in the meantime. */
6021 epilogue_completed = 1;
6022
6023 #ifdef HAVE_eh_return
6024 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6025 some targets, these get split to a special version of the epilogue
6026 code. In order to be able to properly annotate these with unwind
6027 info, try to split them now. If we get a valid split, drop an
6028 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6029 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6030 {
6031 rtx_insn *prev, *last, *trial;
6032
6033 if (e->flags & EDGE_FALLTHRU)
6034 continue;
6035 last = BB_END (e->src);
6036 if (!eh_returnjump_p (last))
6037 continue;
6038
6039 prev = PREV_INSN (last);
6040 trial = try_split (PATTERN (last), last, 1);
6041 if (trial == last)
6042 continue;
6043
6044 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6045 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6046 }
6047 #endif
6048
6049 /* If nothing falls through into the exit block, we don't need an
6050 epilogue. */
6051
6052 if (exit_fallthru_edge == NULL)
6053 goto epilogue_done;
6054
6055 if (HAVE_epilogue)
6056 {
6057 start_sequence ();
6058 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6059 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
6060 if (seq)
6061 emit_jump_insn (seq);
6062
6063 /* Retain a map of the epilogue insns. */
6064 record_insns (seq, NULL, &epilogue_insn_hash);
6065 set_insn_locations (seq, epilogue_location);
6066
6067 seq = get_insns ();
6068 returnjump = get_last_insn ();
6069 end_sequence ();
6070
6071 insert_insn_on_edge (seq, exit_fallthru_edge);
6072 inserted = true;
6073
6074 if (JUMP_P (returnjump))
6075 set_return_jump_label (returnjump);
6076 }
6077 else
6078 {
6079 basic_block cur_bb;
6080
6081 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6082 goto epilogue_done;
6083 /* We have a fall-through edge to the exit block, the source is not
6084 at the end of the function, and there will be an assembler epilogue
6085 at the end of the function.
6086 We can't use force_nonfallthru here, because that would try to
6087 use return. Inserting a jump 'by hand' is extremely messy, so
6088 we take advantage of cfg_layout_finalize using
6089 fixup_fallthru_exit_predecessor. */
6090 cfg_layout_initialize (0);
6091 FOR_EACH_BB_FN (cur_bb, cfun)
6092 if (cur_bb->index >= NUM_FIXED_BLOCKS
6093 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6094 cur_bb->aux = cur_bb->next_bb;
6095 cfg_layout_finalize ();
6096 }
6097
6098 epilogue_done:
6099
6100 default_rtl_profile ();
6101
6102 if (inserted)
6103 {
6104 sbitmap blocks;
6105
6106 commit_edge_insertions ();
6107
6108 /* Look for basic blocks within the prologue insns. */
6109 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6110 bitmap_clear (blocks);
6111 bitmap_set_bit (blocks, entry_edge->dest->index);
6112 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6113 find_many_sub_basic_blocks (blocks);
6114 sbitmap_free (blocks);
6115
6116 /* The epilogue insns we inserted may cause the exit edge to no longer
6117 be fallthru. */
6118 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6119 {
6120 if (((e->flags & EDGE_FALLTHRU) != 0)
6121 && returnjump_p (BB_END (e->src)))
6122 e->flags &= ~EDGE_FALLTHRU;
6123 }
6124 }
6125
6126 if (HAVE_simple_return)
6127 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6128 returnjump, unconverted_simple_returns);
6129
6130 #ifdef HAVE_sibcall_epilogue
6131 /* Emit sibling epilogues before any sibling call sites. */
6132 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6133 ei_safe_edge (ei));
6134 )
6135 {
6136 basic_block bb = e->src;
6137 rtx_insn *insn = BB_END (bb);
6138 rtx ep_seq;
6139
6140 if (!CALL_P (insn)
6141 || ! SIBLING_CALL_P (insn)
6142 || (HAVE_simple_return && (entry_edge != orig_entry_edge
6143 && !bitmap_bit_p (&bb_flags, bb->index))))
6144 {
6145 ei_next (&ei);
6146 continue;
6147 }
6148
6149 ep_seq = gen_sibcall_epilogue ();
6150 if (ep_seq)
6151 {
6152 start_sequence ();
6153 emit_note (NOTE_INSN_EPILOGUE_BEG);
6154 emit_insn (ep_seq);
6155 rtx_insn *seq = get_insns ();
6156 end_sequence ();
6157
6158 /* Retain a map of the epilogue insns. Used in life analysis to
6159 avoid getting rid of sibcall epilogue insns. Do this before we
6160 actually emit the sequence. */
6161 record_insns (seq, NULL, &epilogue_insn_hash);
6162 set_insn_locations (seq, epilogue_location);
6163
6164 emit_insn_before (seq, insn);
6165 }
6166 ei_next (&ei);
6167 }
6168 #endif
6169
6170 if (epilogue_end)
6171 {
6172 rtx_insn *insn, *next;
6173
6174 /* Similarly, move any line notes that appear after the epilogue.
6175 There is no need, however, to be quite so anal about the existence
6176 of such a note. Also possibly move
6177 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6178 info generation. */
6179 for (insn = epilogue_end; insn; insn = next)
6180 {
6181 next = NEXT_INSN (insn);
6182 if (NOTE_P (insn)
6183 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6184 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6185 }
6186 }
6187
6188 bitmap_clear (&bb_flags);
6189
6190 /* Threading the prologue and epilogue changes the artificial refs
6191 in the entry and exit blocks. */
6192 epilogue_completed = 1;
6193 df_update_entry_exit_and_calls ();
6194 }
6195
6196 /* Reposition the prologue-end and epilogue-begin notes after
6197 instruction scheduling. */
6198
6199 void
6200 reposition_prologue_and_epilogue_notes (void)
6201 {
6202 #if ! defined (HAVE_prologue) && ! defined (HAVE_sibcall_epilogue)
6203 if (!HAVE_epilogue)
6204 return;
6205 #endif
6206
6207 /* Since the hash table is created on demand, the fact that it is
6208 non-null is a signal that it is non-empty. */
6209 if (prologue_insn_hash != NULL)
6210 {
6211 size_t len = prologue_insn_hash->elements ();
6212 rtx_insn *insn, *last = NULL, *note = NULL;
6213
6214 /* Scan from the beginning until we reach the last prologue insn. */
6215 /* ??? While we do have the CFG intact, there are two problems:
6216 (1) The prologue can contain loops (typically probing the stack),
6217 which means that the end of the prologue isn't in the first bb.
6218 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6219 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6220 {
6221 if (NOTE_P (insn))
6222 {
6223 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6224 note = insn;
6225 }
6226 else if (contains (insn, prologue_insn_hash))
6227 {
6228 last = insn;
6229 if (--len == 0)
6230 break;
6231 }
6232 }
6233
6234 if (last)
6235 {
6236 if (note == NULL)
6237 {
6238 /* Scan forward looking for the PROLOGUE_END note. It should
6239 be right at the beginning of the block, possibly with other
6240 insn notes that got moved there. */
6241 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6242 {
6243 if (NOTE_P (note)
6244 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6245 break;
6246 }
6247 }
6248
6249 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6250 if (LABEL_P (last))
6251 last = NEXT_INSN (last);
6252 reorder_insns (note, note, last);
6253 }
6254 }
6255
6256 if (epilogue_insn_hash != NULL)
6257 {
6258 edge_iterator ei;
6259 edge e;
6260
6261 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6262 {
6263 rtx_insn *insn, *first = NULL, *note = NULL;
6264 basic_block bb = e->src;
6265
6266 /* Scan from the beginning until we reach the first epilogue insn. */
6267 FOR_BB_INSNS (bb, insn)
6268 {
6269 if (NOTE_P (insn))
6270 {
6271 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6272 {
6273 note = insn;
6274 if (first != NULL)
6275 break;
6276 }
6277 }
6278 else if (first == NULL && contains (insn, epilogue_insn_hash))
6279 {
6280 first = insn;
6281 if (note != NULL)
6282 break;
6283 }
6284 }
6285
6286 if (note)
6287 {
6288 /* If the function has a single basic block, and no real
6289 epilogue insns (e.g. sibcall with no cleanup), the
6290 epilogue note can get scheduled before the prologue
6291 note. If we have frame related prologue insns, having
6292 them scanned during the epilogue will result in a crash.
6293 In this case re-order the epilogue note to just before
6294 the last insn in the block. */
6295 if (first == NULL)
6296 first = BB_END (bb);
6297
6298 if (PREV_INSN (first) != note)
6299 reorder_insns (note, note, PREV_INSN (first));
6300 }
6301 }
6302 }
6303 }
6304
6305 /* Returns the name of function declared by FNDECL. */
6306 const char *
6307 fndecl_name (tree fndecl)
6308 {
6309 if (fndecl == NULL)
6310 return "(nofn)";
6311 return lang_hooks.decl_printable_name (fndecl, 2);
6312 }
6313
6314 /* Returns the name of function FN. */
6315 const char *
6316 function_name (struct function *fn)
6317 {
6318 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6319 return fndecl_name (fndecl);
6320 }
6321
6322 /* Returns the name of the current function. */
6323 const char *
6324 current_function_name (void)
6325 {
6326 return function_name (cfun);
6327 }
6328 \f
6329
6330 static unsigned int
6331 rest_of_handle_check_leaf_regs (void)
6332 {
6333 #ifdef LEAF_REGISTERS
6334 crtl->uses_only_leaf_regs
6335 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6336 #endif
6337 return 0;
6338 }
6339
6340 /* Insert a TYPE into the used types hash table of CFUN. */
6341
6342 static void
6343 used_types_insert_helper (tree type, struct function *func)
6344 {
6345 if (type != NULL && func != NULL)
6346 {
6347 if (func->used_types_hash == NULL)
6348 func->used_types_hash = hash_set<tree>::create_ggc (37);
6349
6350 func->used_types_hash->add (type);
6351 }
6352 }
6353
6354 /* Given a type, insert it into the used hash table in cfun. */
6355 void
6356 used_types_insert (tree t)
6357 {
6358 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6359 if (TYPE_NAME (t))
6360 break;
6361 else
6362 t = TREE_TYPE (t);
6363 if (TREE_CODE (t) == ERROR_MARK)
6364 return;
6365 if (TYPE_NAME (t) == NULL_TREE
6366 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6367 t = TYPE_MAIN_VARIANT (t);
6368 if (debug_info_level > DINFO_LEVEL_NONE)
6369 {
6370 if (cfun)
6371 used_types_insert_helper (t, cfun);
6372 else
6373 {
6374 /* So this might be a type referenced by a global variable.
6375 Record that type so that we can later decide to emit its
6376 debug information. */
6377 vec_safe_push (types_used_by_cur_var_decl, t);
6378 }
6379 }
6380 }
6381
6382 /* Helper to Hash a struct types_used_by_vars_entry. */
6383
6384 static hashval_t
6385 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6386 {
6387 gcc_assert (entry && entry->var_decl && entry->type);
6388
6389 return iterative_hash_object (entry->type,
6390 iterative_hash_object (entry->var_decl, 0));
6391 }
6392
6393 /* Hash function of the types_used_by_vars_entry hash table. */
6394
6395 hashval_t
6396 used_type_hasher::hash (types_used_by_vars_entry *entry)
6397 {
6398 return hash_types_used_by_vars_entry (entry);
6399 }
6400
6401 /*Equality function of the types_used_by_vars_entry hash table. */
6402
6403 bool
6404 used_type_hasher::equal (types_used_by_vars_entry *e1,
6405 types_used_by_vars_entry *e2)
6406 {
6407 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6408 }
6409
6410 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6411
6412 void
6413 types_used_by_var_decl_insert (tree type, tree var_decl)
6414 {
6415 if (type != NULL && var_decl != NULL)
6416 {
6417 types_used_by_vars_entry **slot;
6418 struct types_used_by_vars_entry e;
6419 e.var_decl = var_decl;
6420 e.type = type;
6421 if (types_used_by_vars_hash == NULL)
6422 types_used_by_vars_hash
6423 = hash_table<used_type_hasher>::create_ggc (37);
6424
6425 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6426 if (*slot == NULL)
6427 {
6428 struct types_used_by_vars_entry *entry;
6429 entry = ggc_alloc<types_used_by_vars_entry> ();
6430 entry->type = type;
6431 entry->var_decl = var_decl;
6432 *slot = entry;
6433 }
6434 }
6435 }
6436
6437 namespace {
6438
6439 const pass_data pass_data_leaf_regs =
6440 {
6441 RTL_PASS, /* type */
6442 "*leaf_regs", /* name */
6443 OPTGROUP_NONE, /* optinfo_flags */
6444 TV_NONE, /* tv_id */
6445 0, /* properties_required */
6446 0, /* properties_provided */
6447 0, /* properties_destroyed */
6448 0, /* todo_flags_start */
6449 0, /* todo_flags_finish */
6450 };
6451
6452 class pass_leaf_regs : public rtl_opt_pass
6453 {
6454 public:
6455 pass_leaf_regs (gcc::context *ctxt)
6456 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6457 {}
6458
6459 /* opt_pass methods: */
6460 virtual unsigned int execute (function *)
6461 {
6462 return rest_of_handle_check_leaf_regs ();
6463 }
6464
6465 }; // class pass_leaf_regs
6466
6467 } // anon namespace
6468
6469 rtl_opt_pass *
6470 make_pass_leaf_regs (gcc::context *ctxt)
6471 {
6472 return new pass_leaf_regs (ctxt);
6473 }
6474
6475 static unsigned int
6476 rest_of_handle_thread_prologue_and_epilogue (void)
6477 {
6478 if (optimize)
6479 cleanup_cfg (CLEANUP_EXPENSIVE);
6480
6481 /* On some machines, the prologue and epilogue code, or parts thereof,
6482 can be represented as RTL. Doing so lets us schedule insns between
6483 it and the rest of the code and also allows delayed branch
6484 scheduling to operate in the epilogue. */
6485 thread_prologue_and_epilogue_insns ();
6486
6487 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6488 see PR57320. */
6489 cleanup_cfg (0);
6490
6491 /* The stack usage info is finalized during prologue expansion. */
6492 if (flag_stack_usage_info)
6493 output_stack_usage ();
6494
6495 return 0;
6496 }
6497
6498 namespace {
6499
6500 const pass_data pass_data_thread_prologue_and_epilogue =
6501 {
6502 RTL_PASS, /* type */
6503 "pro_and_epilogue", /* name */
6504 OPTGROUP_NONE, /* optinfo_flags */
6505 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6506 0, /* properties_required */
6507 0, /* properties_provided */
6508 0, /* properties_destroyed */
6509 0, /* todo_flags_start */
6510 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6511 };
6512
6513 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6514 {
6515 public:
6516 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6517 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6518 {}
6519
6520 /* opt_pass methods: */
6521 virtual unsigned int execute (function *)
6522 {
6523 return rest_of_handle_thread_prologue_and_epilogue ();
6524 }
6525
6526 }; // class pass_thread_prologue_and_epilogue
6527
6528 } // anon namespace
6529
6530 rtl_opt_pass *
6531 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6532 {
6533 return new pass_thread_prologue_and_epilogue (ctxt);
6534 }
6535 \f
6536
6537 /* This mini-pass fixes fall-out from SSA in asm statements that have
6538 in-out constraints. Say you start with
6539
6540 orig = inout;
6541 asm ("": "+mr" (inout));
6542 use (orig);
6543
6544 which is transformed very early to use explicit output and match operands:
6545
6546 orig = inout;
6547 asm ("": "=mr" (inout) : "0" (inout));
6548 use (orig);
6549
6550 Or, after SSA and copyprop,
6551
6552 asm ("": "=mr" (inout_2) : "0" (inout_1));
6553 use (inout_1);
6554
6555 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6556 they represent two separate values, so they will get different pseudo
6557 registers during expansion. Then, since the two operands need to match
6558 per the constraints, but use different pseudo registers, reload can
6559 only register a reload for these operands. But reloads can only be
6560 satisfied by hardregs, not by memory, so we need a register for this
6561 reload, just because we are presented with non-matching operands.
6562 So, even though we allow memory for this operand, no memory can be
6563 used for it, just because the two operands don't match. This can
6564 cause reload failures on register-starved targets.
6565
6566 So it's a symptom of reload not being able to use memory for reloads
6567 or, alternatively it's also a symptom of both operands not coming into
6568 reload as matching (in which case the pseudo could go to memory just
6569 fine, as the alternative allows it, and no reload would be necessary).
6570 We fix the latter problem here, by transforming
6571
6572 asm ("": "=mr" (inout_2) : "0" (inout_1));
6573
6574 back to
6575
6576 inout_2 = inout_1;
6577 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6578
6579 static void
6580 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6581 {
6582 int i;
6583 bool changed = false;
6584 rtx op = SET_SRC (p_sets[0]);
6585 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6586 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6587 bool *output_matched = XALLOCAVEC (bool, noutputs);
6588
6589 memset (output_matched, 0, noutputs * sizeof (bool));
6590 for (i = 0; i < ninputs; i++)
6591 {
6592 rtx input, output;
6593 rtx_insn *insns;
6594 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6595 char *end;
6596 int match, j;
6597
6598 if (*constraint == '%')
6599 constraint++;
6600
6601 match = strtoul (constraint, &end, 10);
6602 if (end == constraint)
6603 continue;
6604
6605 gcc_assert (match < noutputs);
6606 output = SET_DEST (p_sets[match]);
6607 input = RTVEC_ELT (inputs, i);
6608 /* Only do the transformation for pseudos. */
6609 if (! REG_P (output)
6610 || rtx_equal_p (output, input)
6611 || (GET_MODE (input) != VOIDmode
6612 && GET_MODE (input) != GET_MODE (output)))
6613 continue;
6614
6615 /* We can't do anything if the output is also used as input,
6616 as we're going to overwrite it. */
6617 for (j = 0; j < ninputs; j++)
6618 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6619 break;
6620 if (j != ninputs)
6621 continue;
6622
6623 /* Avoid changing the same input several times. For
6624 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6625 only change in once (to out1), rather than changing it
6626 first to out1 and afterwards to out2. */
6627 if (i > 0)
6628 {
6629 for (j = 0; j < noutputs; j++)
6630 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6631 break;
6632 if (j != noutputs)
6633 continue;
6634 }
6635 output_matched[match] = true;
6636
6637 start_sequence ();
6638 emit_move_insn (output, input);
6639 insns = get_insns ();
6640 end_sequence ();
6641 emit_insn_before (insns, insn);
6642
6643 /* Now replace all mentions of the input with output. We can't
6644 just replace the occurrence in inputs[i], as the register might
6645 also be used in some other input (or even in an address of an
6646 output), which would mean possibly increasing the number of
6647 inputs by one (namely 'output' in addition), which might pose
6648 a too complicated problem for reload to solve. E.g. this situation:
6649
6650 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6651
6652 Here 'input' is used in two occurrences as input (once for the
6653 input operand, once for the address in the second output operand).
6654 If we would replace only the occurrence of the input operand (to
6655 make the matching) we would be left with this:
6656
6657 output = input
6658 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6659
6660 Now we suddenly have two different input values (containing the same
6661 value, but different pseudos) where we formerly had only one.
6662 With more complicated asms this might lead to reload failures
6663 which wouldn't have happen without this pass. So, iterate over
6664 all operands and replace all occurrences of the register used. */
6665 for (j = 0; j < noutputs; j++)
6666 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6667 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6668 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6669 input, output);
6670 for (j = 0; j < ninputs; j++)
6671 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6672 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6673 input, output);
6674
6675 changed = true;
6676 }
6677
6678 if (changed)
6679 df_insn_rescan (insn);
6680 }
6681
6682 /* Add the decl D to the local_decls list of FUN. */
6683
6684 void
6685 add_local_decl (struct function *fun, tree d)
6686 {
6687 gcc_assert (TREE_CODE (d) == VAR_DECL);
6688 vec_safe_push (fun->local_decls, d);
6689 }
6690
6691 namespace {
6692
6693 const pass_data pass_data_match_asm_constraints =
6694 {
6695 RTL_PASS, /* type */
6696 "asmcons", /* name */
6697 OPTGROUP_NONE, /* optinfo_flags */
6698 TV_NONE, /* tv_id */
6699 0, /* properties_required */
6700 0, /* properties_provided */
6701 0, /* properties_destroyed */
6702 0, /* todo_flags_start */
6703 0, /* todo_flags_finish */
6704 };
6705
6706 class pass_match_asm_constraints : public rtl_opt_pass
6707 {
6708 public:
6709 pass_match_asm_constraints (gcc::context *ctxt)
6710 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6711 {}
6712
6713 /* opt_pass methods: */
6714 virtual unsigned int execute (function *);
6715
6716 }; // class pass_match_asm_constraints
6717
6718 unsigned
6719 pass_match_asm_constraints::execute (function *fun)
6720 {
6721 basic_block bb;
6722 rtx_insn *insn;
6723 rtx pat, *p_sets;
6724 int noutputs;
6725
6726 if (!crtl->has_asm_statement)
6727 return 0;
6728
6729 df_set_flags (DF_DEFER_INSN_RESCAN);
6730 FOR_EACH_BB_FN (bb, fun)
6731 {
6732 FOR_BB_INSNS (bb, insn)
6733 {
6734 if (!INSN_P (insn))
6735 continue;
6736
6737 pat = PATTERN (insn);
6738 if (GET_CODE (pat) == PARALLEL)
6739 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6740 else if (GET_CODE (pat) == SET)
6741 p_sets = &PATTERN (insn), noutputs = 1;
6742 else
6743 continue;
6744
6745 if (GET_CODE (*p_sets) == SET
6746 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6747 match_asm_constraints_1 (insn, p_sets, noutputs);
6748 }
6749 }
6750
6751 return TODO_df_finish;
6752 }
6753
6754 } // anon namespace
6755
6756 rtl_opt_pass *
6757 make_pass_match_asm_constraints (gcc::context *ctxt)
6758 {
6759 return new pass_match_asm_constraints (ctxt);
6760 }
6761
6762
6763 #include "gt-function.h"