]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
6bdb500baebdc01a584dec193973fc2e603a1aa2
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "hashtab.h"
46 #include "hash-set.h"
47 #include "vec.h"
48 #include "machmode.h"
49 #include "hard-reg-set.h"
50 #include "input.h"
51 #include "function.h"
52 #include "expr.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "libfuncs.h"
56 #include "regs.h"
57 #include "insn-config.h"
58 #include "recog.h"
59 #include "output.h"
60 #include "tm_p.h"
61 #include "langhooks.h"
62 #include "target.h"
63 #include "common/common-target.h"
64 #include "gimple-expr.h"
65 #include "gimplify.h"
66 #include "tree-pass.h"
67 #include "predict.h"
68 #include "dominance.h"
69 #include "cfg.h"
70 #include "cfgrtl.h"
71 #include "cfganal.h"
72 #include "cfgbuild.h"
73 #include "cfgcleanup.h"
74 #include "basic-block.h"
75 #include "df.h"
76 #include "params.h"
77 #include "bb-reorder.h"
78 #include "shrink-wrap.h"
79 #include "toplev.h"
80 #include "rtl-iter.h"
81 #include "tree-chkp.h"
82 #include "rtl-chkp.h"
83
84 /* So we can assign to cfun in this file. */
85 #undef cfun
86
87 #ifndef STACK_ALIGNMENT_NEEDED
88 #define STACK_ALIGNMENT_NEEDED 1
89 #endif
90
91 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92
93 /* Round a value to the lowest integer less than it that is a multiple of
94 the required alignment. Avoid using division in case the value is
95 negative. Assume the alignment is a power of two. */
96 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97
98 /* Similar, but round to the next highest integer that meets the
99 alignment. */
100 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101
102 /* Nonzero once virtual register instantiation has been done.
103 assign_stack_local uses frame_pointer_rtx when this is nonzero.
104 calls.c:emit_library_call_value_1 uses it to set up
105 post-instantiation libcalls. */
106 int virtuals_instantiated;
107
108 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
109 static GTY(()) int funcdef_no;
110
111 /* These variables hold pointers to functions to create and destroy
112 target specific, per-function data structures. */
113 struct machine_function * (*init_machine_status) (void);
114
115 /* The currently compiled function. */
116 struct function *cfun = 0;
117
118 /* These hashes record the prologue and epilogue insns. */
119 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
120 htab_t prologue_insn_hash;
121 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
122 htab_t epilogue_insn_hash;
123 \f
124
125 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
126 vec<tree, va_gc> *types_used_by_cur_var_decl;
127
128 /* Forward declarations. */
129
130 static struct temp_slot *find_temp_slot_from_address (rtx);
131 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
132 static void pad_below (struct args_size *, machine_mode, tree);
133 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
134 static int all_blocks (tree, tree *);
135 static tree *get_block_vector (tree, int *);
136 extern tree debug_find_var_in_block_tree (tree, tree);
137 /* We always define `record_insns' even if it's not used so that we
138 can always export `prologue_epilogue_contains'. */
139 static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
140 static bool contains (const_rtx, htab_t);
141 static void prepare_function_start (void);
142 static void do_clobber_return_reg (rtx, void *);
143 static void do_use_return_reg (rtx, void *);
144 \f
145 /* Stack of nested functions. */
146 /* Keep track of the cfun stack. */
147
148 typedef struct function *function_p;
149
150 static vec<function_p> function_context_stack;
151
152 /* Save the current context for compilation of a nested function.
153 This is called from language-specific code. */
154
155 void
156 push_function_context (void)
157 {
158 if (cfun == 0)
159 allocate_struct_function (NULL, false);
160
161 function_context_stack.safe_push (cfun);
162 set_cfun (NULL);
163 }
164
165 /* Restore the last saved context, at the end of a nested function.
166 This function is called from language-specific code. */
167
168 void
169 pop_function_context (void)
170 {
171 struct function *p = function_context_stack.pop ();
172 set_cfun (p);
173 current_function_decl = p->decl;
174
175 /* Reset variables that have known state during rtx generation. */
176 virtuals_instantiated = 0;
177 generating_concat_p = 1;
178 }
179
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been parsed, but not compiled, to let
182 garbage collection reclaim the memory. */
183
184 void
185 free_after_parsing (struct function *f)
186 {
187 f->language = 0;
188 }
189
190 /* Clear out all parts of the state in F that can safely be discarded
191 after the function has been compiled, to let garbage collection
192 reclaim the memory. */
193
194 void
195 free_after_compilation (struct function *f)
196 {
197 prologue_insn_hash = NULL;
198 epilogue_insn_hash = NULL;
199
200 free (crtl->emit.regno_pointer_align);
201
202 memset (crtl, 0, sizeof (struct rtl_data));
203 f->eh = NULL;
204 f->machine = NULL;
205 f->cfg = NULL;
206
207 regno_reg_rtx = NULL;
208 }
209 \f
210 /* Return size needed for stack frame based on slots so far allocated.
211 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
212 the caller may have to do that. */
213
214 HOST_WIDE_INT
215 get_frame_size (void)
216 {
217 if (FRAME_GROWS_DOWNWARD)
218 return -frame_offset;
219 else
220 return frame_offset;
221 }
222
223 /* Issue an error message and return TRUE if frame OFFSET overflows in
224 the signed target pointer arithmetics for function FUNC. Otherwise
225 return FALSE. */
226
227 bool
228 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
229 {
230 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
231
232 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
233 /* Leave room for the fixed part of the frame. */
234 - 64 * UNITS_PER_WORD)
235 {
236 error_at (DECL_SOURCE_LOCATION (func),
237 "total size of local objects too large");
238 return TRUE;
239 }
240
241 return FALSE;
242 }
243
244 /* Return stack slot alignment in bits for TYPE and MODE. */
245
246 static unsigned int
247 get_stack_local_alignment (tree type, machine_mode mode)
248 {
249 unsigned int alignment;
250
251 if (mode == BLKmode)
252 alignment = BIGGEST_ALIGNMENT;
253 else
254 alignment = GET_MODE_ALIGNMENT (mode);
255
256 /* Allow the frond-end to (possibly) increase the alignment of this
257 stack slot. */
258 if (! type)
259 type = lang_hooks.types.type_for_mode (mode, 0);
260
261 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
262 }
263
264 /* Determine whether it is possible to fit a stack slot of size SIZE and
265 alignment ALIGNMENT into an area in the stack frame that starts at
266 frame offset START and has a length of LENGTH. If so, store the frame
267 offset to be used for the stack slot in *POFFSET and return true;
268 return false otherwise. This function will extend the frame size when
269 given a start/length pair that lies at the end of the frame. */
270
271 static bool
272 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
273 HOST_WIDE_INT size, unsigned int alignment,
274 HOST_WIDE_INT *poffset)
275 {
276 HOST_WIDE_INT this_frame_offset;
277 int frame_off, frame_alignment, frame_phase;
278
279 /* Calculate how many bytes the start of local variables is off from
280 stack alignment. */
281 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
282 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
283 frame_phase = frame_off ? frame_alignment - frame_off : 0;
284
285 /* Round the frame offset to the specified alignment. */
286
287 /* We must be careful here, since FRAME_OFFSET might be negative and
288 division with a negative dividend isn't as well defined as we might
289 like. So we instead assume that ALIGNMENT is a power of two and
290 use logical operations which are unambiguous. */
291 if (FRAME_GROWS_DOWNWARD)
292 this_frame_offset
293 = (FLOOR_ROUND (start + length - size - frame_phase,
294 (unsigned HOST_WIDE_INT) alignment)
295 + frame_phase);
296 else
297 this_frame_offset
298 = (CEIL_ROUND (start - frame_phase,
299 (unsigned HOST_WIDE_INT) alignment)
300 + frame_phase);
301
302 /* See if it fits. If this space is at the edge of the frame,
303 consider extending the frame to make it fit. Our caller relies on
304 this when allocating a new slot. */
305 if (frame_offset == start && this_frame_offset < frame_offset)
306 frame_offset = this_frame_offset;
307 else if (this_frame_offset < start)
308 return false;
309 else if (start + length == frame_offset
310 && this_frame_offset + size > start + length)
311 frame_offset = this_frame_offset + size;
312 else if (this_frame_offset + size > start + length)
313 return false;
314
315 *poffset = this_frame_offset;
316 return true;
317 }
318
319 /* Create a new frame_space structure describing free space in the stack
320 frame beginning at START and ending at END, and chain it into the
321 function's frame_space_list. */
322
323 static void
324 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
325 {
326 struct frame_space *space = ggc_alloc<frame_space> ();
327 space->next = crtl->frame_space_list;
328 crtl->frame_space_list = space;
329 space->start = start;
330 space->length = end - start;
331 }
332
333 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
334 with machine mode MODE.
335
336 ALIGN controls the amount of alignment for the address of the slot:
337 0 means according to MODE,
338 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
339 -2 means use BITS_PER_UNIT,
340 positive specifies alignment boundary in bits.
341
342 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
343 alignment and ASLK_RECORD_PAD bit set if we should remember
344 extra space we allocated for alignment purposes. When we are
345 called from assign_stack_temp_for_type, it is not set so we don't
346 track the same stack slot in two independent lists.
347
348 We do not round to stack_boundary here. */
349
350 rtx
351 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
352 int align, int kind)
353 {
354 rtx x, addr;
355 int bigend_correction = 0;
356 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
357 unsigned int alignment, alignment_in_bits;
358
359 if (align == 0)
360 {
361 alignment = get_stack_local_alignment (NULL, mode);
362 alignment /= BITS_PER_UNIT;
363 }
364 else if (align == -1)
365 {
366 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
367 size = CEIL_ROUND (size, alignment);
368 }
369 else if (align == -2)
370 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
371 else
372 alignment = align / BITS_PER_UNIT;
373
374 alignment_in_bits = alignment * BITS_PER_UNIT;
375
376 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
377 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
378 {
379 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
380 alignment = alignment_in_bits / BITS_PER_UNIT;
381 }
382
383 if (SUPPORTS_STACK_ALIGNMENT)
384 {
385 if (crtl->stack_alignment_estimated < alignment_in_bits)
386 {
387 if (!crtl->stack_realign_processed)
388 crtl->stack_alignment_estimated = alignment_in_bits;
389 else
390 {
391 /* If stack is realigned and stack alignment value
392 hasn't been finalized, it is OK not to increase
393 stack_alignment_estimated. The bigger alignment
394 requirement is recorded in stack_alignment_needed
395 below. */
396 gcc_assert (!crtl->stack_realign_finalized);
397 if (!crtl->stack_realign_needed)
398 {
399 /* It is OK to reduce the alignment as long as the
400 requested size is 0 or the estimated stack
401 alignment >= mode alignment. */
402 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
403 || size == 0
404 || (crtl->stack_alignment_estimated
405 >= GET_MODE_ALIGNMENT (mode)));
406 alignment_in_bits = crtl->stack_alignment_estimated;
407 alignment = alignment_in_bits / BITS_PER_UNIT;
408 }
409 }
410 }
411 }
412
413 if (crtl->stack_alignment_needed < alignment_in_bits)
414 crtl->stack_alignment_needed = alignment_in_bits;
415 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
416 crtl->max_used_stack_slot_alignment = alignment_in_bits;
417
418 if (mode != BLKmode || size != 0)
419 {
420 if (kind & ASLK_RECORD_PAD)
421 {
422 struct frame_space **psp;
423
424 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
425 {
426 struct frame_space *space = *psp;
427 if (!try_fit_stack_local (space->start, space->length, size,
428 alignment, &slot_offset))
429 continue;
430 *psp = space->next;
431 if (slot_offset > space->start)
432 add_frame_space (space->start, slot_offset);
433 if (slot_offset + size < space->start + space->length)
434 add_frame_space (slot_offset + size,
435 space->start + space->length);
436 goto found_space;
437 }
438 }
439 }
440 else if (!STACK_ALIGNMENT_NEEDED)
441 {
442 slot_offset = frame_offset;
443 goto found_space;
444 }
445
446 old_frame_offset = frame_offset;
447
448 if (FRAME_GROWS_DOWNWARD)
449 {
450 frame_offset -= size;
451 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
452
453 if (kind & ASLK_RECORD_PAD)
454 {
455 if (slot_offset > frame_offset)
456 add_frame_space (frame_offset, slot_offset);
457 if (slot_offset + size < old_frame_offset)
458 add_frame_space (slot_offset + size, old_frame_offset);
459 }
460 }
461 else
462 {
463 frame_offset += size;
464 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
465
466 if (kind & ASLK_RECORD_PAD)
467 {
468 if (slot_offset > old_frame_offset)
469 add_frame_space (old_frame_offset, slot_offset);
470 if (slot_offset + size < frame_offset)
471 add_frame_space (slot_offset + size, frame_offset);
472 }
473 }
474
475 found_space:
476 /* On a big-endian machine, if we are allocating more space than we will use,
477 use the least significant bytes of those that are allocated. */
478 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
479 bigend_correction = size - GET_MODE_SIZE (mode);
480
481 /* If we have already instantiated virtual registers, return the actual
482 address relative to the frame pointer. */
483 if (virtuals_instantiated)
484 addr = plus_constant (Pmode, frame_pointer_rtx,
485 trunc_int_for_mode
486 (slot_offset + bigend_correction
487 + STARTING_FRAME_OFFSET, Pmode));
488 else
489 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
490 trunc_int_for_mode
491 (slot_offset + bigend_correction,
492 Pmode));
493
494 x = gen_rtx_MEM (mode, addr);
495 set_mem_align (x, alignment_in_bits);
496 MEM_NOTRAP_P (x) = 1;
497
498 stack_slot_list
499 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
500
501 if (frame_offset_overflow (frame_offset, current_function_decl))
502 frame_offset = 0;
503
504 return x;
505 }
506
507 /* Wrap up assign_stack_local_1 with last parameter as false. */
508
509 rtx
510 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
511 {
512 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
513 }
514 \f
515 /* In order to evaluate some expressions, such as function calls returning
516 structures in memory, we need to temporarily allocate stack locations.
517 We record each allocated temporary in the following structure.
518
519 Associated with each temporary slot is a nesting level. When we pop up
520 one level, all temporaries associated with the previous level are freed.
521 Normally, all temporaries are freed after the execution of the statement
522 in which they were created. However, if we are inside a ({...}) grouping,
523 the result may be in a temporary and hence must be preserved. If the
524 result could be in a temporary, we preserve it if we can determine which
525 one it is in. If we cannot determine which temporary may contain the
526 result, all temporaries are preserved. A temporary is preserved by
527 pretending it was allocated at the previous nesting level. */
528
529 struct GTY(()) temp_slot {
530 /* Points to next temporary slot. */
531 struct temp_slot *next;
532 /* Points to previous temporary slot. */
533 struct temp_slot *prev;
534 /* The rtx to used to reference the slot. */
535 rtx slot;
536 /* The size, in units, of the slot. */
537 HOST_WIDE_INT size;
538 /* The type of the object in the slot, or zero if it doesn't correspond
539 to a type. We use this to determine whether a slot can be reused.
540 It can be reused if objects of the type of the new slot will always
541 conflict with objects of the type of the old slot. */
542 tree type;
543 /* The alignment (in bits) of the slot. */
544 unsigned int align;
545 /* Nonzero if this temporary is currently in use. */
546 char in_use;
547 /* Nesting level at which this slot is being used. */
548 int level;
549 /* The offset of the slot from the frame_pointer, including extra space
550 for alignment. This info is for combine_temp_slots. */
551 HOST_WIDE_INT base_offset;
552 /* The size of the slot, including extra space for alignment. This
553 info is for combine_temp_slots. */
554 HOST_WIDE_INT full_size;
555 };
556
557 /* Entry for the below hash table. */
558 struct GTY((for_user)) temp_slot_address_entry {
559 hashval_t hash;
560 rtx address;
561 struct temp_slot *temp_slot;
562 };
563
564 struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
565 {
566 static hashval_t hash (temp_slot_address_entry *);
567 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
568 };
569
570 /* A table of addresses that represent a stack slot. The table is a mapping
571 from address RTXen to a temp slot. */
572 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
573 static size_t n_temp_slots_in_use;
574
575 /* Removes temporary slot TEMP from LIST. */
576
577 static void
578 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
579 {
580 if (temp->next)
581 temp->next->prev = temp->prev;
582 if (temp->prev)
583 temp->prev->next = temp->next;
584 else
585 *list = temp->next;
586
587 temp->prev = temp->next = NULL;
588 }
589
590 /* Inserts temporary slot TEMP to LIST. */
591
592 static void
593 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
594 {
595 temp->next = *list;
596 if (*list)
597 (*list)->prev = temp;
598 temp->prev = NULL;
599 *list = temp;
600 }
601
602 /* Returns the list of used temp slots at LEVEL. */
603
604 static struct temp_slot **
605 temp_slots_at_level (int level)
606 {
607 if (level >= (int) vec_safe_length (used_temp_slots))
608 vec_safe_grow_cleared (used_temp_slots, level + 1);
609
610 return &(*used_temp_slots)[level];
611 }
612
613 /* Returns the maximal temporary slot level. */
614
615 static int
616 max_slot_level (void)
617 {
618 if (!used_temp_slots)
619 return -1;
620
621 return used_temp_slots->length () - 1;
622 }
623
624 /* Moves temporary slot TEMP to LEVEL. */
625
626 static void
627 move_slot_to_level (struct temp_slot *temp, int level)
628 {
629 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
630 insert_slot_to_list (temp, temp_slots_at_level (level));
631 temp->level = level;
632 }
633
634 /* Make temporary slot TEMP available. */
635
636 static void
637 make_slot_available (struct temp_slot *temp)
638 {
639 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
640 insert_slot_to_list (temp, &avail_temp_slots);
641 temp->in_use = 0;
642 temp->level = -1;
643 n_temp_slots_in_use--;
644 }
645
646 /* Compute the hash value for an address -> temp slot mapping.
647 The value is cached on the mapping entry. */
648 static hashval_t
649 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
650 {
651 int do_not_record = 0;
652 return hash_rtx (t->address, GET_MODE (t->address),
653 &do_not_record, NULL, false);
654 }
655
656 /* Return the hash value for an address -> temp slot mapping. */
657 hashval_t
658 temp_address_hasher::hash (temp_slot_address_entry *t)
659 {
660 return t->hash;
661 }
662
663 /* Compare two address -> temp slot mapping entries. */
664 bool
665 temp_address_hasher::equal (temp_slot_address_entry *t1,
666 temp_slot_address_entry *t2)
667 {
668 return exp_equiv_p (t1->address, t2->address, 0, true);
669 }
670
671 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
672 static void
673 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
674 {
675 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
676 t->address = address;
677 t->temp_slot = temp_slot;
678 t->hash = temp_slot_address_compute_hash (t);
679 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
680 }
681
682 /* Remove an address -> temp slot mapping entry if the temp slot is
683 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
684 int
685 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
686 {
687 const struct temp_slot_address_entry *t = *slot;
688 if (! t->temp_slot->in_use)
689 temp_slot_address_table->clear_slot (slot);
690 return 1;
691 }
692
693 /* Remove all mappings of addresses to unused temp slots. */
694 static void
695 remove_unused_temp_slot_addresses (void)
696 {
697 /* Use quicker clearing if there aren't any active temp slots. */
698 if (n_temp_slots_in_use)
699 temp_slot_address_table->traverse
700 <void *, remove_unused_temp_slot_addresses_1> (NULL);
701 else
702 temp_slot_address_table->empty ();
703 }
704
705 /* Find the temp slot corresponding to the object at address X. */
706
707 static struct temp_slot *
708 find_temp_slot_from_address (rtx x)
709 {
710 struct temp_slot *p;
711 struct temp_slot_address_entry tmp, *t;
712
713 /* First try the easy way:
714 See if X exists in the address -> temp slot mapping. */
715 tmp.address = x;
716 tmp.temp_slot = NULL;
717 tmp.hash = temp_slot_address_compute_hash (&tmp);
718 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
719 if (t)
720 return t->temp_slot;
721
722 /* If we have a sum involving a register, see if it points to a temp
723 slot. */
724 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
725 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
726 return p;
727 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
728 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
729 return p;
730
731 /* Last resort: Address is a virtual stack var address. */
732 if (GET_CODE (x) == PLUS
733 && XEXP (x, 0) == virtual_stack_vars_rtx
734 && CONST_INT_P (XEXP (x, 1)))
735 {
736 int i;
737 for (i = max_slot_level (); i >= 0; i--)
738 for (p = *temp_slots_at_level (i); p; p = p->next)
739 {
740 if (INTVAL (XEXP (x, 1)) >= p->base_offset
741 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
742 return p;
743 }
744 }
745
746 return NULL;
747 }
748 \f
749 /* Allocate a temporary stack slot and record it for possible later
750 reuse.
751
752 MODE is the machine mode to be given to the returned rtx.
753
754 SIZE is the size in units of the space required. We do no rounding here
755 since assign_stack_local will do any required rounding.
756
757 TYPE is the type that will be used for the stack slot. */
758
759 rtx
760 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
761 tree type)
762 {
763 unsigned int align;
764 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
765 rtx slot;
766
767 /* If SIZE is -1 it means that somebody tried to allocate a temporary
768 of a variable size. */
769 gcc_assert (size != -1);
770
771 align = get_stack_local_alignment (type, mode);
772
773 /* Try to find an available, already-allocated temporary of the proper
774 mode which meets the size and alignment requirements. Choose the
775 smallest one with the closest alignment.
776
777 If assign_stack_temp is called outside of the tree->rtl expansion,
778 we cannot reuse the stack slots (that may still refer to
779 VIRTUAL_STACK_VARS_REGNUM). */
780 if (!virtuals_instantiated)
781 {
782 for (p = avail_temp_slots; p; p = p->next)
783 {
784 if (p->align >= align && p->size >= size
785 && GET_MODE (p->slot) == mode
786 && objects_must_conflict_p (p->type, type)
787 && (best_p == 0 || best_p->size > p->size
788 || (best_p->size == p->size && best_p->align > p->align)))
789 {
790 if (p->align == align && p->size == size)
791 {
792 selected = p;
793 cut_slot_from_list (selected, &avail_temp_slots);
794 best_p = 0;
795 break;
796 }
797 best_p = p;
798 }
799 }
800 }
801
802 /* Make our best, if any, the one to use. */
803 if (best_p)
804 {
805 selected = best_p;
806 cut_slot_from_list (selected, &avail_temp_slots);
807
808 /* If there are enough aligned bytes left over, make them into a new
809 temp_slot so that the extra bytes don't get wasted. Do this only
810 for BLKmode slots, so that we can be sure of the alignment. */
811 if (GET_MODE (best_p->slot) == BLKmode)
812 {
813 int alignment = best_p->align / BITS_PER_UNIT;
814 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
815
816 if (best_p->size - rounded_size >= alignment)
817 {
818 p = ggc_alloc<temp_slot> ();
819 p->in_use = 0;
820 p->size = best_p->size - rounded_size;
821 p->base_offset = best_p->base_offset + rounded_size;
822 p->full_size = best_p->full_size - rounded_size;
823 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
824 p->align = best_p->align;
825 p->type = best_p->type;
826 insert_slot_to_list (p, &avail_temp_slots);
827
828 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
829 stack_slot_list);
830
831 best_p->size = rounded_size;
832 best_p->full_size = rounded_size;
833 }
834 }
835 }
836
837 /* If we still didn't find one, make a new temporary. */
838 if (selected == 0)
839 {
840 HOST_WIDE_INT frame_offset_old = frame_offset;
841
842 p = ggc_alloc<temp_slot> ();
843
844 /* We are passing an explicit alignment request to assign_stack_local.
845 One side effect of that is assign_stack_local will not round SIZE
846 to ensure the frame offset remains suitably aligned.
847
848 So for requests which depended on the rounding of SIZE, we go ahead
849 and round it now. We also make sure ALIGNMENT is at least
850 BIGGEST_ALIGNMENT. */
851 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
852 p->slot = assign_stack_local_1 (mode,
853 (mode == BLKmode
854 ? CEIL_ROUND (size,
855 (int) align
856 / BITS_PER_UNIT)
857 : size),
858 align, 0);
859
860 p->align = align;
861
862 /* The following slot size computation is necessary because we don't
863 know the actual size of the temporary slot until assign_stack_local
864 has performed all the frame alignment and size rounding for the
865 requested temporary. Note that extra space added for alignment
866 can be either above or below this stack slot depending on which
867 way the frame grows. We include the extra space if and only if it
868 is above this slot. */
869 if (FRAME_GROWS_DOWNWARD)
870 p->size = frame_offset_old - frame_offset;
871 else
872 p->size = size;
873
874 /* Now define the fields used by combine_temp_slots. */
875 if (FRAME_GROWS_DOWNWARD)
876 {
877 p->base_offset = frame_offset;
878 p->full_size = frame_offset_old - frame_offset;
879 }
880 else
881 {
882 p->base_offset = frame_offset_old;
883 p->full_size = frame_offset - frame_offset_old;
884 }
885
886 selected = p;
887 }
888
889 p = selected;
890 p->in_use = 1;
891 p->type = type;
892 p->level = temp_slot_level;
893 n_temp_slots_in_use++;
894
895 pp = temp_slots_at_level (p->level);
896 insert_slot_to_list (p, pp);
897 insert_temp_slot_address (XEXP (p->slot, 0), p);
898
899 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
900 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
901 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
902
903 /* If we know the alias set for the memory that will be used, use
904 it. If there's no TYPE, then we don't know anything about the
905 alias set for the memory. */
906 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
907 set_mem_align (slot, align);
908
909 /* If a type is specified, set the relevant flags. */
910 if (type != 0)
911 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
912 MEM_NOTRAP_P (slot) = 1;
913
914 return slot;
915 }
916
917 /* Allocate a temporary stack slot and record it for possible later
918 reuse. First two arguments are same as in preceding function. */
919
920 rtx
921 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
922 {
923 return assign_stack_temp_for_type (mode, size, NULL_TREE);
924 }
925 \f
926 /* Assign a temporary.
927 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
928 and so that should be used in error messages. In either case, we
929 allocate of the given type.
930 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
931 it is 0 if a register is OK.
932 DONT_PROMOTE is 1 if we should not promote values in register
933 to wider modes. */
934
935 rtx
936 assign_temp (tree type_or_decl, int memory_required,
937 int dont_promote ATTRIBUTE_UNUSED)
938 {
939 tree type, decl;
940 machine_mode mode;
941 #ifdef PROMOTE_MODE
942 int unsignedp;
943 #endif
944
945 if (DECL_P (type_or_decl))
946 decl = type_or_decl, type = TREE_TYPE (decl);
947 else
948 decl = NULL, type = type_or_decl;
949
950 mode = TYPE_MODE (type);
951 #ifdef PROMOTE_MODE
952 unsignedp = TYPE_UNSIGNED (type);
953 #endif
954
955 if (mode == BLKmode || memory_required)
956 {
957 HOST_WIDE_INT size = int_size_in_bytes (type);
958 rtx tmp;
959
960 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
961 problems with allocating the stack space. */
962 if (size == 0)
963 size = 1;
964
965 /* Unfortunately, we don't yet know how to allocate variable-sized
966 temporaries. However, sometimes we can find a fixed upper limit on
967 the size, so try that instead. */
968 else if (size == -1)
969 size = max_int_size_in_bytes (type);
970
971 /* The size of the temporary may be too large to fit into an integer. */
972 /* ??? Not sure this should happen except for user silliness, so limit
973 this to things that aren't compiler-generated temporaries. The
974 rest of the time we'll die in assign_stack_temp_for_type. */
975 if (decl && size == -1
976 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
977 {
978 error ("size of variable %q+D is too large", decl);
979 size = 1;
980 }
981
982 tmp = assign_stack_temp_for_type (mode, size, type);
983 return tmp;
984 }
985
986 #ifdef PROMOTE_MODE
987 if (! dont_promote)
988 mode = promote_mode (type, mode, &unsignedp);
989 #endif
990
991 return gen_reg_rtx (mode);
992 }
993 \f
994 /* Combine temporary stack slots which are adjacent on the stack.
995
996 This allows for better use of already allocated stack space. This is only
997 done for BLKmode slots because we can be sure that we won't have alignment
998 problems in this case. */
999
1000 static void
1001 combine_temp_slots (void)
1002 {
1003 struct temp_slot *p, *q, *next, *next_q;
1004 int num_slots;
1005
1006 /* We can't combine slots, because the information about which slot
1007 is in which alias set will be lost. */
1008 if (flag_strict_aliasing)
1009 return;
1010
1011 /* If there are a lot of temp slots, don't do anything unless
1012 high levels of optimization. */
1013 if (! flag_expensive_optimizations)
1014 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1015 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1016 return;
1017
1018 for (p = avail_temp_slots; p; p = next)
1019 {
1020 int delete_p = 0;
1021
1022 next = p->next;
1023
1024 if (GET_MODE (p->slot) != BLKmode)
1025 continue;
1026
1027 for (q = p->next; q; q = next_q)
1028 {
1029 int delete_q = 0;
1030
1031 next_q = q->next;
1032
1033 if (GET_MODE (q->slot) != BLKmode)
1034 continue;
1035
1036 if (p->base_offset + p->full_size == q->base_offset)
1037 {
1038 /* Q comes after P; combine Q into P. */
1039 p->size += q->size;
1040 p->full_size += q->full_size;
1041 delete_q = 1;
1042 }
1043 else if (q->base_offset + q->full_size == p->base_offset)
1044 {
1045 /* P comes after Q; combine P into Q. */
1046 q->size += p->size;
1047 q->full_size += p->full_size;
1048 delete_p = 1;
1049 break;
1050 }
1051 if (delete_q)
1052 cut_slot_from_list (q, &avail_temp_slots);
1053 }
1054
1055 /* Either delete P or advance past it. */
1056 if (delete_p)
1057 cut_slot_from_list (p, &avail_temp_slots);
1058 }
1059 }
1060 \f
1061 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1062 slot that previously was known by OLD_RTX. */
1063
1064 void
1065 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1066 {
1067 struct temp_slot *p;
1068
1069 if (rtx_equal_p (old_rtx, new_rtx))
1070 return;
1071
1072 p = find_temp_slot_from_address (old_rtx);
1073
1074 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1075 NEW_RTX is a register, see if one operand of the PLUS is a
1076 temporary location. If so, NEW_RTX points into it. Otherwise,
1077 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1078 in common between them. If so, try a recursive call on those
1079 values. */
1080 if (p == 0)
1081 {
1082 if (GET_CODE (old_rtx) != PLUS)
1083 return;
1084
1085 if (REG_P (new_rtx))
1086 {
1087 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1088 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1089 return;
1090 }
1091 else if (GET_CODE (new_rtx) != PLUS)
1092 return;
1093
1094 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1095 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1096 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1097 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1098 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1099 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1100 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1101 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1102
1103 return;
1104 }
1105
1106 /* Otherwise add an alias for the temp's address. */
1107 insert_temp_slot_address (new_rtx, p);
1108 }
1109
1110 /* If X could be a reference to a temporary slot, mark that slot as
1111 belonging to the to one level higher than the current level. If X
1112 matched one of our slots, just mark that one. Otherwise, we can't
1113 easily predict which it is, so upgrade all of them.
1114
1115 This is called when an ({...}) construct occurs and a statement
1116 returns a value in memory. */
1117
1118 void
1119 preserve_temp_slots (rtx x)
1120 {
1121 struct temp_slot *p = 0, *next;
1122
1123 if (x == 0)
1124 return;
1125
1126 /* If X is a register that is being used as a pointer, see if we have
1127 a temporary slot we know it points to. */
1128 if (REG_P (x) && REG_POINTER (x))
1129 p = find_temp_slot_from_address (x);
1130
1131 /* If X is not in memory or is at a constant address, it cannot be in
1132 a temporary slot. */
1133 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1134 return;
1135
1136 /* First see if we can find a match. */
1137 if (p == 0)
1138 p = find_temp_slot_from_address (XEXP (x, 0));
1139
1140 if (p != 0)
1141 {
1142 if (p->level == temp_slot_level)
1143 move_slot_to_level (p, temp_slot_level - 1);
1144 return;
1145 }
1146
1147 /* Otherwise, preserve all non-kept slots at this level. */
1148 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1149 {
1150 next = p->next;
1151 move_slot_to_level (p, temp_slot_level - 1);
1152 }
1153 }
1154
1155 /* Free all temporaries used so far. This is normally called at the
1156 end of generating code for a statement. */
1157
1158 void
1159 free_temp_slots (void)
1160 {
1161 struct temp_slot *p, *next;
1162 bool some_available = false;
1163
1164 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1165 {
1166 next = p->next;
1167 make_slot_available (p);
1168 some_available = true;
1169 }
1170
1171 if (some_available)
1172 {
1173 remove_unused_temp_slot_addresses ();
1174 combine_temp_slots ();
1175 }
1176 }
1177
1178 /* Push deeper into the nesting level for stack temporaries. */
1179
1180 void
1181 push_temp_slots (void)
1182 {
1183 temp_slot_level++;
1184 }
1185
1186 /* Pop a temporary nesting level. All slots in use in the current level
1187 are freed. */
1188
1189 void
1190 pop_temp_slots (void)
1191 {
1192 free_temp_slots ();
1193 temp_slot_level--;
1194 }
1195
1196 /* Initialize temporary slots. */
1197
1198 void
1199 init_temp_slots (void)
1200 {
1201 /* We have not allocated any temporaries yet. */
1202 avail_temp_slots = 0;
1203 vec_alloc (used_temp_slots, 0);
1204 temp_slot_level = 0;
1205 n_temp_slots_in_use = 0;
1206
1207 /* Set up the table to map addresses to temp slots. */
1208 if (! temp_slot_address_table)
1209 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1210 else
1211 temp_slot_address_table->empty ();
1212 }
1213 \f
1214 /* Functions and data structures to keep track of the values hard regs
1215 had at the start of the function. */
1216
1217 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1218 and has_hard_reg_initial_val.. */
1219 typedef struct GTY(()) initial_value_pair {
1220 rtx hard_reg;
1221 rtx pseudo;
1222 } initial_value_pair;
1223 /* ??? This could be a VEC but there is currently no way to define an
1224 opaque VEC type. This could be worked around by defining struct
1225 initial_value_pair in function.h. */
1226 typedef struct GTY(()) initial_value_struct {
1227 int num_entries;
1228 int max_entries;
1229 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1230 } initial_value_struct;
1231
1232 /* If a pseudo represents an initial hard reg (or expression), return
1233 it, else return NULL_RTX. */
1234
1235 rtx
1236 get_hard_reg_initial_reg (rtx reg)
1237 {
1238 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1239 int i;
1240
1241 if (ivs == 0)
1242 return NULL_RTX;
1243
1244 for (i = 0; i < ivs->num_entries; i++)
1245 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1246 return ivs->entries[i].hard_reg;
1247
1248 return NULL_RTX;
1249 }
1250
1251 /* Make sure that there's a pseudo register of mode MODE that stores the
1252 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1253
1254 rtx
1255 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1256 {
1257 struct initial_value_struct *ivs;
1258 rtx rv;
1259
1260 rv = has_hard_reg_initial_val (mode, regno);
1261 if (rv)
1262 return rv;
1263
1264 ivs = crtl->hard_reg_initial_vals;
1265 if (ivs == 0)
1266 {
1267 ivs = ggc_alloc<initial_value_struct> ();
1268 ivs->num_entries = 0;
1269 ivs->max_entries = 5;
1270 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1271 crtl->hard_reg_initial_vals = ivs;
1272 }
1273
1274 if (ivs->num_entries >= ivs->max_entries)
1275 {
1276 ivs->max_entries += 5;
1277 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1278 ivs->max_entries);
1279 }
1280
1281 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1282 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1283
1284 return ivs->entries[ivs->num_entries++].pseudo;
1285 }
1286
1287 /* See if get_hard_reg_initial_val has been used to create a pseudo
1288 for the initial value of hard register REGNO in mode MODE. Return
1289 the associated pseudo if so, otherwise return NULL. */
1290
1291 rtx
1292 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1293 {
1294 struct initial_value_struct *ivs;
1295 int i;
1296
1297 ivs = crtl->hard_reg_initial_vals;
1298 if (ivs != 0)
1299 for (i = 0; i < ivs->num_entries; i++)
1300 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1301 && REGNO (ivs->entries[i].hard_reg) == regno)
1302 return ivs->entries[i].pseudo;
1303
1304 return NULL_RTX;
1305 }
1306
1307 unsigned int
1308 emit_initial_value_sets (void)
1309 {
1310 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1311 int i;
1312 rtx_insn *seq;
1313
1314 if (ivs == 0)
1315 return 0;
1316
1317 start_sequence ();
1318 for (i = 0; i < ivs->num_entries; i++)
1319 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1320 seq = get_insns ();
1321 end_sequence ();
1322
1323 emit_insn_at_entry (seq);
1324 return 0;
1325 }
1326
1327 /* Return the hardreg-pseudoreg initial values pair entry I and
1328 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1329 bool
1330 initial_value_entry (int i, rtx *hreg, rtx *preg)
1331 {
1332 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1333 if (!ivs || i >= ivs->num_entries)
1334 return false;
1335
1336 *hreg = ivs->entries[i].hard_reg;
1337 *preg = ivs->entries[i].pseudo;
1338 return true;
1339 }
1340 \f
1341 /* These routines are responsible for converting virtual register references
1342 to the actual hard register references once RTL generation is complete.
1343
1344 The following four variables are used for communication between the
1345 routines. They contain the offsets of the virtual registers from their
1346 respective hard registers. */
1347
1348 static int in_arg_offset;
1349 static int var_offset;
1350 static int dynamic_offset;
1351 static int out_arg_offset;
1352 static int cfa_offset;
1353
1354 /* In most machines, the stack pointer register is equivalent to the bottom
1355 of the stack. */
1356
1357 #ifndef STACK_POINTER_OFFSET
1358 #define STACK_POINTER_OFFSET 0
1359 #endif
1360
1361 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1362 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1363 #endif
1364
1365 /* If not defined, pick an appropriate default for the offset of dynamically
1366 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1367 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1368
1369 #ifndef STACK_DYNAMIC_OFFSET
1370
1371 /* The bottom of the stack points to the actual arguments. If
1372 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1373 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1374 stack space for register parameters is not pushed by the caller, but
1375 rather part of the fixed stack areas and hence not included in
1376 `crtl->outgoing_args_size'. Nevertheless, we must allow
1377 for it when allocating stack dynamic objects. */
1378
1379 #ifdef INCOMING_REG_PARM_STACK_SPACE
1380 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1381 ((ACCUMULATE_OUTGOING_ARGS \
1382 ? (crtl->outgoing_args_size \
1383 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1384 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1385 : 0) + (STACK_POINTER_OFFSET))
1386 #else
1387 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1388 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1389 + (STACK_POINTER_OFFSET))
1390 #endif
1391 #endif
1392
1393 \f
1394 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1395 is a virtual register, return the equivalent hard register and set the
1396 offset indirectly through the pointer. Otherwise, return 0. */
1397
1398 static rtx
1399 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1400 {
1401 rtx new_rtx;
1402 HOST_WIDE_INT offset;
1403
1404 if (x == virtual_incoming_args_rtx)
1405 {
1406 if (stack_realign_drap)
1407 {
1408 /* Replace virtual_incoming_args_rtx with internal arg
1409 pointer if DRAP is used to realign stack. */
1410 new_rtx = crtl->args.internal_arg_pointer;
1411 offset = 0;
1412 }
1413 else
1414 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1415 }
1416 else if (x == virtual_stack_vars_rtx)
1417 new_rtx = frame_pointer_rtx, offset = var_offset;
1418 else if (x == virtual_stack_dynamic_rtx)
1419 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1420 else if (x == virtual_outgoing_args_rtx)
1421 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1422 else if (x == virtual_cfa_rtx)
1423 {
1424 #ifdef FRAME_POINTER_CFA_OFFSET
1425 new_rtx = frame_pointer_rtx;
1426 #else
1427 new_rtx = arg_pointer_rtx;
1428 #endif
1429 offset = cfa_offset;
1430 }
1431 else if (x == virtual_preferred_stack_boundary_rtx)
1432 {
1433 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1434 offset = 0;
1435 }
1436 else
1437 return NULL_RTX;
1438
1439 *poffset = offset;
1440 return new_rtx;
1441 }
1442
1443 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1444 registers present inside of *LOC. The expression is simplified,
1445 as much as possible, but is not to be considered "valid" in any sense
1446 implied by the target. Return true if any change is made. */
1447
1448 static bool
1449 instantiate_virtual_regs_in_rtx (rtx *loc)
1450 {
1451 if (!*loc)
1452 return false;
1453 bool changed = false;
1454 subrtx_ptr_iterator::array_type array;
1455 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1456 {
1457 rtx *loc = *iter;
1458 if (rtx x = *loc)
1459 {
1460 rtx new_rtx;
1461 HOST_WIDE_INT offset;
1462 switch (GET_CODE (x))
1463 {
1464 case REG:
1465 new_rtx = instantiate_new_reg (x, &offset);
1466 if (new_rtx)
1467 {
1468 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1469 changed = true;
1470 }
1471 iter.skip_subrtxes ();
1472 break;
1473
1474 case PLUS:
1475 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1476 if (new_rtx)
1477 {
1478 XEXP (x, 0) = new_rtx;
1479 *loc = plus_constant (GET_MODE (x), x, offset, true);
1480 changed = true;
1481 iter.skip_subrtxes ();
1482 break;
1483 }
1484
1485 /* FIXME -- from old code */
1486 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1487 we can commute the PLUS and SUBREG because pointers into the
1488 frame are well-behaved. */
1489 break;
1490
1491 default:
1492 break;
1493 }
1494 }
1495 }
1496 return changed;
1497 }
1498
1499 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1500 matches the predicate for insn CODE operand OPERAND. */
1501
1502 static int
1503 safe_insn_predicate (int code, int operand, rtx x)
1504 {
1505 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1506 }
1507
1508 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1509 registers present inside of insn. The result will be a valid insn. */
1510
1511 static void
1512 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1513 {
1514 HOST_WIDE_INT offset;
1515 int insn_code, i;
1516 bool any_change = false;
1517 rtx set, new_rtx, x;
1518 rtx_insn *seq;
1519
1520 /* There are some special cases to be handled first. */
1521 set = single_set (insn);
1522 if (set)
1523 {
1524 /* We're allowed to assign to a virtual register. This is interpreted
1525 to mean that the underlying register gets assigned the inverse
1526 transformation. This is used, for example, in the handling of
1527 non-local gotos. */
1528 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1529 if (new_rtx)
1530 {
1531 start_sequence ();
1532
1533 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1534 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1535 gen_int_mode (-offset, GET_MODE (new_rtx)));
1536 x = force_operand (x, new_rtx);
1537 if (x != new_rtx)
1538 emit_move_insn (new_rtx, x);
1539
1540 seq = get_insns ();
1541 end_sequence ();
1542
1543 emit_insn_before (seq, insn);
1544 delete_insn (insn);
1545 return;
1546 }
1547
1548 /* Handle a straight copy from a virtual register by generating a
1549 new add insn. The difference between this and falling through
1550 to the generic case is avoiding a new pseudo and eliminating a
1551 move insn in the initial rtl stream. */
1552 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1553 if (new_rtx && offset != 0
1554 && REG_P (SET_DEST (set))
1555 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1556 {
1557 start_sequence ();
1558
1559 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1560 gen_int_mode (offset,
1561 GET_MODE (SET_DEST (set))),
1562 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1563 if (x != SET_DEST (set))
1564 emit_move_insn (SET_DEST (set), x);
1565
1566 seq = get_insns ();
1567 end_sequence ();
1568
1569 emit_insn_before (seq, insn);
1570 delete_insn (insn);
1571 return;
1572 }
1573
1574 extract_insn (insn);
1575 insn_code = INSN_CODE (insn);
1576
1577 /* Handle a plus involving a virtual register by determining if the
1578 operands remain valid if they're modified in place. */
1579 if (GET_CODE (SET_SRC (set)) == PLUS
1580 && recog_data.n_operands >= 3
1581 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1582 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1583 && CONST_INT_P (recog_data.operand[2])
1584 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1585 {
1586 offset += INTVAL (recog_data.operand[2]);
1587
1588 /* If the sum is zero, then replace with a plain move. */
1589 if (offset == 0
1590 && REG_P (SET_DEST (set))
1591 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1592 {
1593 start_sequence ();
1594 emit_move_insn (SET_DEST (set), new_rtx);
1595 seq = get_insns ();
1596 end_sequence ();
1597
1598 emit_insn_before (seq, insn);
1599 delete_insn (insn);
1600 return;
1601 }
1602
1603 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1604
1605 /* Using validate_change and apply_change_group here leaves
1606 recog_data in an invalid state. Since we know exactly what
1607 we want to check, do those two by hand. */
1608 if (safe_insn_predicate (insn_code, 1, new_rtx)
1609 && safe_insn_predicate (insn_code, 2, x))
1610 {
1611 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1612 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1613 any_change = true;
1614
1615 /* Fall through into the regular operand fixup loop in
1616 order to take care of operands other than 1 and 2. */
1617 }
1618 }
1619 }
1620 else
1621 {
1622 extract_insn (insn);
1623 insn_code = INSN_CODE (insn);
1624 }
1625
1626 /* In the general case, we expect virtual registers to appear only in
1627 operands, and then only as either bare registers or inside memories. */
1628 for (i = 0; i < recog_data.n_operands; ++i)
1629 {
1630 x = recog_data.operand[i];
1631 switch (GET_CODE (x))
1632 {
1633 case MEM:
1634 {
1635 rtx addr = XEXP (x, 0);
1636
1637 if (!instantiate_virtual_regs_in_rtx (&addr))
1638 continue;
1639
1640 start_sequence ();
1641 x = replace_equiv_address (x, addr, true);
1642 /* It may happen that the address with the virtual reg
1643 was valid (e.g. based on the virtual stack reg, which might
1644 be acceptable to the predicates with all offsets), whereas
1645 the address now isn't anymore, for instance when the address
1646 is still offsetted, but the base reg isn't virtual-stack-reg
1647 anymore. Below we would do a force_reg on the whole operand,
1648 but this insn might actually only accept memory. Hence,
1649 before doing that last resort, try to reload the address into
1650 a register, so this operand stays a MEM. */
1651 if (!safe_insn_predicate (insn_code, i, x))
1652 {
1653 addr = force_reg (GET_MODE (addr), addr);
1654 x = replace_equiv_address (x, addr, true);
1655 }
1656 seq = get_insns ();
1657 end_sequence ();
1658 if (seq)
1659 emit_insn_before (seq, insn);
1660 }
1661 break;
1662
1663 case REG:
1664 new_rtx = instantiate_new_reg (x, &offset);
1665 if (new_rtx == NULL)
1666 continue;
1667 if (offset == 0)
1668 x = new_rtx;
1669 else
1670 {
1671 start_sequence ();
1672
1673 /* Careful, special mode predicates may have stuff in
1674 insn_data[insn_code].operand[i].mode that isn't useful
1675 to us for computing a new value. */
1676 /* ??? Recognize address_operand and/or "p" constraints
1677 to see if (plus new offset) is a valid before we put
1678 this through expand_simple_binop. */
1679 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1680 gen_int_mode (offset, GET_MODE (x)),
1681 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1682 seq = get_insns ();
1683 end_sequence ();
1684 emit_insn_before (seq, insn);
1685 }
1686 break;
1687
1688 case SUBREG:
1689 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1690 if (new_rtx == NULL)
1691 continue;
1692 if (offset != 0)
1693 {
1694 start_sequence ();
1695 new_rtx = expand_simple_binop
1696 (GET_MODE (new_rtx), PLUS, new_rtx,
1697 gen_int_mode (offset, GET_MODE (new_rtx)),
1698 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1699 seq = get_insns ();
1700 end_sequence ();
1701 emit_insn_before (seq, insn);
1702 }
1703 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1704 GET_MODE (new_rtx), SUBREG_BYTE (x));
1705 gcc_assert (x);
1706 break;
1707
1708 default:
1709 continue;
1710 }
1711
1712 /* At this point, X contains the new value for the operand.
1713 Validate the new value vs the insn predicate. Note that
1714 asm insns will have insn_code -1 here. */
1715 if (!safe_insn_predicate (insn_code, i, x))
1716 {
1717 start_sequence ();
1718 if (REG_P (x))
1719 {
1720 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1721 x = copy_to_reg (x);
1722 }
1723 else
1724 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1725 seq = get_insns ();
1726 end_sequence ();
1727 if (seq)
1728 emit_insn_before (seq, insn);
1729 }
1730
1731 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1732 any_change = true;
1733 }
1734
1735 if (any_change)
1736 {
1737 /* Propagate operand changes into the duplicates. */
1738 for (i = 0; i < recog_data.n_dups; ++i)
1739 *recog_data.dup_loc[i]
1740 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1741
1742 /* Force re-recognition of the instruction for validation. */
1743 INSN_CODE (insn) = -1;
1744 }
1745
1746 if (asm_noperands (PATTERN (insn)) >= 0)
1747 {
1748 if (!check_asm_operands (PATTERN (insn)))
1749 {
1750 error_for_asm (insn, "impossible constraint in %<asm%>");
1751 /* For asm goto, instead of fixing up all the edges
1752 just clear the template and clear input operands
1753 (asm goto doesn't have any output operands). */
1754 if (JUMP_P (insn))
1755 {
1756 rtx asm_op = extract_asm_operands (PATTERN (insn));
1757 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1758 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1759 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1760 }
1761 else
1762 delete_insn (insn);
1763 }
1764 }
1765 else
1766 {
1767 if (recog_memoized (insn) < 0)
1768 fatal_insn_not_found (insn);
1769 }
1770 }
1771
1772 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1773 do any instantiation required. */
1774
1775 void
1776 instantiate_decl_rtl (rtx x)
1777 {
1778 rtx addr;
1779
1780 if (x == 0)
1781 return;
1782
1783 /* If this is a CONCAT, recurse for the pieces. */
1784 if (GET_CODE (x) == CONCAT)
1785 {
1786 instantiate_decl_rtl (XEXP (x, 0));
1787 instantiate_decl_rtl (XEXP (x, 1));
1788 return;
1789 }
1790
1791 /* If this is not a MEM, no need to do anything. Similarly if the
1792 address is a constant or a register that is not a virtual register. */
1793 if (!MEM_P (x))
1794 return;
1795
1796 addr = XEXP (x, 0);
1797 if (CONSTANT_P (addr)
1798 || (REG_P (addr)
1799 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1800 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1801 return;
1802
1803 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1804 }
1805
1806 /* Helper for instantiate_decls called via walk_tree: Process all decls
1807 in the given DECL_VALUE_EXPR. */
1808
1809 static tree
1810 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1811 {
1812 tree t = *tp;
1813 if (! EXPR_P (t))
1814 {
1815 *walk_subtrees = 0;
1816 if (DECL_P (t))
1817 {
1818 if (DECL_RTL_SET_P (t))
1819 instantiate_decl_rtl (DECL_RTL (t));
1820 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1821 && DECL_INCOMING_RTL (t))
1822 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1823 if ((TREE_CODE (t) == VAR_DECL
1824 || TREE_CODE (t) == RESULT_DECL)
1825 && DECL_HAS_VALUE_EXPR_P (t))
1826 {
1827 tree v = DECL_VALUE_EXPR (t);
1828 walk_tree (&v, instantiate_expr, NULL, NULL);
1829 }
1830 }
1831 }
1832 return NULL;
1833 }
1834
1835 /* Subroutine of instantiate_decls: Process all decls in the given
1836 BLOCK node and all its subblocks. */
1837
1838 static void
1839 instantiate_decls_1 (tree let)
1840 {
1841 tree t;
1842
1843 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1844 {
1845 if (DECL_RTL_SET_P (t))
1846 instantiate_decl_rtl (DECL_RTL (t));
1847 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1848 {
1849 tree v = DECL_VALUE_EXPR (t);
1850 walk_tree (&v, instantiate_expr, NULL, NULL);
1851 }
1852 }
1853
1854 /* Process all subblocks. */
1855 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1856 instantiate_decls_1 (t);
1857 }
1858
1859 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1860 all virtual registers in their DECL_RTL's. */
1861
1862 static void
1863 instantiate_decls (tree fndecl)
1864 {
1865 tree decl;
1866 unsigned ix;
1867
1868 /* Process all parameters of the function. */
1869 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1870 {
1871 instantiate_decl_rtl (DECL_RTL (decl));
1872 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1873 if (DECL_HAS_VALUE_EXPR_P (decl))
1874 {
1875 tree v = DECL_VALUE_EXPR (decl);
1876 walk_tree (&v, instantiate_expr, NULL, NULL);
1877 }
1878 }
1879
1880 if ((decl = DECL_RESULT (fndecl))
1881 && TREE_CODE (decl) == RESULT_DECL)
1882 {
1883 if (DECL_RTL_SET_P (decl))
1884 instantiate_decl_rtl (DECL_RTL (decl));
1885 if (DECL_HAS_VALUE_EXPR_P (decl))
1886 {
1887 tree v = DECL_VALUE_EXPR (decl);
1888 walk_tree (&v, instantiate_expr, NULL, NULL);
1889 }
1890 }
1891
1892 /* Process the saved static chain if it exists. */
1893 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1894 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1895 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1896
1897 /* Now process all variables defined in the function or its subblocks. */
1898 instantiate_decls_1 (DECL_INITIAL (fndecl));
1899
1900 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1901 if (DECL_RTL_SET_P (decl))
1902 instantiate_decl_rtl (DECL_RTL (decl));
1903 vec_free (cfun->local_decls);
1904 }
1905
1906 /* Pass through the INSNS of function FNDECL and convert virtual register
1907 references to hard register references. */
1908
1909 static unsigned int
1910 instantiate_virtual_regs (void)
1911 {
1912 rtx_insn *insn;
1913
1914 /* Compute the offsets to use for this function. */
1915 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1916 var_offset = STARTING_FRAME_OFFSET;
1917 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1918 out_arg_offset = STACK_POINTER_OFFSET;
1919 #ifdef FRAME_POINTER_CFA_OFFSET
1920 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1921 #else
1922 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1923 #endif
1924
1925 /* Initialize recognition, indicating that volatile is OK. */
1926 init_recog ();
1927
1928 /* Scan through all the insns, instantiating every virtual register still
1929 present. */
1930 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1931 if (INSN_P (insn))
1932 {
1933 /* These patterns in the instruction stream can never be recognized.
1934 Fortunately, they shouldn't contain virtual registers either. */
1935 if (GET_CODE (PATTERN (insn)) == USE
1936 || GET_CODE (PATTERN (insn)) == CLOBBER
1937 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1938 continue;
1939 else if (DEBUG_INSN_P (insn))
1940 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1941 else
1942 instantiate_virtual_regs_in_insn (insn);
1943
1944 if (insn->deleted ())
1945 continue;
1946
1947 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1948
1949 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1950 if (CALL_P (insn))
1951 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1952 }
1953
1954 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1955 instantiate_decls (current_function_decl);
1956
1957 targetm.instantiate_decls ();
1958
1959 /* Indicate that, from now on, assign_stack_local should use
1960 frame_pointer_rtx. */
1961 virtuals_instantiated = 1;
1962
1963 return 0;
1964 }
1965
1966 namespace {
1967
1968 const pass_data pass_data_instantiate_virtual_regs =
1969 {
1970 RTL_PASS, /* type */
1971 "vregs", /* name */
1972 OPTGROUP_NONE, /* optinfo_flags */
1973 TV_NONE, /* tv_id */
1974 0, /* properties_required */
1975 0, /* properties_provided */
1976 0, /* properties_destroyed */
1977 0, /* todo_flags_start */
1978 0, /* todo_flags_finish */
1979 };
1980
1981 class pass_instantiate_virtual_regs : public rtl_opt_pass
1982 {
1983 public:
1984 pass_instantiate_virtual_regs (gcc::context *ctxt)
1985 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
1986 {}
1987
1988 /* opt_pass methods: */
1989 virtual unsigned int execute (function *)
1990 {
1991 return instantiate_virtual_regs ();
1992 }
1993
1994 }; // class pass_instantiate_virtual_regs
1995
1996 } // anon namespace
1997
1998 rtl_opt_pass *
1999 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2000 {
2001 return new pass_instantiate_virtual_regs (ctxt);
2002 }
2003
2004 \f
2005 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2006 This means a type for which function calls must pass an address to the
2007 function or get an address back from the function.
2008 EXP may be a type node or an expression (whose type is tested). */
2009
2010 int
2011 aggregate_value_p (const_tree exp, const_tree fntype)
2012 {
2013 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2014 int i, regno, nregs;
2015 rtx reg;
2016
2017 if (fntype)
2018 switch (TREE_CODE (fntype))
2019 {
2020 case CALL_EXPR:
2021 {
2022 tree fndecl = get_callee_fndecl (fntype);
2023 fntype = (fndecl
2024 ? TREE_TYPE (fndecl)
2025 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2026 }
2027 break;
2028 case FUNCTION_DECL:
2029 fntype = TREE_TYPE (fntype);
2030 break;
2031 case FUNCTION_TYPE:
2032 case METHOD_TYPE:
2033 break;
2034 case IDENTIFIER_NODE:
2035 fntype = NULL_TREE;
2036 break;
2037 default:
2038 /* We don't expect other tree types here. */
2039 gcc_unreachable ();
2040 }
2041
2042 if (VOID_TYPE_P (type))
2043 return 0;
2044
2045 /* If a record should be passed the same as its first (and only) member
2046 don't pass it as an aggregate. */
2047 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2048 return aggregate_value_p (first_field (type), fntype);
2049
2050 /* If the front end has decided that this needs to be passed by
2051 reference, do so. */
2052 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2053 && DECL_BY_REFERENCE (exp))
2054 return 1;
2055
2056 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2057 if (fntype && TREE_ADDRESSABLE (fntype))
2058 return 1;
2059
2060 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2061 and thus can't be returned in registers. */
2062 if (TREE_ADDRESSABLE (type))
2063 return 1;
2064
2065 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2066 return 1;
2067
2068 if (targetm.calls.return_in_memory (type, fntype))
2069 return 1;
2070
2071 /* Make sure we have suitable call-clobbered regs to return
2072 the value in; if not, we must return it in memory. */
2073 reg = hard_function_value (type, 0, fntype, 0);
2074
2075 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2076 it is OK. */
2077 if (!REG_P (reg))
2078 return 0;
2079
2080 regno = REGNO (reg);
2081 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2082 for (i = 0; i < nregs; i++)
2083 if (! call_used_regs[regno + i])
2084 return 1;
2085
2086 return 0;
2087 }
2088 \f
2089 /* Return true if we should assign DECL a pseudo register; false if it
2090 should live on the local stack. */
2091
2092 bool
2093 use_register_for_decl (const_tree decl)
2094 {
2095 if (!targetm.calls.allocate_stack_slots_for_args ())
2096 return true;
2097
2098 /* Honor volatile. */
2099 if (TREE_SIDE_EFFECTS (decl))
2100 return false;
2101
2102 /* Honor addressability. */
2103 if (TREE_ADDRESSABLE (decl))
2104 return false;
2105
2106 /* Decl is implicitly addressible by bound stores and loads
2107 if it is an aggregate holding bounds. */
2108 if (chkp_function_instrumented_p (current_function_decl)
2109 && TREE_TYPE (decl)
2110 && !BOUNDED_P (decl)
2111 && chkp_type_has_pointer (TREE_TYPE (decl)))
2112 return false;
2113
2114 /* Only register-like things go in registers. */
2115 if (DECL_MODE (decl) == BLKmode)
2116 return false;
2117
2118 /* If -ffloat-store specified, don't put explicit float variables
2119 into registers. */
2120 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2121 propagates values across these stores, and it probably shouldn't. */
2122 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2123 return false;
2124
2125 /* If we're not interested in tracking debugging information for
2126 this decl, then we can certainly put it in a register. */
2127 if (DECL_IGNORED_P (decl))
2128 return true;
2129
2130 if (optimize)
2131 return true;
2132
2133 if (!DECL_REGISTER (decl))
2134 return false;
2135
2136 switch (TREE_CODE (TREE_TYPE (decl)))
2137 {
2138 case RECORD_TYPE:
2139 case UNION_TYPE:
2140 case QUAL_UNION_TYPE:
2141 /* When not optimizing, disregard register keyword for variables with
2142 types containing methods, otherwise the methods won't be callable
2143 from the debugger. */
2144 if (TYPE_METHODS (TREE_TYPE (decl)))
2145 return false;
2146 break;
2147 default:
2148 break;
2149 }
2150
2151 return true;
2152 }
2153
2154 /* Return true if TYPE should be passed by invisible reference. */
2155
2156 bool
2157 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
2158 tree type, bool named_arg)
2159 {
2160 if (type)
2161 {
2162 /* If this type contains non-trivial constructors, then it is
2163 forbidden for the middle-end to create any new copies. */
2164 if (TREE_ADDRESSABLE (type))
2165 return true;
2166
2167 /* GCC post 3.4 passes *all* variable sized types by reference. */
2168 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2169 return true;
2170
2171 /* If a record type should be passed the same as its first (and only)
2172 member, use the type and mode of that member. */
2173 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2174 {
2175 type = TREE_TYPE (first_field (type));
2176 mode = TYPE_MODE (type);
2177 }
2178 }
2179
2180 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2181 type, named_arg);
2182 }
2183
2184 /* Return true if TYPE, which is passed by reference, should be callee
2185 copied instead of caller copied. */
2186
2187 bool
2188 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
2189 tree type, bool named_arg)
2190 {
2191 if (type && TREE_ADDRESSABLE (type))
2192 return false;
2193 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2194 named_arg);
2195 }
2196
2197 /* Structures to communicate between the subroutines of assign_parms.
2198 The first holds data persistent across all parameters, the second
2199 is cleared out for each parameter. */
2200
2201 struct assign_parm_data_all
2202 {
2203 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2204 should become a job of the target or otherwise encapsulated. */
2205 CUMULATIVE_ARGS args_so_far_v;
2206 cumulative_args_t args_so_far;
2207 struct args_size stack_args_size;
2208 tree function_result_decl;
2209 tree orig_fnargs;
2210 rtx_insn *first_conversion_insn;
2211 rtx_insn *last_conversion_insn;
2212 HOST_WIDE_INT pretend_args_size;
2213 HOST_WIDE_INT extra_pretend_bytes;
2214 int reg_parm_stack_space;
2215 };
2216
2217 struct assign_parm_data_one
2218 {
2219 tree nominal_type;
2220 tree passed_type;
2221 rtx entry_parm;
2222 rtx stack_parm;
2223 machine_mode nominal_mode;
2224 machine_mode passed_mode;
2225 machine_mode promoted_mode;
2226 struct locate_and_pad_arg_data locate;
2227 int partial;
2228 BOOL_BITFIELD named_arg : 1;
2229 BOOL_BITFIELD passed_pointer : 1;
2230 BOOL_BITFIELD on_stack : 1;
2231 BOOL_BITFIELD loaded_in_reg : 1;
2232 };
2233
2234 struct bounds_parm_data
2235 {
2236 assign_parm_data_one parm_data;
2237 tree bounds_parm;
2238 tree ptr_parm;
2239 rtx ptr_entry;
2240 int bound_no;
2241 };
2242
2243 /* A subroutine of assign_parms. Initialize ALL. */
2244
2245 static void
2246 assign_parms_initialize_all (struct assign_parm_data_all *all)
2247 {
2248 tree fntype ATTRIBUTE_UNUSED;
2249
2250 memset (all, 0, sizeof (*all));
2251
2252 fntype = TREE_TYPE (current_function_decl);
2253
2254 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2255 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2256 #else
2257 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2258 current_function_decl, -1);
2259 #endif
2260 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2261
2262 #ifdef INCOMING_REG_PARM_STACK_SPACE
2263 all->reg_parm_stack_space
2264 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2265 #endif
2266 }
2267
2268 /* If ARGS contains entries with complex types, split the entry into two
2269 entries of the component type. Return a new list of substitutions are
2270 needed, else the old list. */
2271
2272 static void
2273 split_complex_args (vec<tree> *args)
2274 {
2275 unsigned i;
2276 tree p;
2277
2278 FOR_EACH_VEC_ELT (*args, i, p)
2279 {
2280 tree type = TREE_TYPE (p);
2281 if (TREE_CODE (type) == COMPLEX_TYPE
2282 && targetm.calls.split_complex_arg (type))
2283 {
2284 tree decl;
2285 tree subtype = TREE_TYPE (type);
2286 bool addressable = TREE_ADDRESSABLE (p);
2287
2288 /* Rewrite the PARM_DECL's type with its component. */
2289 p = copy_node (p);
2290 TREE_TYPE (p) = subtype;
2291 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2292 DECL_MODE (p) = VOIDmode;
2293 DECL_SIZE (p) = NULL;
2294 DECL_SIZE_UNIT (p) = NULL;
2295 /* If this arg must go in memory, put it in a pseudo here.
2296 We can't allow it to go in memory as per normal parms,
2297 because the usual place might not have the imag part
2298 adjacent to the real part. */
2299 DECL_ARTIFICIAL (p) = addressable;
2300 DECL_IGNORED_P (p) = addressable;
2301 TREE_ADDRESSABLE (p) = 0;
2302 layout_decl (p, 0);
2303 (*args)[i] = p;
2304
2305 /* Build a second synthetic decl. */
2306 decl = build_decl (EXPR_LOCATION (p),
2307 PARM_DECL, NULL_TREE, subtype);
2308 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2309 DECL_ARTIFICIAL (decl) = addressable;
2310 DECL_IGNORED_P (decl) = addressable;
2311 layout_decl (decl, 0);
2312 args->safe_insert (++i, decl);
2313 }
2314 }
2315 }
2316
2317 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2318 the hidden struct return argument, and (abi willing) complex args.
2319 Return the new parameter list. */
2320
2321 static vec<tree>
2322 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2323 {
2324 tree fndecl = current_function_decl;
2325 tree fntype = TREE_TYPE (fndecl);
2326 vec<tree> fnargs = vNULL;
2327 tree arg;
2328
2329 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2330 fnargs.safe_push (arg);
2331
2332 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2333
2334 /* If struct value address is treated as the first argument, make it so. */
2335 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2336 && ! cfun->returns_pcc_struct
2337 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2338 {
2339 tree type = build_pointer_type (TREE_TYPE (fntype));
2340 tree decl;
2341
2342 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2343 PARM_DECL, get_identifier (".result_ptr"), type);
2344 DECL_ARG_TYPE (decl) = type;
2345 DECL_ARTIFICIAL (decl) = 1;
2346 DECL_NAMELESS (decl) = 1;
2347 TREE_CONSTANT (decl) = 1;
2348
2349 DECL_CHAIN (decl) = all->orig_fnargs;
2350 all->orig_fnargs = decl;
2351 fnargs.safe_insert (0, decl);
2352
2353 all->function_result_decl = decl;
2354
2355 /* If function is instrumented then bounds of the
2356 passed structure address is the second argument. */
2357 if (chkp_function_instrumented_p (fndecl))
2358 {
2359 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2360 PARM_DECL, get_identifier (".result_bnd"),
2361 pointer_bounds_type_node);
2362 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2363 DECL_ARTIFICIAL (decl) = 1;
2364 DECL_NAMELESS (decl) = 1;
2365 TREE_CONSTANT (decl) = 1;
2366
2367 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2368 DECL_CHAIN (all->orig_fnargs) = decl;
2369 fnargs.safe_insert (1, decl);
2370 }
2371 }
2372
2373 /* If the target wants to split complex arguments into scalars, do so. */
2374 if (targetm.calls.split_complex_arg)
2375 split_complex_args (&fnargs);
2376
2377 return fnargs;
2378 }
2379
2380 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2381 data for the parameter. Incorporate ABI specifics such as pass-by-
2382 reference and type promotion. */
2383
2384 static void
2385 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2386 struct assign_parm_data_one *data)
2387 {
2388 tree nominal_type, passed_type;
2389 machine_mode nominal_mode, passed_mode, promoted_mode;
2390 int unsignedp;
2391
2392 memset (data, 0, sizeof (*data));
2393
2394 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2395 if (!cfun->stdarg)
2396 data->named_arg = 1; /* No variadic parms. */
2397 else if (DECL_CHAIN (parm))
2398 data->named_arg = 1; /* Not the last non-variadic parm. */
2399 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2400 data->named_arg = 1; /* Only variadic ones are unnamed. */
2401 else
2402 data->named_arg = 0; /* Treat as variadic. */
2403
2404 nominal_type = TREE_TYPE (parm);
2405 passed_type = DECL_ARG_TYPE (parm);
2406
2407 /* Look out for errors propagating this far. Also, if the parameter's
2408 type is void then its value doesn't matter. */
2409 if (TREE_TYPE (parm) == error_mark_node
2410 /* This can happen after weird syntax errors
2411 or if an enum type is defined among the parms. */
2412 || TREE_CODE (parm) != PARM_DECL
2413 || passed_type == NULL
2414 || VOID_TYPE_P (nominal_type))
2415 {
2416 nominal_type = passed_type = void_type_node;
2417 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2418 goto egress;
2419 }
2420
2421 /* Find mode of arg as it is passed, and mode of arg as it should be
2422 during execution of this function. */
2423 passed_mode = TYPE_MODE (passed_type);
2424 nominal_mode = TYPE_MODE (nominal_type);
2425
2426 /* If the parm is to be passed as a transparent union or record, use the
2427 type of the first field for the tests below. We have already verified
2428 that the modes are the same. */
2429 if ((TREE_CODE (passed_type) == UNION_TYPE
2430 || TREE_CODE (passed_type) == RECORD_TYPE)
2431 && TYPE_TRANSPARENT_AGGR (passed_type))
2432 passed_type = TREE_TYPE (first_field (passed_type));
2433
2434 /* See if this arg was passed by invisible reference. */
2435 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2436 passed_type, data->named_arg))
2437 {
2438 passed_type = nominal_type = build_pointer_type (passed_type);
2439 data->passed_pointer = true;
2440 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2441 }
2442
2443 /* Find mode as it is passed by the ABI. */
2444 unsignedp = TYPE_UNSIGNED (passed_type);
2445 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2446 TREE_TYPE (current_function_decl), 0);
2447
2448 egress:
2449 data->nominal_type = nominal_type;
2450 data->passed_type = passed_type;
2451 data->nominal_mode = nominal_mode;
2452 data->passed_mode = passed_mode;
2453 data->promoted_mode = promoted_mode;
2454 }
2455
2456 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2457
2458 static void
2459 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2460 struct assign_parm_data_one *data, bool no_rtl)
2461 {
2462 int varargs_pretend_bytes = 0;
2463
2464 targetm.calls.setup_incoming_varargs (all->args_so_far,
2465 data->promoted_mode,
2466 data->passed_type,
2467 &varargs_pretend_bytes, no_rtl);
2468
2469 /* If the back-end has requested extra stack space, record how much is
2470 needed. Do not change pretend_args_size otherwise since it may be
2471 nonzero from an earlier partial argument. */
2472 if (varargs_pretend_bytes > 0)
2473 all->pretend_args_size = varargs_pretend_bytes;
2474 }
2475
2476 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2477 the incoming location of the current parameter. */
2478
2479 static void
2480 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2481 struct assign_parm_data_one *data)
2482 {
2483 HOST_WIDE_INT pretend_bytes = 0;
2484 rtx entry_parm;
2485 bool in_regs;
2486
2487 if (data->promoted_mode == VOIDmode)
2488 {
2489 data->entry_parm = data->stack_parm = const0_rtx;
2490 return;
2491 }
2492
2493 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2494 data->promoted_mode,
2495 data->passed_type,
2496 data->named_arg);
2497
2498 if (entry_parm == 0)
2499 data->promoted_mode = data->passed_mode;
2500
2501 /* Determine parm's home in the stack, in case it arrives in the stack
2502 or we should pretend it did. Compute the stack position and rtx where
2503 the argument arrives and its size.
2504
2505 There is one complexity here: If this was a parameter that would
2506 have been passed in registers, but wasn't only because it is
2507 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2508 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2509 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2510 as it was the previous time. */
2511 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2512 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2513 in_regs = true;
2514 #endif
2515 if (!in_regs && !data->named_arg)
2516 {
2517 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2518 {
2519 rtx tem;
2520 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2521 data->promoted_mode,
2522 data->passed_type, true);
2523 in_regs = tem != NULL;
2524 }
2525 }
2526
2527 /* If this parameter was passed both in registers and in the stack, use
2528 the copy on the stack. */
2529 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2530 data->passed_type))
2531 entry_parm = 0;
2532
2533 if (entry_parm)
2534 {
2535 int partial;
2536
2537 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2538 data->promoted_mode,
2539 data->passed_type,
2540 data->named_arg);
2541 data->partial = partial;
2542
2543 /* The caller might already have allocated stack space for the
2544 register parameters. */
2545 if (partial != 0 && all->reg_parm_stack_space == 0)
2546 {
2547 /* Part of this argument is passed in registers and part
2548 is passed on the stack. Ask the prologue code to extend
2549 the stack part so that we can recreate the full value.
2550
2551 PRETEND_BYTES is the size of the registers we need to store.
2552 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2553 stack space that the prologue should allocate.
2554
2555 Internally, gcc assumes that the argument pointer is aligned
2556 to STACK_BOUNDARY bits. This is used both for alignment
2557 optimizations (see init_emit) and to locate arguments that are
2558 aligned to more than PARM_BOUNDARY bits. We must preserve this
2559 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2560 a stack boundary. */
2561
2562 /* We assume at most one partial arg, and it must be the first
2563 argument on the stack. */
2564 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2565
2566 pretend_bytes = partial;
2567 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2568
2569 /* We want to align relative to the actual stack pointer, so
2570 don't include this in the stack size until later. */
2571 all->extra_pretend_bytes = all->pretend_args_size;
2572 }
2573 }
2574
2575 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2576 all->reg_parm_stack_space,
2577 entry_parm ? data->partial : 0, current_function_decl,
2578 &all->stack_args_size, &data->locate);
2579
2580 /* Update parm_stack_boundary if this parameter is passed in the
2581 stack. */
2582 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2583 crtl->parm_stack_boundary = data->locate.boundary;
2584
2585 /* Adjust offsets to include the pretend args. */
2586 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2587 data->locate.slot_offset.constant += pretend_bytes;
2588 data->locate.offset.constant += pretend_bytes;
2589
2590 data->entry_parm = entry_parm;
2591 }
2592
2593 /* A subroutine of assign_parms. If there is actually space on the stack
2594 for this parm, count it in stack_args_size and return true. */
2595
2596 static bool
2597 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2598 struct assign_parm_data_one *data)
2599 {
2600 /* Bounds are never passed on the stack to keep compatibility
2601 with not instrumented code. */
2602 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2603 return false;
2604 /* Trivially true if we've no incoming register. */
2605 else if (data->entry_parm == NULL)
2606 ;
2607 /* Also true if we're partially in registers and partially not,
2608 since we've arranged to drop the entire argument on the stack. */
2609 else if (data->partial != 0)
2610 ;
2611 /* Also true if the target says that it's passed in both registers
2612 and on the stack. */
2613 else if (GET_CODE (data->entry_parm) == PARALLEL
2614 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2615 ;
2616 /* Also true if the target says that there's stack allocated for
2617 all register parameters. */
2618 else if (all->reg_parm_stack_space > 0)
2619 ;
2620 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2621 else
2622 return false;
2623
2624 all->stack_args_size.constant += data->locate.size.constant;
2625 if (data->locate.size.var)
2626 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2627
2628 return true;
2629 }
2630
2631 /* A subroutine of assign_parms. Given that this parameter is allocated
2632 stack space by the ABI, find it. */
2633
2634 static void
2635 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2636 {
2637 rtx offset_rtx, stack_parm;
2638 unsigned int align, boundary;
2639
2640 /* If we're passing this arg using a reg, make its stack home the
2641 aligned stack slot. */
2642 if (data->entry_parm)
2643 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2644 else
2645 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2646
2647 stack_parm = crtl->args.internal_arg_pointer;
2648 if (offset_rtx != const0_rtx)
2649 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2650 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2651
2652 if (!data->passed_pointer)
2653 {
2654 set_mem_attributes (stack_parm, parm, 1);
2655 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2656 while promoted mode's size is needed. */
2657 if (data->promoted_mode != BLKmode
2658 && data->promoted_mode != DECL_MODE (parm))
2659 {
2660 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2661 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2662 {
2663 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2664 data->promoted_mode);
2665 if (offset)
2666 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2667 }
2668 }
2669 }
2670
2671 boundary = data->locate.boundary;
2672 align = BITS_PER_UNIT;
2673
2674 /* If we're padding upward, we know that the alignment of the slot
2675 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2676 intentionally forcing upward padding. Otherwise we have to come
2677 up with a guess at the alignment based on OFFSET_RTX. */
2678 if (data->locate.where_pad != downward || data->entry_parm)
2679 align = boundary;
2680 else if (CONST_INT_P (offset_rtx))
2681 {
2682 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2683 align = align & -align;
2684 }
2685 set_mem_align (stack_parm, align);
2686
2687 if (data->entry_parm)
2688 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2689
2690 data->stack_parm = stack_parm;
2691 }
2692
2693 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2694 always valid and contiguous. */
2695
2696 static void
2697 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2698 {
2699 rtx entry_parm = data->entry_parm;
2700 rtx stack_parm = data->stack_parm;
2701
2702 /* If this parm was passed part in regs and part in memory, pretend it
2703 arrived entirely in memory by pushing the register-part onto the stack.
2704 In the special case of a DImode or DFmode that is split, we could put
2705 it together in a pseudoreg directly, but for now that's not worth
2706 bothering with. */
2707 if (data->partial != 0)
2708 {
2709 /* Handle calls that pass values in multiple non-contiguous
2710 locations. The Irix 6 ABI has examples of this. */
2711 if (GET_CODE (entry_parm) == PARALLEL)
2712 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2713 data->passed_type,
2714 int_size_in_bytes (data->passed_type));
2715 else
2716 {
2717 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2718 move_block_from_reg (REGNO (entry_parm),
2719 validize_mem (copy_rtx (stack_parm)),
2720 data->partial / UNITS_PER_WORD);
2721 }
2722
2723 entry_parm = stack_parm;
2724 }
2725
2726 /* If we didn't decide this parm came in a register, by default it came
2727 on the stack. */
2728 else if (entry_parm == NULL)
2729 entry_parm = stack_parm;
2730
2731 /* When an argument is passed in multiple locations, we can't make use
2732 of this information, but we can save some copying if the whole argument
2733 is passed in a single register. */
2734 else if (GET_CODE (entry_parm) == PARALLEL
2735 && data->nominal_mode != BLKmode
2736 && data->passed_mode != BLKmode)
2737 {
2738 size_t i, len = XVECLEN (entry_parm, 0);
2739
2740 for (i = 0; i < len; i++)
2741 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2742 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2743 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2744 == data->passed_mode)
2745 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2746 {
2747 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2748 break;
2749 }
2750 }
2751
2752 data->entry_parm = entry_parm;
2753 }
2754
2755 /* A subroutine of assign_parms. Reconstitute any values which were
2756 passed in multiple registers and would fit in a single register. */
2757
2758 static void
2759 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2760 {
2761 rtx entry_parm = data->entry_parm;
2762
2763 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2764 This can be done with register operations rather than on the
2765 stack, even if we will store the reconstituted parameter on the
2766 stack later. */
2767 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2768 {
2769 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2770 emit_group_store (parmreg, entry_parm, data->passed_type,
2771 GET_MODE_SIZE (GET_MODE (entry_parm)));
2772 entry_parm = parmreg;
2773 }
2774
2775 data->entry_parm = entry_parm;
2776 }
2777
2778 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2779 always valid and properly aligned. */
2780
2781 static void
2782 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2783 {
2784 rtx stack_parm = data->stack_parm;
2785
2786 /* If we can't trust the parm stack slot to be aligned enough for its
2787 ultimate type, don't use that slot after entry. We'll make another
2788 stack slot, if we need one. */
2789 if (stack_parm
2790 && ((STRICT_ALIGNMENT
2791 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2792 || (data->nominal_type
2793 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2794 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2795 stack_parm = NULL;
2796
2797 /* If parm was passed in memory, and we need to convert it on entry,
2798 don't store it back in that same slot. */
2799 else if (data->entry_parm == stack_parm
2800 && data->nominal_mode != BLKmode
2801 && data->nominal_mode != data->passed_mode)
2802 stack_parm = NULL;
2803
2804 /* If stack protection is in effect for this function, don't leave any
2805 pointers in their passed stack slots. */
2806 else if (crtl->stack_protect_guard
2807 && (flag_stack_protect == 2
2808 || data->passed_pointer
2809 || POINTER_TYPE_P (data->nominal_type)))
2810 stack_parm = NULL;
2811
2812 data->stack_parm = stack_parm;
2813 }
2814
2815 /* A subroutine of assign_parms. Return true if the current parameter
2816 should be stored as a BLKmode in the current frame. */
2817
2818 static bool
2819 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2820 {
2821 if (data->nominal_mode == BLKmode)
2822 return true;
2823 if (GET_MODE (data->entry_parm) == BLKmode)
2824 return true;
2825
2826 #ifdef BLOCK_REG_PADDING
2827 /* Only assign_parm_setup_block knows how to deal with register arguments
2828 that are padded at the least significant end. */
2829 if (REG_P (data->entry_parm)
2830 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2831 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2832 == (BYTES_BIG_ENDIAN ? upward : downward)))
2833 return true;
2834 #endif
2835
2836 return false;
2837 }
2838
2839 /* A subroutine of assign_parms. Arrange for the parameter to be
2840 present and valid in DATA->STACK_RTL. */
2841
2842 static void
2843 assign_parm_setup_block (struct assign_parm_data_all *all,
2844 tree parm, struct assign_parm_data_one *data)
2845 {
2846 rtx entry_parm = data->entry_parm;
2847 rtx stack_parm = data->stack_parm;
2848 HOST_WIDE_INT size;
2849 HOST_WIDE_INT size_stored;
2850
2851 if (GET_CODE (entry_parm) == PARALLEL)
2852 entry_parm = emit_group_move_into_temps (entry_parm);
2853
2854 size = int_size_in_bytes (data->passed_type);
2855 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2856 if (stack_parm == 0)
2857 {
2858 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2859 stack_parm = assign_stack_local (BLKmode, size_stored,
2860 DECL_ALIGN (parm));
2861 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2862 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2863 set_mem_attributes (stack_parm, parm, 1);
2864 }
2865
2866 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2867 calls that pass values in multiple non-contiguous locations. */
2868 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2869 {
2870 rtx mem;
2871
2872 /* Note that we will be storing an integral number of words.
2873 So we have to be careful to ensure that we allocate an
2874 integral number of words. We do this above when we call
2875 assign_stack_local if space was not allocated in the argument
2876 list. If it was, this will not work if PARM_BOUNDARY is not
2877 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2878 if it becomes a problem. Exception is when BLKmode arrives
2879 with arguments not conforming to word_mode. */
2880
2881 if (data->stack_parm == 0)
2882 ;
2883 else if (GET_CODE (entry_parm) == PARALLEL)
2884 ;
2885 else
2886 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2887
2888 mem = validize_mem (copy_rtx (stack_parm));
2889
2890 /* Handle values in multiple non-contiguous locations. */
2891 if (GET_CODE (entry_parm) == PARALLEL)
2892 {
2893 push_to_sequence2 (all->first_conversion_insn,
2894 all->last_conversion_insn);
2895 emit_group_store (mem, entry_parm, data->passed_type, size);
2896 all->first_conversion_insn = get_insns ();
2897 all->last_conversion_insn = get_last_insn ();
2898 end_sequence ();
2899 }
2900
2901 else if (size == 0)
2902 ;
2903
2904 /* If SIZE is that of a mode no bigger than a word, just use
2905 that mode's store operation. */
2906 else if (size <= UNITS_PER_WORD)
2907 {
2908 machine_mode mode
2909 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2910
2911 if (mode != BLKmode
2912 #ifdef BLOCK_REG_PADDING
2913 && (size == UNITS_PER_WORD
2914 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2915 != (BYTES_BIG_ENDIAN ? upward : downward)))
2916 #endif
2917 )
2918 {
2919 rtx reg;
2920
2921 /* We are really truncating a word_mode value containing
2922 SIZE bytes into a value of mode MODE. If such an
2923 operation requires no actual instructions, we can refer
2924 to the value directly in mode MODE, otherwise we must
2925 start with the register in word_mode and explicitly
2926 convert it. */
2927 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2928 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2929 else
2930 {
2931 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2932 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2933 }
2934 emit_move_insn (change_address (mem, mode, 0), reg);
2935 }
2936
2937 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2938 machine must be aligned to the left before storing
2939 to memory. Note that the previous test doesn't
2940 handle all cases (e.g. SIZE == 3). */
2941 else if (size != UNITS_PER_WORD
2942 #ifdef BLOCK_REG_PADDING
2943 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2944 == downward)
2945 #else
2946 && BYTES_BIG_ENDIAN
2947 #endif
2948 )
2949 {
2950 rtx tem, x;
2951 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2952 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2953
2954 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2955 tem = change_address (mem, word_mode, 0);
2956 emit_move_insn (tem, x);
2957 }
2958 else
2959 move_block_from_reg (REGNO (entry_parm), mem,
2960 size_stored / UNITS_PER_WORD);
2961 }
2962 else
2963 move_block_from_reg (REGNO (entry_parm), mem,
2964 size_stored / UNITS_PER_WORD);
2965 }
2966 else if (data->stack_parm == 0)
2967 {
2968 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2969 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2970 BLOCK_OP_NORMAL);
2971 all->first_conversion_insn = get_insns ();
2972 all->last_conversion_insn = get_last_insn ();
2973 end_sequence ();
2974 }
2975
2976 data->stack_parm = stack_parm;
2977 SET_DECL_RTL (parm, stack_parm);
2978 }
2979
2980 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2981 parameter. Get it there. Perform all ABI specified conversions. */
2982
2983 static void
2984 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2985 struct assign_parm_data_one *data)
2986 {
2987 rtx parmreg, validated_mem;
2988 rtx equiv_stack_parm;
2989 machine_mode promoted_nominal_mode;
2990 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2991 bool did_conversion = false;
2992 bool need_conversion, moved;
2993
2994 /* Store the parm in a pseudoregister during the function, but we may
2995 need to do it in a wider mode. Using 2 here makes the result
2996 consistent with promote_decl_mode and thus expand_expr_real_1. */
2997 promoted_nominal_mode
2998 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2999 TREE_TYPE (current_function_decl), 2);
3000
3001 parmreg = gen_reg_rtx (promoted_nominal_mode);
3002
3003 if (!DECL_ARTIFICIAL (parm))
3004 mark_user_reg (parmreg);
3005
3006 /* If this was an item that we received a pointer to,
3007 set DECL_RTL appropriately. */
3008 if (data->passed_pointer)
3009 {
3010 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3011 set_mem_attributes (x, parm, 1);
3012 SET_DECL_RTL (parm, x);
3013 }
3014 else
3015 SET_DECL_RTL (parm, parmreg);
3016
3017 assign_parm_remove_parallels (data);
3018
3019 /* Copy the value into the register, thus bridging between
3020 assign_parm_find_data_types and expand_expr_real_1. */
3021
3022 equiv_stack_parm = data->stack_parm;
3023 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3024
3025 need_conversion = (data->nominal_mode != data->passed_mode
3026 || promoted_nominal_mode != data->promoted_mode);
3027 moved = false;
3028
3029 if (need_conversion
3030 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3031 && data->nominal_mode == data->passed_mode
3032 && data->nominal_mode == GET_MODE (data->entry_parm))
3033 {
3034 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3035 mode, by the caller. We now have to convert it to
3036 NOMINAL_MODE, if different. However, PARMREG may be in
3037 a different mode than NOMINAL_MODE if it is being stored
3038 promoted.
3039
3040 If ENTRY_PARM is a hard register, it might be in a register
3041 not valid for operating in its mode (e.g., an odd-numbered
3042 register for a DFmode). In that case, moves are the only
3043 thing valid, so we can't do a convert from there. This
3044 occurs when the calling sequence allow such misaligned
3045 usages.
3046
3047 In addition, the conversion may involve a call, which could
3048 clobber parameters which haven't been copied to pseudo
3049 registers yet.
3050
3051 First, we try to emit an insn which performs the necessary
3052 conversion. We verify that this insn does not clobber any
3053 hard registers. */
3054
3055 enum insn_code icode;
3056 rtx op0, op1;
3057
3058 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3059 unsignedp);
3060
3061 op0 = parmreg;
3062 op1 = validated_mem;
3063 if (icode != CODE_FOR_nothing
3064 && insn_operand_matches (icode, 0, op0)
3065 && insn_operand_matches (icode, 1, op1))
3066 {
3067 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3068 rtx_insn *insn, *insns;
3069 rtx t = op1;
3070 HARD_REG_SET hardregs;
3071
3072 start_sequence ();
3073 /* If op1 is a hard register that is likely spilled, first
3074 force it into a pseudo, otherwise combiner might extend
3075 its lifetime too much. */
3076 if (GET_CODE (t) == SUBREG)
3077 t = SUBREG_REG (t);
3078 if (REG_P (t)
3079 && HARD_REGISTER_P (t)
3080 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3081 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3082 {
3083 t = gen_reg_rtx (GET_MODE (op1));
3084 emit_move_insn (t, op1);
3085 }
3086 else
3087 t = op1;
3088 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3089 data->passed_mode, unsignedp);
3090 emit_insn (pat);
3091 insns = get_insns ();
3092
3093 moved = true;
3094 CLEAR_HARD_REG_SET (hardregs);
3095 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3096 {
3097 if (INSN_P (insn))
3098 note_stores (PATTERN (insn), record_hard_reg_sets,
3099 &hardregs);
3100 if (!hard_reg_set_empty_p (hardregs))
3101 moved = false;
3102 }
3103
3104 end_sequence ();
3105
3106 if (moved)
3107 {
3108 emit_insn (insns);
3109 if (equiv_stack_parm != NULL_RTX)
3110 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3111 equiv_stack_parm);
3112 }
3113 }
3114 }
3115
3116 if (moved)
3117 /* Nothing to do. */
3118 ;
3119 else if (need_conversion)
3120 {
3121 /* We did not have an insn to convert directly, or the sequence
3122 generated appeared unsafe. We must first copy the parm to a
3123 pseudo reg, and save the conversion until after all
3124 parameters have been moved. */
3125
3126 int save_tree_used;
3127 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3128
3129 emit_move_insn (tempreg, validated_mem);
3130
3131 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3132 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3133
3134 if (GET_CODE (tempreg) == SUBREG
3135 && GET_MODE (tempreg) == data->nominal_mode
3136 && REG_P (SUBREG_REG (tempreg))
3137 && data->nominal_mode == data->passed_mode
3138 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3139 && GET_MODE_SIZE (GET_MODE (tempreg))
3140 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3141 {
3142 /* The argument is already sign/zero extended, so note it
3143 into the subreg. */
3144 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3145 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3146 }
3147
3148 /* TREE_USED gets set erroneously during expand_assignment. */
3149 save_tree_used = TREE_USED (parm);
3150 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3151 TREE_USED (parm) = save_tree_used;
3152 all->first_conversion_insn = get_insns ();
3153 all->last_conversion_insn = get_last_insn ();
3154 end_sequence ();
3155
3156 did_conversion = true;
3157 }
3158 else
3159 emit_move_insn (parmreg, validated_mem);
3160
3161 /* If we were passed a pointer but the actual value can safely live
3162 in a register, retrieve it and use it directly. */
3163 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3164 {
3165 /* We can't use nominal_mode, because it will have been set to
3166 Pmode above. We must use the actual mode of the parm. */
3167 if (use_register_for_decl (parm))
3168 {
3169 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3170 mark_user_reg (parmreg);
3171 }
3172 else
3173 {
3174 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3175 TYPE_MODE (TREE_TYPE (parm)),
3176 TYPE_ALIGN (TREE_TYPE (parm)));
3177 parmreg
3178 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3179 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3180 align);
3181 set_mem_attributes (parmreg, parm, 1);
3182 }
3183
3184 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3185 {
3186 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3187 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3188
3189 push_to_sequence2 (all->first_conversion_insn,
3190 all->last_conversion_insn);
3191 emit_move_insn (tempreg, DECL_RTL (parm));
3192 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3193 emit_move_insn (parmreg, tempreg);
3194 all->first_conversion_insn = get_insns ();
3195 all->last_conversion_insn = get_last_insn ();
3196 end_sequence ();
3197
3198 did_conversion = true;
3199 }
3200 else
3201 emit_move_insn (parmreg, DECL_RTL (parm));
3202
3203 SET_DECL_RTL (parm, parmreg);
3204
3205 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3206 now the parm. */
3207 data->stack_parm = NULL;
3208 }
3209
3210 /* Mark the register as eliminable if we did no conversion and it was
3211 copied from memory at a fixed offset, and the arg pointer was not
3212 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3213 offset formed an invalid address, such memory-equivalences as we
3214 make here would screw up life analysis for it. */
3215 if (data->nominal_mode == data->passed_mode
3216 && !did_conversion
3217 && data->stack_parm != 0
3218 && MEM_P (data->stack_parm)
3219 && data->locate.offset.var == 0
3220 && reg_mentioned_p (virtual_incoming_args_rtx,
3221 XEXP (data->stack_parm, 0)))
3222 {
3223 rtx_insn *linsn = get_last_insn ();
3224 rtx_insn *sinsn;
3225 rtx set;
3226
3227 /* Mark complex types separately. */
3228 if (GET_CODE (parmreg) == CONCAT)
3229 {
3230 machine_mode submode
3231 = GET_MODE_INNER (GET_MODE (parmreg));
3232 int regnor = REGNO (XEXP (parmreg, 0));
3233 int regnoi = REGNO (XEXP (parmreg, 1));
3234 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3235 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3236 GET_MODE_SIZE (submode));
3237
3238 /* Scan backwards for the set of the real and
3239 imaginary parts. */
3240 for (sinsn = linsn; sinsn != 0;
3241 sinsn = prev_nonnote_insn (sinsn))
3242 {
3243 set = single_set (sinsn);
3244 if (set == 0)
3245 continue;
3246
3247 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3248 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3249 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3250 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3251 }
3252 }
3253 else
3254 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3255 }
3256
3257 /* For pointer data type, suggest pointer register. */
3258 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3259 mark_reg_pointer (parmreg,
3260 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3261 }
3262
3263 /* A subroutine of assign_parms. Allocate stack space to hold the current
3264 parameter. Get it there. Perform all ABI specified conversions. */
3265
3266 static void
3267 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3268 struct assign_parm_data_one *data)
3269 {
3270 /* Value must be stored in the stack slot STACK_PARM during function
3271 execution. */
3272 bool to_conversion = false;
3273
3274 assign_parm_remove_parallels (data);
3275
3276 if (data->promoted_mode != data->nominal_mode)
3277 {
3278 /* Conversion is required. */
3279 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3280
3281 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3282
3283 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3284 to_conversion = true;
3285
3286 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3287 TYPE_UNSIGNED (TREE_TYPE (parm)));
3288
3289 if (data->stack_parm)
3290 {
3291 int offset = subreg_lowpart_offset (data->nominal_mode,
3292 GET_MODE (data->stack_parm));
3293 /* ??? This may need a big-endian conversion on sparc64. */
3294 data->stack_parm
3295 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3296 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3297 set_mem_offset (data->stack_parm,
3298 MEM_OFFSET (data->stack_parm) + offset);
3299 }
3300 }
3301
3302 if (data->entry_parm != data->stack_parm)
3303 {
3304 rtx src, dest;
3305
3306 if (data->stack_parm == 0)
3307 {
3308 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3309 GET_MODE (data->entry_parm),
3310 TYPE_ALIGN (data->passed_type));
3311 data->stack_parm
3312 = assign_stack_local (GET_MODE (data->entry_parm),
3313 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3314 align);
3315 set_mem_attributes (data->stack_parm, parm, 1);
3316 }
3317
3318 dest = validize_mem (copy_rtx (data->stack_parm));
3319 src = validize_mem (copy_rtx (data->entry_parm));
3320
3321 if (MEM_P (src))
3322 {
3323 /* Use a block move to handle potentially misaligned entry_parm. */
3324 if (!to_conversion)
3325 push_to_sequence2 (all->first_conversion_insn,
3326 all->last_conversion_insn);
3327 to_conversion = true;
3328
3329 emit_block_move (dest, src,
3330 GEN_INT (int_size_in_bytes (data->passed_type)),
3331 BLOCK_OP_NORMAL);
3332 }
3333 else
3334 emit_move_insn (dest, src);
3335 }
3336
3337 if (to_conversion)
3338 {
3339 all->first_conversion_insn = get_insns ();
3340 all->last_conversion_insn = get_last_insn ();
3341 end_sequence ();
3342 }
3343
3344 SET_DECL_RTL (parm, data->stack_parm);
3345 }
3346
3347 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3348 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3349
3350 static void
3351 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3352 vec<tree> fnargs)
3353 {
3354 tree parm;
3355 tree orig_fnargs = all->orig_fnargs;
3356 unsigned i = 0;
3357
3358 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3359 {
3360 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3361 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3362 {
3363 rtx tmp, real, imag;
3364 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3365
3366 real = DECL_RTL (fnargs[i]);
3367 imag = DECL_RTL (fnargs[i + 1]);
3368 if (inner != GET_MODE (real))
3369 {
3370 real = gen_lowpart_SUBREG (inner, real);
3371 imag = gen_lowpart_SUBREG (inner, imag);
3372 }
3373
3374 if (TREE_ADDRESSABLE (parm))
3375 {
3376 rtx rmem, imem;
3377 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3378 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3379 DECL_MODE (parm),
3380 TYPE_ALIGN (TREE_TYPE (parm)));
3381
3382 /* split_complex_arg put the real and imag parts in
3383 pseudos. Move them to memory. */
3384 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3385 set_mem_attributes (tmp, parm, 1);
3386 rmem = adjust_address_nv (tmp, inner, 0);
3387 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3388 push_to_sequence2 (all->first_conversion_insn,
3389 all->last_conversion_insn);
3390 emit_move_insn (rmem, real);
3391 emit_move_insn (imem, imag);
3392 all->first_conversion_insn = get_insns ();
3393 all->last_conversion_insn = get_last_insn ();
3394 end_sequence ();
3395 }
3396 else
3397 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3398 SET_DECL_RTL (parm, tmp);
3399
3400 real = DECL_INCOMING_RTL (fnargs[i]);
3401 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3402 if (inner != GET_MODE (real))
3403 {
3404 real = gen_lowpart_SUBREG (inner, real);
3405 imag = gen_lowpart_SUBREG (inner, imag);
3406 }
3407 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3408 set_decl_incoming_rtl (parm, tmp, false);
3409 i++;
3410 }
3411 }
3412 }
3413
3414 /* Load bounds of PARM from bounds table. */
3415 static void
3416 assign_parm_load_bounds (struct assign_parm_data_one *data,
3417 tree parm,
3418 rtx entry,
3419 unsigned bound_no)
3420 {
3421 bitmap_iterator bi;
3422 unsigned i, offs = 0;
3423 int bnd_no = -1;
3424 rtx slot = NULL, ptr = NULL;
3425
3426 if (parm)
3427 {
3428 bitmap slots;
3429 bitmap_obstack_initialize (NULL);
3430 slots = BITMAP_ALLOC (NULL);
3431 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3432 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3433 {
3434 if (bound_no)
3435 bound_no--;
3436 else
3437 {
3438 bnd_no = i;
3439 break;
3440 }
3441 }
3442 BITMAP_FREE (slots);
3443 bitmap_obstack_release (NULL);
3444 }
3445
3446 /* We may have bounds not associated with any pointer. */
3447 if (bnd_no != -1)
3448 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3449
3450 /* Find associated pointer. */
3451 if (bnd_no == -1)
3452 {
3453 /* If bounds are not associated with any bounds,
3454 then it is passed in a register or special slot. */
3455 gcc_assert (data->entry_parm);
3456 ptr = const0_rtx;
3457 }
3458 else if (MEM_P (entry))
3459 slot = adjust_address (entry, Pmode, offs);
3460 else if (REG_P (entry))
3461 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3462 else if (GET_CODE (entry) == PARALLEL)
3463 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3464 else
3465 gcc_unreachable ();
3466 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3467 data->entry_parm);
3468 }
3469
3470 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3471
3472 static void
3473 assign_bounds (vec<bounds_parm_data> &bndargs,
3474 struct assign_parm_data_all &all)
3475 {
3476 unsigned i, pass, handled = 0;
3477 bounds_parm_data *pbdata;
3478
3479 if (!bndargs.exists ())
3480 return;
3481
3482 /* We make few passes to store input bounds. Firstly handle bounds
3483 passed in registers. After that we load bounds passed in special
3484 slots. Finally we load bounds from Bounds Table. */
3485 for (pass = 0; pass < 3; pass++)
3486 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3487 {
3488 /* Pass 0 => regs only. */
3489 if (pass == 0
3490 && (!pbdata->parm_data.entry_parm
3491 || GET_CODE (pbdata->parm_data.entry_parm) != REG))
3492 continue;
3493 /* Pass 1 => slots only. */
3494 else if (pass == 1
3495 && (!pbdata->parm_data.entry_parm
3496 || GET_CODE (pbdata->parm_data.entry_parm) == REG))
3497 continue;
3498 /* Pass 2 => BT only. */
3499 else if (pass == 2
3500 && pbdata->parm_data.entry_parm)
3501 continue;
3502
3503 if (!pbdata->parm_data.entry_parm
3504 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3505 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3506 pbdata->ptr_entry, pbdata->bound_no);
3507
3508 set_decl_incoming_rtl (pbdata->bounds_parm,
3509 pbdata->parm_data.entry_parm, false);
3510
3511 if (assign_parm_setup_block_p (&pbdata->parm_data))
3512 assign_parm_setup_block (&all, pbdata->bounds_parm,
3513 &pbdata->parm_data);
3514 else if (pbdata->parm_data.passed_pointer
3515 || use_register_for_decl (pbdata->bounds_parm))
3516 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3517 &pbdata->parm_data);
3518 else
3519 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3520 &pbdata->parm_data);
3521
3522 /* Count handled bounds to make sure we miss nothing. */
3523 handled++;
3524 }
3525
3526 gcc_assert (handled == bndargs.length ());
3527
3528 bndargs.release ();
3529 }
3530
3531 /* Assign RTL expressions to the function's parameters. This may involve
3532 copying them into registers and using those registers as the DECL_RTL. */
3533
3534 static void
3535 assign_parms (tree fndecl)
3536 {
3537 struct assign_parm_data_all all;
3538 tree parm;
3539 vec<tree> fnargs;
3540 unsigned i, bound_no = 0;
3541 tree last_arg = NULL;
3542 rtx last_arg_entry = NULL;
3543 vec<bounds_parm_data> bndargs = vNULL;
3544 bounds_parm_data bdata;
3545
3546 crtl->args.internal_arg_pointer
3547 = targetm.calls.internal_arg_pointer ();
3548
3549 assign_parms_initialize_all (&all);
3550 fnargs = assign_parms_augmented_arg_list (&all);
3551
3552 FOR_EACH_VEC_ELT (fnargs, i, parm)
3553 {
3554 struct assign_parm_data_one data;
3555
3556 /* Extract the type of PARM; adjust it according to ABI. */
3557 assign_parm_find_data_types (&all, parm, &data);
3558
3559 /* Early out for errors and void parameters. */
3560 if (data.passed_mode == VOIDmode)
3561 {
3562 SET_DECL_RTL (parm, const0_rtx);
3563 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3564 continue;
3565 }
3566
3567 /* Estimate stack alignment from parameter alignment. */
3568 if (SUPPORTS_STACK_ALIGNMENT)
3569 {
3570 unsigned int align
3571 = targetm.calls.function_arg_boundary (data.promoted_mode,
3572 data.passed_type);
3573 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3574 align);
3575 if (TYPE_ALIGN (data.nominal_type) > align)
3576 align = MINIMUM_ALIGNMENT (data.nominal_type,
3577 TYPE_MODE (data.nominal_type),
3578 TYPE_ALIGN (data.nominal_type));
3579 if (crtl->stack_alignment_estimated < align)
3580 {
3581 gcc_assert (!crtl->stack_realign_processed);
3582 crtl->stack_alignment_estimated = align;
3583 }
3584 }
3585
3586 /* Find out where the parameter arrives in this function. */
3587 assign_parm_find_entry_rtl (&all, &data);
3588
3589 /* Find out where stack space for this parameter might be. */
3590 if (assign_parm_is_stack_parm (&all, &data))
3591 {
3592 assign_parm_find_stack_rtl (parm, &data);
3593 assign_parm_adjust_entry_rtl (&data);
3594 }
3595 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3596 {
3597 /* Remember where last non bounds arg was passed in case
3598 we have to load associated bounds for it from Bounds
3599 Table. */
3600 last_arg = parm;
3601 last_arg_entry = data.entry_parm;
3602 bound_no = 0;
3603 }
3604 /* Record permanently how this parm was passed. */
3605 if (data.passed_pointer)
3606 {
3607 rtx incoming_rtl
3608 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3609 data.entry_parm);
3610 set_decl_incoming_rtl (parm, incoming_rtl, true);
3611 }
3612 else
3613 set_decl_incoming_rtl (parm, data.entry_parm, false);
3614
3615 /* Boudns should be loaded in the particular order to
3616 have registers allocated correctly. Collect info about
3617 input bounds and load them later. */
3618 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3619 {
3620 /* Expect bounds in instrumented functions only. */
3621 gcc_assert (chkp_function_instrumented_p (fndecl));
3622
3623 bdata.parm_data = data;
3624 bdata.bounds_parm = parm;
3625 bdata.ptr_parm = last_arg;
3626 bdata.ptr_entry = last_arg_entry;
3627 bdata.bound_no = bound_no;
3628 bndargs.safe_push (bdata);
3629 }
3630 else
3631 {
3632 assign_parm_adjust_stack_rtl (&data);
3633
3634 if (assign_parm_setup_block_p (&data))
3635 assign_parm_setup_block (&all, parm, &data);
3636 else if (data.passed_pointer || use_register_for_decl (parm))
3637 assign_parm_setup_reg (&all, parm, &data);
3638 else
3639 assign_parm_setup_stack (&all, parm, &data);
3640 }
3641
3642 if (cfun->stdarg && !DECL_CHAIN (parm))
3643 {
3644 int pretend_bytes = 0;
3645
3646 assign_parms_setup_varargs (&all, &data, false);
3647
3648 if (chkp_function_instrumented_p (fndecl))
3649 {
3650 /* We expect this is the last parm. Otherwise it is wrong
3651 to assign bounds right now. */
3652 gcc_assert (i == (fnargs.length () - 1));
3653 assign_bounds (bndargs, all);
3654 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3655 data.promoted_mode,
3656 data.passed_type,
3657 &pretend_bytes,
3658 false);
3659 }
3660 }
3661
3662 /* Update info on where next arg arrives in registers. */
3663 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3664 data.passed_type, data.named_arg);
3665
3666 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3667 bound_no++;
3668 }
3669
3670 assign_bounds (bndargs, all);
3671
3672 if (targetm.calls.split_complex_arg)
3673 assign_parms_unsplit_complex (&all, fnargs);
3674
3675 fnargs.release ();
3676
3677 /* Initialize pic_offset_table_rtx with a pseudo register
3678 if required. */
3679 if (targetm.use_pseudo_pic_reg ())
3680 pic_offset_table_rtx = gen_reg_rtx (Pmode);
3681
3682 /* Output all parameter conversion instructions (possibly including calls)
3683 now that all parameters have been copied out of hard registers. */
3684 emit_insn (all.first_conversion_insn);
3685
3686 /* Estimate reload stack alignment from scalar return mode. */
3687 if (SUPPORTS_STACK_ALIGNMENT)
3688 {
3689 if (DECL_RESULT (fndecl))
3690 {
3691 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3692 machine_mode mode = TYPE_MODE (type);
3693
3694 if (mode != BLKmode
3695 && mode != VOIDmode
3696 && !AGGREGATE_TYPE_P (type))
3697 {
3698 unsigned int align = GET_MODE_ALIGNMENT (mode);
3699 if (crtl->stack_alignment_estimated < align)
3700 {
3701 gcc_assert (!crtl->stack_realign_processed);
3702 crtl->stack_alignment_estimated = align;
3703 }
3704 }
3705 }
3706 }
3707
3708 /* If we are receiving a struct value address as the first argument, set up
3709 the RTL for the function result. As this might require code to convert
3710 the transmitted address to Pmode, we do this here to ensure that possible
3711 preliminary conversions of the address have been emitted already. */
3712 if (all.function_result_decl)
3713 {
3714 tree result = DECL_RESULT (current_function_decl);
3715 rtx addr = DECL_RTL (all.function_result_decl);
3716 rtx x;
3717
3718 if (DECL_BY_REFERENCE (result))
3719 {
3720 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3721 x = addr;
3722 }
3723 else
3724 {
3725 SET_DECL_VALUE_EXPR (result,
3726 build1 (INDIRECT_REF, TREE_TYPE (result),
3727 all.function_result_decl));
3728 addr = convert_memory_address (Pmode, addr);
3729 x = gen_rtx_MEM (DECL_MODE (result), addr);
3730 set_mem_attributes (x, result, 1);
3731 }
3732
3733 DECL_HAS_VALUE_EXPR_P (result) = 1;
3734
3735 SET_DECL_RTL (result, x);
3736 }
3737
3738 /* We have aligned all the args, so add space for the pretend args. */
3739 crtl->args.pretend_args_size = all.pretend_args_size;
3740 all.stack_args_size.constant += all.extra_pretend_bytes;
3741 crtl->args.size = all.stack_args_size.constant;
3742
3743 /* Adjust function incoming argument size for alignment and
3744 minimum length. */
3745
3746 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3747 crtl->args.size = CEIL_ROUND (crtl->args.size,
3748 PARM_BOUNDARY / BITS_PER_UNIT);
3749
3750 #ifdef ARGS_GROW_DOWNWARD
3751 crtl->args.arg_offset_rtx
3752 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3753 : expand_expr (size_diffop (all.stack_args_size.var,
3754 size_int (-all.stack_args_size.constant)),
3755 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3756 #else
3757 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3758 #endif
3759
3760 /* See how many bytes, if any, of its args a function should try to pop
3761 on return. */
3762
3763 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3764 TREE_TYPE (fndecl),
3765 crtl->args.size);
3766
3767 /* For stdarg.h function, save info about
3768 regs and stack space used by the named args. */
3769
3770 crtl->args.info = all.args_so_far_v;
3771
3772 /* Set the rtx used for the function return value. Put this in its
3773 own variable so any optimizers that need this information don't have
3774 to include tree.h. Do this here so it gets done when an inlined
3775 function gets output. */
3776
3777 crtl->return_rtx
3778 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3779 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3780
3781 /* If scalar return value was computed in a pseudo-reg, or was a named
3782 return value that got dumped to the stack, copy that to the hard
3783 return register. */
3784 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3785 {
3786 tree decl_result = DECL_RESULT (fndecl);
3787 rtx decl_rtl = DECL_RTL (decl_result);
3788
3789 if (REG_P (decl_rtl)
3790 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3791 : DECL_REGISTER (decl_result))
3792 {
3793 rtx real_decl_rtl;
3794
3795 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3796 fndecl, true);
3797 if (chkp_function_instrumented_p (fndecl))
3798 crtl->return_bnd
3799 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3800 fndecl, true);
3801 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3802 /* The delay slot scheduler assumes that crtl->return_rtx
3803 holds the hard register containing the return value, not a
3804 temporary pseudo. */
3805 crtl->return_rtx = real_decl_rtl;
3806 }
3807 }
3808 }
3809
3810 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3811 For all seen types, gimplify their sizes. */
3812
3813 static tree
3814 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3815 {
3816 tree t = *tp;
3817
3818 *walk_subtrees = 0;
3819 if (TYPE_P (t))
3820 {
3821 if (POINTER_TYPE_P (t))
3822 *walk_subtrees = 1;
3823 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3824 && !TYPE_SIZES_GIMPLIFIED (t))
3825 {
3826 gimplify_type_sizes (t, (gimple_seq *) data);
3827 *walk_subtrees = 1;
3828 }
3829 }
3830
3831 return NULL;
3832 }
3833
3834 /* Gimplify the parameter list for current_function_decl. This involves
3835 evaluating SAVE_EXPRs of variable sized parameters and generating code
3836 to implement callee-copies reference parameters. Returns a sequence of
3837 statements to add to the beginning of the function. */
3838
3839 gimple_seq
3840 gimplify_parameters (void)
3841 {
3842 struct assign_parm_data_all all;
3843 tree parm;
3844 gimple_seq stmts = NULL;
3845 vec<tree> fnargs;
3846 unsigned i;
3847
3848 assign_parms_initialize_all (&all);
3849 fnargs = assign_parms_augmented_arg_list (&all);
3850
3851 FOR_EACH_VEC_ELT (fnargs, i, parm)
3852 {
3853 struct assign_parm_data_one data;
3854
3855 /* Extract the type of PARM; adjust it according to ABI. */
3856 assign_parm_find_data_types (&all, parm, &data);
3857
3858 /* Early out for errors and void parameters. */
3859 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3860 continue;
3861
3862 /* Update info on where next arg arrives in registers. */
3863 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3864 data.passed_type, data.named_arg);
3865
3866 /* ??? Once upon a time variable_size stuffed parameter list
3867 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3868 turned out to be less than manageable in the gimple world.
3869 Now we have to hunt them down ourselves. */
3870 walk_tree_without_duplicates (&data.passed_type,
3871 gimplify_parm_type, &stmts);
3872
3873 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3874 {
3875 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3876 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3877 }
3878
3879 if (data.passed_pointer)
3880 {
3881 tree type = TREE_TYPE (data.passed_type);
3882 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3883 type, data.named_arg))
3884 {
3885 tree local, t;
3886
3887 /* For constant-sized objects, this is trivial; for
3888 variable-sized objects, we have to play games. */
3889 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3890 && !(flag_stack_check == GENERIC_STACK_CHECK
3891 && compare_tree_int (DECL_SIZE_UNIT (parm),
3892 STACK_CHECK_MAX_VAR_SIZE) > 0))
3893 {
3894 local = create_tmp_var (type, get_name (parm));
3895 DECL_IGNORED_P (local) = 0;
3896 /* If PARM was addressable, move that flag over
3897 to the local copy, as its address will be taken,
3898 not the PARMs. Keep the parms address taken
3899 as we'll query that flag during gimplification. */
3900 if (TREE_ADDRESSABLE (parm))
3901 TREE_ADDRESSABLE (local) = 1;
3902 else if (TREE_CODE (type) == COMPLEX_TYPE
3903 || TREE_CODE (type) == VECTOR_TYPE)
3904 DECL_GIMPLE_REG_P (local) = 1;
3905 }
3906 else
3907 {
3908 tree ptr_type, addr;
3909
3910 ptr_type = build_pointer_type (type);
3911 addr = create_tmp_reg (ptr_type, get_name (parm));
3912 DECL_IGNORED_P (addr) = 0;
3913 local = build_fold_indirect_ref (addr);
3914
3915 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3916 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3917 size_int (DECL_ALIGN (parm)));
3918
3919 /* The call has been built for a variable-sized object. */
3920 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3921 t = fold_convert (ptr_type, t);
3922 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3923 gimplify_and_add (t, &stmts);
3924 }
3925
3926 gimplify_assign (local, parm, &stmts);
3927
3928 SET_DECL_VALUE_EXPR (parm, local);
3929 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3930 }
3931 }
3932 }
3933
3934 fnargs.release ();
3935
3936 return stmts;
3937 }
3938 \f
3939 /* Compute the size and offset from the start of the stacked arguments for a
3940 parm passed in mode PASSED_MODE and with type TYPE.
3941
3942 INITIAL_OFFSET_PTR points to the current offset into the stacked
3943 arguments.
3944
3945 The starting offset and size for this parm are returned in
3946 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3947 nonzero, the offset is that of stack slot, which is returned in
3948 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3949 padding required from the initial offset ptr to the stack slot.
3950
3951 IN_REGS is nonzero if the argument will be passed in registers. It will
3952 never be set if REG_PARM_STACK_SPACE is not defined.
3953
3954 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3955 for arguments which are passed in registers.
3956
3957 FNDECL is the function in which the argument was defined.
3958
3959 There are two types of rounding that are done. The first, controlled by
3960 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3961 argument list to be aligned to the specific boundary (in bits). This
3962 rounding affects the initial and starting offsets, but not the argument
3963 size.
3964
3965 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3966 optionally rounds the size of the parm to PARM_BOUNDARY. The
3967 initial offset is not affected by this rounding, while the size always
3968 is and the starting offset may be. */
3969
3970 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3971 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3972 callers pass in the total size of args so far as
3973 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3974
3975 void
3976 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3977 int reg_parm_stack_space, int partial,
3978 tree fndecl ATTRIBUTE_UNUSED,
3979 struct args_size *initial_offset_ptr,
3980 struct locate_and_pad_arg_data *locate)
3981 {
3982 tree sizetree;
3983 enum direction where_pad;
3984 unsigned int boundary, round_boundary;
3985 int part_size_in_regs;
3986
3987 /* If we have found a stack parm before we reach the end of the
3988 area reserved for registers, skip that area. */
3989 if (! in_regs)
3990 {
3991 if (reg_parm_stack_space > 0)
3992 {
3993 if (initial_offset_ptr->var)
3994 {
3995 initial_offset_ptr->var
3996 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3997 ssize_int (reg_parm_stack_space));
3998 initial_offset_ptr->constant = 0;
3999 }
4000 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4001 initial_offset_ptr->constant = reg_parm_stack_space;
4002 }
4003 }
4004
4005 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4006
4007 sizetree
4008 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4009 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4010 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4011 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4012 type);
4013 locate->where_pad = where_pad;
4014
4015 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4016 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4017 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4018
4019 locate->boundary = boundary;
4020
4021 if (SUPPORTS_STACK_ALIGNMENT)
4022 {
4023 /* stack_alignment_estimated can't change after stack has been
4024 realigned. */
4025 if (crtl->stack_alignment_estimated < boundary)
4026 {
4027 if (!crtl->stack_realign_processed)
4028 crtl->stack_alignment_estimated = boundary;
4029 else
4030 {
4031 /* If stack is realigned and stack alignment value
4032 hasn't been finalized, it is OK not to increase
4033 stack_alignment_estimated. The bigger alignment
4034 requirement is recorded in stack_alignment_needed
4035 below. */
4036 gcc_assert (!crtl->stack_realign_finalized
4037 && crtl->stack_realign_needed);
4038 }
4039 }
4040 }
4041
4042 /* Remember if the outgoing parameter requires extra alignment on the
4043 calling function side. */
4044 if (crtl->stack_alignment_needed < boundary)
4045 crtl->stack_alignment_needed = boundary;
4046 if (crtl->preferred_stack_boundary < boundary)
4047 crtl->preferred_stack_boundary = boundary;
4048
4049 #ifdef ARGS_GROW_DOWNWARD
4050 locate->slot_offset.constant = -initial_offset_ptr->constant;
4051 if (initial_offset_ptr->var)
4052 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4053 initial_offset_ptr->var);
4054
4055 {
4056 tree s2 = sizetree;
4057 if (where_pad != none
4058 && (!tree_fits_uhwi_p (sizetree)
4059 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4060 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4061 SUB_PARM_SIZE (locate->slot_offset, s2);
4062 }
4063
4064 locate->slot_offset.constant += part_size_in_regs;
4065
4066 if (!in_regs || reg_parm_stack_space > 0)
4067 pad_to_arg_alignment (&locate->slot_offset, boundary,
4068 &locate->alignment_pad);
4069
4070 locate->size.constant = (-initial_offset_ptr->constant
4071 - locate->slot_offset.constant);
4072 if (initial_offset_ptr->var)
4073 locate->size.var = size_binop (MINUS_EXPR,
4074 size_binop (MINUS_EXPR,
4075 ssize_int (0),
4076 initial_offset_ptr->var),
4077 locate->slot_offset.var);
4078
4079 /* Pad_below needs the pre-rounded size to know how much to pad
4080 below. */
4081 locate->offset = locate->slot_offset;
4082 if (where_pad == downward)
4083 pad_below (&locate->offset, passed_mode, sizetree);
4084
4085 #else /* !ARGS_GROW_DOWNWARD */
4086 if (!in_regs || reg_parm_stack_space > 0)
4087 pad_to_arg_alignment (initial_offset_ptr, boundary,
4088 &locate->alignment_pad);
4089 locate->slot_offset = *initial_offset_ptr;
4090
4091 #ifdef PUSH_ROUNDING
4092 if (passed_mode != BLKmode)
4093 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4094 #endif
4095
4096 /* Pad_below needs the pre-rounded size to know how much to pad below
4097 so this must be done before rounding up. */
4098 locate->offset = locate->slot_offset;
4099 if (where_pad == downward)
4100 pad_below (&locate->offset, passed_mode, sizetree);
4101
4102 if (where_pad != none
4103 && (!tree_fits_uhwi_p (sizetree)
4104 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4105 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4106
4107 ADD_PARM_SIZE (locate->size, sizetree);
4108
4109 locate->size.constant -= part_size_in_regs;
4110 #endif /* ARGS_GROW_DOWNWARD */
4111
4112 #ifdef FUNCTION_ARG_OFFSET
4113 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4114 #endif
4115 }
4116
4117 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4118 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4119
4120 static void
4121 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4122 struct args_size *alignment_pad)
4123 {
4124 tree save_var = NULL_TREE;
4125 HOST_WIDE_INT save_constant = 0;
4126 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4127 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4128
4129 #ifdef SPARC_STACK_BOUNDARY_HACK
4130 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4131 the real alignment of %sp. However, when it does this, the
4132 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4133 if (SPARC_STACK_BOUNDARY_HACK)
4134 sp_offset = 0;
4135 #endif
4136
4137 if (boundary > PARM_BOUNDARY)
4138 {
4139 save_var = offset_ptr->var;
4140 save_constant = offset_ptr->constant;
4141 }
4142
4143 alignment_pad->var = NULL_TREE;
4144 alignment_pad->constant = 0;
4145
4146 if (boundary > BITS_PER_UNIT)
4147 {
4148 if (offset_ptr->var)
4149 {
4150 tree sp_offset_tree = ssize_int (sp_offset);
4151 tree offset = size_binop (PLUS_EXPR,
4152 ARGS_SIZE_TREE (*offset_ptr),
4153 sp_offset_tree);
4154 #ifdef ARGS_GROW_DOWNWARD
4155 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
4156 #else
4157 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
4158 #endif
4159
4160 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4161 /* ARGS_SIZE_TREE includes constant term. */
4162 offset_ptr->constant = 0;
4163 if (boundary > PARM_BOUNDARY)
4164 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4165 save_var);
4166 }
4167 else
4168 {
4169 offset_ptr->constant = -sp_offset +
4170 #ifdef ARGS_GROW_DOWNWARD
4171 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
4172 #else
4173 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
4174 #endif
4175 if (boundary > PARM_BOUNDARY)
4176 alignment_pad->constant = offset_ptr->constant - save_constant;
4177 }
4178 }
4179 }
4180
4181 static void
4182 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4183 {
4184 if (passed_mode != BLKmode)
4185 {
4186 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4187 offset_ptr->constant
4188 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4189 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4190 - GET_MODE_SIZE (passed_mode));
4191 }
4192 else
4193 {
4194 if (TREE_CODE (sizetree) != INTEGER_CST
4195 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4196 {
4197 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4198 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4199 /* Add it in. */
4200 ADD_PARM_SIZE (*offset_ptr, s2);
4201 SUB_PARM_SIZE (*offset_ptr, sizetree);
4202 }
4203 }
4204 }
4205 \f
4206
4207 /* True if register REGNO was alive at a place where `setjmp' was
4208 called and was set more than once or is an argument. Such regs may
4209 be clobbered by `longjmp'. */
4210
4211 static bool
4212 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4213 {
4214 /* There appear to be cases where some local vars never reach the
4215 backend but have bogus regnos. */
4216 if (regno >= max_reg_num ())
4217 return false;
4218
4219 return ((REG_N_SETS (regno) > 1
4220 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4221 regno))
4222 && REGNO_REG_SET_P (setjmp_crosses, regno));
4223 }
4224
4225 /* Walk the tree of blocks describing the binding levels within a
4226 function and warn about variables the might be killed by setjmp or
4227 vfork. This is done after calling flow_analysis before register
4228 allocation since that will clobber the pseudo-regs to hard
4229 regs. */
4230
4231 static void
4232 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4233 {
4234 tree decl, sub;
4235
4236 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4237 {
4238 if (TREE_CODE (decl) == VAR_DECL
4239 && DECL_RTL_SET_P (decl)
4240 && REG_P (DECL_RTL (decl))
4241 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4242 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4243 " %<longjmp%> or %<vfork%>", decl);
4244 }
4245
4246 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4247 setjmp_vars_warning (setjmp_crosses, sub);
4248 }
4249
4250 /* Do the appropriate part of setjmp_vars_warning
4251 but for arguments instead of local variables. */
4252
4253 static void
4254 setjmp_args_warning (bitmap setjmp_crosses)
4255 {
4256 tree decl;
4257 for (decl = DECL_ARGUMENTS (current_function_decl);
4258 decl; decl = DECL_CHAIN (decl))
4259 if (DECL_RTL (decl) != 0
4260 && REG_P (DECL_RTL (decl))
4261 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4262 warning (OPT_Wclobbered,
4263 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4264 decl);
4265 }
4266
4267 /* Generate warning messages for variables live across setjmp. */
4268
4269 void
4270 generate_setjmp_warnings (void)
4271 {
4272 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4273
4274 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4275 || bitmap_empty_p (setjmp_crosses))
4276 return;
4277
4278 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4279 setjmp_args_warning (setjmp_crosses);
4280 }
4281
4282 \f
4283 /* Reverse the order of elements in the fragment chain T of blocks,
4284 and return the new head of the chain (old last element).
4285 In addition to that clear BLOCK_SAME_RANGE flags when needed
4286 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4287 its super fragment origin. */
4288
4289 static tree
4290 block_fragments_nreverse (tree t)
4291 {
4292 tree prev = 0, block, next, prev_super = 0;
4293 tree super = BLOCK_SUPERCONTEXT (t);
4294 if (BLOCK_FRAGMENT_ORIGIN (super))
4295 super = BLOCK_FRAGMENT_ORIGIN (super);
4296 for (block = t; block; block = next)
4297 {
4298 next = BLOCK_FRAGMENT_CHAIN (block);
4299 BLOCK_FRAGMENT_CHAIN (block) = prev;
4300 if ((prev && !BLOCK_SAME_RANGE (prev))
4301 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4302 != prev_super))
4303 BLOCK_SAME_RANGE (block) = 0;
4304 prev_super = BLOCK_SUPERCONTEXT (block);
4305 BLOCK_SUPERCONTEXT (block) = super;
4306 prev = block;
4307 }
4308 t = BLOCK_FRAGMENT_ORIGIN (t);
4309 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4310 != prev_super)
4311 BLOCK_SAME_RANGE (t) = 0;
4312 BLOCK_SUPERCONTEXT (t) = super;
4313 return prev;
4314 }
4315
4316 /* Reverse the order of elements in the chain T of blocks,
4317 and return the new head of the chain (old last element).
4318 Also do the same on subblocks and reverse the order of elements
4319 in BLOCK_FRAGMENT_CHAIN as well. */
4320
4321 static tree
4322 blocks_nreverse_all (tree t)
4323 {
4324 tree prev = 0, block, next;
4325 for (block = t; block; block = next)
4326 {
4327 next = BLOCK_CHAIN (block);
4328 BLOCK_CHAIN (block) = prev;
4329 if (BLOCK_FRAGMENT_CHAIN (block)
4330 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4331 {
4332 BLOCK_FRAGMENT_CHAIN (block)
4333 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4334 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4335 BLOCK_SAME_RANGE (block) = 0;
4336 }
4337 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4338 prev = block;
4339 }
4340 return prev;
4341 }
4342
4343
4344 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4345 and create duplicate blocks. */
4346 /* ??? Need an option to either create block fragments or to create
4347 abstract origin duplicates of a source block. It really depends
4348 on what optimization has been performed. */
4349
4350 void
4351 reorder_blocks (void)
4352 {
4353 tree block = DECL_INITIAL (current_function_decl);
4354
4355 if (block == NULL_TREE)
4356 return;
4357
4358 auto_vec<tree, 10> block_stack;
4359
4360 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4361 clear_block_marks (block);
4362
4363 /* Prune the old trees away, so that they don't get in the way. */
4364 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4365 BLOCK_CHAIN (block) = NULL_TREE;
4366
4367 /* Recreate the block tree from the note nesting. */
4368 reorder_blocks_1 (get_insns (), block, &block_stack);
4369 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4370 }
4371
4372 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4373
4374 void
4375 clear_block_marks (tree block)
4376 {
4377 while (block)
4378 {
4379 TREE_ASM_WRITTEN (block) = 0;
4380 clear_block_marks (BLOCK_SUBBLOCKS (block));
4381 block = BLOCK_CHAIN (block);
4382 }
4383 }
4384
4385 static void
4386 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4387 vec<tree> *p_block_stack)
4388 {
4389 rtx_insn *insn;
4390 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4391
4392 for (insn = insns; insn; insn = NEXT_INSN (insn))
4393 {
4394 if (NOTE_P (insn))
4395 {
4396 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4397 {
4398 tree block = NOTE_BLOCK (insn);
4399 tree origin;
4400
4401 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4402 origin = block;
4403
4404 if (prev_end)
4405 BLOCK_SAME_RANGE (prev_end) = 0;
4406 prev_end = NULL_TREE;
4407
4408 /* If we have seen this block before, that means it now
4409 spans multiple address regions. Create a new fragment. */
4410 if (TREE_ASM_WRITTEN (block))
4411 {
4412 tree new_block = copy_node (block);
4413
4414 BLOCK_SAME_RANGE (new_block) = 0;
4415 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4416 BLOCK_FRAGMENT_CHAIN (new_block)
4417 = BLOCK_FRAGMENT_CHAIN (origin);
4418 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4419
4420 NOTE_BLOCK (insn) = new_block;
4421 block = new_block;
4422 }
4423
4424 if (prev_beg == current_block && prev_beg)
4425 BLOCK_SAME_RANGE (block) = 1;
4426
4427 prev_beg = origin;
4428
4429 BLOCK_SUBBLOCKS (block) = 0;
4430 TREE_ASM_WRITTEN (block) = 1;
4431 /* When there's only one block for the entire function,
4432 current_block == block and we mustn't do this, it
4433 will cause infinite recursion. */
4434 if (block != current_block)
4435 {
4436 tree super;
4437 if (block != origin)
4438 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4439 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4440 (origin))
4441 == current_block);
4442 if (p_block_stack->is_empty ())
4443 super = current_block;
4444 else
4445 {
4446 super = p_block_stack->last ();
4447 gcc_assert (super == current_block
4448 || BLOCK_FRAGMENT_ORIGIN (super)
4449 == current_block);
4450 }
4451 BLOCK_SUPERCONTEXT (block) = super;
4452 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4453 BLOCK_SUBBLOCKS (current_block) = block;
4454 current_block = origin;
4455 }
4456 p_block_stack->safe_push (block);
4457 }
4458 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4459 {
4460 NOTE_BLOCK (insn) = p_block_stack->pop ();
4461 current_block = BLOCK_SUPERCONTEXT (current_block);
4462 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4463 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4464 prev_beg = NULL_TREE;
4465 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4466 ? NOTE_BLOCK (insn) : NULL_TREE;
4467 }
4468 }
4469 else
4470 {
4471 prev_beg = NULL_TREE;
4472 if (prev_end)
4473 BLOCK_SAME_RANGE (prev_end) = 0;
4474 prev_end = NULL_TREE;
4475 }
4476 }
4477 }
4478
4479 /* Reverse the order of elements in the chain T of blocks,
4480 and return the new head of the chain (old last element). */
4481
4482 tree
4483 blocks_nreverse (tree t)
4484 {
4485 tree prev = 0, block, next;
4486 for (block = t; block; block = next)
4487 {
4488 next = BLOCK_CHAIN (block);
4489 BLOCK_CHAIN (block) = prev;
4490 prev = block;
4491 }
4492 return prev;
4493 }
4494
4495 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4496 by modifying the last node in chain 1 to point to chain 2. */
4497
4498 tree
4499 block_chainon (tree op1, tree op2)
4500 {
4501 tree t1;
4502
4503 if (!op1)
4504 return op2;
4505 if (!op2)
4506 return op1;
4507
4508 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4509 continue;
4510 BLOCK_CHAIN (t1) = op2;
4511
4512 #ifdef ENABLE_TREE_CHECKING
4513 {
4514 tree t2;
4515 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4516 gcc_assert (t2 != t1);
4517 }
4518 #endif
4519
4520 return op1;
4521 }
4522
4523 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4524 non-NULL, list them all into VECTOR, in a depth-first preorder
4525 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4526 blocks. */
4527
4528 static int
4529 all_blocks (tree block, tree *vector)
4530 {
4531 int n_blocks = 0;
4532
4533 while (block)
4534 {
4535 TREE_ASM_WRITTEN (block) = 0;
4536
4537 /* Record this block. */
4538 if (vector)
4539 vector[n_blocks] = block;
4540
4541 ++n_blocks;
4542
4543 /* Record the subblocks, and their subblocks... */
4544 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4545 vector ? vector + n_blocks : 0);
4546 block = BLOCK_CHAIN (block);
4547 }
4548
4549 return n_blocks;
4550 }
4551
4552 /* Return a vector containing all the blocks rooted at BLOCK. The
4553 number of elements in the vector is stored in N_BLOCKS_P. The
4554 vector is dynamically allocated; it is the caller's responsibility
4555 to call `free' on the pointer returned. */
4556
4557 static tree *
4558 get_block_vector (tree block, int *n_blocks_p)
4559 {
4560 tree *block_vector;
4561
4562 *n_blocks_p = all_blocks (block, NULL);
4563 block_vector = XNEWVEC (tree, *n_blocks_p);
4564 all_blocks (block, block_vector);
4565
4566 return block_vector;
4567 }
4568
4569 static GTY(()) int next_block_index = 2;
4570
4571 /* Set BLOCK_NUMBER for all the blocks in FN. */
4572
4573 void
4574 number_blocks (tree fn)
4575 {
4576 int i;
4577 int n_blocks;
4578 tree *block_vector;
4579
4580 /* For SDB and XCOFF debugging output, we start numbering the blocks
4581 from 1 within each function, rather than keeping a running
4582 count. */
4583 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4584 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4585 next_block_index = 1;
4586 #endif
4587
4588 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4589
4590 /* The top-level BLOCK isn't numbered at all. */
4591 for (i = 1; i < n_blocks; ++i)
4592 /* We number the blocks from two. */
4593 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4594
4595 free (block_vector);
4596
4597 return;
4598 }
4599
4600 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4601
4602 DEBUG_FUNCTION tree
4603 debug_find_var_in_block_tree (tree var, tree block)
4604 {
4605 tree t;
4606
4607 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4608 if (t == var)
4609 return block;
4610
4611 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4612 {
4613 tree ret = debug_find_var_in_block_tree (var, t);
4614 if (ret)
4615 return ret;
4616 }
4617
4618 return NULL_TREE;
4619 }
4620 \f
4621 /* Keep track of whether we're in a dummy function context. If we are,
4622 we don't want to invoke the set_current_function hook, because we'll
4623 get into trouble if the hook calls target_reinit () recursively or
4624 when the initial initialization is not yet complete. */
4625
4626 static bool in_dummy_function;
4627
4628 /* Invoke the target hook when setting cfun. Update the optimization options
4629 if the function uses different options than the default. */
4630
4631 static void
4632 invoke_set_current_function_hook (tree fndecl)
4633 {
4634 if (!in_dummy_function)
4635 {
4636 tree opts = ((fndecl)
4637 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4638 : optimization_default_node);
4639
4640 if (!opts)
4641 opts = optimization_default_node;
4642
4643 /* Change optimization options if needed. */
4644 if (optimization_current_node != opts)
4645 {
4646 optimization_current_node = opts;
4647 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4648 }
4649
4650 targetm.set_current_function (fndecl);
4651 this_fn_optabs = this_target_optabs;
4652
4653 if (opts != optimization_default_node)
4654 {
4655 init_tree_optimization_optabs (opts);
4656 if (TREE_OPTIMIZATION_OPTABS (opts))
4657 this_fn_optabs = (struct target_optabs *)
4658 TREE_OPTIMIZATION_OPTABS (opts);
4659 }
4660 }
4661 }
4662
4663 /* cfun should never be set directly; use this function. */
4664
4665 void
4666 set_cfun (struct function *new_cfun)
4667 {
4668 if (cfun != new_cfun)
4669 {
4670 cfun = new_cfun;
4671 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4672 }
4673 }
4674
4675 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4676
4677 static vec<function_p> cfun_stack;
4678
4679 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4680 current_function_decl accordingly. */
4681
4682 void
4683 push_cfun (struct function *new_cfun)
4684 {
4685 gcc_assert ((!cfun && !current_function_decl)
4686 || (cfun && current_function_decl == cfun->decl));
4687 cfun_stack.safe_push (cfun);
4688 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4689 set_cfun (new_cfun);
4690 }
4691
4692 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4693
4694 void
4695 pop_cfun (void)
4696 {
4697 struct function *new_cfun = cfun_stack.pop ();
4698 /* When in_dummy_function, we do have a cfun but current_function_decl is
4699 NULL. We also allow pushing NULL cfun and subsequently changing
4700 current_function_decl to something else and have both restored by
4701 pop_cfun. */
4702 gcc_checking_assert (in_dummy_function
4703 || !cfun
4704 || current_function_decl == cfun->decl);
4705 set_cfun (new_cfun);
4706 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4707 }
4708
4709 /* Return value of funcdef and increase it. */
4710 int
4711 get_next_funcdef_no (void)
4712 {
4713 return funcdef_no++;
4714 }
4715
4716 /* Return value of funcdef. */
4717 int
4718 get_last_funcdef_no (void)
4719 {
4720 return funcdef_no;
4721 }
4722
4723 /* Allocate a function structure for FNDECL and set its contents
4724 to the defaults. Set cfun to the newly-allocated object.
4725 Some of the helper functions invoked during initialization assume
4726 that cfun has already been set. Therefore, assign the new object
4727 directly into cfun and invoke the back end hook explicitly at the
4728 very end, rather than initializing a temporary and calling set_cfun
4729 on it.
4730
4731 ABSTRACT_P is true if this is a function that will never be seen by
4732 the middle-end. Such functions are front-end concepts (like C++
4733 function templates) that do not correspond directly to functions
4734 placed in object files. */
4735
4736 void
4737 allocate_struct_function (tree fndecl, bool abstract_p)
4738 {
4739 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4740
4741 cfun = ggc_cleared_alloc<function> ();
4742
4743 init_eh_for_function ();
4744
4745 if (init_machine_status)
4746 cfun->machine = (*init_machine_status) ();
4747
4748 #ifdef OVERRIDE_ABI_FORMAT
4749 OVERRIDE_ABI_FORMAT (fndecl);
4750 #endif
4751
4752 if (fndecl != NULL_TREE)
4753 {
4754 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4755 cfun->decl = fndecl;
4756 current_function_funcdef_no = get_next_funcdef_no ();
4757 }
4758
4759 invoke_set_current_function_hook (fndecl);
4760
4761 if (fndecl != NULL_TREE)
4762 {
4763 tree result = DECL_RESULT (fndecl);
4764 if (!abstract_p && aggregate_value_p (result, fndecl))
4765 {
4766 #ifdef PCC_STATIC_STRUCT_RETURN
4767 cfun->returns_pcc_struct = 1;
4768 #endif
4769 cfun->returns_struct = 1;
4770 }
4771
4772 cfun->stdarg = stdarg_p (fntype);
4773
4774 /* Assume all registers in stdarg functions need to be saved. */
4775 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4776 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4777
4778 /* ??? This could be set on a per-function basis by the front-end
4779 but is this worth the hassle? */
4780 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4781 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4782
4783 if (!profile_flag && !flag_instrument_function_entry_exit)
4784 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4785 }
4786 }
4787
4788 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4789 instead of just setting it. */
4790
4791 void
4792 push_struct_function (tree fndecl)
4793 {
4794 /* When in_dummy_function we might be in the middle of a pop_cfun and
4795 current_function_decl and cfun may not match. */
4796 gcc_assert (in_dummy_function
4797 || (!cfun && !current_function_decl)
4798 || (cfun && current_function_decl == cfun->decl));
4799 cfun_stack.safe_push (cfun);
4800 current_function_decl = fndecl;
4801 allocate_struct_function (fndecl, false);
4802 }
4803
4804 /* Reset crtl and other non-struct-function variables to defaults as
4805 appropriate for emitting rtl at the start of a function. */
4806
4807 static void
4808 prepare_function_start (void)
4809 {
4810 gcc_assert (!crtl->emit.x_last_insn);
4811 init_temp_slots ();
4812 init_emit ();
4813 init_varasm_status ();
4814 init_expr ();
4815 default_rtl_profile ();
4816
4817 if (flag_stack_usage_info)
4818 {
4819 cfun->su = ggc_cleared_alloc<stack_usage> ();
4820 cfun->su->static_stack_size = -1;
4821 }
4822
4823 cse_not_expected = ! optimize;
4824
4825 /* Caller save not needed yet. */
4826 caller_save_needed = 0;
4827
4828 /* We haven't done register allocation yet. */
4829 reg_renumber = 0;
4830
4831 /* Indicate that we have not instantiated virtual registers yet. */
4832 virtuals_instantiated = 0;
4833
4834 /* Indicate that we want CONCATs now. */
4835 generating_concat_p = 1;
4836
4837 /* Indicate we have no need of a frame pointer yet. */
4838 frame_pointer_needed = 0;
4839 }
4840
4841 /* Initialize the rtl expansion mechanism so that we can do simple things
4842 like generate sequences. This is used to provide a context during global
4843 initialization of some passes. You must call expand_dummy_function_end
4844 to exit this context. */
4845
4846 void
4847 init_dummy_function_start (void)
4848 {
4849 gcc_assert (!in_dummy_function);
4850 in_dummy_function = true;
4851 push_struct_function (NULL_TREE);
4852 prepare_function_start ();
4853 }
4854
4855 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4856 and initialize static variables for generating RTL for the statements
4857 of the function. */
4858
4859 void
4860 init_function_start (tree subr)
4861 {
4862 if (subr && DECL_STRUCT_FUNCTION (subr))
4863 set_cfun (DECL_STRUCT_FUNCTION (subr));
4864 else
4865 allocate_struct_function (subr, false);
4866
4867 /* Initialize backend, if needed. */
4868 initialize_rtl ();
4869
4870 prepare_function_start ();
4871 decide_function_section (subr);
4872
4873 /* Warn if this value is an aggregate type,
4874 regardless of which calling convention we are using for it. */
4875 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4876 warning (OPT_Waggregate_return, "function returns an aggregate");
4877 }
4878
4879 /* Expand code to verify the stack_protect_guard. This is invoked at
4880 the end of a function to be protected. */
4881
4882 #ifndef HAVE_stack_protect_test
4883 # define HAVE_stack_protect_test 0
4884 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4885 #endif
4886
4887 void
4888 stack_protect_epilogue (void)
4889 {
4890 tree guard_decl = targetm.stack_protect_guard ();
4891 rtx_code_label *label = gen_label_rtx ();
4892 rtx x, y, tmp;
4893
4894 x = expand_normal (crtl->stack_protect_guard);
4895 y = expand_normal (guard_decl);
4896
4897 /* Allow the target to compare Y with X without leaking either into
4898 a register. */
4899 switch ((int) (HAVE_stack_protect_test != 0))
4900 {
4901 case 1:
4902 tmp = gen_stack_protect_test (x, y, label);
4903 if (tmp)
4904 {
4905 emit_insn (tmp);
4906 break;
4907 }
4908 /* FALLTHRU */
4909
4910 default:
4911 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4912 break;
4913 }
4914
4915 /* The noreturn predictor has been moved to the tree level. The rtl-level
4916 predictors estimate this branch about 20%, which isn't enough to get
4917 things moved out of line. Since this is the only extant case of adding
4918 a noreturn function at the rtl level, it doesn't seem worth doing ought
4919 except adding the prediction by hand. */
4920 tmp = get_last_insn ();
4921 if (JUMP_P (tmp))
4922 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4923
4924 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4925 free_temp_slots ();
4926 emit_label (label);
4927 }
4928 \f
4929 /* Start the RTL for a new function, and set variables used for
4930 emitting RTL.
4931 SUBR is the FUNCTION_DECL node.
4932 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4933 the function's parameters, which must be run at any return statement. */
4934
4935 void
4936 expand_function_start (tree subr)
4937 {
4938 /* Make sure volatile mem refs aren't considered
4939 valid operands of arithmetic insns. */
4940 init_recog_no_volatile ();
4941
4942 crtl->profile
4943 = (profile_flag
4944 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4945
4946 crtl->limit_stack
4947 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4948
4949 /* Make the label for return statements to jump to. Do not special
4950 case machines with special return instructions -- they will be
4951 handled later during jump, ifcvt, or epilogue creation. */
4952 return_label = gen_label_rtx ();
4953
4954 /* Initialize rtx used to return the value. */
4955 /* Do this before assign_parms so that we copy the struct value address
4956 before any library calls that assign parms might generate. */
4957
4958 /* Decide whether to return the value in memory or in a register. */
4959 if (aggregate_value_p (DECL_RESULT (subr), subr))
4960 {
4961 /* Returning something that won't go in a register. */
4962 rtx value_address = 0;
4963
4964 #ifdef PCC_STATIC_STRUCT_RETURN
4965 if (cfun->returns_pcc_struct)
4966 {
4967 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4968 value_address = assemble_static_space (size);
4969 }
4970 else
4971 #endif
4972 {
4973 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4974 /* Expect to be passed the address of a place to store the value.
4975 If it is passed as an argument, assign_parms will take care of
4976 it. */
4977 if (sv)
4978 {
4979 value_address = gen_reg_rtx (Pmode);
4980 emit_move_insn (value_address, sv);
4981 }
4982 }
4983 if (value_address)
4984 {
4985 rtx x = value_address;
4986 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4987 {
4988 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4989 set_mem_attributes (x, DECL_RESULT (subr), 1);
4990 }
4991 SET_DECL_RTL (DECL_RESULT (subr), x);
4992 }
4993 }
4994 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4995 /* If return mode is void, this decl rtl should not be used. */
4996 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4997 else
4998 {
4999 /* Compute the return values into a pseudo reg, which we will copy
5000 into the true return register after the cleanups are done. */
5001 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5002 if (TYPE_MODE (return_type) != BLKmode
5003 && targetm.calls.return_in_msb (return_type))
5004 /* expand_function_end will insert the appropriate padding in
5005 this case. Use the return value's natural (unpadded) mode
5006 within the function proper. */
5007 SET_DECL_RTL (DECL_RESULT (subr),
5008 gen_reg_rtx (TYPE_MODE (return_type)));
5009 else
5010 {
5011 /* In order to figure out what mode to use for the pseudo, we
5012 figure out what the mode of the eventual return register will
5013 actually be, and use that. */
5014 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5015
5016 /* Structures that are returned in registers are not
5017 aggregate_value_p, so we may see a PARALLEL or a REG. */
5018 if (REG_P (hard_reg))
5019 SET_DECL_RTL (DECL_RESULT (subr),
5020 gen_reg_rtx (GET_MODE (hard_reg)));
5021 else
5022 {
5023 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5024 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5025 }
5026 }
5027
5028 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5029 result to the real return register(s). */
5030 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5031
5032 if (chkp_function_instrumented_p (current_function_decl))
5033 {
5034 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5035 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5036 subr, 1);
5037 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5038 }
5039 }
5040
5041 /* Initialize rtx for parameters and local variables.
5042 In some cases this requires emitting insns. */
5043 assign_parms (subr);
5044
5045 /* If function gets a static chain arg, store it. */
5046 if (cfun->static_chain_decl)
5047 {
5048 tree parm = cfun->static_chain_decl;
5049 rtx local, chain, insn;
5050
5051 local = gen_reg_rtx (Pmode);
5052 chain = targetm.calls.static_chain (current_function_decl, true);
5053
5054 set_decl_incoming_rtl (parm, chain, false);
5055 SET_DECL_RTL (parm, local);
5056 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5057
5058 insn = emit_move_insn (local, chain);
5059
5060 /* Mark the register as eliminable, similar to parameters. */
5061 if (MEM_P (chain)
5062 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5063 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5064
5065 /* If we aren't optimizing, save the static chain onto the stack. */
5066 if (!optimize)
5067 {
5068 tree saved_static_chain_decl
5069 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5070 DECL_NAME (parm), TREE_TYPE (parm));
5071 rtx saved_static_chain_rtx
5072 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5073 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5074 emit_move_insn (saved_static_chain_rtx, chain);
5075 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5076 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5077 }
5078 }
5079
5080 /* If the function receives a non-local goto, then store the
5081 bits we need to restore the frame pointer. */
5082 if (cfun->nonlocal_goto_save_area)
5083 {
5084 tree t_save;
5085 rtx r_save;
5086
5087 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5088 gcc_assert (DECL_RTL_SET_P (var));
5089
5090 t_save = build4 (ARRAY_REF,
5091 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5092 cfun->nonlocal_goto_save_area,
5093 integer_zero_node, NULL_TREE, NULL_TREE);
5094 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5095 gcc_assert (GET_MODE (r_save) == Pmode);
5096
5097 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5098 update_nonlocal_goto_save_area ();
5099 }
5100
5101 /* The following was moved from init_function_start.
5102 The move is supposed to make sdb output more accurate. */
5103 /* Indicate the beginning of the function body,
5104 as opposed to parm setup. */
5105 emit_note (NOTE_INSN_FUNCTION_BEG);
5106
5107 gcc_assert (NOTE_P (get_last_insn ()));
5108
5109 parm_birth_insn = get_last_insn ();
5110
5111 if (crtl->profile)
5112 {
5113 #ifdef PROFILE_HOOK
5114 PROFILE_HOOK (current_function_funcdef_no);
5115 #endif
5116 }
5117
5118 /* If we are doing generic stack checking, the probe should go here. */
5119 if (flag_stack_check == GENERIC_STACK_CHECK)
5120 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5121 }
5122 \f
5123 /* Undo the effects of init_dummy_function_start. */
5124 void
5125 expand_dummy_function_end (void)
5126 {
5127 gcc_assert (in_dummy_function);
5128
5129 /* End any sequences that failed to be closed due to syntax errors. */
5130 while (in_sequence_p ())
5131 end_sequence ();
5132
5133 /* Outside function body, can't compute type's actual size
5134 until next function's body starts. */
5135
5136 free_after_parsing (cfun);
5137 free_after_compilation (cfun);
5138 pop_cfun ();
5139 in_dummy_function = false;
5140 }
5141
5142 /* Helper for diddle_return_value. */
5143
5144 void
5145 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5146 {
5147 if (! outgoing)
5148 return;
5149
5150 if (REG_P (outgoing))
5151 (*doit) (outgoing, arg);
5152 else if (GET_CODE (outgoing) == PARALLEL)
5153 {
5154 int i;
5155
5156 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5157 {
5158 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5159
5160 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5161 (*doit) (x, arg);
5162 }
5163 }
5164 }
5165
5166 /* Call DOIT for each hard register used as a return value from
5167 the current function. */
5168
5169 void
5170 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5171 {
5172 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5173 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5174 }
5175
5176 static void
5177 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5178 {
5179 emit_clobber (reg);
5180 }
5181
5182 void
5183 clobber_return_register (void)
5184 {
5185 diddle_return_value (do_clobber_return_reg, NULL);
5186
5187 /* In case we do use pseudo to return value, clobber it too. */
5188 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5189 {
5190 tree decl_result = DECL_RESULT (current_function_decl);
5191 rtx decl_rtl = DECL_RTL (decl_result);
5192 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5193 {
5194 do_clobber_return_reg (decl_rtl, NULL);
5195 }
5196 }
5197 }
5198
5199 static void
5200 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5201 {
5202 emit_use (reg);
5203 }
5204
5205 static void
5206 use_return_register (void)
5207 {
5208 diddle_return_value (do_use_return_reg, NULL);
5209 }
5210
5211 /* Possibly warn about unused parameters. */
5212 void
5213 do_warn_unused_parameter (tree fn)
5214 {
5215 tree decl;
5216
5217 for (decl = DECL_ARGUMENTS (fn);
5218 decl; decl = DECL_CHAIN (decl))
5219 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5220 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5221 && !TREE_NO_WARNING (decl))
5222 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
5223 }
5224
5225 /* Set the location of the insn chain starting at INSN to LOC. */
5226
5227 static void
5228 set_insn_locations (rtx_insn *insn, int loc)
5229 {
5230 while (insn != NULL)
5231 {
5232 if (INSN_P (insn))
5233 INSN_LOCATION (insn) = loc;
5234 insn = NEXT_INSN (insn);
5235 }
5236 }
5237
5238 /* Generate RTL for the end of the current function. */
5239
5240 void
5241 expand_function_end (void)
5242 {
5243 rtx clobber_after;
5244
5245 /* If arg_pointer_save_area was referenced only from a nested
5246 function, we will not have initialized it yet. Do that now. */
5247 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5248 get_arg_pointer_save_area ();
5249
5250 /* If we are doing generic stack checking and this function makes calls,
5251 do a stack probe at the start of the function to ensure we have enough
5252 space for another stack frame. */
5253 if (flag_stack_check == GENERIC_STACK_CHECK)
5254 {
5255 rtx_insn *insn, *seq;
5256
5257 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5258 if (CALL_P (insn))
5259 {
5260 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5261 start_sequence ();
5262 if (STACK_CHECK_MOVING_SP)
5263 anti_adjust_stack_and_probe (max_frame_size, true);
5264 else
5265 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5266 seq = get_insns ();
5267 end_sequence ();
5268 set_insn_locations (seq, prologue_location);
5269 emit_insn_before (seq, stack_check_probe_note);
5270 break;
5271 }
5272 }
5273
5274 /* End any sequences that failed to be closed due to syntax errors. */
5275 while (in_sequence_p ())
5276 end_sequence ();
5277
5278 clear_pending_stack_adjust ();
5279 do_pending_stack_adjust ();
5280
5281 /* Output a linenumber for the end of the function.
5282 SDB depends on this. */
5283 set_curr_insn_location (input_location);
5284
5285 /* Before the return label (if any), clobber the return
5286 registers so that they are not propagated live to the rest of
5287 the function. This can only happen with functions that drop
5288 through; if there had been a return statement, there would
5289 have either been a return rtx, or a jump to the return label.
5290
5291 We delay actual code generation after the current_function_value_rtx
5292 is computed. */
5293 clobber_after = get_last_insn ();
5294
5295 /* Output the label for the actual return from the function. */
5296 emit_label (return_label);
5297
5298 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5299 {
5300 /* Let except.c know where it should emit the call to unregister
5301 the function context for sjlj exceptions. */
5302 if (flag_exceptions)
5303 sjlj_emit_function_exit_after (get_last_insn ());
5304 }
5305 else
5306 {
5307 /* We want to ensure that instructions that may trap are not
5308 moved into the epilogue by scheduling, because we don't
5309 always emit unwind information for the epilogue. */
5310 if (cfun->can_throw_non_call_exceptions)
5311 emit_insn (gen_blockage ());
5312 }
5313
5314 /* If this is an implementation of throw, do what's necessary to
5315 communicate between __builtin_eh_return and the epilogue. */
5316 expand_eh_return ();
5317
5318 /* If scalar return value was computed in a pseudo-reg, or was a named
5319 return value that got dumped to the stack, copy that to the hard
5320 return register. */
5321 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5322 {
5323 tree decl_result = DECL_RESULT (current_function_decl);
5324 rtx decl_rtl = DECL_RTL (decl_result);
5325
5326 if (REG_P (decl_rtl)
5327 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5328 : DECL_REGISTER (decl_result))
5329 {
5330 rtx real_decl_rtl = crtl->return_rtx;
5331
5332 /* This should be set in assign_parms. */
5333 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5334
5335 /* If this is a BLKmode structure being returned in registers,
5336 then use the mode computed in expand_return. Note that if
5337 decl_rtl is memory, then its mode may have been changed,
5338 but that crtl->return_rtx has not. */
5339 if (GET_MODE (real_decl_rtl) == BLKmode)
5340 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5341
5342 /* If a non-BLKmode return value should be padded at the least
5343 significant end of the register, shift it left by the appropriate
5344 amount. BLKmode results are handled using the group load/store
5345 machinery. */
5346 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5347 && REG_P (real_decl_rtl)
5348 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5349 {
5350 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5351 REGNO (real_decl_rtl)),
5352 decl_rtl);
5353 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5354 }
5355 /* If a named return value dumped decl_return to memory, then
5356 we may need to re-do the PROMOTE_MODE signed/unsigned
5357 extension. */
5358 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5359 {
5360 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5361 promote_function_mode (TREE_TYPE (decl_result),
5362 GET_MODE (decl_rtl), &unsignedp,
5363 TREE_TYPE (current_function_decl), 1);
5364
5365 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5366 }
5367 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5368 {
5369 /* If expand_function_start has created a PARALLEL for decl_rtl,
5370 move the result to the real return registers. Otherwise, do
5371 a group load from decl_rtl for a named return. */
5372 if (GET_CODE (decl_rtl) == PARALLEL)
5373 emit_group_move (real_decl_rtl, decl_rtl);
5374 else
5375 emit_group_load (real_decl_rtl, decl_rtl,
5376 TREE_TYPE (decl_result),
5377 int_size_in_bytes (TREE_TYPE (decl_result)));
5378 }
5379 /* In the case of complex integer modes smaller than a word, we'll
5380 need to generate some non-trivial bitfield insertions. Do that
5381 on a pseudo and not the hard register. */
5382 else if (GET_CODE (decl_rtl) == CONCAT
5383 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5384 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5385 {
5386 int old_generating_concat_p;
5387 rtx tmp;
5388
5389 old_generating_concat_p = generating_concat_p;
5390 generating_concat_p = 0;
5391 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5392 generating_concat_p = old_generating_concat_p;
5393
5394 emit_move_insn (tmp, decl_rtl);
5395 emit_move_insn (real_decl_rtl, tmp);
5396 }
5397 else
5398 emit_move_insn (real_decl_rtl, decl_rtl);
5399 }
5400 }
5401
5402 /* If returning a structure, arrange to return the address of the value
5403 in a place where debuggers expect to find it.
5404
5405 If returning a structure PCC style,
5406 the caller also depends on this value.
5407 And cfun->returns_pcc_struct is not necessarily set. */
5408 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5409 && !targetm.calls.omit_struct_return_reg)
5410 {
5411 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5412 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5413 rtx outgoing;
5414
5415 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5416 type = TREE_TYPE (type);
5417 else
5418 value_address = XEXP (value_address, 0);
5419
5420 outgoing = targetm.calls.function_value (build_pointer_type (type),
5421 current_function_decl, true);
5422
5423 /* Mark this as a function return value so integrate will delete the
5424 assignment and USE below when inlining this function. */
5425 REG_FUNCTION_VALUE_P (outgoing) = 1;
5426
5427 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5428 value_address = convert_memory_address (GET_MODE (outgoing),
5429 value_address);
5430
5431 emit_move_insn (outgoing, value_address);
5432
5433 /* Show return register used to hold result (in this case the address
5434 of the result. */
5435 crtl->return_rtx = outgoing;
5436 }
5437
5438 /* Emit the actual code to clobber return register. Don't emit
5439 it if clobber_after is a barrier, then the previous basic block
5440 certainly doesn't fall thru into the exit block. */
5441 if (!BARRIER_P (clobber_after))
5442 {
5443 rtx seq;
5444
5445 start_sequence ();
5446 clobber_return_register ();
5447 seq = get_insns ();
5448 end_sequence ();
5449
5450 emit_insn_after (seq, clobber_after);
5451 }
5452
5453 /* Output the label for the naked return from the function. */
5454 if (naked_return_label)
5455 emit_label (naked_return_label);
5456
5457 /* @@@ This is a kludge. We want to ensure that instructions that
5458 may trap are not moved into the epilogue by scheduling, because
5459 we don't always emit unwind information for the epilogue. */
5460 if (cfun->can_throw_non_call_exceptions
5461 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5462 emit_insn (gen_blockage ());
5463
5464 /* If stack protection is enabled for this function, check the guard. */
5465 if (crtl->stack_protect_guard)
5466 stack_protect_epilogue ();
5467
5468 /* If we had calls to alloca, and this machine needs
5469 an accurate stack pointer to exit the function,
5470 insert some code to save and restore the stack pointer. */
5471 if (! EXIT_IGNORE_STACK
5472 && cfun->calls_alloca)
5473 {
5474 rtx tem = 0, seq;
5475
5476 start_sequence ();
5477 emit_stack_save (SAVE_FUNCTION, &tem);
5478 seq = get_insns ();
5479 end_sequence ();
5480 emit_insn_before (seq, parm_birth_insn);
5481
5482 emit_stack_restore (SAVE_FUNCTION, tem);
5483 }
5484
5485 /* ??? This should no longer be necessary since stupid is no longer with
5486 us, but there are some parts of the compiler (eg reload_combine, and
5487 sh mach_dep_reorg) that still try and compute their own lifetime info
5488 instead of using the general framework. */
5489 use_return_register ();
5490 }
5491
5492 rtx
5493 get_arg_pointer_save_area (void)
5494 {
5495 rtx ret = arg_pointer_save_area;
5496
5497 if (! ret)
5498 {
5499 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5500 arg_pointer_save_area = ret;
5501 }
5502
5503 if (! crtl->arg_pointer_save_area_init)
5504 {
5505 rtx seq;
5506
5507 /* Save the arg pointer at the beginning of the function. The
5508 generated stack slot may not be a valid memory address, so we
5509 have to check it and fix it if necessary. */
5510 start_sequence ();
5511 emit_move_insn (validize_mem (copy_rtx (ret)),
5512 crtl->args.internal_arg_pointer);
5513 seq = get_insns ();
5514 end_sequence ();
5515
5516 push_topmost_sequence ();
5517 emit_insn_after (seq, entry_of_function ());
5518 pop_topmost_sequence ();
5519
5520 crtl->arg_pointer_save_area_init = true;
5521 }
5522
5523 return ret;
5524 }
5525 \f
5526 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5527 for the first time. */
5528
5529 static void
5530 record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
5531 {
5532 rtx_insn *tmp;
5533 htab_t hash = *hashp;
5534
5535 if (hash == NULL)
5536 *hashp = hash
5537 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5538
5539 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5540 {
5541 void **slot = htab_find_slot (hash, tmp, INSERT);
5542 gcc_assert (*slot == NULL);
5543 *slot = tmp;
5544 }
5545 }
5546
5547 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5548 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5549 insn, then record COPY as well. */
5550
5551 void
5552 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5553 {
5554 htab_t hash;
5555 void **slot;
5556
5557 hash = epilogue_insn_hash;
5558 if (!hash || !htab_find (hash, insn))
5559 {
5560 hash = prologue_insn_hash;
5561 if (!hash || !htab_find (hash, insn))
5562 return;
5563 }
5564
5565 slot = htab_find_slot (hash, copy, INSERT);
5566 gcc_assert (*slot == NULL);
5567 *slot = copy;
5568 }
5569
5570 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5571 we can be running after reorg, SEQUENCE rtl is possible. */
5572
5573 static bool
5574 contains (const_rtx insn, htab_t hash)
5575 {
5576 if (hash == NULL)
5577 return false;
5578
5579 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5580 {
5581 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5582 int i;
5583 for (i = seq->len () - 1; i >= 0; i--)
5584 if (htab_find (hash, seq->element (i)))
5585 return true;
5586 return false;
5587 }
5588
5589 return htab_find (hash, insn) != NULL;
5590 }
5591
5592 int
5593 prologue_epilogue_contains (const_rtx insn)
5594 {
5595 if (contains (insn, prologue_insn_hash))
5596 return 1;
5597 if (contains (insn, epilogue_insn_hash))
5598 return 1;
5599 return 0;
5600 }
5601
5602 #ifdef HAVE_return
5603 /* Insert use of return register before the end of BB. */
5604
5605 static void
5606 emit_use_return_register_into_block (basic_block bb)
5607 {
5608 rtx seq, insn;
5609 start_sequence ();
5610 use_return_register ();
5611 seq = get_insns ();
5612 end_sequence ();
5613 insn = BB_END (bb);
5614 #ifdef HAVE_cc0
5615 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5616 insn = prev_cc0_setter (insn);
5617 #endif
5618 emit_insn_before (seq, insn);
5619 }
5620
5621
5622 /* Create a return pattern, either simple_return or return, depending on
5623 simple_p. */
5624
5625 static rtx
5626 gen_return_pattern (bool simple_p)
5627 {
5628 #ifdef HAVE_simple_return
5629 return simple_p ? gen_simple_return () : gen_return ();
5630 #else
5631 gcc_assert (!simple_p);
5632 return gen_return ();
5633 #endif
5634 }
5635
5636 /* Insert an appropriate return pattern at the end of block BB. This
5637 also means updating block_for_insn appropriately. SIMPLE_P is
5638 the same as in gen_return_pattern and passed to it. */
5639
5640 void
5641 emit_return_into_block (bool simple_p, basic_block bb)
5642 {
5643 rtx jump, pat;
5644 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5645 pat = PATTERN (jump);
5646 if (GET_CODE (pat) == PARALLEL)
5647 pat = XVECEXP (pat, 0, 0);
5648 gcc_assert (ANY_RETURN_P (pat));
5649 JUMP_LABEL (jump) = pat;
5650 }
5651 #endif
5652
5653 /* Set JUMP_LABEL for a return insn. */
5654
5655 void
5656 set_return_jump_label (rtx returnjump)
5657 {
5658 rtx pat = PATTERN (returnjump);
5659 if (GET_CODE (pat) == PARALLEL)
5660 pat = XVECEXP (pat, 0, 0);
5661 if (ANY_RETURN_P (pat))
5662 JUMP_LABEL (returnjump) = pat;
5663 else
5664 JUMP_LABEL (returnjump) = ret_rtx;
5665 }
5666
5667 #if defined (HAVE_return) || defined (HAVE_simple_return)
5668 /* Return true if there are any active insns between HEAD and TAIL. */
5669 bool
5670 active_insn_between (rtx_insn *head, rtx_insn *tail)
5671 {
5672 while (tail)
5673 {
5674 if (active_insn_p (tail))
5675 return true;
5676 if (tail == head)
5677 return false;
5678 tail = PREV_INSN (tail);
5679 }
5680 return false;
5681 }
5682
5683 /* LAST_BB is a block that exits, and empty of active instructions.
5684 Examine its predecessors for jumps that can be converted to
5685 (conditional) returns. */
5686 vec<edge>
5687 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5688 vec<edge> unconverted ATTRIBUTE_UNUSED)
5689 {
5690 int i;
5691 basic_block bb;
5692 rtx label;
5693 edge_iterator ei;
5694 edge e;
5695 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5696
5697 FOR_EACH_EDGE (e, ei, last_bb->preds)
5698 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5699 src_bbs.quick_push (e->src);
5700
5701 label = BB_HEAD (last_bb);
5702
5703 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5704 {
5705 rtx_insn *jump = BB_END (bb);
5706
5707 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5708 continue;
5709
5710 e = find_edge (bb, last_bb);
5711
5712 /* If we have an unconditional jump, we can replace that
5713 with a simple return instruction. */
5714 if (simplejump_p (jump))
5715 {
5716 /* The use of the return register might be present in the exit
5717 fallthru block. Either:
5718 - removing the use is safe, and we should remove the use in
5719 the exit fallthru block, or
5720 - removing the use is not safe, and we should add it here.
5721 For now, we conservatively choose the latter. Either of the
5722 2 helps in crossjumping. */
5723 emit_use_return_register_into_block (bb);
5724
5725 emit_return_into_block (simple_p, bb);
5726 delete_insn (jump);
5727 }
5728
5729 /* If we have a conditional jump branching to the last
5730 block, we can try to replace that with a conditional
5731 return instruction. */
5732 else if (condjump_p (jump))
5733 {
5734 rtx dest;
5735
5736 if (simple_p)
5737 dest = simple_return_rtx;
5738 else
5739 dest = ret_rtx;
5740 if (!redirect_jump (jump, dest, 0))
5741 {
5742 #ifdef HAVE_simple_return
5743 if (simple_p)
5744 {
5745 if (dump_file)
5746 fprintf (dump_file,
5747 "Failed to redirect bb %d branch.\n", bb->index);
5748 unconverted.safe_push (e);
5749 }
5750 #endif
5751 continue;
5752 }
5753
5754 /* See comment in simplejump_p case above. */
5755 emit_use_return_register_into_block (bb);
5756
5757 /* If this block has only one successor, it both jumps
5758 and falls through to the fallthru block, so we can't
5759 delete the edge. */
5760 if (single_succ_p (bb))
5761 continue;
5762 }
5763 else
5764 {
5765 #ifdef HAVE_simple_return
5766 if (simple_p)
5767 {
5768 if (dump_file)
5769 fprintf (dump_file,
5770 "Failed to redirect bb %d branch.\n", bb->index);
5771 unconverted.safe_push (e);
5772 }
5773 #endif
5774 continue;
5775 }
5776
5777 /* Fix up the CFG for the successful change we just made. */
5778 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5779 e->flags &= ~EDGE_CROSSING;
5780 }
5781 src_bbs.release ();
5782 return unconverted;
5783 }
5784
5785 /* Emit a return insn for the exit fallthru block. */
5786 basic_block
5787 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5788 {
5789 basic_block last_bb = exit_fallthru_edge->src;
5790
5791 if (JUMP_P (BB_END (last_bb)))
5792 {
5793 last_bb = split_edge (exit_fallthru_edge);
5794 exit_fallthru_edge = single_succ_edge (last_bb);
5795 }
5796 emit_barrier_after (BB_END (last_bb));
5797 emit_return_into_block (simple_p, last_bb);
5798 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5799 return last_bb;
5800 }
5801 #endif
5802
5803
5804 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5805 this into place with notes indicating where the prologue ends and where
5806 the epilogue begins. Update the basic block information when possible.
5807
5808 Notes on epilogue placement:
5809 There are several kinds of edges to the exit block:
5810 * a single fallthru edge from LAST_BB
5811 * possibly, edges from blocks containing sibcalls
5812 * possibly, fake edges from infinite loops
5813
5814 The epilogue is always emitted on the fallthru edge from the last basic
5815 block in the function, LAST_BB, into the exit block.
5816
5817 If LAST_BB is empty except for a label, it is the target of every
5818 other basic block in the function that ends in a return. If a
5819 target has a return or simple_return pattern (possibly with
5820 conditional variants), these basic blocks can be changed so that a
5821 return insn is emitted into them, and their target is adjusted to
5822 the real exit block.
5823
5824 Notes on shrink wrapping: We implement a fairly conservative
5825 version of shrink-wrapping rather than the textbook one. We only
5826 generate a single prologue and a single epilogue. This is
5827 sufficient to catch a number of interesting cases involving early
5828 exits.
5829
5830 First, we identify the blocks that require the prologue to occur before
5831 them. These are the ones that modify a call-saved register, or reference
5832 any of the stack or frame pointer registers. To simplify things, we then
5833 mark everything reachable from these blocks as also requiring a prologue.
5834 This takes care of loops automatically, and avoids the need to examine
5835 whether MEMs reference the frame, since it is sufficient to check for
5836 occurrences of the stack or frame pointer.
5837
5838 We then compute the set of blocks for which the need for a prologue
5839 is anticipatable (borrowing terminology from the shrink-wrapping
5840 description in Muchnick's book). These are the blocks which either
5841 require a prologue themselves, or those that have only successors
5842 where the prologue is anticipatable. The prologue needs to be
5843 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5844 is not. For the moment, we ensure that only one such edge exists.
5845
5846 The epilogue is placed as described above, but we make a
5847 distinction between inserting return and simple_return patterns
5848 when modifying other blocks that end in a return. Blocks that end
5849 in a sibcall omit the sibcall_epilogue if the block is not in
5850 ANTIC. */
5851
5852 void
5853 thread_prologue_and_epilogue_insns (void)
5854 {
5855 bool inserted;
5856 #ifdef HAVE_simple_return
5857 vec<edge> unconverted_simple_returns = vNULL;
5858 bitmap_head bb_flags;
5859 #endif
5860 rtx_insn *returnjump;
5861 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5862 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5863 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5864 edge_iterator ei;
5865
5866 df_analyze ();
5867
5868 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5869
5870 inserted = false;
5871 epilogue_end = NULL;
5872 returnjump = NULL;
5873
5874 /* Can't deal with multiple successors of the entry block at the
5875 moment. Function should always have at least one entry
5876 point. */
5877 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5878 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5879 orig_entry_edge = entry_edge;
5880
5881 split_prologue_seq = NULL;
5882 if (flag_split_stack
5883 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5884 == NULL))
5885 {
5886 #ifndef HAVE_split_stack_prologue
5887 gcc_unreachable ();
5888 #else
5889 gcc_assert (HAVE_split_stack_prologue);
5890
5891 start_sequence ();
5892 emit_insn (gen_split_stack_prologue ());
5893 split_prologue_seq = get_insns ();
5894 end_sequence ();
5895
5896 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5897 set_insn_locations (split_prologue_seq, prologue_location);
5898 #endif
5899 }
5900
5901 prologue_seq = NULL;
5902 #ifdef HAVE_prologue
5903 if (HAVE_prologue)
5904 {
5905 start_sequence ();
5906 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
5907 emit_insn (seq);
5908
5909 /* Insert an explicit USE for the frame pointer
5910 if the profiling is on and the frame pointer is required. */
5911 if (crtl->profile && frame_pointer_needed)
5912 emit_use (hard_frame_pointer_rtx);
5913
5914 /* Retain a map of the prologue insns. */
5915 record_insns (seq, NULL, &prologue_insn_hash);
5916 emit_note (NOTE_INSN_PROLOGUE_END);
5917
5918 /* Ensure that instructions are not moved into the prologue when
5919 profiling is on. The call to the profiling routine can be
5920 emitted within the live range of a call-clobbered register. */
5921 if (!targetm.profile_before_prologue () && crtl->profile)
5922 emit_insn (gen_blockage ());
5923
5924 prologue_seq = get_insns ();
5925 end_sequence ();
5926 set_insn_locations (prologue_seq, prologue_location);
5927 }
5928 #endif
5929
5930 #ifdef HAVE_simple_return
5931 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5932
5933 /* Try to perform a kind of shrink-wrapping, making sure the
5934 prologue/epilogue is emitted only around those parts of the
5935 function that require it. */
5936
5937 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5938 #endif
5939
5940 if (split_prologue_seq != NULL_RTX)
5941 {
5942 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5943 inserted = true;
5944 }
5945 if (prologue_seq != NULL_RTX)
5946 {
5947 insert_insn_on_edge (prologue_seq, entry_edge);
5948 inserted = true;
5949 }
5950
5951 /* If the exit block has no non-fake predecessors, we don't need
5952 an epilogue. */
5953 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5954 if ((e->flags & EDGE_FAKE) == 0)
5955 break;
5956 if (e == NULL)
5957 goto epilogue_done;
5958
5959 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5960
5961 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5962
5963 #ifdef HAVE_simple_return
5964 if (entry_edge != orig_entry_edge)
5965 exit_fallthru_edge
5966 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5967 &unconverted_simple_returns,
5968 &returnjump);
5969 #endif
5970 #ifdef HAVE_return
5971 if (HAVE_return)
5972 {
5973 if (exit_fallthru_edge == NULL)
5974 goto epilogue_done;
5975
5976 if (optimize)
5977 {
5978 basic_block last_bb = exit_fallthru_edge->src;
5979
5980 if (LABEL_P (BB_HEAD (last_bb))
5981 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
5982 convert_jumps_to_returns (last_bb, false, vNULL);
5983
5984 if (EDGE_COUNT (last_bb->preds) != 0
5985 && single_succ_p (last_bb))
5986 {
5987 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5988 epilogue_end = returnjump = BB_END (last_bb);
5989 #ifdef HAVE_simple_return
5990 /* Emitting the return may add a basic block.
5991 Fix bb_flags for the added block. */
5992 if (last_bb != exit_fallthru_edge->src)
5993 bitmap_set_bit (&bb_flags, last_bb->index);
5994 #endif
5995 goto epilogue_done;
5996 }
5997 }
5998 }
5999 #endif
6000
6001 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6002 this marker for the splits of EH_RETURN patterns, and nothing else
6003 uses the flag in the meantime. */
6004 epilogue_completed = 1;
6005
6006 #ifdef HAVE_eh_return
6007 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6008 some targets, these get split to a special version of the epilogue
6009 code. In order to be able to properly annotate these with unwind
6010 info, try to split them now. If we get a valid split, drop an
6011 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6012 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6013 {
6014 rtx_insn *prev, *last, *trial;
6015
6016 if (e->flags & EDGE_FALLTHRU)
6017 continue;
6018 last = BB_END (e->src);
6019 if (!eh_returnjump_p (last))
6020 continue;
6021
6022 prev = PREV_INSN (last);
6023 trial = try_split (PATTERN (last), last, 1);
6024 if (trial == last)
6025 continue;
6026
6027 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6028 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6029 }
6030 #endif
6031
6032 /* If nothing falls through into the exit block, we don't need an
6033 epilogue. */
6034
6035 if (exit_fallthru_edge == NULL)
6036 goto epilogue_done;
6037
6038 #ifdef HAVE_epilogue
6039 if (HAVE_epilogue)
6040 {
6041 start_sequence ();
6042 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6043 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
6044 if (seq)
6045 emit_jump_insn (seq);
6046
6047 /* Retain a map of the epilogue insns. */
6048 record_insns (seq, NULL, &epilogue_insn_hash);
6049 set_insn_locations (seq, epilogue_location);
6050
6051 seq = get_insns ();
6052 returnjump = get_last_insn ();
6053 end_sequence ();
6054
6055 insert_insn_on_edge (seq, exit_fallthru_edge);
6056 inserted = true;
6057
6058 if (JUMP_P (returnjump))
6059 set_return_jump_label (returnjump);
6060 }
6061 else
6062 #endif
6063 {
6064 basic_block cur_bb;
6065
6066 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6067 goto epilogue_done;
6068 /* We have a fall-through edge to the exit block, the source is not
6069 at the end of the function, and there will be an assembler epilogue
6070 at the end of the function.
6071 We can't use force_nonfallthru here, because that would try to
6072 use return. Inserting a jump 'by hand' is extremely messy, so
6073 we take advantage of cfg_layout_finalize using
6074 fixup_fallthru_exit_predecessor. */
6075 cfg_layout_initialize (0);
6076 FOR_EACH_BB_FN (cur_bb, cfun)
6077 if (cur_bb->index >= NUM_FIXED_BLOCKS
6078 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6079 cur_bb->aux = cur_bb->next_bb;
6080 cfg_layout_finalize ();
6081 }
6082
6083 epilogue_done:
6084
6085 default_rtl_profile ();
6086
6087 if (inserted)
6088 {
6089 sbitmap blocks;
6090
6091 commit_edge_insertions ();
6092
6093 /* Look for basic blocks within the prologue insns. */
6094 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6095 bitmap_clear (blocks);
6096 bitmap_set_bit (blocks, entry_edge->dest->index);
6097 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6098 find_many_sub_basic_blocks (blocks);
6099 sbitmap_free (blocks);
6100
6101 /* The epilogue insns we inserted may cause the exit edge to no longer
6102 be fallthru. */
6103 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6104 {
6105 if (((e->flags & EDGE_FALLTHRU) != 0)
6106 && returnjump_p (BB_END (e->src)))
6107 e->flags &= ~EDGE_FALLTHRU;
6108 }
6109 }
6110
6111 #ifdef HAVE_simple_return
6112 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
6113 unconverted_simple_returns);
6114 #endif
6115
6116 #ifdef HAVE_sibcall_epilogue
6117 /* Emit sibling epilogues before any sibling call sites. */
6118 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6119 ei_safe_edge (ei));
6120 )
6121 {
6122 basic_block bb = e->src;
6123 rtx_insn *insn = BB_END (bb);
6124 rtx ep_seq;
6125
6126 if (!CALL_P (insn)
6127 || ! SIBLING_CALL_P (insn)
6128 #ifdef HAVE_simple_return
6129 || (entry_edge != orig_entry_edge
6130 && !bitmap_bit_p (&bb_flags, bb->index))
6131 #endif
6132 )
6133 {
6134 ei_next (&ei);
6135 continue;
6136 }
6137
6138 ep_seq = gen_sibcall_epilogue ();
6139 if (ep_seq)
6140 {
6141 start_sequence ();
6142 emit_note (NOTE_INSN_EPILOGUE_BEG);
6143 emit_insn (ep_seq);
6144 rtx_insn *seq = get_insns ();
6145 end_sequence ();
6146
6147 /* Retain a map of the epilogue insns. Used in life analysis to
6148 avoid getting rid of sibcall epilogue insns. Do this before we
6149 actually emit the sequence. */
6150 record_insns (seq, NULL, &epilogue_insn_hash);
6151 set_insn_locations (seq, epilogue_location);
6152
6153 emit_insn_before (seq, insn);
6154 }
6155 ei_next (&ei);
6156 }
6157 #endif
6158
6159 #ifdef HAVE_epilogue
6160 if (epilogue_end)
6161 {
6162 rtx_insn *insn, *next;
6163
6164 /* Similarly, move any line notes that appear after the epilogue.
6165 There is no need, however, to be quite so anal about the existence
6166 of such a note. Also possibly move
6167 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6168 info generation. */
6169 for (insn = epilogue_end; insn; insn = next)
6170 {
6171 next = NEXT_INSN (insn);
6172 if (NOTE_P (insn)
6173 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6174 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6175 }
6176 }
6177 #endif
6178
6179 #ifdef HAVE_simple_return
6180 bitmap_clear (&bb_flags);
6181 #endif
6182
6183 /* Threading the prologue and epilogue changes the artificial refs
6184 in the entry and exit blocks. */
6185 epilogue_completed = 1;
6186 df_update_entry_exit_and_calls ();
6187 }
6188
6189 /* Reposition the prologue-end and epilogue-begin notes after
6190 instruction scheduling. */
6191
6192 void
6193 reposition_prologue_and_epilogue_notes (void)
6194 {
6195 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6196 || defined (HAVE_sibcall_epilogue)
6197 /* Since the hash table is created on demand, the fact that it is
6198 non-null is a signal that it is non-empty. */
6199 if (prologue_insn_hash != NULL)
6200 {
6201 size_t len = htab_elements (prologue_insn_hash);
6202 rtx_insn *insn, *last = NULL, *note = NULL;
6203
6204 /* Scan from the beginning until we reach the last prologue insn. */
6205 /* ??? While we do have the CFG intact, there are two problems:
6206 (1) The prologue can contain loops (typically probing the stack),
6207 which means that the end of the prologue isn't in the first bb.
6208 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6209 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6210 {
6211 if (NOTE_P (insn))
6212 {
6213 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6214 note = insn;
6215 }
6216 else if (contains (insn, prologue_insn_hash))
6217 {
6218 last = insn;
6219 if (--len == 0)
6220 break;
6221 }
6222 }
6223
6224 if (last)
6225 {
6226 if (note == NULL)
6227 {
6228 /* Scan forward looking for the PROLOGUE_END note. It should
6229 be right at the beginning of the block, possibly with other
6230 insn notes that got moved there. */
6231 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6232 {
6233 if (NOTE_P (note)
6234 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6235 break;
6236 }
6237 }
6238
6239 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6240 if (LABEL_P (last))
6241 last = NEXT_INSN (last);
6242 reorder_insns (note, note, last);
6243 }
6244 }
6245
6246 if (epilogue_insn_hash != NULL)
6247 {
6248 edge_iterator ei;
6249 edge e;
6250
6251 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6252 {
6253 rtx_insn *insn, *first = NULL, *note = NULL;
6254 basic_block bb = e->src;
6255
6256 /* Scan from the beginning until we reach the first epilogue insn. */
6257 FOR_BB_INSNS (bb, insn)
6258 {
6259 if (NOTE_P (insn))
6260 {
6261 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6262 {
6263 note = insn;
6264 if (first != NULL)
6265 break;
6266 }
6267 }
6268 else if (first == NULL && contains (insn, epilogue_insn_hash))
6269 {
6270 first = insn;
6271 if (note != NULL)
6272 break;
6273 }
6274 }
6275
6276 if (note)
6277 {
6278 /* If the function has a single basic block, and no real
6279 epilogue insns (e.g. sibcall with no cleanup), the
6280 epilogue note can get scheduled before the prologue
6281 note. If we have frame related prologue insns, having
6282 them scanned during the epilogue will result in a crash.
6283 In this case re-order the epilogue note to just before
6284 the last insn in the block. */
6285 if (first == NULL)
6286 first = BB_END (bb);
6287
6288 if (PREV_INSN (first) != note)
6289 reorder_insns (note, note, PREV_INSN (first));
6290 }
6291 }
6292 }
6293 #endif /* HAVE_prologue or HAVE_epilogue */
6294 }
6295
6296 /* Returns the name of function declared by FNDECL. */
6297 const char *
6298 fndecl_name (tree fndecl)
6299 {
6300 if (fndecl == NULL)
6301 return "(nofn)";
6302 return lang_hooks.decl_printable_name (fndecl, 2);
6303 }
6304
6305 /* Returns the name of function FN. */
6306 const char *
6307 function_name (struct function *fn)
6308 {
6309 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6310 return fndecl_name (fndecl);
6311 }
6312
6313 /* Returns the name of the current function. */
6314 const char *
6315 current_function_name (void)
6316 {
6317 return function_name (cfun);
6318 }
6319 \f
6320
6321 static unsigned int
6322 rest_of_handle_check_leaf_regs (void)
6323 {
6324 #ifdef LEAF_REGISTERS
6325 crtl->uses_only_leaf_regs
6326 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6327 #endif
6328 return 0;
6329 }
6330
6331 /* Insert a TYPE into the used types hash table of CFUN. */
6332
6333 static void
6334 used_types_insert_helper (tree type, struct function *func)
6335 {
6336 if (type != NULL && func != NULL)
6337 {
6338 if (func->used_types_hash == NULL)
6339 func->used_types_hash = hash_set<tree>::create_ggc (37);
6340
6341 func->used_types_hash->add (type);
6342 }
6343 }
6344
6345 /* Given a type, insert it into the used hash table in cfun. */
6346 void
6347 used_types_insert (tree t)
6348 {
6349 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6350 if (TYPE_NAME (t))
6351 break;
6352 else
6353 t = TREE_TYPE (t);
6354 if (TREE_CODE (t) == ERROR_MARK)
6355 return;
6356 if (TYPE_NAME (t) == NULL_TREE
6357 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6358 t = TYPE_MAIN_VARIANT (t);
6359 if (debug_info_level > DINFO_LEVEL_NONE)
6360 {
6361 if (cfun)
6362 used_types_insert_helper (t, cfun);
6363 else
6364 {
6365 /* So this might be a type referenced by a global variable.
6366 Record that type so that we can later decide to emit its
6367 debug information. */
6368 vec_safe_push (types_used_by_cur_var_decl, t);
6369 }
6370 }
6371 }
6372
6373 /* Helper to Hash a struct types_used_by_vars_entry. */
6374
6375 static hashval_t
6376 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6377 {
6378 gcc_assert (entry && entry->var_decl && entry->type);
6379
6380 return iterative_hash_object (entry->type,
6381 iterative_hash_object (entry->var_decl, 0));
6382 }
6383
6384 /* Hash function of the types_used_by_vars_entry hash table. */
6385
6386 hashval_t
6387 used_type_hasher::hash (types_used_by_vars_entry *entry)
6388 {
6389 return hash_types_used_by_vars_entry (entry);
6390 }
6391
6392 /*Equality function of the types_used_by_vars_entry hash table. */
6393
6394 bool
6395 used_type_hasher::equal (types_used_by_vars_entry *e1,
6396 types_used_by_vars_entry *e2)
6397 {
6398 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6399 }
6400
6401 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6402
6403 void
6404 types_used_by_var_decl_insert (tree type, tree var_decl)
6405 {
6406 if (type != NULL && var_decl != NULL)
6407 {
6408 types_used_by_vars_entry **slot;
6409 struct types_used_by_vars_entry e;
6410 e.var_decl = var_decl;
6411 e.type = type;
6412 if (types_used_by_vars_hash == NULL)
6413 types_used_by_vars_hash
6414 = hash_table<used_type_hasher>::create_ggc (37);
6415
6416 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6417 if (*slot == NULL)
6418 {
6419 struct types_used_by_vars_entry *entry;
6420 entry = ggc_alloc<types_used_by_vars_entry> ();
6421 entry->type = type;
6422 entry->var_decl = var_decl;
6423 *slot = entry;
6424 }
6425 }
6426 }
6427
6428 namespace {
6429
6430 const pass_data pass_data_leaf_regs =
6431 {
6432 RTL_PASS, /* type */
6433 "*leaf_regs", /* name */
6434 OPTGROUP_NONE, /* optinfo_flags */
6435 TV_NONE, /* tv_id */
6436 0, /* properties_required */
6437 0, /* properties_provided */
6438 0, /* properties_destroyed */
6439 0, /* todo_flags_start */
6440 0, /* todo_flags_finish */
6441 };
6442
6443 class pass_leaf_regs : public rtl_opt_pass
6444 {
6445 public:
6446 pass_leaf_regs (gcc::context *ctxt)
6447 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6448 {}
6449
6450 /* opt_pass methods: */
6451 virtual unsigned int execute (function *)
6452 {
6453 return rest_of_handle_check_leaf_regs ();
6454 }
6455
6456 }; // class pass_leaf_regs
6457
6458 } // anon namespace
6459
6460 rtl_opt_pass *
6461 make_pass_leaf_regs (gcc::context *ctxt)
6462 {
6463 return new pass_leaf_regs (ctxt);
6464 }
6465
6466 static unsigned int
6467 rest_of_handle_thread_prologue_and_epilogue (void)
6468 {
6469 if (optimize)
6470 cleanup_cfg (CLEANUP_EXPENSIVE);
6471
6472 /* On some machines, the prologue and epilogue code, or parts thereof,
6473 can be represented as RTL. Doing so lets us schedule insns between
6474 it and the rest of the code and also allows delayed branch
6475 scheduling to operate in the epilogue. */
6476 thread_prologue_and_epilogue_insns ();
6477
6478 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6479 see PR57320. */
6480 cleanup_cfg (0);
6481
6482 /* The stack usage info is finalized during prologue expansion. */
6483 if (flag_stack_usage_info)
6484 output_stack_usage ();
6485
6486 return 0;
6487 }
6488
6489 namespace {
6490
6491 const pass_data pass_data_thread_prologue_and_epilogue =
6492 {
6493 RTL_PASS, /* type */
6494 "pro_and_epilogue", /* name */
6495 OPTGROUP_NONE, /* optinfo_flags */
6496 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6497 0, /* properties_required */
6498 0, /* properties_provided */
6499 0, /* properties_destroyed */
6500 0, /* todo_flags_start */
6501 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6502 };
6503
6504 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6505 {
6506 public:
6507 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6508 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6509 {}
6510
6511 /* opt_pass methods: */
6512 virtual unsigned int execute (function *)
6513 {
6514 return rest_of_handle_thread_prologue_and_epilogue ();
6515 }
6516
6517 }; // class pass_thread_prologue_and_epilogue
6518
6519 } // anon namespace
6520
6521 rtl_opt_pass *
6522 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6523 {
6524 return new pass_thread_prologue_and_epilogue (ctxt);
6525 }
6526 \f
6527
6528 /* This mini-pass fixes fall-out from SSA in asm statements that have
6529 in-out constraints. Say you start with
6530
6531 orig = inout;
6532 asm ("": "+mr" (inout));
6533 use (orig);
6534
6535 which is transformed very early to use explicit output and match operands:
6536
6537 orig = inout;
6538 asm ("": "=mr" (inout) : "0" (inout));
6539 use (orig);
6540
6541 Or, after SSA and copyprop,
6542
6543 asm ("": "=mr" (inout_2) : "0" (inout_1));
6544 use (inout_1);
6545
6546 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6547 they represent two separate values, so they will get different pseudo
6548 registers during expansion. Then, since the two operands need to match
6549 per the constraints, but use different pseudo registers, reload can
6550 only register a reload for these operands. But reloads can only be
6551 satisfied by hardregs, not by memory, so we need a register for this
6552 reload, just because we are presented with non-matching operands.
6553 So, even though we allow memory for this operand, no memory can be
6554 used for it, just because the two operands don't match. This can
6555 cause reload failures on register-starved targets.
6556
6557 So it's a symptom of reload not being able to use memory for reloads
6558 or, alternatively it's also a symptom of both operands not coming into
6559 reload as matching (in which case the pseudo could go to memory just
6560 fine, as the alternative allows it, and no reload would be necessary).
6561 We fix the latter problem here, by transforming
6562
6563 asm ("": "=mr" (inout_2) : "0" (inout_1));
6564
6565 back to
6566
6567 inout_2 = inout_1;
6568 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6569
6570 static void
6571 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6572 {
6573 int i;
6574 bool changed = false;
6575 rtx op = SET_SRC (p_sets[0]);
6576 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6577 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6578 bool *output_matched = XALLOCAVEC (bool, noutputs);
6579
6580 memset (output_matched, 0, noutputs * sizeof (bool));
6581 for (i = 0; i < ninputs; i++)
6582 {
6583 rtx input, output;
6584 rtx_insn *insns;
6585 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6586 char *end;
6587 int match, j;
6588
6589 if (*constraint == '%')
6590 constraint++;
6591
6592 match = strtoul (constraint, &end, 10);
6593 if (end == constraint)
6594 continue;
6595
6596 gcc_assert (match < noutputs);
6597 output = SET_DEST (p_sets[match]);
6598 input = RTVEC_ELT (inputs, i);
6599 /* Only do the transformation for pseudos. */
6600 if (! REG_P (output)
6601 || rtx_equal_p (output, input)
6602 || (GET_MODE (input) != VOIDmode
6603 && GET_MODE (input) != GET_MODE (output)))
6604 continue;
6605
6606 /* We can't do anything if the output is also used as input,
6607 as we're going to overwrite it. */
6608 for (j = 0; j < ninputs; j++)
6609 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6610 break;
6611 if (j != ninputs)
6612 continue;
6613
6614 /* Avoid changing the same input several times. For
6615 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6616 only change in once (to out1), rather than changing it
6617 first to out1 and afterwards to out2. */
6618 if (i > 0)
6619 {
6620 for (j = 0; j < noutputs; j++)
6621 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6622 break;
6623 if (j != noutputs)
6624 continue;
6625 }
6626 output_matched[match] = true;
6627
6628 start_sequence ();
6629 emit_move_insn (output, input);
6630 insns = get_insns ();
6631 end_sequence ();
6632 emit_insn_before (insns, insn);
6633
6634 /* Now replace all mentions of the input with output. We can't
6635 just replace the occurrence in inputs[i], as the register might
6636 also be used in some other input (or even in an address of an
6637 output), which would mean possibly increasing the number of
6638 inputs by one (namely 'output' in addition), which might pose
6639 a too complicated problem for reload to solve. E.g. this situation:
6640
6641 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6642
6643 Here 'input' is used in two occurrences as input (once for the
6644 input operand, once for the address in the second output operand).
6645 If we would replace only the occurrence of the input operand (to
6646 make the matching) we would be left with this:
6647
6648 output = input
6649 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6650
6651 Now we suddenly have two different input values (containing the same
6652 value, but different pseudos) where we formerly had only one.
6653 With more complicated asms this might lead to reload failures
6654 which wouldn't have happen without this pass. So, iterate over
6655 all operands and replace all occurrences of the register used. */
6656 for (j = 0; j < noutputs; j++)
6657 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6658 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6659 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6660 input, output);
6661 for (j = 0; j < ninputs; j++)
6662 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6663 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6664 input, output);
6665
6666 changed = true;
6667 }
6668
6669 if (changed)
6670 df_insn_rescan (insn);
6671 }
6672
6673 /* Add the decl D to the local_decls list of FUN. */
6674
6675 void
6676 add_local_decl (struct function *fun, tree d)
6677 {
6678 gcc_assert (TREE_CODE (d) == VAR_DECL);
6679 vec_safe_push (fun->local_decls, d);
6680 }
6681
6682 namespace {
6683
6684 const pass_data pass_data_match_asm_constraints =
6685 {
6686 RTL_PASS, /* type */
6687 "asmcons", /* name */
6688 OPTGROUP_NONE, /* optinfo_flags */
6689 TV_NONE, /* tv_id */
6690 0, /* properties_required */
6691 0, /* properties_provided */
6692 0, /* properties_destroyed */
6693 0, /* todo_flags_start */
6694 0, /* todo_flags_finish */
6695 };
6696
6697 class pass_match_asm_constraints : public rtl_opt_pass
6698 {
6699 public:
6700 pass_match_asm_constraints (gcc::context *ctxt)
6701 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6702 {}
6703
6704 /* opt_pass methods: */
6705 virtual unsigned int execute (function *);
6706
6707 }; // class pass_match_asm_constraints
6708
6709 unsigned
6710 pass_match_asm_constraints::execute (function *fun)
6711 {
6712 basic_block bb;
6713 rtx_insn *insn;
6714 rtx pat, *p_sets;
6715 int noutputs;
6716
6717 if (!crtl->has_asm_statement)
6718 return 0;
6719
6720 df_set_flags (DF_DEFER_INSN_RESCAN);
6721 FOR_EACH_BB_FN (bb, fun)
6722 {
6723 FOR_BB_INSNS (bb, insn)
6724 {
6725 if (!INSN_P (insn))
6726 continue;
6727
6728 pat = PATTERN (insn);
6729 if (GET_CODE (pat) == PARALLEL)
6730 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6731 else if (GET_CODE (pat) == SET)
6732 p_sets = &PATTERN (insn), noutputs = 1;
6733 else
6734 continue;
6735
6736 if (GET_CODE (*p_sets) == SET
6737 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6738 match_asm_constraints_1 (insn, p_sets, noutputs);
6739 }
6740 }
6741
6742 return TODO_df_finish;
6743 }
6744
6745 } // anon namespace
6746
6747 rtl_opt_pass *
6748 make_pass_match_asm_constraints (gcc::context *ctxt)
6749 {
6750 return new pass_match_asm_constraints (ctxt);
6751 }
6752
6753
6754 #include "gt-function.h"