]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
Update copyright years.
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "opts.h"
50 #include "regs.h"
51 #include "emit-rtl.h"
52 #include "recog.h"
53 #include "rtl-error.h"
54 #include "hard-reg-set.h"
55 #include "alias.h"
56 #include "fold-const.h"
57 #include "stor-layout.h"
58 #include "varasm.h"
59 #include "except.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "calls.h"
63 #include "expr.h"
64 #include "optabs-tree.h"
65 #include "output.h"
66 #include "langhooks.h"
67 #include "common/common-target.h"
68 #include "gimplify.h"
69 #include "tree-pass.h"
70 #include "cfgrtl.h"
71 #include "cfganal.h"
72 #include "cfgbuild.h"
73 #include "cfgcleanup.h"
74 #include "cfgexpand.h"
75 #include "shrink-wrap.h"
76 #include "toplev.h"
77 #include "rtl-iter.h"
78 #include "tree-dfa.h"
79 #include "tree-ssa.h"
80 #include "stringpool.h"
81 #include "attribs.h"
82 #include "gimple.h"
83 #include "options.h"
84 #include "function-abi.h"
85
86 /* So we can assign to cfun in this file. */
87 #undef cfun
88
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
91 #endif
92
93 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
94
95 /* Round a value to the lowest integer less than it that is a multiple of
96 the required alignment. Avoid using division in case the value is
97 negative. Assume the alignment is a power of two. */
98 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
99
100 /* Similar, but round to the next highest integer that meets the
101 alignment. */
102 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
103
104 /* Nonzero once virtual register instantiation has been done.
105 assign_stack_local uses frame_pointer_rtx when this is nonzero.
106 calls.c:emit_library_call_value_1 uses it to set up
107 post-instantiation libcalls. */
108 int virtuals_instantiated;
109
110 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
111 static GTY(()) int funcdef_no;
112
113 /* These variables hold pointers to functions to create and destroy
114 target specific, per-function data structures. */
115 struct machine_function * (*init_machine_status) (void);
116
117 /* The currently compiled function. */
118 struct function *cfun = 0;
119
120 /* These hashes record the prologue and epilogue insns. */
121
122 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
123 {
124 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
125 static bool equal (rtx a, rtx b) { return a == b; }
126 };
127
128 static GTY((cache))
129 hash_table<insn_cache_hasher> *prologue_insn_hash;
130 static GTY((cache))
131 hash_table<insn_cache_hasher> *epilogue_insn_hash;
132 \f
133
134 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
135 vec<tree, va_gc> *types_used_by_cur_var_decl;
136
137 /* Forward declarations. */
138
139 static class temp_slot *find_temp_slot_from_address (rtx);
140 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
141 static void pad_below (struct args_size *, machine_mode, tree);
142 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
143 static int all_blocks (tree, tree *);
144 static tree *get_block_vector (tree, int *);
145 extern tree debug_find_var_in_block_tree (tree, tree);
146 /* We always define `record_insns' even if it's not used so that we
147 can always export `prologue_epilogue_contains'. */
148 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
149 ATTRIBUTE_UNUSED;
150 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
151 static void prepare_function_start (void);
152 static void do_clobber_return_reg (rtx, void *);
153 static void do_use_return_reg (rtx, void *);
154
155 \f
156 /* Stack of nested functions. */
157 /* Keep track of the cfun stack. */
158
159 static vec<function *> function_context_stack;
160
161 /* Save the current context for compilation of a nested function.
162 This is called from language-specific code. */
163
164 void
165 push_function_context (void)
166 {
167 if (cfun == 0)
168 allocate_struct_function (NULL, false);
169
170 function_context_stack.safe_push (cfun);
171 set_cfun (NULL);
172 }
173
174 /* Restore the last saved context, at the end of a nested function.
175 This function is called from language-specific code. */
176
177 void
178 pop_function_context (void)
179 {
180 struct function *p = function_context_stack.pop ();
181 set_cfun (p);
182 current_function_decl = p->decl;
183
184 /* Reset variables that have known state during rtx generation. */
185 virtuals_instantiated = 0;
186 generating_concat_p = 1;
187 }
188
189 /* Clear out all parts of the state in F that can safely be discarded
190 after the function has been parsed, but not compiled, to let
191 garbage collection reclaim the memory. */
192
193 void
194 free_after_parsing (struct function *f)
195 {
196 f->language = 0;
197 }
198
199 /* Clear out all parts of the state in F that can safely be discarded
200 after the function has been compiled, to let garbage collection
201 reclaim the memory. */
202
203 void
204 free_after_compilation (struct function *f)
205 {
206 prologue_insn_hash = NULL;
207 epilogue_insn_hash = NULL;
208
209 free (crtl->emit.regno_pointer_align);
210
211 memset (crtl, 0, sizeof (struct rtl_data));
212 f->eh = NULL;
213 f->machine = NULL;
214 f->cfg = NULL;
215 f->curr_properties &= ~PROP_cfg;
216
217 regno_reg_rtx = NULL;
218 }
219 \f
220 /* Return size needed for stack frame based on slots so far allocated.
221 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
222 the caller may have to do that. */
223
224 poly_int64
225 get_frame_size (void)
226 {
227 if (FRAME_GROWS_DOWNWARD)
228 return -frame_offset;
229 else
230 return frame_offset;
231 }
232
233 /* Issue an error message and return TRUE if frame OFFSET overflows in
234 the signed target pointer arithmetics for function FUNC. Otherwise
235 return FALSE. */
236
237 bool
238 frame_offset_overflow (poly_int64 offset, tree func)
239 {
240 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
241 unsigned HOST_WIDE_INT limit
242 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
243 /* Leave room for the fixed part of the frame. */
244 - 64 * UNITS_PER_WORD);
245
246 if (!coeffs_in_range_p (size, 0U, limit))
247 {
248 unsigned HOST_WIDE_INT hwisize;
249 if (size.is_constant (&hwisize))
250 error_at (DECL_SOURCE_LOCATION (func),
251 "total size of local objects %wu exceeds maximum %wu",
252 hwisize, limit);
253 else
254 error_at (DECL_SOURCE_LOCATION (func),
255 "total size of local objects exceeds maximum %wu",
256 limit);
257 return true;
258 }
259
260 return false;
261 }
262
263 /* Return the minimum spill slot alignment for a register of mode MODE. */
264
265 unsigned int
266 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
267 {
268 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
269 }
270
271 /* Return stack slot alignment in bits for TYPE and MODE. */
272
273 static unsigned int
274 get_stack_local_alignment (tree type, machine_mode mode)
275 {
276 unsigned int alignment;
277
278 if (mode == BLKmode)
279 alignment = BIGGEST_ALIGNMENT;
280 else
281 alignment = GET_MODE_ALIGNMENT (mode);
282
283 /* Allow the frond-end to (possibly) increase the alignment of this
284 stack slot. */
285 if (! type)
286 type = lang_hooks.types.type_for_mode (mode, 0);
287
288 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
289 }
290
291 /* Determine whether it is possible to fit a stack slot of size SIZE and
292 alignment ALIGNMENT into an area in the stack frame that starts at
293 frame offset START and has a length of LENGTH. If so, store the frame
294 offset to be used for the stack slot in *POFFSET and return true;
295 return false otherwise. This function will extend the frame size when
296 given a start/length pair that lies at the end of the frame. */
297
298 static bool
299 try_fit_stack_local (poly_int64 start, poly_int64 length,
300 poly_int64 size, unsigned int alignment,
301 poly_int64_pod *poffset)
302 {
303 poly_int64 this_frame_offset;
304 int frame_off, frame_alignment, frame_phase;
305
306 /* Calculate how many bytes the start of local variables is off from
307 stack alignment. */
308 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
309 frame_off = targetm.starting_frame_offset () % frame_alignment;
310 frame_phase = frame_off ? frame_alignment - frame_off : 0;
311
312 /* Round the frame offset to the specified alignment. */
313
314 if (FRAME_GROWS_DOWNWARD)
315 this_frame_offset
316 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
317 + frame_phase);
318 else
319 this_frame_offset
320 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
321
322 /* See if it fits. If this space is at the edge of the frame,
323 consider extending the frame to make it fit. Our caller relies on
324 this when allocating a new slot. */
325 if (maybe_lt (this_frame_offset, start))
326 {
327 if (known_eq (frame_offset, start))
328 frame_offset = this_frame_offset;
329 else
330 return false;
331 }
332 else if (maybe_gt (this_frame_offset + size, start + length))
333 {
334 if (known_eq (frame_offset, start + length))
335 frame_offset = this_frame_offset + size;
336 else
337 return false;
338 }
339
340 *poffset = this_frame_offset;
341 return true;
342 }
343
344 /* Create a new frame_space structure describing free space in the stack
345 frame beginning at START and ending at END, and chain it into the
346 function's frame_space_list. */
347
348 static void
349 add_frame_space (poly_int64 start, poly_int64 end)
350 {
351 class frame_space *space = ggc_alloc<frame_space> ();
352 space->next = crtl->frame_space_list;
353 crtl->frame_space_list = space;
354 space->start = start;
355 space->length = end - start;
356 }
357
358 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
359 with machine mode MODE.
360
361 ALIGN controls the amount of alignment for the address of the slot:
362 0 means according to MODE,
363 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
364 -2 means use BITS_PER_UNIT,
365 positive specifies alignment boundary in bits.
366
367 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
368 alignment and ASLK_RECORD_PAD bit set if we should remember
369 extra space we allocated for alignment purposes. When we are
370 called from assign_stack_temp_for_type, it is not set so we don't
371 track the same stack slot in two independent lists.
372
373 We do not round to stack_boundary here. */
374
375 rtx
376 assign_stack_local_1 (machine_mode mode, poly_int64 size,
377 int align, int kind)
378 {
379 rtx x, addr;
380 poly_int64 bigend_correction = 0;
381 poly_int64 slot_offset = 0, old_frame_offset;
382 unsigned int alignment, alignment_in_bits;
383
384 if (align == 0)
385 {
386 alignment = get_stack_local_alignment (NULL, mode);
387 alignment /= BITS_PER_UNIT;
388 }
389 else if (align == -1)
390 {
391 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
392 size = aligned_upper_bound (size, alignment);
393 }
394 else if (align == -2)
395 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
396 else
397 alignment = align / BITS_PER_UNIT;
398
399 alignment_in_bits = alignment * BITS_PER_UNIT;
400
401 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
402 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
403 {
404 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
405 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
406 }
407
408 if (SUPPORTS_STACK_ALIGNMENT)
409 {
410 if (crtl->stack_alignment_estimated < alignment_in_bits)
411 {
412 if (!crtl->stack_realign_processed)
413 crtl->stack_alignment_estimated = alignment_in_bits;
414 else
415 {
416 /* If stack is realigned and stack alignment value
417 hasn't been finalized, it is OK not to increase
418 stack_alignment_estimated. The bigger alignment
419 requirement is recorded in stack_alignment_needed
420 below. */
421 gcc_assert (!crtl->stack_realign_finalized);
422 if (!crtl->stack_realign_needed)
423 {
424 /* It is OK to reduce the alignment as long as the
425 requested size is 0 or the estimated stack
426 alignment >= mode alignment. */
427 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
428 || known_eq (size, 0)
429 || (crtl->stack_alignment_estimated
430 >= GET_MODE_ALIGNMENT (mode)));
431 alignment_in_bits = crtl->stack_alignment_estimated;
432 alignment = alignment_in_bits / BITS_PER_UNIT;
433 }
434 }
435 }
436 }
437
438 if (crtl->stack_alignment_needed < alignment_in_bits)
439 crtl->stack_alignment_needed = alignment_in_bits;
440 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
441 crtl->max_used_stack_slot_alignment = alignment_in_bits;
442
443 if (mode != BLKmode || maybe_ne (size, 0))
444 {
445 if (kind & ASLK_RECORD_PAD)
446 {
447 class frame_space **psp;
448
449 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
450 {
451 class frame_space *space = *psp;
452 if (!try_fit_stack_local (space->start, space->length, size,
453 alignment, &slot_offset))
454 continue;
455 *psp = space->next;
456 if (known_gt (slot_offset, space->start))
457 add_frame_space (space->start, slot_offset);
458 if (known_lt (slot_offset + size, space->start + space->length))
459 add_frame_space (slot_offset + size,
460 space->start + space->length);
461 goto found_space;
462 }
463 }
464 }
465 else if (!STACK_ALIGNMENT_NEEDED)
466 {
467 slot_offset = frame_offset;
468 goto found_space;
469 }
470
471 old_frame_offset = frame_offset;
472
473 if (FRAME_GROWS_DOWNWARD)
474 {
475 frame_offset -= size;
476 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
477
478 if (kind & ASLK_RECORD_PAD)
479 {
480 if (known_gt (slot_offset, frame_offset))
481 add_frame_space (frame_offset, slot_offset);
482 if (known_lt (slot_offset + size, old_frame_offset))
483 add_frame_space (slot_offset + size, old_frame_offset);
484 }
485 }
486 else
487 {
488 frame_offset += size;
489 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
490
491 if (kind & ASLK_RECORD_PAD)
492 {
493 if (known_gt (slot_offset, old_frame_offset))
494 add_frame_space (old_frame_offset, slot_offset);
495 if (known_lt (slot_offset + size, frame_offset))
496 add_frame_space (slot_offset + size, frame_offset);
497 }
498 }
499
500 found_space:
501 /* On a big-endian machine, if we are allocating more space than we will use,
502 use the least significant bytes of those that are allocated. */
503 if (mode != BLKmode)
504 {
505 /* The slot size can sometimes be smaller than the mode size;
506 e.g. the rs6000 port allocates slots with a vector mode
507 that have the size of only one element. However, the slot
508 size must always be ordered wrt to the mode size, in the
509 same way as for a subreg. */
510 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
511 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
512 bigend_correction = size - GET_MODE_SIZE (mode);
513 }
514
515 /* If we have already instantiated virtual registers, return the actual
516 address relative to the frame pointer. */
517 if (virtuals_instantiated)
518 addr = plus_constant (Pmode, frame_pointer_rtx,
519 trunc_int_for_mode
520 (slot_offset + bigend_correction
521 + targetm.starting_frame_offset (), Pmode));
522 else
523 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
524 trunc_int_for_mode
525 (slot_offset + bigend_correction,
526 Pmode));
527
528 x = gen_rtx_MEM (mode, addr);
529 set_mem_align (x, alignment_in_bits);
530 MEM_NOTRAP_P (x) = 1;
531
532 vec_safe_push (stack_slot_list, x);
533
534 if (frame_offset_overflow (frame_offset, current_function_decl))
535 frame_offset = 0;
536
537 return x;
538 }
539
540 /* Wrap up assign_stack_local_1 with last parameter as false. */
541
542 rtx
543 assign_stack_local (machine_mode mode, poly_int64 size, int align)
544 {
545 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
546 }
547 \f
548 /* In order to evaluate some expressions, such as function calls returning
549 structures in memory, we need to temporarily allocate stack locations.
550 We record each allocated temporary in the following structure.
551
552 Associated with each temporary slot is a nesting level. When we pop up
553 one level, all temporaries associated with the previous level are freed.
554 Normally, all temporaries are freed after the execution of the statement
555 in which they were created. However, if we are inside a ({...}) grouping,
556 the result may be in a temporary and hence must be preserved. If the
557 result could be in a temporary, we preserve it if we can determine which
558 one it is in. If we cannot determine which temporary may contain the
559 result, all temporaries are preserved. A temporary is preserved by
560 pretending it was allocated at the previous nesting level. */
561
562 class GTY(()) temp_slot {
563 public:
564 /* Points to next temporary slot. */
565 class temp_slot *next;
566 /* Points to previous temporary slot. */
567 class temp_slot *prev;
568 /* The rtx to used to reference the slot. */
569 rtx slot;
570 /* The size, in units, of the slot. */
571 poly_int64 size;
572 /* The type of the object in the slot, or zero if it doesn't correspond
573 to a type. We use this to determine whether a slot can be reused.
574 It can be reused if objects of the type of the new slot will always
575 conflict with objects of the type of the old slot. */
576 tree type;
577 /* The alignment (in bits) of the slot. */
578 unsigned int align;
579 /* Nonzero if this temporary is currently in use. */
580 char in_use;
581 /* Nesting level at which this slot is being used. */
582 int level;
583 /* The offset of the slot from the frame_pointer, including extra space
584 for alignment. This info is for combine_temp_slots. */
585 poly_int64 base_offset;
586 /* The size of the slot, including extra space for alignment. This
587 info is for combine_temp_slots. */
588 poly_int64 full_size;
589 };
590
591 /* Entry for the below hash table. */
592 struct GTY((for_user)) temp_slot_address_entry {
593 hashval_t hash;
594 rtx address;
595 class temp_slot *temp_slot;
596 };
597
598 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
599 {
600 static hashval_t hash (temp_slot_address_entry *);
601 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
602 };
603
604 /* A table of addresses that represent a stack slot. The table is a mapping
605 from address RTXen to a temp slot. */
606 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
607 static size_t n_temp_slots_in_use;
608
609 /* Removes temporary slot TEMP from LIST. */
610
611 static void
612 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
613 {
614 if (temp->next)
615 temp->next->prev = temp->prev;
616 if (temp->prev)
617 temp->prev->next = temp->next;
618 else
619 *list = temp->next;
620
621 temp->prev = temp->next = NULL;
622 }
623
624 /* Inserts temporary slot TEMP to LIST. */
625
626 static void
627 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
628 {
629 temp->next = *list;
630 if (*list)
631 (*list)->prev = temp;
632 temp->prev = NULL;
633 *list = temp;
634 }
635
636 /* Returns the list of used temp slots at LEVEL. */
637
638 static class temp_slot **
639 temp_slots_at_level (int level)
640 {
641 if (level >= (int) vec_safe_length (used_temp_slots))
642 vec_safe_grow_cleared (used_temp_slots, level + 1, true);
643
644 return &(*used_temp_slots)[level];
645 }
646
647 /* Returns the maximal temporary slot level. */
648
649 static int
650 max_slot_level (void)
651 {
652 if (!used_temp_slots)
653 return -1;
654
655 return used_temp_slots->length () - 1;
656 }
657
658 /* Moves temporary slot TEMP to LEVEL. */
659
660 static void
661 move_slot_to_level (class temp_slot *temp, int level)
662 {
663 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
664 insert_slot_to_list (temp, temp_slots_at_level (level));
665 temp->level = level;
666 }
667
668 /* Make temporary slot TEMP available. */
669
670 static void
671 make_slot_available (class temp_slot *temp)
672 {
673 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
674 insert_slot_to_list (temp, &avail_temp_slots);
675 temp->in_use = 0;
676 temp->level = -1;
677 n_temp_slots_in_use--;
678 }
679
680 /* Compute the hash value for an address -> temp slot mapping.
681 The value is cached on the mapping entry. */
682 static hashval_t
683 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
684 {
685 int do_not_record = 0;
686 return hash_rtx (t->address, GET_MODE (t->address),
687 &do_not_record, NULL, false);
688 }
689
690 /* Return the hash value for an address -> temp slot mapping. */
691 hashval_t
692 temp_address_hasher::hash (temp_slot_address_entry *t)
693 {
694 return t->hash;
695 }
696
697 /* Compare two address -> temp slot mapping entries. */
698 bool
699 temp_address_hasher::equal (temp_slot_address_entry *t1,
700 temp_slot_address_entry *t2)
701 {
702 return exp_equiv_p (t1->address, t2->address, 0, true);
703 }
704
705 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
706 static void
707 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
708 {
709 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
710 t->address = copy_rtx (address);
711 t->temp_slot = temp_slot;
712 t->hash = temp_slot_address_compute_hash (t);
713 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
714 }
715
716 /* Remove an address -> temp slot mapping entry if the temp slot is
717 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
718 int
719 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
720 {
721 const struct temp_slot_address_entry *t = *slot;
722 if (! t->temp_slot->in_use)
723 temp_slot_address_table->clear_slot (slot);
724 return 1;
725 }
726
727 /* Remove all mappings of addresses to unused temp slots. */
728 static void
729 remove_unused_temp_slot_addresses (void)
730 {
731 /* Use quicker clearing if there aren't any active temp slots. */
732 if (n_temp_slots_in_use)
733 temp_slot_address_table->traverse
734 <void *, remove_unused_temp_slot_addresses_1> (NULL);
735 else
736 temp_slot_address_table->empty ();
737 }
738
739 /* Find the temp slot corresponding to the object at address X. */
740
741 static class temp_slot *
742 find_temp_slot_from_address (rtx x)
743 {
744 class temp_slot *p;
745 struct temp_slot_address_entry tmp, *t;
746
747 /* First try the easy way:
748 See if X exists in the address -> temp slot mapping. */
749 tmp.address = x;
750 tmp.temp_slot = NULL;
751 tmp.hash = temp_slot_address_compute_hash (&tmp);
752 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
753 if (t)
754 return t->temp_slot;
755
756 /* If we have a sum involving a register, see if it points to a temp
757 slot. */
758 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
759 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
760 return p;
761 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
762 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
763 return p;
764
765 /* Last resort: Address is a virtual stack var address. */
766 poly_int64 offset;
767 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
768 {
769 int i;
770 for (i = max_slot_level (); i >= 0; i--)
771 for (p = *temp_slots_at_level (i); p; p = p->next)
772 if (known_in_range_p (offset, p->base_offset, p->full_size))
773 return p;
774 }
775
776 return NULL;
777 }
778 \f
779 /* Allocate a temporary stack slot and record it for possible later
780 reuse.
781
782 MODE is the machine mode to be given to the returned rtx.
783
784 SIZE is the size in units of the space required. We do no rounding here
785 since assign_stack_local will do any required rounding.
786
787 TYPE is the type that will be used for the stack slot. */
788
789 rtx
790 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
791 {
792 unsigned int align;
793 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
794 rtx slot;
795
796 gcc_assert (known_size_p (size));
797
798 align = get_stack_local_alignment (type, mode);
799
800 /* Try to find an available, already-allocated temporary of the proper
801 mode which meets the size and alignment requirements. Choose the
802 smallest one with the closest alignment.
803
804 If assign_stack_temp is called outside of the tree->rtl expansion,
805 we cannot reuse the stack slots (that may still refer to
806 VIRTUAL_STACK_VARS_REGNUM). */
807 if (!virtuals_instantiated)
808 {
809 for (p = avail_temp_slots; p; p = p->next)
810 {
811 if (p->align >= align
812 && known_ge (p->size, size)
813 && GET_MODE (p->slot) == mode
814 && objects_must_conflict_p (p->type, type)
815 && (best_p == 0
816 || (known_eq (best_p->size, p->size)
817 ? best_p->align > p->align
818 : known_ge (best_p->size, p->size))))
819 {
820 if (p->align == align && known_eq (p->size, size))
821 {
822 selected = p;
823 cut_slot_from_list (selected, &avail_temp_slots);
824 best_p = 0;
825 break;
826 }
827 best_p = p;
828 }
829 }
830 }
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 selected = best_p;
836 cut_slot_from_list (selected, &avail_temp_slots);
837
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
842 {
843 int alignment = best_p->align / BITS_PER_UNIT;
844 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
845
846 if (known_ge (best_p->size - rounded_size, alignment))
847 {
848 p = ggc_alloc<temp_slot> ();
849 p->in_use = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 p->align = best_p->align;
855 p->type = best_p->type;
856 insert_slot_to_list (p, &avail_temp_slots);
857
858 vec_safe_push (stack_slot_list, p->slot);
859
860 best_p->size = rounded_size;
861 best_p->full_size = rounded_size;
862 }
863 }
864 }
865
866 /* If we still didn't find one, make a new temporary. */
867 if (selected == 0)
868 {
869 poly_int64 frame_offset_old = frame_offset;
870
871 p = ggc_alloc<temp_slot> ();
872
873 /* We are passing an explicit alignment request to assign_stack_local.
874 One side effect of that is assign_stack_local will not round SIZE
875 to ensure the frame offset remains suitably aligned.
876
877 So for requests which depended on the rounding of SIZE, we go ahead
878 and round it now. We also make sure ALIGNMENT is at least
879 BIGGEST_ALIGNMENT. */
880 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
881 p->slot = assign_stack_local_1 (mode,
882 (mode == BLKmode
883 ? aligned_upper_bound (size,
884 (int) align
885 / BITS_PER_UNIT)
886 : size),
887 align, 0);
888
889 p->align = align;
890
891 /* The following slot size computation is necessary because we don't
892 know the actual size of the temporary slot until assign_stack_local
893 has performed all the frame alignment and size rounding for the
894 requested temporary. Note that extra space added for alignment
895 can be either above or below this stack slot depending on which
896 way the frame grows. We include the extra space if and only if it
897 is above this slot. */
898 if (FRAME_GROWS_DOWNWARD)
899 p->size = frame_offset_old - frame_offset;
900 else
901 p->size = size;
902
903 /* Now define the fields used by combine_temp_slots. */
904 if (FRAME_GROWS_DOWNWARD)
905 {
906 p->base_offset = frame_offset;
907 p->full_size = frame_offset_old - frame_offset;
908 }
909 else
910 {
911 p->base_offset = frame_offset_old;
912 p->full_size = frame_offset - frame_offset_old;
913 }
914
915 selected = p;
916 }
917
918 p = selected;
919 p->in_use = 1;
920 p->type = type;
921 p->level = temp_slot_level;
922 n_temp_slots_in_use++;
923
924 pp = temp_slots_at_level (p->level);
925 insert_slot_to_list (p, pp);
926 insert_temp_slot_address (XEXP (p->slot, 0), p);
927
928 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
929 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
930 vec_safe_push (stack_slot_list, slot);
931
932 /* If we know the alias set for the memory that will be used, use
933 it. If there's no TYPE, then we don't know anything about the
934 alias set for the memory. */
935 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
936 set_mem_align (slot, align);
937
938 /* If a type is specified, set the relevant flags. */
939 if (type != 0)
940 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
941 MEM_NOTRAP_P (slot) = 1;
942
943 return slot;
944 }
945
946 /* Allocate a temporary stack slot and record it for possible later
947 reuse. First two arguments are same as in preceding function. */
948
949 rtx
950 assign_stack_temp (machine_mode mode, poly_int64 size)
951 {
952 return assign_stack_temp_for_type (mode, size, NULL_TREE);
953 }
954 \f
955 /* Assign a temporary.
956 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
957 and so that should be used in error messages. In either case, we
958 allocate of the given type.
959 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
960 it is 0 if a register is OK.
961 DONT_PROMOTE is 1 if we should not promote values in register
962 to wider modes. */
963
964 rtx
965 assign_temp (tree type_or_decl, int memory_required,
966 int dont_promote ATTRIBUTE_UNUSED)
967 {
968 tree type, decl;
969 machine_mode mode;
970 #ifdef PROMOTE_MODE
971 int unsignedp;
972 #endif
973
974 if (DECL_P (type_or_decl))
975 decl = type_or_decl, type = TREE_TYPE (decl);
976 else
977 decl = NULL, type = type_or_decl;
978
979 mode = TYPE_MODE (type);
980 #ifdef PROMOTE_MODE
981 unsignedp = TYPE_UNSIGNED (type);
982 #endif
983
984 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
985 end. See also create_tmp_var for the gimplification-time check. */
986 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
987
988 if (mode == BLKmode || memory_required)
989 {
990 poly_int64 size;
991 rtx tmp;
992
993 /* Unfortunately, we don't yet know how to allocate variable-sized
994 temporaries. However, sometimes we can find a fixed upper limit on
995 the size, so try that instead. */
996 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
997 size = max_int_size_in_bytes (type);
998
999 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
1000 problems with allocating the stack space. */
1001 if (known_eq (size, 0))
1002 size = 1;
1003
1004 /* The size of the temporary may be too large to fit into an integer. */
1005 /* ??? Not sure this should happen except for user silliness, so limit
1006 this to things that aren't compiler-generated temporaries. The
1007 rest of the time we'll die in assign_stack_temp_for_type. */
1008 if (decl
1009 && !known_size_p (size)
1010 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1011 {
1012 error ("size of variable %q+D is too large", decl);
1013 size = 1;
1014 }
1015
1016 tmp = assign_stack_temp_for_type (mode, size, type);
1017 return tmp;
1018 }
1019
1020 #ifdef PROMOTE_MODE
1021 if (! dont_promote)
1022 mode = promote_mode (type, mode, &unsignedp);
1023 #endif
1024
1025 return gen_reg_rtx (mode);
1026 }
1027 \f
1028 /* Combine temporary stack slots which are adjacent on the stack.
1029
1030 This allows for better use of already allocated stack space. This is only
1031 done for BLKmode slots because we can be sure that we won't have alignment
1032 problems in this case. */
1033
1034 static void
1035 combine_temp_slots (void)
1036 {
1037 class temp_slot *p, *q, *next, *next_q;
1038 int num_slots;
1039
1040 /* We can't combine slots, because the information about which slot
1041 is in which alias set will be lost. */
1042 if (flag_strict_aliasing)
1043 return;
1044
1045 /* If there are a lot of temp slots, don't do anything unless
1046 high levels of optimization. */
1047 if (! flag_expensive_optimizations)
1048 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1049 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1050 return;
1051
1052 for (p = avail_temp_slots; p; p = next)
1053 {
1054 int delete_p = 0;
1055
1056 next = p->next;
1057
1058 if (GET_MODE (p->slot) != BLKmode)
1059 continue;
1060
1061 for (q = p->next; q; q = next_q)
1062 {
1063 int delete_q = 0;
1064
1065 next_q = q->next;
1066
1067 if (GET_MODE (q->slot) != BLKmode)
1068 continue;
1069
1070 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1071 {
1072 /* Q comes after P; combine Q into P. */
1073 p->size += q->size;
1074 p->full_size += q->full_size;
1075 delete_q = 1;
1076 }
1077 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1078 {
1079 /* P comes after Q; combine P into Q. */
1080 q->size += p->size;
1081 q->full_size += p->full_size;
1082 delete_p = 1;
1083 break;
1084 }
1085 if (delete_q)
1086 cut_slot_from_list (q, &avail_temp_slots);
1087 }
1088
1089 /* Either delete P or advance past it. */
1090 if (delete_p)
1091 cut_slot_from_list (p, &avail_temp_slots);
1092 }
1093 }
1094 \f
1095 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1096 slot that previously was known by OLD_RTX. */
1097
1098 void
1099 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1100 {
1101 class temp_slot *p;
1102
1103 if (rtx_equal_p (old_rtx, new_rtx))
1104 return;
1105
1106 p = find_temp_slot_from_address (old_rtx);
1107
1108 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1109 NEW_RTX is a register, see if one operand of the PLUS is a
1110 temporary location. If so, NEW_RTX points into it. Otherwise,
1111 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1112 in common between them. If so, try a recursive call on those
1113 values. */
1114 if (p == 0)
1115 {
1116 if (GET_CODE (old_rtx) != PLUS)
1117 return;
1118
1119 if (REG_P (new_rtx))
1120 {
1121 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1122 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1123 return;
1124 }
1125 else if (GET_CODE (new_rtx) != PLUS)
1126 return;
1127
1128 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1129 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1131 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1132 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1133 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1134 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1135 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1136
1137 return;
1138 }
1139
1140 /* Otherwise add an alias for the temp's address. */
1141 insert_temp_slot_address (new_rtx, p);
1142 }
1143
1144 /* If X could be a reference to a temporary slot, mark that slot as
1145 belonging to the to one level higher than the current level. If X
1146 matched one of our slots, just mark that one. Otherwise, we can't
1147 easily predict which it is, so upgrade all of them.
1148
1149 This is called when an ({...}) construct occurs and a statement
1150 returns a value in memory. */
1151
1152 void
1153 preserve_temp_slots (rtx x)
1154 {
1155 class temp_slot *p = 0, *next;
1156
1157 if (x == 0)
1158 return;
1159
1160 /* If X is a register that is being used as a pointer, see if we have
1161 a temporary slot we know it points to. */
1162 if (REG_P (x) && REG_POINTER (x))
1163 p = find_temp_slot_from_address (x);
1164
1165 /* If X is not in memory or is at a constant address, it cannot be in
1166 a temporary slot. */
1167 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1168 return;
1169
1170 /* First see if we can find a match. */
1171 if (p == 0)
1172 p = find_temp_slot_from_address (XEXP (x, 0));
1173
1174 if (p != 0)
1175 {
1176 if (p->level == temp_slot_level)
1177 move_slot_to_level (p, temp_slot_level - 1);
1178 return;
1179 }
1180
1181 /* Otherwise, preserve all non-kept slots at this level. */
1182 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1183 {
1184 next = p->next;
1185 move_slot_to_level (p, temp_slot_level - 1);
1186 }
1187 }
1188
1189 /* Free all temporaries used so far. This is normally called at the
1190 end of generating code for a statement. */
1191
1192 void
1193 free_temp_slots (void)
1194 {
1195 class temp_slot *p, *next;
1196 bool some_available = false;
1197
1198 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1199 {
1200 next = p->next;
1201 make_slot_available (p);
1202 some_available = true;
1203 }
1204
1205 if (some_available)
1206 {
1207 remove_unused_temp_slot_addresses ();
1208 combine_temp_slots ();
1209 }
1210 }
1211
1212 /* Push deeper into the nesting level for stack temporaries. */
1213
1214 void
1215 push_temp_slots (void)
1216 {
1217 temp_slot_level++;
1218 }
1219
1220 /* Pop a temporary nesting level. All slots in use in the current level
1221 are freed. */
1222
1223 void
1224 pop_temp_slots (void)
1225 {
1226 free_temp_slots ();
1227 temp_slot_level--;
1228 }
1229
1230 /* Initialize temporary slots. */
1231
1232 void
1233 init_temp_slots (void)
1234 {
1235 /* We have not allocated any temporaries yet. */
1236 avail_temp_slots = 0;
1237 vec_alloc (used_temp_slots, 0);
1238 temp_slot_level = 0;
1239 n_temp_slots_in_use = 0;
1240
1241 /* Set up the table to map addresses to temp slots. */
1242 if (! temp_slot_address_table)
1243 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1244 else
1245 temp_slot_address_table->empty ();
1246 }
1247 \f
1248 /* Functions and data structures to keep track of the values hard regs
1249 had at the start of the function. */
1250
1251 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1252 and has_hard_reg_initial_val.. */
1253 struct GTY(()) initial_value_pair {
1254 rtx hard_reg;
1255 rtx pseudo;
1256 };
1257 /* ??? This could be a VEC but there is currently no way to define an
1258 opaque VEC type. This could be worked around by defining struct
1259 initial_value_pair in function.h. */
1260 struct GTY(()) initial_value_struct {
1261 int num_entries;
1262 int max_entries;
1263 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1264 };
1265
1266 /* If a pseudo represents an initial hard reg (or expression), return
1267 it, else return NULL_RTX. */
1268
1269 rtx
1270 get_hard_reg_initial_reg (rtx reg)
1271 {
1272 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1273 int i;
1274
1275 if (ivs == 0)
1276 return NULL_RTX;
1277
1278 for (i = 0; i < ivs->num_entries; i++)
1279 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1280 return ivs->entries[i].hard_reg;
1281
1282 return NULL_RTX;
1283 }
1284
1285 /* Make sure that there's a pseudo register of mode MODE that stores the
1286 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1287
1288 rtx
1289 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1290 {
1291 struct initial_value_struct *ivs;
1292 rtx rv;
1293
1294 rv = has_hard_reg_initial_val (mode, regno);
1295 if (rv)
1296 return rv;
1297
1298 ivs = crtl->hard_reg_initial_vals;
1299 if (ivs == 0)
1300 {
1301 ivs = ggc_alloc<initial_value_struct> ();
1302 ivs->num_entries = 0;
1303 ivs->max_entries = 5;
1304 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1305 crtl->hard_reg_initial_vals = ivs;
1306 }
1307
1308 if (ivs->num_entries >= ivs->max_entries)
1309 {
1310 ivs->max_entries += 5;
1311 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1312 ivs->max_entries);
1313 }
1314
1315 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1316 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1317
1318 return ivs->entries[ivs->num_entries++].pseudo;
1319 }
1320
1321 /* See if get_hard_reg_initial_val has been used to create a pseudo
1322 for the initial value of hard register REGNO in mode MODE. Return
1323 the associated pseudo if so, otherwise return NULL. */
1324
1325 rtx
1326 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1327 {
1328 struct initial_value_struct *ivs;
1329 int i;
1330
1331 ivs = crtl->hard_reg_initial_vals;
1332 if (ivs != 0)
1333 for (i = 0; i < ivs->num_entries; i++)
1334 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1335 && REGNO (ivs->entries[i].hard_reg) == regno)
1336 return ivs->entries[i].pseudo;
1337
1338 return NULL_RTX;
1339 }
1340
1341 unsigned int
1342 emit_initial_value_sets (void)
1343 {
1344 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1345 int i;
1346 rtx_insn *seq;
1347
1348 if (ivs == 0)
1349 return 0;
1350
1351 start_sequence ();
1352 for (i = 0; i < ivs->num_entries; i++)
1353 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1354 seq = get_insns ();
1355 end_sequence ();
1356
1357 emit_insn_at_entry (seq);
1358 return 0;
1359 }
1360
1361 /* Return the hardreg-pseudoreg initial values pair entry I and
1362 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1363 bool
1364 initial_value_entry (int i, rtx *hreg, rtx *preg)
1365 {
1366 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1367 if (!ivs || i >= ivs->num_entries)
1368 return false;
1369
1370 *hreg = ivs->entries[i].hard_reg;
1371 *preg = ivs->entries[i].pseudo;
1372 return true;
1373 }
1374 \f
1375 /* These routines are responsible for converting virtual register references
1376 to the actual hard register references once RTL generation is complete.
1377
1378 The following four variables are used for communication between the
1379 routines. They contain the offsets of the virtual registers from their
1380 respective hard registers. */
1381
1382 static poly_int64 in_arg_offset;
1383 static poly_int64 var_offset;
1384 static poly_int64 dynamic_offset;
1385 static poly_int64 out_arg_offset;
1386 static poly_int64 cfa_offset;
1387
1388 /* In most machines, the stack pointer register is equivalent to the bottom
1389 of the stack. */
1390
1391 #ifndef STACK_POINTER_OFFSET
1392 #define STACK_POINTER_OFFSET 0
1393 #endif
1394
1395 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1396 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1397 #endif
1398
1399 /* If not defined, pick an appropriate default for the offset of dynamically
1400 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1401 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1402
1403 #ifndef STACK_DYNAMIC_OFFSET
1404
1405 /* The bottom of the stack points to the actual arguments. If
1406 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1407 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1408 stack space for register parameters is not pushed by the caller, but
1409 rather part of the fixed stack areas and hence not included in
1410 `crtl->outgoing_args_size'. Nevertheless, we must allow
1411 for it when allocating stack dynamic objects. */
1412
1413 #ifdef INCOMING_REG_PARM_STACK_SPACE
1414 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1415 ((ACCUMULATE_OUTGOING_ARGS \
1416 ? (crtl->outgoing_args_size \
1417 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1418 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1419 : 0) + (STACK_POINTER_OFFSET))
1420 #else
1421 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1422 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1423 + (STACK_POINTER_OFFSET))
1424 #endif
1425 #endif
1426
1427 \f
1428 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1429 is a virtual register, return the equivalent hard register and set the
1430 offset indirectly through the pointer. Otherwise, return 0. */
1431
1432 static rtx
1433 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1434 {
1435 rtx new_rtx;
1436 poly_int64 offset;
1437
1438 if (x == virtual_incoming_args_rtx)
1439 {
1440 if (stack_realign_drap)
1441 {
1442 /* Replace virtual_incoming_args_rtx with internal arg
1443 pointer if DRAP is used to realign stack. */
1444 new_rtx = crtl->args.internal_arg_pointer;
1445 offset = 0;
1446 }
1447 else
1448 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1449 }
1450 else if (x == virtual_stack_vars_rtx)
1451 new_rtx = frame_pointer_rtx, offset = var_offset;
1452 else if (x == virtual_stack_dynamic_rtx)
1453 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1454 else if (x == virtual_outgoing_args_rtx)
1455 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1456 else if (x == virtual_cfa_rtx)
1457 {
1458 #ifdef FRAME_POINTER_CFA_OFFSET
1459 new_rtx = frame_pointer_rtx;
1460 #else
1461 new_rtx = arg_pointer_rtx;
1462 #endif
1463 offset = cfa_offset;
1464 }
1465 else if (x == virtual_preferred_stack_boundary_rtx)
1466 {
1467 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1468 offset = 0;
1469 }
1470 else
1471 return NULL_RTX;
1472
1473 *poffset = offset;
1474 return new_rtx;
1475 }
1476
1477 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1478 registers present inside of *LOC. The expression is simplified,
1479 as much as possible, but is not to be considered "valid" in any sense
1480 implied by the target. Return true if any change is made. */
1481
1482 static bool
1483 instantiate_virtual_regs_in_rtx (rtx *loc)
1484 {
1485 if (!*loc)
1486 return false;
1487 bool changed = false;
1488 subrtx_ptr_iterator::array_type array;
1489 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1490 {
1491 rtx *loc = *iter;
1492 if (rtx x = *loc)
1493 {
1494 rtx new_rtx;
1495 poly_int64 offset;
1496 switch (GET_CODE (x))
1497 {
1498 case REG:
1499 new_rtx = instantiate_new_reg (x, &offset);
1500 if (new_rtx)
1501 {
1502 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1503 changed = true;
1504 }
1505 iter.skip_subrtxes ();
1506 break;
1507
1508 case PLUS:
1509 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1510 if (new_rtx)
1511 {
1512 XEXP (x, 0) = new_rtx;
1513 *loc = plus_constant (GET_MODE (x), x, offset, true);
1514 changed = true;
1515 iter.skip_subrtxes ();
1516 break;
1517 }
1518
1519 /* FIXME -- from old code */
1520 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1521 we can commute the PLUS and SUBREG because pointers into the
1522 frame are well-behaved. */
1523 break;
1524
1525 default:
1526 break;
1527 }
1528 }
1529 }
1530 return changed;
1531 }
1532
1533 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1534 matches the predicate for insn CODE operand OPERAND. */
1535
1536 static int
1537 safe_insn_predicate (int code, int operand, rtx x)
1538 {
1539 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1540 }
1541
1542 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1543 registers present inside of insn. The result will be a valid insn. */
1544
1545 static void
1546 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1547 {
1548 poly_int64 offset;
1549 int insn_code, i;
1550 bool any_change = false;
1551 rtx set, new_rtx, x;
1552 rtx_insn *seq;
1553
1554 /* There are some special cases to be handled first. */
1555 set = single_set (insn);
1556 if (set)
1557 {
1558 /* We're allowed to assign to a virtual register. This is interpreted
1559 to mean that the underlying register gets assigned the inverse
1560 transformation. This is used, for example, in the handling of
1561 non-local gotos. */
1562 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1563 if (new_rtx)
1564 {
1565 start_sequence ();
1566
1567 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1568 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1569 gen_int_mode (-offset, GET_MODE (new_rtx)));
1570 x = force_operand (x, new_rtx);
1571 if (x != new_rtx)
1572 emit_move_insn (new_rtx, x);
1573
1574 seq = get_insns ();
1575 end_sequence ();
1576
1577 emit_insn_before (seq, insn);
1578 delete_insn (insn);
1579 return;
1580 }
1581
1582 /* Handle a straight copy from a virtual register by generating a
1583 new add insn. The difference between this and falling through
1584 to the generic case is avoiding a new pseudo and eliminating a
1585 move insn in the initial rtl stream. */
1586 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1587 if (new_rtx
1588 && maybe_ne (offset, 0)
1589 && REG_P (SET_DEST (set))
1590 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1591 {
1592 start_sequence ();
1593
1594 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1595 gen_int_mode (offset,
1596 GET_MODE (SET_DEST (set))),
1597 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1598 if (x != SET_DEST (set))
1599 emit_move_insn (SET_DEST (set), x);
1600
1601 seq = get_insns ();
1602 end_sequence ();
1603
1604 emit_insn_before (seq, insn);
1605 delete_insn (insn);
1606 return;
1607 }
1608
1609 extract_insn (insn);
1610 insn_code = INSN_CODE (insn);
1611
1612 /* Handle a plus involving a virtual register by determining if the
1613 operands remain valid if they're modified in place. */
1614 poly_int64 delta;
1615 if (GET_CODE (SET_SRC (set)) == PLUS
1616 && recog_data.n_operands >= 3
1617 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1618 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1619 && poly_int_rtx_p (recog_data.operand[2], &delta)
1620 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1621 {
1622 offset += delta;
1623
1624 /* If the sum is zero, then replace with a plain move. */
1625 if (known_eq (offset, 0)
1626 && REG_P (SET_DEST (set))
1627 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1628 {
1629 start_sequence ();
1630 emit_move_insn (SET_DEST (set), new_rtx);
1631 seq = get_insns ();
1632 end_sequence ();
1633
1634 emit_insn_before (seq, insn);
1635 delete_insn (insn);
1636 return;
1637 }
1638
1639 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1640
1641 /* Using validate_change and apply_change_group here leaves
1642 recog_data in an invalid state. Since we know exactly what
1643 we want to check, do those two by hand. */
1644 if (safe_insn_predicate (insn_code, 1, new_rtx)
1645 && safe_insn_predicate (insn_code, 2, x))
1646 {
1647 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1648 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1649 any_change = true;
1650
1651 /* Fall through into the regular operand fixup loop in
1652 order to take care of operands other than 1 and 2. */
1653 }
1654 }
1655 }
1656 else
1657 {
1658 extract_insn (insn);
1659 insn_code = INSN_CODE (insn);
1660 }
1661
1662 /* In the general case, we expect virtual registers to appear only in
1663 operands, and then only as either bare registers or inside memories. */
1664 for (i = 0; i < recog_data.n_operands; ++i)
1665 {
1666 x = recog_data.operand[i];
1667 switch (GET_CODE (x))
1668 {
1669 case MEM:
1670 {
1671 rtx addr = XEXP (x, 0);
1672
1673 if (!instantiate_virtual_regs_in_rtx (&addr))
1674 continue;
1675
1676 start_sequence ();
1677 x = replace_equiv_address (x, addr, true);
1678 /* It may happen that the address with the virtual reg
1679 was valid (e.g. based on the virtual stack reg, which might
1680 be acceptable to the predicates with all offsets), whereas
1681 the address now isn't anymore, for instance when the address
1682 is still offsetted, but the base reg isn't virtual-stack-reg
1683 anymore. Below we would do a force_reg on the whole operand,
1684 but this insn might actually only accept memory. Hence,
1685 before doing that last resort, try to reload the address into
1686 a register, so this operand stays a MEM. */
1687 if (!safe_insn_predicate (insn_code, i, x))
1688 {
1689 addr = force_reg (GET_MODE (addr), addr);
1690 x = replace_equiv_address (x, addr, true);
1691 }
1692 seq = get_insns ();
1693 end_sequence ();
1694 if (seq)
1695 emit_insn_before (seq, insn);
1696 }
1697 break;
1698
1699 case REG:
1700 new_rtx = instantiate_new_reg (x, &offset);
1701 if (new_rtx == NULL)
1702 continue;
1703 if (known_eq (offset, 0))
1704 x = new_rtx;
1705 else
1706 {
1707 start_sequence ();
1708
1709 /* Careful, special mode predicates may have stuff in
1710 insn_data[insn_code].operand[i].mode that isn't useful
1711 to us for computing a new value. */
1712 /* ??? Recognize address_operand and/or "p" constraints
1713 to see if (plus new offset) is a valid before we put
1714 this through expand_simple_binop. */
1715 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1716 gen_int_mode (offset, GET_MODE (x)),
1717 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1718 seq = get_insns ();
1719 end_sequence ();
1720 emit_insn_before (seq, insn);
1721 }
1722 break;
1723
1724 case SUBREG:
1725 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1726 if (new_rtx == NULL)
1727 continue;
1728 if (maybe_ne (offset, 0))
1729 {
1730 start_sequence ();
1731 new_rtx = expand_simple_binop
1732 (GET_MODE (new_rtx), PLUS, new_rtx,
1733 gen_int_mode (offset, GET_MODE (new_rtx)),
1734 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1735 seq = get_insns ();
1736 end_sequence ();
1737 emit_insn_before (seq, insn);
1738 }
1739 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1740 GET_MODE (new_rtx), SUBREG_BYTE (x));
1741 gcc_assert (x);
1742 break;
1743
1744 default:
1745 continue;
1746 }
1747
1748 /* At this point, X contains the new value for the operand.
1749 Validate the new value vs the insn predicate. Note that
1750 asm insns will have insn_code -1 here. */
1751 if (!safe_insn_predicate (insn_code, i, x))
1752 {
1753 start_sequence ();
1754 if (REG_P (x))
1755 {
1756 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1757 x = copy_to_reg (x);
1758 }
1759 else
1760 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1761 seq = get_insns ();
1762 end_sequence ();
1763 if (seq)
1764 emit_insn_before (seq, insn);
1765 }
1766
1767 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1768 any_change = true;
1769 }
1770
1771 if (any_change)
1772 {
1773 /* Propagate operand changes into the duplicates. */
1774 for (i = 0; i < recog_data.n_dups; ++i)
1775 *recog_data.dup_loc[i]
1776 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1777
1778 /* Force re-recognition of the instruction for validation. */
1779 INSN_CODE (insn) = -1;
1780 }
1781
1782 if (asm_noperands (PATTERN (insn)) >= 0)
1783 {
1784 if (!check_asm_operands (PATTERN (insn)))
1785 {
1786 error_for_asm (insn, "impossible constraint in %<asm%>");
1787 /* For asm goto, instead of fixing up all the edges
1788 just clear the template and clear input operands
1789 (asm goto doesn't have any output operands). */
1790 if (JUMP_P (insn))
1791 {
1792 rtx asm_op = extract_asm_operands (PATTERN (insn));
1793 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1794 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1795 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1796 }
1797 else
1798 delete_insn (insn);
1799 }
1800 }
1801 else
1802 {
1803 if (recog_memoized (insn) < 0)
1804 fatal_insn_not_found (insn);
1805 }
1806 }
1807
1808 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1809 do any instantiation required. */
1810
1811 void
1812 instantiate_decl_rtl (rtx x)
1813 {
1814 rtx addr;
1815
1816 if (x == 0)
1817 return;
1818
1819 /* If this is a CONCAT, recurse for the pieces. */
1820 if (GET_CODE (x) == CONCAT)
1821 {
1822 instantiate_decl_rtl (XEXP (x, 0));
1823 instantiate_decl_rtl (XEXP (x, 1));
1824 return;
1825 }
1826
1827 /* If this is not a MEM, no need to do anything. Similarly if the
1828 address is a constant or a register that is not a virtual register. */
1829 if (!MEM_P (x))
1830 return;
1831
1832 addr = XEXP (x, 0);
1833 if (CONSTANT_P (addr)
1834 || (REG_P (addr)
1835 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1836 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1837 return;
1838
1839 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1840 }
1841
1842 /* Helper for instantiate_decls called via walk_tree: Process all decls
1843 in the given DECL_VALUE_EXPR. */
1844
1845 static tree
1846 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1847 {
1848 tree t = *tp;
1849 if (! EXPR_P (t))
1850 {
1851 *walk_subtrees = 0;
1852 if (DECL_P (t))
1853 {
1854 if (DECL_RTL_SET_P (t))
1855 instantiate_decl_rtl (DECL_RTL (t));
1856 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1857 && DECL_INCOMING_RTL (t))
1858 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1859 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1860 && DECL_HAS_VALUE_EXPR_P (t))
1861 {
1862 tree v = DECL_VALUE_EXPR (t);
1863 walk_tree (&v, instantiate_expr, NULL, NULL);
1864 }
1865 }
1866 }
1867 return NULL;
1868 }
1869
1870 /* Subroutine of instantiate_decls: Process all decls in the given
1871 BLOCK node and all its subblocks. */
1872
1873 static void
1874 instantiate_decls_1 (tree let)
1875 {
1876 tree t;
1877
1878 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1879 {
1880 if (DECL_RTL_SET_P (t))
1881 instantiate_decl_rtl (DECL_RTL (t));
1882 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1883 {
1884 tree v = DECL_VALUE_EXPR (t);
1885 walk_tree (&v, instantiate_expr, NULL, NULL);
1886 }
1887 }
1888
1889 /* Process all subblocks. */
1890 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1891 instantiate_decls_1 (t);
1892 }
1893
1894 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1895 all virtual registers in their DECL_RTL's. */
1896
1897 static void
1898 instantiate_decls (tree fndecl)
1899 {
1900 tree decl;
1901 unsigned ix;
1902
1903 /* Process all parameters of the function. */
1904 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1905 {
1906 instantiate_decl_rtl (DECL_RTL (decl));
1907 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1908 if (DECL_HAS_VALUE_EXPR_P (decl))
1909 {
1910 tree v = DECL_VALUE_EXPR (decl);
1911 walk_tree (&v, instantiate_expr, NULL, NULL);
1912 }
1913 }
1914
1915 if ((decl = DECL_RESULT (fndecl))
1916 && TREE_CODE (decl) == RESULT_DECL)
1917 {
1918 if (DECL_RTL_SET_P (decl))
1919 instantiate_decl_rtl (DECL_RTL (decl));
1920 if (DECL_HAS_VALUE_EXPR_P (decl))
1921 {
1922 tree v = DECL_VALUE_EXPR (decl);
1923 walk_tree (&v, instantiate_expr, NULL, NULL);
1924 }
1925 }
1926
1927 /* Process the saved static chain if it exists. */
1928 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1929 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1930 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1931
1932 /* Now process all variables defined in the function or its subblocks. */
1933 if (DECL_INITIAL (fndecl))
1934 instantiate_decls_1 (DECL_INITIAL (fndecl));
1935
1936 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1937 if (DECL_RTL_SET_P (decl))
1938 instantiate_decl_rtl (DECL_RTL (decl));
1939 vec_free (cfun->local_decls);
1940 }
1941
1942 /* Pass through the INSNS of function FNDECL and convert virtual register
1943 references to hard register references. */
1944
1945 static unsigned int
1946 instantiate_virtual_regs (void)
1947 {
1948 rtx_insn *insn;
1949
1950 /* Compute the offsets to use for this function. */
1951 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1952 var_offset = targetm.starting_frame_offset ();
1953 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1954 out_arg_offset = STACK_POINTER_OFFSET;
1955 #ifdef FRAME_POINTER_CFA_OFFSET
1956 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1957 #else
1958 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1959 #endif
1960
1961 /* Initialize recognition, indicating that volatile is OK. */
1962 init_recog ();
1963
1964 /* Scan through all the insns, instantiating every virtual register still
1965 present. */
1966 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1967 if (INSN_P (insn))
1968 {
1969 /* These patterns in the instruction stream can never be recognized.
1970 Fortunately, they shouldn't contain virtual registers either. */
1971 if (GET_CODE (PATTERN (insn)) == USE
1972 || GET_CODE (PATTERN (insn)) == CLOBBER
1973 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1974 || DEBUG_MARKER_INSN_P (insn))
1975 continue;
1976 else if (DEBUG_BIND_INSN_P (insn))
1977 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1978 else
1979 instantiate_virtual_regs_in_insn (insn);
1980
1981 if (insn->deleted ())
1982 continue;
1983
1984 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1985
1986 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1987 if (CALL_P (insn))
1988 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1989 }
1990
1991 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1992 instantiate_decls (current_function_decl);
1993
1994 targetm.instantiate_decls ();
1995
1996 /* Indicate that, from now on, assign_stack_local should use
1997 frame_pointer_rtx. */
1998 virtuals_instantiated = 1;
1999
2000 return 0;
2001 }
2002
2003 namespace {
2004
2005 const pass_data pass_data_instantiate_virtual_regs =
2006 {
2007 RTL_PASS, /* type */
2008 "vregs", /* name */
2009 OPTGROUP_NONE, /* optinfo_flags */
2010 TV_NONE, /* tv_id */
2011 0, /* properties_required */
2012 0, /* properties_provided */
2013 0, /* properties_destroyed */
2014 0, /* todo_flags_start */
2015 0, /* todo_flags_finish */
2016 };
2017
2018 class pass_instantiate_virtual_regs : public rtl_opt_pass
2019 {
2020 public:
2021 pass_instantiate_virtual_regs (gcc::context *ctxt)
2022 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2023 {}
2024
2025 /* opt_pass methods: */
2026 virtual unsigned int execute (function *)
2027 {
2028 return instantiate_virtual_regs ();
2029 }
2030
2031 }; // class pass_instantiate_virtual_regs
2032
2033 } // anon namespace
2034
2035 rtl_opt_pass *
2036 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2037 {
2038 return new pass_instantiate_virtual_regs (ctxt);
2039 }
2040
2041 \f
2042 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2043 This means a type for which function calls must pass an address to the
2044 function or get an address back from the function.
2045 EXP may be a type node or an expression (whose type is tested). */
2046
2047 int
2048 aggregate_value_p (const_tree exp, const_tree fntype)
2049 {
2050 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2051 int i, regno, nregs;
2052 rtx reg;
2053
2054 if (fntype)
2055 switch (TREE_CODE (fntype))
2056 {
2057 case CALL_EXPR:
2058 {
2059 tree fndecl = get_callee_fndecl (fntype);
2060 if (fndecl)
2061 fntype = TREE_TYPE (fndecl);
2062 else if (CALL_EXPR_FN (fntype))
2063 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2064 else
2065 /* For internal functions, assume nothing needs to be
2066 returned in memory. */
2067 return 0;
2068 }
2069 break;
2070 case FUNCTION_DECL:
2071 fntype = TREE_TYPE (fntype);
2072 break;
2073 case FUNCTION_TYPE:
2074 case METHOD_TYPE:
2075 break;
2076 case IDENTIFIER_NODE:
2077 fntype = NULL_TREE;
2078 break;
2079 default:
2080 /* We don't expect other tree types here. */
2081 gcc_unreachable ();
2082 }
2083
2084 if (VOID_TYPE_P (type))
2085 return 0;
2086
2087 /* If a record should be passed the same as its first (and only) member
2088 don't pass it as an aggregate. */
2089 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2090 return aggregate_value_p (first_field (type), fntype);
2091
2092 /* If the front end has decided that this needs to be passed by
2093 reference, do so. */
2094 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2095 && DECL_BY_REFERENCE (exp))
2096 return 1;
2097
2098 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2099 if (fntype && TREE_ADDRESSABLE (fntype))
2100 return 1;
2101
2102 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2103 and thus can't be returned in registers. */
2104 if (TREE_ADDRESSABLE (type))
2105 return 1;
2106
2107 if (TYPE_EMPTY_P (type))
2108 return 0;
2109
2110 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2111 return 1;
2112
2113 if (targetm.calls.return_in_memory (type, fntype))
2114 return 1;
2115
2116 /* Make sure we have suitable call-clobbered regs to return
2117 the value in; if not, we must return it in memory. */
2118 reg = hard_function_value (type, 0, fntype, 0);
2119
2120 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2121 it is OK. */
2122 if (!REG_P (reg))
2123 return 0;
2124
2125 /* Use the default ABI if the type of the function isn't known.
2126 The scheme for handling interoperability between different ABIs
2127 requires us to be able to tell when we're calling a function with
2128 a nondefault ABI. */
2129 const predefined_function_abi &abi = (fntype
2130 ? fntype_abi (fntype)
2131 : default_function_abi);
2132 regno = REGNO (reg);
2133 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2134 for (i = 0; i < nregs; i++)
2135 if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2136 return 1;
2137
2138 return 0;
2139 }
2140 \f
2141 /* Return true if we should assign DECL a pseudo register; false if it
2142 should live on the local stack. */
2143
2144 bool
2145 use_register_for_decl (const_tree decl)
2146 {
2147 if (TREE_CODE (decl) == SSA_NAME)
2148 {
2149 /* We often try to use the SSA_NAME, instead of its underlying
2150 decl, to get type information and guide decisions, to avoid
2151 differences of behavior between anonymous and named
2152 variables, but in this one case we have to go for the actual
2153 variable if there is one. The main reason is that, at least
2154 at -O0, we want to place user variables on the stack, but we
2155 don't mind using pseudos for anonymous or ignored temps.
2156 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2157 should go in pseudos, whereas their corresponding variables
2158 might have to go on the stack. So, disregarding the decl
2159 here would negatively impact debug info at -O0, enable
2160 coalescing between SSA_NAMEs that ought to get different
2161 stack/pseudo assignments, and get the incoming argument
2162 processing thoroughly confused by PARM_DECLs expected to live
2163 in stack slots but assigned to pseudos. */
2164 if (!SSA_NAME_VAR (decl))
2165 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2166 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2167
2168 decl = SSA_NAME_VAR (decl);
2169 }
2170
2171 /* Honor volatile. */
2172 if (TREE_SIDE_EFFECTS (decl))
2173 return false;
2174
2175 /* Honor addressability. */
2176 if (TREE_ADDRESSABLE (decl))
2177 return false;
2178
2179 /* RESULT_DECLs are a bit special in that they're assigned without
2180 regard to use_register_for_decl, but we generally only store in
2181 them. If we coalesce their SSA NAMEs, we'd better return a
2182 result that matches the assignment in expand_function_start. */
2183 if (TREE_CODE (decl) == RESULT_DECL)
2184 {
2185 /* If it's not an aggregate, we're going to use a REG or a
2186 PARALLEL containing a REG. */
2187 if (!aggregate_value_p (decl, current_function_decl))
2188 return true;
2189
2190 /* If expand_function_start determines the return value, we'll
2191 use MEM if it's not by reference. */
2192 if (cfun->returns_pcc_struct
2193 || (targetm.calls.struct_value_rtx
2194 (TREE_TYPE (current_function_decl), 1)))
2195 return DECL_BY_REFERENCE (decl);
2196
2197 /* Otherwise, we're taking an extra all.function_result_decl
2198 argument. It's set up in assign_parms_augmented_arg_list,
2199 under the (negated) conditions above, and then it's used to
2200 set up the RESULT_DECL rtl in assign_params, after looping
2201 over all parameters. Now, if the RESULT_DECL is not by
2202 reference, we'll use a MEM either way. */
2203 if (!DECL_BY_REFERENCE (decl))
2204 return false;
2205
2206 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2207 the function_result_decl's assignment. Since it's a pointer,
2208 we can short-circuit a number of the tests below, and we must
2209 duplicate them because we don't have the function_result_decl
2210 to test. */
2211 if (!targetm.calls.allocate_stack_slots_for_args ())
2212 return true;
2213 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2214 if (optimize)
2215 return true;
2216 if (cfun->tail_call_marked)
2217 return true;
2218 /* We don't set DECL_REGISTER for the function_result_decl. */
2219 return false;
2220 }
2221
2222 /* Only register-like things go in registers. */
2223 if (DECL_MODE (decl) == BLKmode)
2224 return false;
2225
2226 /* If -ffloat-store specified, don't put explicit float variables
2227 into registers. */
2228 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2229 propagates values across these stores, and it probably shouldn't. */
2230 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2231 return false;
2232
2233 if (!targetm.calls.allocate_stack_slots_for_args ())
2234 return true;
2235
2236 /* If we're not interested in tracking debugging information for
2237 this decl, then we can certainly put it in a register. */
2238 if (DECL_IGNORED_P (decl))
2239 return true;
2240
2241 if (optimize)
2242 return true;
2243
2244 /* Thunks force a tail call even at -O0 so we need to avoid creating a
2245 dangling reference in case the parameter is passed by reference. */
2246 if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
2247 return true;
2248
2249 if (!DECL_REGISTER (decl))
2250 return false;
2251
2252 /* When not optimizing, disregard register keyword for types that
2253 could have methods, otherwise the methods won't be callable from
2254 the debugger. */
2255 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2256 return false;
2257
2258 return true;
2259 }
2260
2261 /* Structures to communicate between the subroutines of assign_parms.
2262 The first holds data persistent across all parameters, the second
2263 is cleared out for each parameter. */
2264
2265 struct assign_parm_data_all
2266 {
2267 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2268 should become a job of the target or otherwise encapsulated. */
2269 CUMULATIVE_ARGS args_so_far_v;
2270 cumulative_args_t args_so_far;
2271 struct args_size stack_args_size;
2272 tree function_result_decl;
2273 tree orig_fnargs;
2274 rtx_insn *first_conversion_insn;
2275 rtx_insn *last_conversion_insn;
2276 HOST_WIDE_INT pretend_args_size;
2277 HOST_WIDE_INT extra_pretend_bytes;
2278 int reg_parm_stack_space;
2279 };
2280
2281 struct assign_parm_data_one
2282 {
2283 tree nominal_type;
2284 function_arg_info arg;
2285 rtx entry_parm;
2286 rtx stack_parm;
2287 machine_mode nominal_mode;
2288 machine_mode passed_mode;
2289 struct locate_and_pad_arg_data locate;
2290 int partial;
2291 };
2292
2293 /* A subroutine of assign_parms. Initialize ALL. */
2294
2295 static void
2296 assign_parms_initialize_all (struct assign_parm_data_all *all)
2297 {
2298 tree fntype ATTRIBUTE_UNUSED;
2299
2300 memset (all, 0, sizeof (*all));
2301
2302 fntype = TREE_TYPE (current_function_decl);
2303
2304 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2305 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2306 #else
2307 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2308 current_function_decl, -1);
2309 #endif
2310 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2311
2312 #ifdef INCOMING_REG_PARM_STACK_SPACE
2313 all->reg_parm_stack_space
2314 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2315 #endif
2316 }
2317
2318 /* If ARGS contains entries with complex types, split the entry into two
2319 entries of the component type. Return a new list of substitutions are
2320 needed, else the old list. */
2321
2322 static void
2323 split_complex_args (vec<tree> *args)
2324 {
2325 unsigned i;
2326 tree p;
2327
2328 FOR_EACH_VEC_ELT (*args, i, p)
2329 {
2330 tree type = TREE_TYPE (p);
2331 if (TREE_CODE (type) == COMPLEX_TYPE
2332 && targetm.calls.split_complex_arg (type))
2333 {
2334 tree decl;
2335 tree subtype = TREE_TYPE (type);
2336 bool addressable = TREE_ADDRESSABLE (p);
2337
2338 /* Rewrite the PARM_DECL's type with its component. */
2339 p = copy_node (p);
2340 TREE_TYPE (p) = subtype;
2341 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2342 SET_DECL_MODE (p, VOIDmode);
2343 DECL_SIZE (p) = NULL;
2344 DECL_SIZE_UNIT (p) = NULL;
2345 /* If this arg must go in memory, put it in a pseudo here.
2346 We can't allow it to go in memory as per normal parms,
2347 because the usual place might not have the imag part
2348 adjacent to the real part. */
2349 DECL_ARTIFICIAL (p) = addressable;
2350 DECL_IGNORED_P (p) = addressable;
2351 TREE_ADDRESSABLE (p) = 0;
2352 layout_decl (p, 0);
2353 (*args)[i] = p;
2354
2355 /* Build a second synthetic decl. */
2356 decl = build_decl (EXPR_LOCATION (p),
2357 PARM_DECL, NULL_TREE, subtype);
2358 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2359 DECL_ARTIFICIAL (decl) = addressable;
2360 DECL_IGNORED_P (decl) = addressable;
2361 layout_decl (decl, 0);
2362 args->safe_insert (++i, decl);
2363 }
2364 }
2365 }
2366
2367 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2368 the hidden struct return argument, and (abi willing) complex args.
2369 Return the new parameter list. */
2370
2371 static vec<tree>
2372 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2373 {
2374 tree fndecl = current_function_decl;
2375 tree fntype = TREE_TYPE (fndecl);
2376 vec<tree> fnargs = vNULL;
2377 tree arg;
2378
2379 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2380 fnargs.safe_push (arg);
2381
2382 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2383
2384 /* If struct value address is treated as the first argument, make it so. */
2385 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2386 && ! cfun->returns_pcc_struct
2387 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2388 {
2389 tree type = build_pointer_type (TREE_TYPE (fntype));
2390 tree decl;
2391
2392 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2393 PARM_DECL, get_identifier (".result_ptr"), type);
2394 DECL_ARG_TYPE (decl) = type;
2395 DECL_ARTIFICIAL (decl) = 1;
2396 DECL_NAMELESS (decl) = 1;
2397 TREE_CONSTANT (decl) = 1;
2398 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2399 changes, the end of the RESULT_DECL handling block in
2400 use_register_for_decl must be adjusted to match. */
2401
2402 DECL_CHAIN (decl) = all->orig_fnargs;
2403 all->orig_fnargs = decl;
2404 fnargs.safe_insert (0, decl);
2405
2406 all->function_result_decl = decl;
2407 }
2408
2409 /* If the target wants to split complex arguments into scalars, do so. */
2410 if (targetm.calls.split_complex_arg)
2411 split_complex_args (&fnargs);
2412
2413 return fnargs;
2414 }
2415
2416 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2417 data for the parameter. Incorporate ABI specifics such as pass-by-
2418 reference and type promotion. */
2419
2420 static void
2421 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2422 struct assign_parm_data_one *data)
2423 {
2424 int unsignedp;
2425
2426 #ifndef BROKEN_VALUE_INITIALIZATION
2427 *data = assign_parm_data_one ();
2428 #else
2429 /* Old versions of GCC used to miscompile the above by only initializing
2430 the members with explicit constructors and copying garbage
2431 to the other members. */
2432 assign_parm_data_one zero_data = {};
2433 *data = zero_data;
2434 #endif
2435
2436 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2437 if (!cfun->stdarg)
2438 data->arg.named = 1; /* No variadic parms. */
2439 else if (DECL_CHAIN (parm))
2440 data->arg.named = 1; /* Not the last non-variadic parm. */
2441 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2442 data->arg.named = 1; /* Only variadic ones are unnamed. */
2443 else
2444 data->arg.named = 0; /* Treat as variadic. */
2445
2446 data->nominal_type = TREE_TYPE (parm);
2447 data->arg.type = DECL_ARG_TYPE (parm);
2448
2449 /* Look out for errors propagating this far. Also, if the parameter's
2450 type is void then its value doesn't matter. */
2451 if (TREE_TYPE (parm) == error_mark_node
2452 /* This can happen after weird syntax errors
2453 or if an enum type is defined among the parms. */
2454 || TREE_CODE (parm) != PARM_DECL
2455 || data->arg.type == NULL
2456 || VOID_TYPE_P (data->nominal_type))
2457 {
2458 data->nominal_type = data->arg.type = void_type_node;
2459 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2460 return;
2461 }
2462
2463 /* Find mode of arg as it is passed, and mode of arg as it should be
2464 during execution of this function. */
2465 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2466 data->nominal_mode = TYPE_MODE (data->nominal_type);
2467
2468 /* If the parm is to be passed as a transparent union or record, use the
2469 type of the first field for the tests below. We have already verified
2470 that the modes are the same. */
2471 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2472 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2473 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2474
2475 /* See if this arg was passed by invisible reference. */
2476 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2477 {
2478 data->nominal_type = data->arg.type;
2479 data->passed_mode = data->nominal_mode = data->arg.mode;
2480 }
2481
2482 /* Find mode as it is passed by the ABI. */
2483 unsignedp = TYPE_UNSIGNED (data->arg.type);
2484 data->arg.mode
2485 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2486 TREE_TYPE (current_function_decl), 0);
2487 }
2488
2489 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2490
2491 static void
2492 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2493 struct assign_parm_data_one *data, bool no_rtl)
2494 {
2495 int varargs_pretend_bytes = 0;
2496
2497 function_arg_info last_named_arg = data->arg;
2498 last_named_arg.named = true;
2499 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2500 &varargs_pretend_bytes, no_rtl);
2501
2502 /* If the back-end has requested extra stack space, record how much is
2503 needed. Do not change pretend_args_size otherwise since it may be
2504 nonzero from an earlier partial argument. */
2505 if (varargs_pretend_bytes > 0)
2506 all->pretend_args_size = varargs_pretend_bytes;
2507 }
2508
2509 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2510 the incoming location of the current parameter. */
2511
2512 static void
2513 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2514 struct assign_parm_data_one *data)
2515 {
2516 HOST_WIDE_INT pretend_bytes = 0;
2517 rtx entry_parm;
2518 bool in_regs;
2519
2520 if (data->arg.mode == VOIDmode)
2521 {
2522 data->entry_parm = data->stack_parm = const0_rtx;
2523 return;
2524 }
2525
2526 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2527 data->arg.type);
2528
2529 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2530 data->arg);
2531 if (entry_parm == 0)
2532 data->arg.mode = data->passed_mode;
2533
2534 /* Determine parm's home in the stack, in case it arrives in the stack
2535 or we should pretend it did. Compute the stack position and rtx where
2536 the argument arrives and its size.
2537
2538 There is one complexity here: If this was a parameter that would
2539 have been passed in registers, but wasn't only because it is
2540 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2541 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2542 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2543 as it was the previous time. */
2544 in_regs = (entry_parm != 0);
2545 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2546 in_regs = true;
2547 #endif
2548 if (!in_regs && !data->arg.named)
2549 {
2550 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2551 {
2552 rtx tem;
2553 function_arg_info named_arg = data->arg;
2554 named_arg.named = true;
2555 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2556 named_arg);
2557 in_regs = tem != NULL;
2558 }
2559 }
2560
2561 /* If this parameter was passed both in registers and in the stack, use
2562 the copy on the stack. */
2563 if (targetm.calls.must_pass_in_stack (data->arg))
2564 entry_parm = 0;
2565
2566 if (entry_parm)
2567 {
2568 int partial;
2569
2570 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2571 data->partial = partial;
2572
2573 /* The caller might already have allocated stack space for the
2574 register parameters. */
2575 if (partial != 0 && all->reg_parm_stack_space == 0)
2576 {
2577 /* Part of this argument is passed in registers and part
2578 is passed on the stack. Ask the prologue code to extend
2579 the stack part so that we can recreate the full value.
2580
2581 PRETEND_BYTES is the size of the registers we need to store.
2582 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2583 stack space that the prologue should allocate.
2584
2585 Internally, gcc assumes that the argument pointer is aligned
2586 to STACK_BOUNDARY bits. This is used both for alignment
2587 optimizations (see init_emit) and to locate arguments that are
2588 aligned to more than PARM_BOUNDARY bits. We must preserve this
2589 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2590 a stack boundary. */
2591
2592 /* We assume at most one partial arg, and it must be the first
2593 argument on the stack. */
2594 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2595
2596 pretend_bytes = partial;
2597 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2598
2599 /* We want to align relative to the actual stack pointer, so
2600 don't include this in the stack size until later. */
2601 all->extra_pretend_bytes = all->pretend_args_size;
2602 }
2603 }
2604
2605 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2606 all->reg_parm_stack_space,
2607 entry_parm ? data->partial : 0, current_function_decl,
2608 &all->stack_args_size, &data->locate);
2609
2610 /* Update parm_stack_boundary if this parameter is passed in the
2611 stack. */
2612 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2613 crtl->parm_stack_boundary = data->locate.boundary;
2614
2615 /* Adjust offsets to include the pretend args. */
2616 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2617 data->locate.slot_offset.constant += pretend_bytes;
2618 data->locate.offset.constant += pretend_bytes;
2619
2620 data->entry_parm = entry_parm;
2621 }
2622
2623 /* A subroutine of assign_parms. If there is actually space on the stack
2624 for this parm, count it in stack_args_size and return true. */
2625
2626 static bool
2627 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2628 struct assign_parm_data_one *data)
2629 {
2630 /* Trivially true if we've no incoming register. */
2631 if (data->entry_parm == NULL)
2632 ;
2633 /* Also true if we're partially in registers and partially not,
2634 since we've arranged to drop the entire argument on the stack. */
2635 else if (data->partial != 0)
2636 ;
2637 /* Also true if the target says that it's passed in both registers
2638 and on the stack. */
2639 else if (GET_CODE (data->entry_parm) == PARALLEL
2640 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2641 ;
2642 /* Also true if the target says that there's stack allocated for
2643 all register parameters. */
2644 else if (all->reg_parm_stack_space > 0)
2645 ;
2646 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2647 else
2648 return false;
2649
2650 all->stack_args_size.constant += data->locate.size.constant;
2651 if (data->locate.size.var)
2652 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2653
2654 return true;
2655 }
2656
2657 /* A subroutine of assign_parms. Given that this parameter is allocated
2658 stack space by the ABI, find it. */
2659
2660 static void
2661 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2662 {
2663 rtx offset_rtx, stack_parm;
2664 unsigned int align, boundary;
2665
2666 /* If we're passing this arg using a reg, make its stack home the
2667 aligned stack slot. */
2668 if (data->entry_parm)
2669 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2670 else
2671 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2672
2673 stack_parm = crtl->args.internal_arg_pointer;
2674 if (offset_rtx != const0_rtx)
2675 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2676 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2677
2678 if (!data->arg.pass_by_reference)
2679 {
2680 set_mem_attributes (stack_parm, parm, 1);
2681 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2682 while promoted mode's size is needed. */
2683 if (data->arg.mode != BLKmode
2684 && data->arg.mode != DECL_MODE (parm))
2685 {
2686 set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2687 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2688 {
2689 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2690 data->arg.mode);
2691 if (maybe_ne (offset, 0))
2692 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2693 }
2694 }
2695 }
2696
2697 boundary = data->locate.boundary;
2698 align = BITS_PER_UNIT;
2699
2700 /* If we're padding upward, we know that the alignment of the slot
2701 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2702 intentionally forcing upward padding. Otherwise we have to come
2703 up with a guess at the alignment based on OFFSET_RTX. */
2704 poly_int64 offset;
2705 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2706 align = boundary;
2707 else if (data->locate.where_pad == PAD_UPWARD)
2708 {
2709 align = boundary;
2710 /* If the argument offset is actually more aligned than the nominal
2711 stack slot boundary, take advantage of that excess alignment.
2712 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2713 if (poly_int_rtx_p (offset_rtx, &offset)
2714 && known_eq (STACK_POINTER_OFFSET, 0))
2715 {
2716 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2717 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2718 offset_align = STACK_BOUNDARY;
2719 align = MAX (align, offset_align);
2720 }
2721 }
2722 else if (poly_int_rtx_p (offset_rtx, &offset))
2723 {
2724 align = least_bit_hwi (boundary);
2725 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2726 if (offset_align != 0)
2727 align = MIN (align, offset_align);
2728 }
2729 set_mem_align (stack_parm, align);
2730
2731 if (data->entry_parm)
2732 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2733
2734 data->stack_parm = stack_parm;
2735 }
2736
2737 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2738 always valid and contiguous. */
2739
2740 static void
2741 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2742 {
2743 rtx entry_parm = data->entry_parm;
2744 rtx stack_parm = data->stack_parm;
2745
2746 /* If this parm was passed part in regs and part in memory, pretend it
2747 arrived entirely in memory by pushing the register-part onto the stack.
2748 In the special case of a DImode or DFmode that is split, we could put
2749 it together in a pseudoreg directly, but for now that's not worth
2750 bothering with. */
2751 if (data->partial != 0)
2752 {
2753 /* Handle calls that pass values in multiple non-contiguous
2754 locations. The Irix 6 ABI has examples of this. */
2755 if (GET_CODE (entry_parm) == PARALLEL)
2756 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2757 data->arg.type, int_size_in_bytes (data->arg.type));
2758 else
2759 {
2760 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2761 move_block_from_reg (REGNO (entry_parm),
2762 validize_mem (copy_rtx (stack_parm)),
2763 data->partial / UNITS_PER_WORD);
2764 }
2765
2766 entry_parm = stack_parm;
2767 }
2768
2769 /* If we didn't decide this parm came in a register, by default it came
2770 on the stack. */
2771 else if (entry_parm == NULL)
2772 entry_parm = stack_parm;
2773
2774 /* When an argument is passed in multiple locations, we can't make use
2775 of this information, but we can save some copying if the whole argument
2776 is passed in a single register. */
2777 else if (GET_CODE (entry_parm) == PARALLEL
2778 && data->nominal_mode != BLKmode
2779 && data->passed_mode != BLKmode)
2780 {
2781 size_t i, len = XVECLEN (entry_parm, 0);
2782
2783 for (i = 0; i < len; i++)
2784 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2785 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2786 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2787 == data->passed_mode)
2788 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2789 {
2790 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2791 break;
2792 }
2793 }
2794
2795 data->entry_parm = entry_parm;
2796 }
2797
2798 /* A subroutine of assign_parms. Reconstitute any values which were
2799 passed in multiple registers and would fit in a single register. */
2800
2801 static void
2802 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2803 {
2804 rtx entry_parm = data->entry_parm;
2805
2806 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2807 This can be done with register operations rather than on the
2808 stack, even if we will store the reconstituted parameter on the
2809 stack later. */
2810 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2811 {
2812 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2813 emit_group_store (parmreg, entry_parm, data->arg.type,
2814 GET_MODE_SIZE (GET_MODE (entry_parm)));
2815 entry_parm = parmreg;
2816 }
2817
2818 data->entry_parm = entry_parm;
2819 }
2820
2821 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2822 always valid and properly aligned. */
2823
2824 static void
2825 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2826 {
2827 rtx stack_parm = data->stack_parm;
2828
2829 /* If we can't trust the parm stack slot to be aligned enough for its
2830 ultimate type, don't use that slot after entry. We'll make another
2831 stack slot, if we need one. */
2832 if (stack_parm
2833 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2834 && ((optab_handler (movmisalign_optab, data->nominal_mode)
2835 != CODE_FOR_nothing)
2836 || targetm.slow_unaligned_access (data->nominal_mode,
2837 MEM_ALIGN (stack_parm))))
2838 || (data->nominal_type
2839 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2840 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2841 stack_parm = NULL;
2842
2843 /* If parm was passed in memory, and we need to convert it on entry,
2844 don't store it back in that same slot. */
2845 else if (data->entry_parm == stack_parm
2846 && data->nominal_mode != BLKmode
2847 && data->nominal_mode != data->passed_mode)
2848 stack_parm = NULL;
2849
2850 /* If stack protection is in effect for this function, don't leave any
2851 pointers in their passed stack slots. */
2852 else if (crtl->stack_protect_guard
2853 && (flag_stack_protect == SPCT_FLAG_ALL
2854 || data->arg.pass_by_reference
2855 || POINTER_TYPE_P (data->nominal_type)))
2856 stack_parm = NULL;
2857
2858 data->stack_parm = stack_parm;
2859 }
2860
2861 /* A subroutine of assign_parms. Return true if the current parameter
2862 should be stored as a BLKmode in the current frame. */
2863
2864 static bool
2865 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2866 {
2867 if (data->nominal_mode == BLKmode)
2868 return true;
2869 if (GET_MODE (data->entry_parm) == BLKmode)
2870 return true;
2871
2872 #ifdef BLOCK_REG_PADDING
2873 /* Only assign_parm_setup_block knows how to deal with register arguments
2874 that are padded at the least significant end. */
2875 if (REG_P (data->entry_parm)
2876 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2877 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2878 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2879 return true;
2880 #endif
2881
2882 return false;
2883 }
2884
2885 /* A subroutine of assign_parms. Arrange for the parameter to be
2886 present and valid in DATA->STACK_RTL. */
2887
2888 static void
2889 assign_parm_setup_block (struct assign_parm_data_all *all,
2890 tree parm, struct assign_parm_data_one *data)
2891 {
2892 rtx entry_parm = data->entry_parm;
2893 rtx stack_parm = data->stack_parm;
2894 rtx target_reg = NULL_RTX;
2895 bool in_conversion_seq = false;
2896 HOST_WIDE_INT size;
2897 HOST_WIDE_INT size_stored;
2898
2899 if (GET_CODE (entry_parm) == PARALLEL)
2900 entry_parm = emit_group_move_into_temps (entry_parm);
2901
2902 /* If we want the parameter in a pseudo, don't use a stack slot. */
2903 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2904 {
2905 tree def = ssa_default_def (cfun, parm);
2906 gcc_assert (def);
2907 machine_mode mode = promote_ssa_mode (def, NULL);
2908 rtx reg = gen_reg_rtx (mode);
2909 if (GET_CODE (reg) != CONCAT)
2910 stack_parm = reg;
2911 else
2912 {
2913 target_reg = reg;
2914 /* Avoid allocating a stack slot, if there isn't one
2915 preallocated by the ABI. It might seem like we should
2916 always prefer a pseudo, but converting between
2917 floating-point and integer modes goes through the stack
2918 on various machines, so it's better to use the reserved
2919 stack slot than to risk wasting it and allocating more
2920 for the conversion. */
2921 if (stack_parm == NULL_RTX)
2922 {
2923 int save = generating_concat_p;
2924 generating_concat_p = 0;
2925 stack_parm = gen_reg_rtx (mode);
2926 generating_concat_p = save;
2927 }
2928 }
2929 data->stack_parm = NULL;
2930 }
2931
2932 size = int_size_in_bytes (data->arg.type);
2933 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2934 if (stack_parm == 0)
2935 {
2936 HOST_WIDE_INT parm_align
2937 = (STRICT_ALIGNMENT
2938 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2939
2940 SET_DECL_ALIGN (parm, parm_align);
2941 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2942 {
2943 rtx allocsize = gen_int_mode (size_stored, Pmode);
2944 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2945 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2946 MAX_SUPPORTED_STACK_ALIGNMENT);
2947 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2948 DECL_ALIGN (parm));
2949 mark_reg_pointer (addr, DECL_ALIGN (parm));
2950 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2951 MEM_NOTRAP_P (stack_parm) = 1;
2952 }
2953 else
2954 stack_parm = assign_stack_local (BLKmode, size_stored,
2955 DECL_ALIGN (parm));
2956 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2957 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2958 set_mem_attributes (stack_parm, parm, 1);
2959 }
2960
2961 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2962 calls that pass values in multiple non-contiguous locations. */
2963 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2964 {
2965 rtx mem;
2966
2967 /* Note that we will be storing an integral number of words.
2968 So we have to be careful to ensure that we allocate an
2969 integral number of words. We do this above when we call
2970 assign_stack_local if space was not allocated in the argument
2971 list. If it was, this will not work if PARM_BOUNDARY is not
2972 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2973 if it becomes a problem. Exception is when BLKmode arrives
2974 with arguments not conforming to word_mode. */
2975
2976 if (data->stack_parm == 0)
2977 ;
2978 else if (GET_CODE (entry_parm) == PARALLEL)
2979 ;
2980 else
2981 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2982
2983 mem = validize_mem (copy_rtx (stack_parm));
2984
2985 /* Handle values in multiple non-contiguous locations. */
2986 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2987 emit_group_store (mem, entry_parm, data->arg.type, size);
2988 else if (GET_CODE (entry_parm) == PARALLEL)
2989 {
2990 push_to_sequence2 (all->first_conversion_insn,
2991 all->last_conversion_insn);
2992 emit_group_store (mem, entry_parm, data->arg.type, size);
2993 all->first_conversion_insn = get_insns ();
2994 all->last_conversion_insn = get_last_insn ();
2995 end_sequence ();
2996 in_conversion_seq = true;
2997 }
2998
2999 else if (size == 0)
3000 ;
3001
3002 /* If SIZE is that of a mode no bigger than a word, just use
3003 that mode's store operation. */
3004 else if (size <= UNITS_PER_WORD)
3005 {
3006 unsigned int bits = size * BITS_PER_UNIT;
3007 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3008
3009 if (mode != BLKmode
3010 #ifdef BLOCK_REG_PADDING
3011 && (size == UNITS_PER_WORD
3012 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3013 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3014 #endif
3015 )
3016 {
3017 rtx reg;
3018
3019 /* We are really truncating a word_mode value containing
3020 SIZE bytes into a value of mode MODE. If such an
3021 operation requires no actual instructions, we can refer
3022 to the value directly in mode MODE, otherwise we must
3023 start with the register in word_mode and explicitly
3024 convert it. */
3025 if (mode == word_mode
3026 || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
3027 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3028 else
3029 {
3030 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3031 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3032 }
3033 emit_move_insn (change_address (mem, mode, 0), reg);
3034 }
3035
3036 #ifdef BLOCK_REG_PADDING
3037 /* Storing the register in memory as a full word, as
3038 move_block_from_reg below would do, and then using the
3039 MEM in a smaller mode, has the effect of shifting right
3040 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3041 shifting must be explicit. */
3042 else if (!MEM_P (mem))
3043 {
3044 rtx x;
3045
3046 /* If the assert below fails, we should have taken the
3047 mode != BLKmode path above, unless we have downward
3048 padding of smaller-than-word arguments on a machine
3049 with little-endian bytes, which would likely require
3050 additional changes to work correctly. */
3051 gcc_checking_assert (BYTES_BIG_ENDIAN
3052 && (BLOCK_REG_PADDING (mode,
3053 data->arg.type, 1)
3054 == PAD_UPWARD));
3055
3056 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3057
3058 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3059 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3060 NULL_RTX, 1);
3061 x = force_reg (word_mode, x);
3062 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3063
3064 emit_move_insn (mem, x);
3065 }
3066 #endif
3067
3068 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3069 machine must be aligned to the left before storing
3070 to memory. Note that the previous test doesn't
3071 handle all cases (e.g. SIZE == 3). */
3072 else if (size != UNITS_PER_WORD
3073 #ifdef BLOCK_REG_PADDING
3074 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3075 == PAD_DOWNWARD)
3076 #else
3077 && BYTES_BIG_ENDIAN
3078 #endif
3079 )
3080 {
3081 rtx tem, x;
3082 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3083 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3084
3085 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3086 tem = change_address (mem, word_mode, 0);
3087 emit_move_insn (tem, x);
3088 }
3089 else
3090 move_block_from_reg (REGNO (entry_parm), mem,
3091 size_stored / UNITS_PER_WORD);
3092 }
3093 else if (!MEM_P (mem))
3094 {
3095 gcc_checking_assert (size > UNITS_PER_WORD);
3096 #ifdef BLOCK_REG_PADDING
3097 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3098 data->arg.type, 0)
3099 == PAD_UPWARD);
3100 #endif
3101 emit_move_insn (mem, entry_parm);
3102 }
3103 else
3104 move_block_from_reg (REGNO (entry_parm), mem,
3105 size_stored / UNITS_PER_WORD);
3106 }
3107 else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3108 {
3109 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3110 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3111 BLOCK_OP_NORMAL);
3112 all->first_conversion_insn = get_insns ();
3113 all->last_conversion_insn = get_last_insn ();
3114 end_sequence ();
3115 in_conversion_seq = true;
3116 }
3117
3118 if (target_reg)
3119 {
3120 if (!in_conversion_seq)
3121 emit_move_insn (target_reg, stack_parm);
3122 else
3123 {
3124 push_to_sequence2 (all->first_conversion_insn,
3125 all->last_conversion_insn);
3126 emit_move_insn (target_reg, stack_parm);
3127 all->first_conversion_insn = get_insns ();
3128 all->last_conversion_insn = get_last_insn ();
3129 end_sequence ();
3130 }
3131 stack_parm = target_reg;
3132 }
3133
3134 data->stack_parm = stack_parm;
3135 set_parm_rtl (parm, stack_parm);
3136 }
3137
3138 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3139 parameter. Get it there. Perform all ABI specified conversions. */
3140
3141 static void
3142 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3143 struct assign_parm_data_one *data)
3144 {
3145 rtx parmreg, validated_mem;
3146 rtx equiv_stack_parm;
3147 machine_mode promoted_nominal_mode;
3148 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3149 bool did_conversion = false;
3150 bool need_conversion, moved;
3151 enum insn_code icode;
3152 rtx rtl;
3153
3154 /* Store the parm in a pseudoregister during the function, but we may
3155 need to do it in a wider mode. Using 2 here makes the result
3156 consistent with promote_decl_mode and thus expand_expr_real_1. */
3157 promoted_nominal_mode
3158 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3159 TREE_TYPE (current_function_decl), 2);
3160
3161 parmreg = gen_reg_rtx (promoted_nominal_mode);
3162 if (!DECL_ARTIFICIAL (parm))
3163 mark_user_reg (parmreg);
3164
3165 /* If this was an item that we received a pointer to,
3166 set rtl appropriately. */
3167 if (data->arg.pass_by_reference)
3168 {
3169 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3170 set_mem_attributes (rtl, parm, 1);
3171 }
3172 else
3173 rtl = parmreg;
3174
3175 assign_parm_remove_parallels (data);
3176
3177 /* Copy the value into the register, thus bridging between
3178 assign_parm_find_data_types and expand_expr_real_1. */
3179
3180 equiv_stack_parm = data->stack_parm;
3181 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3182
3183 need_conversion = (data->nominal_mode != data->passed_mode
3184 || promoted_nominal_mode != data->arg.mode);
3185 moved = false;
3186
3187 if (need_conversion
3188 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3189 && data->nominal_mode == data->passed_mode
3190 && data->nominal_mode == GET_MODE (data->entry_parm))
3191 {
3192 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3193 mode, by the caller. We now have to convert it to
3194 NOMINAL_MODE, if different. However, PARMREG may be in
3195 a different mode than NOMINAL_MODE if it is being stored
3196 promoted.
3197
3198 If ENTRY_PARM is a hard register, it might be in a register
3199 not valid for operating in its mode (e.g., an odd-numbered
3200 register for a DFmode). In that case, moves are the only
3201 thing valid, so we can't do a convert from there. This
3202 occurs when the calling sequence allow such misaligned
3203 usages.
3204
3205 In addition, the conversion may involve a call, which could
3206 clobber parameters which haven't been copied to pseudo
3207 registers yet.
3208
3209 First, we try to emit an insn which performs the necessary
3210 conversion. We verify that this insn does not clobber any
3211 hard registers. */
3212
3213 rtx op0, op1;
3214
3215 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3216 unsignedp);
3217
3218 op0 = parmreg;
3219 op1 = validated_mem;
3220 if (icode != CODE_FOR_nothing
3221 && insn_operand_matches (icode, 0, op0)
3222 && insn_operand_matches (icode, 1, op1))
3223 {
3224 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3225 rtx_insn *insn, *insns;
3226 rtx t = op1;
3227 HARD_REG_SET hardregs;
3228
3229 start_sequence ();
3230 /* If op1 is a hard register that is likely spilled, first
3231 force it into a pseudo, otherwise combiner might extend
3232 its lifetime too much. */
3233 if (GET_CODE (t) == SUBREG)
3234 t = SUBREG_REG (t);
3235 if (REG_P (t)
3236 && HARD_REGISTER_P (t)
3237 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3238 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3239 {
3240 t = gen_reg_rtx (GET_MODE (op1));
3241 emit_move_insn (t, op1);
3242 }
3243 else
3244 t = op1;
3245 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3246 data->passed_mode, unsignedp);
3247 emit_insn (pat);
3248 insns = get_insns ();
3249
3250 moved = true;
3251 CLEAR_HARD_REG_SET (hardregs);
3252 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3253 {
3254 if (INSN_P (insn))
3255 note_stores (insn, record_hard_reg_sets, &hardregs);
3256 if (!hard_reg_set_empty_p (hardregs))
3257 moved = false;
3258 }
3259
3260 end_sequence ();
3261
3262 if (moved)
3263 {
3264 emit_insn (insns);
3265 if (equiv_stack_parm != NULL_RTX)
3266 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3267 equiv_stack_parm);
3268 }
3269 }
3270 }
3271
3272 if (moved)
3273 /* Nothing to do. */
3274 ;
3275 else if (need_conversion)
3276 {
3277 /* We did not have an insn to convert directly, or the sequence
3278 generated appeared unsafe. We must first copy the parm to a
3279 pseudo reg, and save the conversion until after all
3280 parameters have been moved. */
3281
3282 int save_tree_used;
3283 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3284
3285 emit_move_insn (tempreg, validated_mem);
3286
3287 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3288 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3289
3290 if (partial_subreg_p (tempreg)
3291 && GET_MODE (tempreg) == data->nominal_mode
3292 && REG_P (SUBREG_REG (tempreg))
3293 && data->nominal_mode == data->passed_mode
3294 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3295 {
3296 /* The argument is already sign/zero extended, so note it
3297 into the subreg. */
3298 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3299 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3300 }
3301
3302 /* TREE_USED gets set erroneously during expand_assignment. */
3303 save_tree_used = TREE_USED (parm);
3304 SET_DECL_RTL (parm, rtl);
3305 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3306 SET_DECL_RTL (parm, NULL_RTX);
3307 TREE_USED (parm) = save_tree_used;
3308 all->first_conversion_insn = get_insns ();
3309 all->last_conversion_insn = get_last_insn ();
3310 end_sequence ();
3311
3312 did_conversion = true;
3313 }
3314 else if (MEM_P (data->entry_parm)
3315 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3316 > MEM_ALIGN (data->entry_parm)
3317 && (((icode = optab_handler (movmisalign_optab,
3318 promoted_nominal_mode))
3319 != CODE_FOR_nothing)
3320 || targetm.slow_unaligned_access (promoted_nominal_mode,
3321 MEM_ALIGN (data->entry_parm))))
3322 {
3323 if (icode != CODE_FOR_nothing)
3324 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3325 else
3326 rtl = parmreg = extract_bit_field (validated_mem,
3327 GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3328 unsignedp, parmreg,
3329 promoted_nominal_mode, VOIDmode, false, NULL);
3330 }
3331 else
3332 emit_move_insn (parmreg, validated_mem);
3333
3334 /* If we were passed a pointer but the actual value can live in a register,
3335 retrieve it and use it directly. Note that we cannot use nominal_mode,
3336 because it will have been set to Pmode above, we must use the actual mode
3337 of the parameter instead. */
3338 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3339 {
3340 /* Use a stack slot for debugging purposes if possible. */
3341 if (use_register_for_decl (parm))
3342 {
3343 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3344 mark_user_reg (parmreg);
3345 }
3346 else
3347 {
3348 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3349 TYPE_MODE (TREE_TYPE (parm)),
3350 TYPE_ALIGN (TREE_TYPE (parm)));
3351 parmreg
3352 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3353 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3354 align);
3355 set_mem_attributes (parmreg, parm, 1);
3356 }
3357
3358 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3359 the debug info in case it is not legitimate. */
3360 if (GET_MODE (parmreg) != GET_MODE (rtl))
3361 {
3362 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3363 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3364
3365 push_to_sequence2 (all->first_conversion_insn,
3366 all->last_conversion_insn);
3367 emit_move_insn (tempreg, rtl);
3368 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3369 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3370 tempreg);
3371 all->first_conversion_insn = get_insns ();
3372 all->last_conversion_insn = get_last_insn ();
3373 end_sequence ();
3374
3375 did_conversion = true;
3376 }
3377 else
3378 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3379
3380 rtl = parmreg;
3381
3382 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3383 now the parm. */
3384 data->stack_parm = NULL;
3385 }
3386
3387 set_parm_rtl (parm, rtl);
3388
3389 /* Mark the register as eliminable if we did no conversion and it was
3390 copied from memory at a fixed offset, and the arg pointer was not
3391 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3392 offset formed an invalid address, such memory-equivalences as we
3393 make here would screw up life analysis for it. */
3394 if (data->nominal_mode == data->passed_mode
3395 && !did_conversion
3396 && data->stack_parm != 0
3397 && MEM_P (data->stack_parm)
3398 && data->locate.offset.var == 0
3399 && reg_mentioned_p (virtual_incoming_args_rtx,
3400 XEXP (data->stack_parm, 0)))
3401 {
3402 rtx_insn *linsn = get_last_insn ();
3403 rtx_insn *sinsn;
3404 rtx set;
3405
3406 /* Mark complex types separately. */
3407 if (GET_CODE (parmreg) == CONCAT)
3408 {
3409 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3410 int regnor = REGNO (XEXP (parmreg, 0));
3411 int regnoi = REGNO (XEXP (parmreg, 1));
3412 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3413 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3414 GET_MODE_SIZE (submode));
3415
3416 /* Scan backwards for the set of the real and
3417 imaginary parts. */
3418 for (sinsn = linsn; sinsn != 0;
3419 sinsn = prev_nonnote_insn (sinsn))
3420 {
3421 set = single_set (sinsn);
3422 if (set == 0)
3423 continue;
3424
3425 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3426 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3427 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3428 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3429 }
3430 }
3431 else
3432 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3433 }
3434
3435 /* For pointer data type, suggest pointer register. */
3436 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3437 mark_reg_pointer (parmreg,
3438 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3439 }
3440
3441 /* A subroutine of assign_parms. Allocate stack space to hold the current
3442 parameter. Get it there. Perform all ABI specified conversions. */
3443
3444 static void
3445 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3446 struct assign_parm_data_one *data)
3447 {
3448 /* Value must be stored in the stack slot STACK_PARM during function
3449 execution. */
3450 bool to_conversion = false;
3451
3452 assign_parm_remove_parallels (data);
3453
3454 if (data->arg.mode != data->nominal_mode)
3455 {
3456 /* Conversion is required. */
3457 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3458
3459 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3460
3461 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3462 to_conversion = true;
3463
3464 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3465 TYPE_UNSIGNED (TREE_TYPE (parm)));
3466
3467 if (data->stack_parm)
3468 {
3469 poly_int64 offset
3470 = subreg_lowpart_offset (data->nominal_mode,
3471 GET_MODE (data->stack_parm));
3472 /* ??? This may need a big-endian conversion on sparc64. */
3473 data->stack_parm
3474 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3475 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3476 set_mem_offset (data->stack_parm,
3477 MEM_OFFSET (data->stack_parm) + offset);
3478 }
3479 }
3480
3481 if (data->entry_parm != data->stack_parm)
3482 {
3483 rtx src, dest;
3484
3485 if (data->stack_parm == 0)
3486 {
3487 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3488 GET_MODE (data->entry_parm),
3489 TYPE_ALIGN (data->arg.type));
3490 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3491 && ((optab_handler (movmisalign_optab,
3492 GET_MODE (data->entry_parm))
3493 != CODE_FOR_nothing)
3494 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3495 align)))
3496 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3497 data->stack_parm
3498 = assign_stack_local (GET_MODE (data->entry_parm),
3499 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3500 align);
3501 align = MEM_ALIGN (data->stack_parm);
3502 set_mem_attributes (data->stack_parm, parm, 1);
3503 set_mem_align (data->stack_parm, align);
3504 }
3505
3506 dest = validize_mem (copy_rtx (data->stack_parm));
3507 src = validize_mem (copy_rtx (data->entry_parm));
3508
3509 if (TYPE_EMPTY_P (data->arg.type))
3510 /* Empty types don't really need to be copied. */;
3511 else if (MEM_P (src))
3512 {
3513 /* Use a block move to handle potentially misaligned entry_parm. */
3514 if (!to_conversion)
3515 push_to_sequence2 (all->first_conversion_insn,
3516 all->last_conversion_insn);
3517 to_conversion = true;
3518
3519 emit_block_move (dest, src,
3520 GEN_INT (int_size_in_bytes (data->arg.type)),
3521 BLOCK_OP_NORMAL);
3522 }
3523 else
3524 {
3525 if (!REG_P (src))
3526 src = force_reg (GET_MODE (src), src);
3527 emit_move_insn (dest, src);
3528 }
3529 }
3530
3531 if (to_conversion)
3532 {
3533 all->first_conversion_insn = get_insns ();
3534 all->last_conversion_insn = get_last_insn ();
3535 end_sequence ();
3536 }
3537
3538 set_parm_rtl (parm, data->stack_parm);
3539 }
3540
3541 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3542 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3543
3544 static void
3545 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3546 vec<tree> fnargs)
3547 {
3548 tree parm;
3549 tree orig_fnargs = all->orig_fnargs;
3550 unsigned i = 0;
3551
3552 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3553 {
3554 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3555 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3556 {
3557 rtx tmp, real, imag;
3558 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3559
3560 real = DECL_RTL (fnargs[i]);
3561 imag = DECL_RTL (fnargs[i + 1]);
3562 if (inner != GET_MODE (real))
3563 {
3564 real = gen_lowpart_SUBREG (inner, real);
3565 imag = gen_lowpart_SUBREG (inner, imag);
3566 }
3567
3568 if (TREE_ADDRESSABLE (parm))
3569 {
3570 rtx rmem, imem;
3571 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3572 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3573 DECL_MODE (parm),
3574 TYPE_ALIGN (TREE_TYPE (parm)));
3575
3576 /* split_complex_arg put the real and imag parts in
3577 pseudos. Move them to memory. */
3578 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3579 set_mem_attributes (tmp, parm, 1);
3580 rmem = adjust_address_nv (tmp, inner, 0);
3581 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3582 push_to_sequence2 (all->first_conversion_insn,
3583 all->last_conversion_insn);
3584 emit_move_insn (rmem, real);
3585 emit_move_insn (imem, imag);
3586 all->first_conversion_insn = get_insns ();
3587 all->last_conversion_insn = get_last_insn ();
3588 end_sequence ();
3589 }
3590 else
3591 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3592 set_parm_rtl (parm, tmp);
3593
3594 real = DECL_INCOMING_RTL (fnargs[i]);
3595 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3596 if (inner != GET_MODE (real))
3597 {
3598 real = gen_lowpart_SUBREG (inner, real);
3599 imag = gen_lowpart_SUBREG (inner, imag);
3600 }
3601 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3602 set_decl_incoming_rtl (parm, tmp, false);
3603 i++;
3604 }
3605 }
3606 }
3607
3608 /* Assign RTL expressions to the function's parameters. This may involve
3609 copying them into registers and using those registers as the DECL_RTL. */
3610
3611 static void
3612 assign_parms (tree fndecl)
3613 {
3614 struct assign_parm_data_all all;
3615 tree parm;
3616 vec<tree> fnargs;
3617 unsigned i;
3618
3619 crtl->args.internal_arg_pointer
3620 = targetm.calls.internal_arg_pointer ();
3621
3622 assign_parms_initialize_all (&all);
3623 fnargs = assign_parms_augmented_arg_list (&all);
3624
3625 FOR_EACH_VEC_ELT (fnargs, i, parm)
3626 {
3627 struct assign_parm_data_one data;
3628
3629 /* Extract the type of PARM; adjust it according to ABI. */
3630 assign_parm_find_data_types (&all, parm, &data);
3631
3632 /* Early out for errors and void parameters. */
3633 if (data.passed_mode == VOIDmode)
3634 {
3635 SET_DECL_RTL (parm, const0_rtx);
3636 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3637 continue;
3638 }
3639
3640 /* Estimate stack alignment from parameter alignment. */
3641 if (SUPPORTS_STACK_ALIGNMENT)
3642 {
3643 unsigned int align
3644 = targetm.calls.function_arg_boundary (data.arg.mode,
3645 data.arg.type);
3646 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3647 if (TYPE_ALIGN (data.nominal_type) > align)
3648 align = MINIMUM_ALIGNMENT (data.nominal_type,
3649 TYPE_MODE (data.nominal_type),
3650 TYPE_ALIGN (data.nominal_type));
3651 if (crtl->stack_alignment_estimated < align)
3652 {
3653 gcc_assert (!crtl->stack_realign_processed);
3654 crtl->stack_alignment_estimated = align;
3655 }
3656 }
3657
3658 /* Find out where the parameter arrives in this function. */
3659 assign_parm_find_entry_rtl (&all, &data);
3660
3661 /* Find out where stack space for this parameter might be. */
3662 if (assign_parm_is_stack_parm (&all, &data))
3663 {
3664 assign_parm_find_stack_rtl (parm, &data);
3665 assign_parm_adjust_entry_rtl (&data);
3666 /* For arguments that occupy no space in the parameter
3667 passing area, have non-zero size and have address taken,
3668 force creation of a stack slot so that they have distinct
3669 address from other parameters. */
3670 if (TYPE_EMPTY_P (data.arg.type)
3671 && TREE_ADDRESSABLE (parm)
3672 && data.entry_parm == data.stack_parm
3673 && MEM_P (data.entry_parm)
3674 && int_size_in_bytes (data.arg.type))
3675 data.stack_parm = NULL_RTX;
3676 }
3677 /* Record permanently how this parm was passed. */
3678 if (data.arg.pass_by_reference)
3679 {
3680 rtx incoming_rtl
3681 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3682 data.entry_parm);
3683 set_decl_incoming_rtl (parm, incoming_rtl, true);
3684 }
3685 else
3686 set_decl_incoming_rtl (parm, data.entry_parm, false);
3687
3688 assign_parm_adjust_stack_rtl (&data);
3689
3690 if (assign_parm_setup_block_p (&data))
3691 assign_parm_setup_block (&all, parm, &data);
3692 else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3693 assign_parm_setup_reg (&all, parm, &data);
3694 else
3695 assign_parm_setup_stack (&all, parm, &data);
3696
3697 if (cfun->stdarg && !DECL_CHAIN (parm))
3698 assign_parms_setup_varargs (&all, &data, false);
3699
3700 /* Update info on where next arg arrives in registers. */
3701 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3702 }
3703
3704 if (targetm.calls.split_complex_arg)
3705 assign_parms_unsplit_complex (&all, fnargs);
3706
3707 fnargs.release ();
3708
3709 /* Output all parameter conversion instructions (possibly including calls)
3710 now that all parameters have been copied out of hard registers. */
3711 emit_insn (all.first_conversion_insn);
3712
3713 /* Estimate reload stack alignment from scalar return mode. */
3714 if (SUPPORTS_STACK_ALIGNMENT)
3715 {
3716 if (DECL_RESULT (fndecl))
3717 {
3718 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3719 machine_mode mode = TYPE_MODE (type);
3720
3721 if (mode != BLKmode
3722 && mode != VOIDmode
3723 && !AGGREGATE_TYPE_P (type))
3724 {
3725 unsigned int align = GET_MODE_ALIGNMENT (mode);
3726 if (crtl->stack_alignment_estimated < align)
3727 {
3728 gcc_assert (!crtl->stack_realign_processed);
3729 crtl->stack_alignment_estimated = align;
3730 }
3731 }
3732 }
3733 }
3734
3735 /* If we are receiving a struct value address as the first argument, set up
3736 the RTL for the function result. As this might require code to convert
3737 the transmitted address to Pmode, we do this here to ensure that possible
3738 preliminary conversions of the address have been emitted already. */
3739 if (all.function_result_decl)
3740 {
3741 tree result = DECL_RESULT (current_function_decl);
3742 rtx addr = DECL_RTL (all.function_result_decl);
3743 rtx x;
3744
3745 if (DECL_BY_REFERENCE (result))
3746 {
3747 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3748 x = addr;
3749 }
3750 else
3751 {
3752 SET_DECL_VALUE_EXPR (result,
3753 build1 (INDIRECT_REF, TREE_TYPE (result),
3754 all.function_result_decl));
3755 addr = convert_memory_address (Pmode, addr);
3756 x = gen_rtx_MEM (DECL_MODE (result), addr);
3757 set_mem_attributes (x, result, 1);
3758 }
3759
3760 DECL_HAS_VALUE_EXPR_P (result) = 1;
3761
3762 set_parm_rtl (result, x);
3763 }
3764
3765 /* We have aligned all the args, so add space for the pretend args. */
3766 crtl->args.pretend_args_size = all.pretend_args_size;
3767 all.stack_args_size.constant += all.extra_pretend_bytes;
3768 crtl->args.size = all.stack_args_size.constant;
3769
3770 /* Adjust function incoming argument size for alignment and
3771 minimum length. */
3772
3773 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3774 crtl->args.size = aligned_upper_bound (crtl->args.size,
3775 PARM_BOUNDARY / BITS_PER_UNIT);
3776
3777 if (ARGS_GROW_DOWNWARD)
3778 {
3779 crtl->args.arg_offset_rtx
3780 = (all.stack_args_size.var == 0
3781 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3782 : expand_expr (size_diffop (all.stack_args_size.var,
3783 size_int (-all.stack_args_size.constant)),
3784 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3785 }
3786 else
3787 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3788
3789 /* See how many bytes, if any, of its args a function should try to pop
3790 on return. */
3791
3792 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3793 TREE_TYPE (fndecl),
3794 crtl->args.size);
3795
3796 /* For stdarg.h function, save info about
3797 regs and stack space used by the named args. */
3798
3799 crtl->args.info = all.args_so_far_v;
3800
3801 /* Set the rtx used for the function return value. Put this in its
3802 own variable so any optimizers that need this information don't have
3803 to include tree.h. Do this here so it gets done when an inlined
3804 function gets output. */
3805
3806 crtl->return_rtx
3807 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3808 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3809
3810 /* If scalar return value was computed in a pseudo-reg, or was a named
3811 return value that got dumped to the stack, copy that to the hard
3812 return register. */
3813 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3814 {
3815 tree decl_result = DECL_RESULT (fndecl);
3816 rtx decl_rtl = DECL_RTL (decl_result);
3817
3818 if (REG_P (decl_rtl)
3819 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3820 : DECL_REGISTER (decl_result))
3821 {
3822 rtx real_decl_rtl;
3823
3824 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3825 fndecl, true);
3826 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3827 /* The delay slot scheduler assumes that crtl->return_rtx
3828 holds the hard register containing the return value, not a
3829 temporary pseudo. */
3830 crtl->return_rtx = real_decl_rtl;
3831 }
3832 }
3833 }
3834
3835 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3836 For all seen types, gimplify their sizes. */
3837
3838 static tree
3839 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3840 {
3841 tree t = *tp;
3842
3843 *walk_subtrees = 0;
3844 if (TYPE_P (t))
3845 {
3846 if (POINTER_TYPE_P (t))
3847 *walk_subtrees = 1;
3848 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3849 && !TYPE_SIZES_GIMPLIFIED (t))
3850 {
3851 gimplify_type_sizes (t, (gimple_seq *) data);
3852 *walk_subtrees = 1;
3853 }
3854 }
3855
3856 return NULL;
3857 }
3858
3859 /* Gimplify the parameter list for current_function_decl. This involves
3860 evaluating SAVE_EXPRs of variable sized parameters and generating code
3861 to implement callee-copies reference parameters. Returns a sequence of
3862 statements to add to the beginning of the function. */
3863
3864 gimple_seq
3865 gimplify_parameters (gimple_seq *cleanup)
3866 {
3867 struct assign_parm_data_all all;
3868 tree parm;
3869 gimple_seq stmts = NULL;
3870 vec<tree> fnargs;
3871 unsigned i;
3872
3873 assign_parms_initialize_all (&all);
3874 fnargs = assign_parms_augmented_arg_list (&all);
3875
3876 FOR_EACH_VEC_ELT (fnargs, i, parm)
3877 {
3878 struct assign_parm_data_one data;
3879
3880 /* Extract the type of PARM; adjust it according to ABI. */
3881 assign_parm_find_data_types (&all, parm, &data);
3882
3883 /* Early out for errors and void parameters. */
3884 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3885 continue;
3886
3887 /* Update info on where next arg arrives in registers. */
3888 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3889
3890 /* ??? Once upon a time variable_size stuffed parameter list
3891 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3892 turned out to be less than manageable in the gimple world.
3893 Now we have to hunt them down ourselves. */
3894 walk_tree_without_duplicates (&data.arg.type,
3895 gimplify_parm_type, &stmts);
3896
3897 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3898 {
3899 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3900 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3901 }
3902
3903 if (data.arg.pass_by_reference)
3904 {
3905 tree type = TREE_TYPE (data.arg.type);
3906 function_arg_info orig_arg (type, data.arg.named);
3907 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3908 {
3909 tree local, t;
3910
3911 /* For constant-sized objects, this is trivial; for
3912 variable-sized objects, we have to play games. */
3913 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3914 && !(flag_stack_check == GENERIC_STACK_CHECK
3915 && compare_tree_int (DECL_SIZE_UNIT (parm),
3916 STACK_CHECK_MAX_VAR_SIZE) > 0))
3917 {
3918 local = create_tmp_var (type, get_name (parm));
3919 DECL_IGNORED_P (local) = 0;
3920 /* If PARM was addressable, move that flag over
3921 to the local copy, as its address will be taken,
3922 not the PARMs. Keep the parms address taken
3923 as we'll query that flag during gimplification. */
3924 if (TREE_ADDRESSABLE (parm))
3925 TREE_ADDRESSABLE (local) = 1;
3926 if (DECL_NOT_GIMPLE_REG_P (parm))
3927 DECL_NOT_GIMPLE_REG_P (local) = 1;
3928
3929 if (!is_gimple_reg (local)
3930 && flag_stack_reuse != SR_NONE)
3931 {
3932 tree clobber = build_clobber (type);
3933 gimple *clobber_stmt;
3934 clobber_stmt = gimple_build_assign (local, clobber);
3935 gimple_seq_add_stmt (cleanup, clobber_stmt);
3936 }
3937 }
3938 else
3939 {
3940 tree ptr_type, addr;
3941
3942 ptr_type = build_pointer_type (type);
3943 addr = create_tmp_reg (ptr_type, get_name (parm));
3944 DECL_IGNORED_P (addr) = 0;
3945 local = build_fold_indirect_ref (addr);
3946
3947 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3948 DECL_ALIGN (parm),
3949 max_int_size_in_bytes (type));
3950 /* The call has been built for a variable-sized object. */
3951 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3952 t = fold_convert (ptr_type, t);
3953 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3954 gimplify_and_add (t, &stmts);
3955 }
3956
3957 gimplify_assign (local, parm, &stmts);
3958
3959 SET_DECL_VALUE_EXPR (parm, local);
3960 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3961 }
3962 }
3963 }
3964
3965 fnargs.release ();
3966
3967 return stmts;
3968 }
3969 \f
3970 /* Compute the size and offset from the start of the stacked arguments for a
3971 parm passed in mode PASSED_MODE and with type TYPE.
3972
3973 INITIAL_OFFSET_PTR points to the current offset into the stacked
3974 arguments.
3975
3976 The starting offset and size for this parm are returned in
3977 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3978 nonzero, the offset is that of stack slot, which is returned in
3979 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3980 padding required from the initial offset ptr to the stack slot.
3981
3982 IN_REGS is nonzero if the argument will be passed in registers. It will
3983 never be set if REG_PARM_STACK_SPACE is not defined.
3984
3985 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3986 for arguments which are passed in registers.
3987
3988 FNDECL is the function in which the argument was defined.
3989
3990 There are two types of rounding that are done. The first, controlled by
3991 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3992 argument list to be aligned to the specific boundary (in bits). This
3993 rounding affects the initial and starting offsets, but not the argument
3994 size.
3995
3996 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3997 optionally rounds the size of the parm to PARM_BOUNDARY. The
3998 initial offset is not affected by this rounding, while the size always
3999 is and the starting offset may be. */
4000
4001 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4002 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4003 callers pass in the total size of args so far as
4004 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4005
4006 void
4007 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4008 int reg_parm_stack_space, int partial,
4009 tree fndecl ATTRIBUTE_UNUSED,
4010 struct args_size *initial_offset_ptr,
4011 struct locate_and_pad_arg_data *locate)
4012 {
4013 tree sizetree;
4014 pad_direction where_pad;
4015 unsigned int boundary, round_boundary;
4016 int part_size_in_regs;
4017
4018 /* If we have found a stack parm before we reach the end of the
4019 area reserved for registers, skip that area. */
4020 if (! in_regs)
4021 {
4022 if (reg_parm_stack_space > 0)
4023 {
4024 if (initial_offset_ptr->var
4025 || !ordered_p (initial_offset_ptr->constant,
4026 reg_parm_stack_space))
4027 {
4028 initial_offset_ptr->var
4029 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4030 ssize_int (reg_parm_stack_space));
4031 initial_offset_ptr->constant = 0;
4032 }
4033 else
4034 initial_offset_ptr->constant
4035 = ordered_max (initial_offset_ptr->constant,
4036 reg_parm_stack_space);
4037 }
4038 }
4039
4040 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4041
4042 sizetree = (type
4043 ? arg_size_in_bytes (type)
4044 : size_int (GET_MODE_SIZE (passed_mode)));
4045 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4046 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4047 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4048 type);
4049 locate->where_pad = where_pad;
4050
4051 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4052 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4053 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4054
4055 locate->boundary = boundary;
4056
4057 if (SUPPORTS_STACK_ALIGNMENT)
4058 {
4059 /* stack_alignment_estimated can't change after stack has been
4060 realigned. */
4061 if (crtl->stack_alignment_estimated < boundary)
4062 {
4063 if (!crtl->stack_realign_processed)
4064 crtl->stack_alignment_estimated = boundary;
4065 else
4066 {
4067 /* If stack is realigned and stack alignment value
4068 hasn't been finalized, it is OK not to increase
4069 stack_alignment_estimated. The bigger alignment
4070 requirement is recorded in stack_alignment_needed
4071 below. */
4072 gcc_assert (!crtl->stack_realign_finalized
4073 && crtl->stack_realign_needed);
4074 }
4075 }
4076 }
4077
4078 if (ARGS_GROW_DOWNWARD)
4079 {
4080 locate->slot_offset.constant = -initial_offset_ptr->constant;
4081 if (initial_offset_ptr->var)
4082 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4083 initial_offset_ptr->var);
4084
4085 {
4086 tree s2 = sizetree;
4087 if (where_pad != PAD_NONE
4088 && (!tree_fits_uhwi_p (sizetree)
4089 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4090 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4091 SUB_PARM_SIZE (locate->slot_offset, s2);
4092 }
4093
4094 locate->slot_offset.constant += part_size_in_regs;
4095
4096 if (!in_regs || reg_parm_stack_space > 0)
4097 pad_to_arg_alignment (&locate->slot_offset, boundary,
4098 &locate->alignment_pad);
4099
4100 locate->size.constant = (-initial_offset_ptr->constant
4101 - locate->slot_offset.constant);
4102 if (initial_offset_ptr->var)
4103 locate->size.var = size_binop (MINUS_EXPR,
4104 size_binop (MINUS_EXPR,
4105 ssize_int (0),
4106 initial_offset_ptr->var),
4107 locate->slot_offset.var);
4108
4109 /* Pad_below needs the pre-rounded size to know how much to pad
4110 below. */
4111 locate->offset = locate->slot_offset;
4112 if (where_pad == PAD_DOWNWARD)
4113 pad_below (&locate->offset, passed_mode, sizetree);
4114
4115 }
4116 else
4117 {
4118 if (!in_regs || reg_parm_stack_space > 0)
4119 pad_to_arg_alignment (initial_offset_ptr, boundary,
4120 &locate->alignment_pad);
4121 locate->slot_offset = *initial_offset_ptr;
4122
4123 #ifdef PUSH_ROUNDING
4124 if (passed_mode != BLKmode)
4125 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4126 #endif
4127
4128 /* Pad_below needs the pre-rounded size to know how much to pad below
4129 so this must be done before rounding up. */
4130 locate->offset = locate->slot_offset;
4131 if (where_pad == PAD_DOWNWARD)
4132 pad_below (&locate->offset, passed_mode, sizetree);
4133
4134 if (where_pad != PAD_NONE
4135 && (!tree_fits_uhwi_p (sizetree)
4136 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4137 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4138
4139 ADD_PARM_SIZE (locate->size, sizetree);
4140
4141 locate->size.constant -= part_size_in_regs;
4142 }
4143
4144 locate->offset.constant
4145 += targetm.calls.function_arg_offset (passed_mode, type);
4146 }
4147
4148 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4149 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4150
4151 static void
4152 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4153 struct args_size *alignment_pad)
4154 {
4155 tree save_var = NULL_TREE;
4156 poly_int64 save_constant = 0;
4157 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4158 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4159
4160 #ifdef SPARC_STACK_BOUNDARY_HACK
4161 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4162 the real alignment of %sp. However, when it does this, the
4163 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4164 if (SPARC_STACK_BOUNDARY_HACK)
4165 sp_offset = 0;
4166 #endif
4167
4168 if (boundary > PARM_BOUNDARY)
4169 {
4170 save_var = offset_ptr->var;
4171 save_constant = offset_ptr->constant;
4172 }
4173
4174 alignment_pad->var = NULL_TREE;
4175 alignment_pad->constant = 0;
4176
4177 if (boundary > BITS_PER_UNIT)
4178 {
4179 int misalign;
4180 if (offset_ptr->var
4181 || !known_misalignment (offset_ptr->constant + sp_offset,
4182 boundary_in_bytes, &misalign))
4183 {
4184 tree sp_offset_tree = ssize_int (sp_offset);
4185 tree offset = size_binop (PLUS_EXPR,
4186 ARGS_SIZE_TREE (*offset_ptr),
4187 sp_offset_tree);
4188 tree rounded;
4189 if (ARGS_GROW_DOWNWARD)
4190 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4191 else
4192 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4193
4194 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4195 /* ARGS_SIZE_TREE includes constant term. */
4196 offset_ptr->constant = 0;
4197 if (boundary > PARM_BOUNDARY)
4198 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4199 save_var);
4200 }
4201 else
4202 {
4203 if (ARGS_GROW_DOWNWARD)
4204 offset_ptr->constant -= misalign;
4205 else
4206 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4207
4208 if (boundary > PARM_BOUNDARY)
4209 alignment_pad->constant = offset_ptr->constant - save_constant;
4210 }
4211 }
4212 }
4213
4214 static void
4215 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4216 {
4217 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4218 int misalign;
4219 if (passed_mode != BLKmode
4220 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4221 offset_ptr->constant += -misalign & (align - 1);
4222 else
4223 {
4224 if (TREE_CODE (sizetree) != INTEGER_CST
4225 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4226 {
4227 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4228 tree s2 = round_up (sizetree, align);
4229 /* Add it in. */
4230 ADD_PARM_SIZE (*offset_ptr, s2);
4231 SUB_PARM_SIZE (*offset_ptr, sizetree);
4232 }
4233 }
4234 }
4235 \f
4236
4237 /* True if register REGNO was alive at a place where `setjmp' was
4238 called and was set more than once or is an argument. Such regs may
4239 be clobbered by `longjmp'. */
4240
4241 static bool
4242 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4243 {
4244 /* There appear to be cases where some local vars never reach the
4245 backend but have bogus regnos. */
4246 if (regno >= max_reg_num ())
4247 return false;
4248
4249 return ((REG_N_SETS (regno) > 1
4250 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4251 regno))
4252 && REGNO_REG_SET_P (setjmp_crosses, regno));
4253 }
4254
4255 /* Walk the tree of blocks describing the binding levels within a
4256 function and warn about variables the might be killed by setjmp or
4257 vfork. This is done after calling flow_analysis before register
4258 allocation since that will clobber the pseudo-regs to hard
4259 regs. */
4260
4261 static void
4262 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4263 {
4264 tree decl, sub;
4265
4266 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4267 {
4268 if (VAR_P (decl)
4269 && DECL_RTL_SET_P (decl)
4270 && REG_P (DECL_RTL (decl))
4271 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4272 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4273 " %<longjmp%> or %<vfork%>", decl);
4274 }
4275
4276 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4277 setjmp_vars_warning (setjmp_crosses, sub);
4278 }
4279
4280 /* Do the appropriate part of setjmp_vars_warning
4281 but for arguments instead of local variables. */
4282
4283 static void
4284 setjmp_args_warning (bitmap setjmp_crosses)
4285 {
4286 tree decl;
4287 for (decl = DECL_ARGUMENTS (current_function_decl);
4288 decl; decl = DECL_CHAIN (decl))
4289 if (DECL_RTL (decl) != 0
4290 && REG_P (DECL_RTL (decl))
4291 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4292 warning (OPT_Wclobbered,
4293 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4294 decl);
4295 }
4296
4297 /* Generate warning messages for variables live across setjmp. */
4298
4299 void
4300 generate_setjmp_warnings (void)
4301 {
4302 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4303
4304 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4305 || bitmap_empty_p (setjmp_crosses))
4306 return;
4307
4308 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4309 setjmp_args_warning (setjmp_crosses);
4310 }
4311
4312 \f
4313 /* Reverse the order of elements in the fragment chain T of blocks,
4314 and return the new head of the chain (old last element).
4315 In addition to that clear BLOCK_SAME_RANGE flags when needed
4316 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4317 its super fragment origin. */
4318
4319 static tree
4320 block_fragments_nreverse (tree t)
4321 {
4322 tree prev = 0, block, next, prev_super = 0;
4323 tree super = BLOCK_SUPERCONTEXT (t);
4324 if (BLOCK_FRAGMENT_ORIGIN (super))
4325 super = BLOCK_FRAGMENT_ORIGIN (super);
4326 for (block = t; block; block = next)
4327 {
4328 next = BLOCK_FRAGMENT_CHAIN (block);
4329 BLOCK_FRAGMENT_CHAIN (block) = prev;
4330 if ((prev && !BLOCK_SAME_RANGE (prev))
4331 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4332 != prev_super))
4333 BLOCK_SAME_RANGE (block) = 0;
4334 prev_super = BLOCK_SUPERCONTEXT (block);
4335 BLOCK_SUPERCONTEXT (block) = super;
4336 prev = block;
4337 }
4338 t = BLOCK_FRAGMENT_ORIGIN (t);
4339 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4340 != prev_super)
4341 BLOCK_SAME_RANGE (t) = 0;
4342 BLOCK_SUPERCONTEXT (t) = super;
4343 return prev;
4344 }
4345
4346 /* Reverse the order of elements in the chain T of blocks,
4347 and return the new head of the chain (old last element).
4348 Also do the same on subblocks and reverse the order of elements
4349 in BLOCK_FRAGMENT_CHAIN as well. */
4350
4351 static tree
4352 blocks_nreverse_all (tree t)
4353 {
4354 tree prev = 0, block, next;
4355 for (block = t; block; block = next)
4356 {
4357 next = BLOCK_CHAIN (block);
4358 BLOCK_CHAIN (block) = prev;
4359 if (BLOCK_FRAGMENT_CHAIN (block)
4360 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4361 {
4362 BLOCK_FRAGMENT_CHAIN (block)
4363 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4364 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4365 BLOCK_SAME_RANGE (block) = 0;
4366 }
4367 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4368 prev = block;
4369 }
4370 return prev;
4371 }
4372
4373
4374 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4375 and create duplicate blocks. */
4376 /* ??? Need an option to either create block fragments or to create
4377 abstract origin duplicates of a source block. It really depends
4378 on what optimization has been performed. */
4379
4380 void
4381 reorder_blocks (void)
4382 {
4383 tree block = DECL_INITIAL (current_function_decl);
4384
4385 if (block == NULL_TREE)
4386 return;
4387
4388 auto_vec<tree, 10> block_stack;
4389
4390 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4391 clear_block_marks (block);
4392
4393 /* Prune the old trees away, so that they don't get in the way. */
4394 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4395 BLOCK_CHAIN (block) = NULL_TREE;
4396
4397 /* Recreate the block tree from the note nesting. */
4398 reorder_blocks_1 (get_insns (), block, &block_stack);
4399 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4400 }
4401
4402 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4403
4404 void
4405 clear_block_marks (tree block)
4406 {
4407 while (block)
4408 {
4409 TREE_ASM_WRITTEN (block) = 0;
4410 clear_block_marks (BLOCK_SUBBLOCKS (block));
4411 block = BLOCK_CHAIN (block);
4412 }
4413 }
4414
4415 static void
4416 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4417 vec<tree> *p_block_stack)
4418 {
4419 rtx_insn *insn;
4420 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4421
4422 for (insn = insns; insn; insn = NEXT_INSN (insn))
4423 {
4424 if (NOTE_P (insn))
4425 {
4426 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4427 {
4428 tree block = NOTE_BLOCK (insn);
4429 tree origin;
4430
4431 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4432 origin = block;
4433
4434 if (prev_end)
4435 BLOCK_SAME_RANGE (prev_end) = 0;
4436 prev_end = NULL_TREE;
4437
4438 /* If we have seen this block before, that means it now
4439 spans multiple address regions. Create a new fragment. */
4440 if (TREE_ASM_WRITTEN (block))
4441 {
4442 tree new_block = copy_node (block);
4443
4444 BLOCK_SAME_RANGE (new_block) = 0;
4445 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4446 BLOCK_FRAGMENT_CHAIN (new_block)
4447 = BLOCK_FRAGMENT_CHAIN (origin);
4448 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4449
4450 NOTE_BLOCK (insn) = new_block;
4451 block = new_block;
4452 }
4453
4454 if (prev_beg == current_block && prev_beg)
4455 BLOCK_SAME_RANGE (block) = 1;
4456
4457 prev_beg = origin;
4458
4459 BLOCK_SUBBLOCKS (block) = 0;
4460 TREE_ASM_WRITTEN (block) = 1;
4461 /* When there's only one block for the entire function,
4462 current_block == block and we mustn't do this, it
4463 will cause infinite recursion. */
4464 if (block != current_block)
4465 {
4466 tree super;
4467 if (block != origin)
4468 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4469 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4470 (origin))
4471 == current_block);
4472 if (p_block_stack->is_empty ())
4473 super = current_block;
4474 else
4475 {
4476 super = p_block_stack->last ();
4477 gcc_assert (super == current_block
4478 || BLOCK_FRAGMENT_ORIGIN (super)
4479 == current_block);
4480 }
4481 BLOCK_SUPERCONTEXT (block) = super;
4482 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4483 BLOCK_SUBBLOCKS (current_block) = block;
4484 current_block = origin;
4485 }
4486 p_block_stack->safe_push (block);
4487 }
4488 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4489 {
4490 NOTE_BLOCK (insn) = p_block_stack->pop ();
4491 current_block = BLOCK_SUPERCONTEXT (current_block);
4492 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4493 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4494 prev_beg = NULL_TREE;
4495 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4496 ? NOTE_BLOCK (insn) : NULL_TREE;
4497 }
4498 }
4499 else
4500 {
4501 prev_beg = NULL_TREE;
4502 if (prev_end)
4503 BLOCK_SAME_RANGE (prev_end) = 0;
4504 prev_end = NULL_TREE;
4505 }
4506 }
4507 }
4508
4509 /* Reverse the order of elements in the chain T of blocks,
4510 and return the new head of the chain (old last element). */
4511
4512 tree
4513 blocks_nreverse (tree t)
4514 {
4515 tree prev = 0, block, next;
4516 for (block = t; block; block = next)
4517 {
4518 next = BLOCK_CHAIN (block);
4519 BLOCK_CHAIN (block) = prev;
4520 prev = block;
4521 }
4522 return prev;
4523 }
4524
4525 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4526 by modifying the last node in chain 1 to point to chain 2. */
4527
4528 tree
4529 block_chainon (tree op1, tree op2)
4530 {
4531 tree t1;
4532
4533 if (!op1)
4534 return op2;
4535 if (!op2)
4536 return op1;
4537
4538 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4539 continue;
4540 BLOCK_CHAIN (t1) = op2;
4541
4542 #ifdef ENABLE_TREE_CHECKING
4543 {
4544 tree t2;
4545 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4546 gcc_assert (t2 != t1);
4547 }
4548 #endif
4549
4550 return op1;
4551 }
4552
4553 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4554 non-NULL, list them all into VECTOR, in a depth-first preorder
4555 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4556 blocks. */
4557
4558 static int
4559 all_blocks (tree block, tree *vector)
4560 {
4561 int n_blocks = 0;
4562
4563 while (block)
4564 {
4565 TREE_ASM_WRITTEN (block) = 0;
4566
4567 /* Record this block. */
4568 if (vector)
4569 vector[n_blocks] = block;
4570
4571 ++n_blocks;
4572
4573 /* Record the subblocks, and their subblocks... */
4574 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4575 vector ? vector + n_blocks : 0);
4576 block = BLOCK_CHAIN (block);
4577 }
4578
4579 return n_blocks;
4580 }
4581
4582 /* Return a vector containing all the blocks rooted at BLOCK. The
4583 number of elements in the vector is stored in N_BLOCKS_P. The
4584 vector is dynamically allocated; it is the caller's responsibility
4585 to call `free' on the pointer returned. */
4586
4587 static tree *
4588 get_block_vector (tree block, int *n_blocks_p)
4589 {
4590 tree *block_vector;
4591
4592 *n_blocks_p = all_blocks (block, NULL);
4593 block_vector = XNEWVEC (tree, *n_blocks_p);
4594 all_blocks (block, block_vector);
4595
4596 return block_vector;
4597 }
4598
4599 static GTY(()) int next_block_index = 2;
4600
4601 /* Set BLOCK_NUMBER for all the blocks in FN. */
4602
4603 void
4604 number_blocks (tree fn)
4605 {
4606 int i;
4607 int n_blocks;
4608 tree *block_vector;
4609
4610 /* For XCOFF debugging output, we start numbering the blocks
4611 from 1 within each function, rather than keeping a running
4612 count. */
4613 #if defined (XCOFF_DEBUGGING_INFO)
4614 if (write_symbols == XCOFF_DEBUG)
4615 next_block_index = 1;
4616 #endif
4617
4618 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4619
4620 /* The top-level BLOCK isn't numbered at all. */
4621 for (i = 1; i < n_blocks; ++i)
4622 /* We number the blocks from two. */
4623 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4624
4625 free (block_vector);
4626
4627 return;
4628 }
4629
4630 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4631
4632 DEBUG_FUNCTION tree
4633 debug_find_var_in_block_tree (tree var, tree block)
4634 {
4635 tree t;
4636
4637 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4638 if (t == var)
4639 return block;
4640
4641 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4642 {
4643 tree ret = debug_find_var_in_block_tree (var, t);
4644 if (ret)
4645 return ret;
4646 }
4647
4648 return NULL_TREE;
4649 }
4650 \f
4651 /* Keep track of whether we're in a dummy function context. If we are,
4652 we don't want to invoke the set_current_function hook, because we'll
4653 get into trouble if the hook calls target_reinit () recursively or
4654 when the initial initialization is not yet complete. */
4655
4656 static bool in_dummy_function;
4657
4658 /* Invoke the target hook when setting cfun. Update the optimization options
4659 if the function uses different options than the default. */
4660
4661 static void
4662 invoke_set_current_function_hook (tree fndecl)
4663 {
4664 if (!in_dummy_function)
4665 {
4666 tree opts = ((fndecl)
4667 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4668 : optimization_default_node);
4669
4670 if (!opts)
4671 opts = optimization_default_node;
4672
4673 /* Change optimization options if needed. */
4674 if (optimization_current_node != opts)
4675 {
4676 optimization_current_node = opts;
4677 cl_optimization_restore (&global_options, &global_options_set,
4678 TREE_OPTIMIZATION (opts));
4679 }
4680
4681 targetm.set_current_function (fndecl);
4682 this_fn_optabs = this_target_optabs;
4683
4684 /* Initialize global alignment variables after op. */
4685 parse_alignment_opts ();
4686
4687 if (opts != optimization_default_node)
4688 {
4689 init_tree_optimization_optabs (opts);
4690 if (TREE_OPTIMIZATION_OPTABS (opts))
4691 this_fn_optabs = (struct target_optabs *)
4692 TREE_OPTIMIZATION_OPTABS (opts);
4693 }
4694 }
4695 }
4696
4697 /* cfun should never be set directly; use this function. */
4698
4699 void
4700 set_cfun (struct function *new_cfun, bool force)
4701 {
4702 if (cfun != new_cfun || force)
4703 {
4704 cfun = new_cfun;
4705 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4706 redirect_edge_var_map_empty ();
4707 }
4708 }
4709
4710 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4711
4712 static vec<function *> cfun_stack;
4713
4714 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4715 current_function_decl accordingly. */
4716
4717 void
4718 push_cfun (struct function *new_cfun)
4719 {
4720 gcc_assert ((!cfun && !current_function_decl)
4721 || (cfun && current_function_decl == cfun->decl));
4722 cfun_stack.safe_push (cfun);
4723 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4724 set_cfun (new_cfun);
4725 }
4726
4727 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4728
4729 void
4730 pop_cfun (void)
4731 {
4732 struct function *new_cfun = cfun_stack.pop ();
4733 /* When in_dummy_function, we do have a cfun but current_function_decl is
4734 NULL. We also allow pushing NULL cfun and subsequently changing
4735 current_function_decl to something else and have both restored by
4736 pop_cfun. */
4737 gcc_checking_assert (in_dummy_function
4738 || !cfun
4739 || current_function_decl == cfun->decl);
4740 set_cfun (new_cfun);
4741 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4742 }
4743
4744 /* Return value of funcdef and increase it. */
4745 int
4746 get_next_funcdef_no (void)
4747 {
4748 return funcdef_no++;
4749 }
4750
4751 /* Return value of funcdef. */
4752 int
4753 get_last_funcdef_no (void)
4754 {
4755 return funcdef_no;
4756 }
4757
4758 /* Allocate and initialize the stack usage info data structure for the
4759 current function. */
4760 static void
4761 allocate_stack_usage_info (void)
4762 {
4763 gcc_assert (!cfun->su);
4764 cfun->su = ggc_cleared_alloc<stack_usage> ();
4765 cfun->su->static_stack_size = -1;
4766 }
4767
4768 /* Allocate a function structure for FNDECL and set its contents
4769 to the defaults. Set cfun to the newly-allocated object.
4770 Some of the helper functions invoked during initialization assume
4771 that cfun has already been set. Therefore, assign the new object
4772 directly into cfun and invoke the back end hook explicitly at the
4773 very end, rather than initializing a temporary and calling set_cfun
4774 on it.
4775
4776 ABSTRACT_P is true if this is a function that will never be seen by
4777 the middle-end. Such functions are front-end concepts (like C++
4778 function templates) that do not correspond directly to functions
4779 placed in object files. */
4780
4781 void
4782 allocate_struct_function (tree fndecl, bool abstract_p)
4783 {
4784 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4785
4786 cfun = ggc_cleared_alloc<function> ();
4787
4788 init_eh_for_function ();
4789
4790 if (init_machine_status)
4791 cfun->machine = (*init_machine_status) ();
4792
4793 #ifdef OVERRIDE_ABI_FORMAT
4794 OVERRIDE_ABI_FORMAT (fndecl);
4795 #endif
4796
4797 if (fndecl != NULL_TREE)
4798 {
4799 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4800 cfun->decl = fndecl;
4801 current_function_funcdef_no = get_next_funcdef_no ();
4802 }
4803
4804 invoke_set_current_function_hook (fndecl);
4805
4806 if (fndecl != NULL_TREE)
4807 {
4808 tree result = DECL_RESULT (fndecl);
4809
4810 if (!abstract_p)
4811 {
4812 /* Now that we have activated any function-specific attributes
4813 that might affect layout, particularly vector modes, relayout
4814 each of the parameters and the result. */
4815 relayout_decl (result);
4816 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4817 parm = DECL_CHAIN (parm))
4818 relayout_decl (parm);
4819
4820 /* Similarly relayout the function decl. */
4821 targetm.target_option.relayout_function (fndecl);
4822 }
4823
4824 if (!abstract_p && aggregate_value_p (result, fndecl))
4825 {
4826 #ifdef PCC_STATIC_STRUCT_RETURN
4827 cfun->returns_pcc_struct = 1;
4828 #endif
4829 cfun->returns_struct = 1;
4830 }
4831
4832 cfun->stdarg = stdarg_p (fntype);
4833
4834 /* Assume all registers in stdarg functions need to be saved. */
4835 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4836 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4837
4838 /* ??? This could be set on a per-function basis by the front-end
4839 but is this worth the hassle? */
4840 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4841 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4842
4843 if (!profile_flag && !flag_instrument_function_entry_exit)
4844 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4845
4846 if (flag_callgraph_info)
4847 allocate_stack_usage_info ();
4848 }
4849
4850 /* Don't enable begin stmt markers if var-tracking at assignments is
4851 disabled. The markers make little sense without the variable
4852 binding annotations among them. */
4853 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4854 && MAY_HAVE_DEBUG_MARKER_STMTS;
4855 }
4856
4857 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4858 instead of just setting it. */
4859
4860 void
4861 push_struct_function (tree fndecl)
4862 {
4863 /* When in_dummy_function we might be in the middle of a pop_cfun and
4864 current_function_decl and cfun may not match. */
4865 gcc_assert (in_dummy_function
4866 || (!cfun && !current_function_decl)
4867 || (cfun && current_function_decl == cfun->decl));
4868 cfun_stack.safe_push (cfun);
4869 current_function_decl = fndecl;
4870 allocate_struct_function (fndecl, false);
4871 }
4872
4873 /* Reset crtl and other non-struct-function variables to defaults as
4874 appropriate for emitting rtl at the start of a function. */
4875
4876 static void
4877 prepare_function_start (void)
4878 {
4879 gcc_assert (!get_last_insn ());
4880
4881 if (in_dummy_function)
4882 crtl->abi = &default_function_abi;
4883 else
4884 crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4885
4886 init_temp_slots ();
4887 init_emit ();
4888 init_varasm_status ();
4889 init_expr ();
4890 default_rtl_profile ();
4891
4892 if (flag_stack_usage_info && !flag_callgraph_info)
4893 allocate_stack_usage_info ();
4894
4895 cse_not_expected = ! optimize;
4896
4897 /* Caller save not needed yet. */
4898 caller_save_needed = 0;
4899
4900 /* We haven't done register allocation yet. */
4901 reg_renumber = 0;
4902
4903 /* Indicate that we have not instantiated virtual registers yet. */
4904 virtuals_instantiated = 0;
4905
4906 /* Indicate that we want CONCATs now. */
4907 generating_concat_p = 1;
4908
4909 /* Indicate we have no need of a frame pointer yet. */
4910 frame_pointer_needed = 0;
4911 }
4912
4913 void
4914 push_dummy_function (bool with_decl)
4915 {
4916 tree fn_decl, fn_type, fn_result_decl;
4917
4918 gcc_assert (!in_dummy_function);
4919 in_dummy_function = true;
4920
4921 if (with_decl)
4922 {
4923 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4924 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4925 fn_type);
4926 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4927 NULL_TREE, void_type_node);
4928 DECL_RESULT (fn_decl) = fn_result_decl;
4929 }
4930 else
4931 fn_decl = NULL_TREE;
4932
4933 push_struct_function (fn_decl);
4934 }
4935
4936 /* Initialize the rtl expansion mechanism so that we can do simple things
4937 like generate sequences. This is used to provide a context during global
4938 initialization of some passes. You must call expand_dummy_function_end
4939 to exit this context. */
4940
4941 void
4942 init_dummy_function_start (void)
4943 {
4944 push_dummy_function (false);
4945 prepare_function_start ();
4946 }
4947
4948 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4949 and initialize static variables for generating RTL for the statements
4950 of the function. */
4951
4952 void
4953 init_function_start (tree subr)
4954 {
4955 /* Initialize backend, if needed. */
4956 initialize_rtl ();
4957
4958 prepare_function_start ();
4959 decide_function_section (subr);
4960
4961 /* Warn if this value is an aggregate type,
4962 regardless of which calling convention we are using for it. */
4963 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4964 warning (OPT_Waggregate_return, "function returns an aggregate");
4965 }
4966
4967 /* Expand code to verify the stack_protect_guard. This is invoked at
4968 the end of a function to be protected. */
4969
4970 void
4971 stack_protect_epilogue (void)
4972 {
4973 tree guard_decl = crtl->stack_protect_guard_decl;
4974 rtx_code_label *label = gen_label_rtx ();
4975 rtx x, y;
4976 rtx_insn *seq = NULL;
4977
4978 x = expand_normal (crtl->stack_protect_guard);
4979
4980 if (targetm.have_stack_protect_combined_test () && guard_decl)
4981 {
4982 gcc_assert (DECL_P (guard_decl));
4983 y = DECL_RTL (guard_decl);
4984 /* Allow the target to compute address of Y and compare it with X without
4985 leaking Y into a register. This combined address + compare pattern
4986 allows the target to prevent spilling of any intermediate results by
4987 splitting it after register allocator. */
4988 seq = targetm.gen_stack_protect_combined_test (x, y, label);
4989 }
4990 else
4991 {
4992 if (guard_decl)
4993 y = expand_normal (guard_decl);
4994 else
4995 y = const0_rtx;
4996
4997 /* Allow the target to compare Y with X without leaking either into
4998 a register. */
4999 if (targetm.have_stack_protect_test ())
5000 seq = targetm.gen_stack_protect_test (x, y, label);
5001 }
5002
5003 if (seq)
5004 emit_insn (seq);
5005 else
5006 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5007
5008 /* The noreturn predictor has been moved to the tree level. The rtl-level
5009 predictors estimate this branch about 20%, which isn't enough to get
5010 things moved out of line. Since this is the only extant case of adding
5011 a noreturn function at the rtl level, it doesn't seem worth doing ought
5012 except adding the prediction by hand. */
5013 rtx_insn *tmp = get_last_insn ();
5014 if (JUMP_P (tmp))
5015 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5016
5017 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5018 free_temp_slots ();
5019 emit_label (label);
5020 }
5021 \f
5022 /* Start the RTL for a new function, and set variables used for
5023 emitting RTL.
5024 SUBR is the FUNCTION_DECL node.
5025 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5026 the function's parameters, which must be run at any return statement. */
5027
5028 void
5029 expand_function_start (tree subr)
5030 {
5031 /* Make sure volatile mem refs aren't considered
5032 valid operands of arithmetic insns. */
5033 init_recog_no_volatile ();
5034
5035 crtl->profile
5036 = (profile_flag
5037 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5038
5039 crtl->limit_stack
5040 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5041
5042 /* Make the label for return statements to jump to. Do not special
5043 case machines with special return instructions -- they will be
5044 handled later during jump, ifcvt, or epilogue creation. */
5045 return_label = gen_label_rtx ();
5046
5047 /* Initialize rtx used to return the value. */
5048 /* Do this before assign_parms so that we copy the struct value address
5049 before any library calls that assign parms might generate. */
5050
5051 /* Decide whether to return the value in memory or in a register. */
5052 tree res = DECL_RESULT (subr);
5053 if (aggregate_value_p (res, subr))
5054 {
5055 /* Returning something that won't go in a register. */
5056 rtx value_address = 0;
5057
5058 #ifdef PCC_STATIC_STRUCT_RETURN
5059 if (cfun->returns_pcc_struct)
5060 {
5061 int size = int_size_in_bytes (TREE_TYPE (res));
5062 value_address = assemble_static_space (size);
5063 }
5064 else
5065 #endif
5066 {
5067 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5068 /* Expect to be passed the address of a place to store the value.
5069 If it is passed as an argument, assign_parms will take care of
5070 it. */
5071 if (sv)
5072 {
5073 value_address = gen_reg_rtx (Pmode);
5074 emit_move_insn (value_address, sv);
5075 }
5076 }
5077 if (value_address)
5078 {
5079 rtx x = value_address;
5080 if (!DECL_BY_REFERENCE (res))
5081 {
5082 x = gen_rtx_MEM (DECL_MODE (res), x);
5083 set_mem_attributes (x, res, 1);
5084 }
5085 set_parm_rtl (res, x);
5086 }
5087 }
5088 else if (DECL_MODE (res) == VOIDmode)
5089 /* If return mode is void, this decl rtl should not be used. */
5090 set_parm_rtl (res, NULL_RTX);
5091 else
5092 {
5093 /* Compute the return values into a pseudo reg, which we will copy
5094 into the true return register after the cleanups are done. */
5095 tree return_type = TREE_TYPE (res);
5096
5097 /* If we may coalesce this result, make sure it has the expected mode
5098 in case it was promoted. But we need not bother about BLKmode. */
5099 machine_mode promoted_mode
5100 = flag_tree_coalesce_vars && is_gimple_reg (res)
5101 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5102 : BLKmode;
5103
5104 if (promoted_mode != BLKmode)
5105 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5106 else if (TYPE_MODE (return_type) != BLKmode
5107 && targetm.calls.return_in_msb (return_type))
5108 /* expand_function_end will insert the appropriate padding in
5109 this case. Use the return value's natural (unpadded) mode
5110 within the function proper. */
5111 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5112 else
5113 {
5114 /* In order to figure out what mode to use for the pseudo, we
5115 figure out what the mode of the eventual return register will
5116 actually be, and use that. */
5117 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5118
5119 /* Structures that are returned in registers are not
5120 aggregate_value_p, so we may see a PARALLEL or a REG. */
5121 if (REG_P (hard_reg))
5122 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5123 else
5124 {
5125 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5126 set_parm_rtl (res, gen_group_rtx (hard_reg));
5127 }
5128 }
5129
5130 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5131 result to the real return register(s). */
5132 DECL_REGISTER (res) = 1;
5133 }
5134
5135 /* Initialize rtx for parameters and local variables.
5136 In some cases this requires emitting insns. */
5137 assign_parms (subr);
5138
5139 /* If function gets a static chain arg, store it. */
5140 if (cfun->static_chain_decl)
5141 {
5142 tree parm = cfun->static_chain_decl;
5143 rtx local, chain;
5144 rtx_insn *insn;
5145 int unsignedp;
5146
5147 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5148 chain = targetm.calls.static_chain (current_function_decl, true);
5149
5150 set_decl_incoming_rtl (parm, chain, false);
5151 set_parm_rtl (parm, local);
5152 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5153
5154 if (GET_MODE (local) != GET_MODE (chain))
5155 {
5156 convert_move (local, chain, unsignedp);
5157 insn = get_last_insn ();
5158 }
5159 else
5160 insn = emit_move_insn (local, chain);
5161
5162 /* Mark the register as eliminable, similar to parameters. */
5163 if (MEM_P (chain)
5164 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5165 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5166
5167 /* If we aren't optimizing, save the static chain onto the stack. */
5168 if (!optimize)
5169 {
5170 tree saved_static_chain_decl
5171 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5172 DECL_NAME (parm), TREE_TYPE (parm));
5173 rtx saved_static_chain_rtx
5174 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5175 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5176 emit_move_insn (saved_static_chain_rtx, chain);
5177 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5178 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5179 }
5180 }
5181
5182 /* The following was moved from init_function_start.
5183 The move was supposed to make sdb output more accurate. */
5184 /* Indicate the beginning of the function body,
5185 as opposed to parm setup. */
5186 emit_note (NOTE_INSN_FUNCTION_BEG);
5187
5188 gcc_assert (NOTE_P (get_last_insn ()));
5189
5190 parm_birth_insn = get_last_insn ();
5191
5192 /* If the function receives a non-local goto, then store the
5193 bits we need to restore the frame pointer. */
5194 if (cfun->nonlocal_goto_save_area)
5195 {
5196 tree t_save;
5197 rtx r_save;
5198
5199 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5200 gcc_assert (DECL_RTL_SET_P (var));
5201
5202 t_save = build4 (ARRAY_REF,
5203 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5204 cfun->nonlocal_goto_save_area,
5205 integer_zero_node, NULL_TREE, NULL_TREE);
5206 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5207 gcc_assert (GET_MODE (r_save) == Pmode);
5208
5209 emit_move_insn (r_save, hard_frame_pointer_rtx);
5210 update_nonlocal_goto_save_area ();
5211 }
5212
5213 if (crtl->profile)
5214 {
5215 #ifdef PROFILE_HOOK
5216 PROFILE_HOOK (current_function_funcdef_no);
5217 #endif
5218 }
5219
5220 /* If we are doing generic stack checking, the probe should go here. */
5221 if (flag_stack_check == GENERIC_STACK_CHECK)
5222 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5223 }
5224 \f
5225 void
5226 pop_dummy_function (void)
5227 {
5228 pop_cfun ();
5229 in_dummy_function = false;
5230 }
5231
5232 /* Undo the effects of init_dummy_function_start. */
5233 void
5234 expand_dummy_function_end (void)
5235 {
5236 gcc_assert (in_dummy_function);
5237
5238 /* End any sequences that failed to be closed due to syntax errors. */
5239 while (in_sequence_p ())
5240 end_sequence ();
5241
5242 /* Outside function body, can't compute type's actual size
5243 until next function's body starts. */
5244
5245 free_after_parsing (cfun);
5246 free_after_compilation (cfun);
5247 pop_dummy_function ();
5248 }
5249
5250 /* Helper for diddle_return_value. */
5251
5252 void
5253 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5254 {
5255 if (! outgoing)
5256 return;
5257
5258 if (REG_P (outgoing))
5259 (*doit) (outgoing, arg);
5260 else if (GET_CODE (outgoing) == PARALLEL)
5261 {
5262 int i;
5263
5264 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5265 {
5266 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5267
5268 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5269 (*doit) (x, arg);
5270 }
5271 }
5272 }
5273
5274 /* Call DOIT for each hard register used as a return value from
5275 the current function. */
5276
5277 void
5278 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5279 {
5280 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5281 }
5282
5283 static void
5284 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5285 {
5286 emit_clobber (reg);
5287 }
5288
5289 void
5290 clobber_return_register (void)
5291 {
5292 diddle_return_value (do_clobber_return_reg, NULL);
5293
5294 /* In case we do use pseudo to return value, clobber it too. */
5295 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5296 {
5297 tree decl_result = DECL_RESULT (current_function_decl);
5298 rtx decl_rtl = DECL_RTL (decl_result);
5299 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5300 {
5301 do_clobber_return_reg (decl_rtl, NULL);
5302 }
5303 }
5304 }
5305
5306 static void
5307 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5308 {
5309 emit_use (reg);
5310 }
5311
5312 static void
5313 use_return_register (void)
5314 {
5315 diddle_return_value (do_use_return_reg, NULL);
5316 }
5317
5318 /* Generate RTL for the end of the current function. */
5319
5320 void
5321 expand_function_end (void)
5322 {
5323 /* If arg_pointer_save_area was referenced only from a nested
5324 function, we will not have initialized it yet. Do that now. */
5325 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5326 get_arg_pointer_save_area ();
5327
5328 /* If we are doing generic stack checking and this function makes calls,
5329 do a stack probe at the start of the function to ensure we have enough
5330 space for another stack frame. */
5331 if (flag_stack_check == GENERIC_STACK_CHECK)
5332 {
5333 rtx_insn *insn, *seq;
5334
5335 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5336 if (CALL_P (insn))
5337 {
5338 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5339 start_sequence ();
5340 if (STACK_CHECK_MOVING_SP)
5341 anti_adjust_stack_and_probe (max_frame_size, true);
5342 else
5343 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5344 seq = get_insns ();
5345 end_sequence ();
5346 set_insn_locations (seq, prologue_location);
5347 emit_insn_before (seq, stack_check_probe_note);
5348 break;
5349 }
5350 }
5351
5352 /* End any sequences that failed to be closed due to syntax errors. */
5353 while (in_sequence_p ())
5354 end_sequence ();
5355
5356 clear_pending_stack_adjust ();
5357 do_pending_stack_adjust ();
5358
5359 /* Output a linenumber for the end of the function.
5360 SDB depended on this. */
5361 set_curr_insn_location (input_location);
5362
5363 /* Before the return label (if any), clobber the return
5364 registers so that they are not propagated live to the rest of
5365 the function. This can only happen with functions that drop
5366 through; if there had been a return statement, there would
5367 have either been a return rtx, or a jump to the return label.
5368
5369 We delay actual code generation after the current_function_value_rtx
5370 is computed. */
5371 rtx_insn *clobber_after = get_last_insn ();
5372
5373 /* Output the label for the actual return from the function. */
5374 emit_label (return_label);
5375
5376 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5377 {
5378 /* Let except.c know where it should emit the call to unregister
5379 the function context for sjlj exceptions. */
5380 if (flag_exceptions)
5381 sjlj_emit_function_exit_after (get_last_insn ());
5382 }
5383
5384 /* If this is an implementation of throw, do what's necessary to
5385 communicate between __builtin_eh_return and the epilogue. */
5386 expand_eh_return ();
5387
5388 /* If stack protection is enabled for this function, check the guard. */
5389 if (crtl->stack_protect_guard
5390 && targetm.stack_protect_runtime_enabled_p ()
5391 && naked_return_label == NULL_RTX)
5392 stack_protect_epilogue ();
5393
5394 /* If scalar return value was computed in a pseudo-reg, or was a named
5395 return value that got dumped to the stack, copy that to the hard
5396 return register. */
5397 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5398 {
5399 tree decl_result = DECL_RESULT (current_function_decl);
5400 rtx decl_rtl = DECL_RTL (decl_result);
5401
5402 if (REG_P (decl_rtl)
5403 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5404 : DECL_REGISTER (decl_result))
5405 {
5406 rtx real_decl_rtl = crtl->return_rtx;
5407 complex_mode cmode;
5408
5409 /* This should be set in assign_parms. */
5410 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5411
5412 /* If this is a BLKmode structure being returned in registers,
5413 then use the mode computed in expand_return. Note that if
5414 decl_rtl is memory, then its mode may have been changed,
5415 but that crtl->return_rtx has not. */
5416 if (GET_MODE (real_decl_rtl) == BLKmode)
5417 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5418
5419 /* If a non-BLKmode return value should be padded at the least
5420 significant end of the register, shift it left by the appropriate
5421 amount. BLKmode results are handled using the group load/store
5422 machinery. */
5423 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5424 && REG_P (real_decl_rtl)
5425 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5426 {
5427 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5428 REGNO (real_decl_rtl)),
5429 decl_rtl);
5430 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5431 }
5432 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5433 {
5434 /* If expand_function_start has created a PARALLEL for decl_rtl,
5435 move the result to the real return registers. Otherwise, do
5436 a group load from decl_rtl for a named return. */
5437 if (GET_CODE (decl_rtl) == PARALLEL)
5438 emit_group_move (real_decl_rtl, decl_rtl);
5439 else
5440 emit_group_load (real_decl_rtl, decl_rtl,
5441 TREE_TYPE (decl_result),
5442 int_size_in_bytes (TREE_TYPE (decl_result)));
5443 }
5444 /* In the case of complex integer modes smaller than a word, we'll
5445 need to generate some non-trivial bitfield insertions. Do that
5446 on a pseudo and not the hard register. */
5447 else if (GET_CODE (decl_rtl) == CONCAT
5448 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5449 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5450 {
5451 int old_generating_concat_p;
5452 rtx tmp;
5453
5454 old_generating_concat_p = generating_concat_p;
5455 generating_concat_p = 0;
5456 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5457 generating_concat_p = old_generating_concat_p;
5458
5459 emit_move_insn (tmp, decl_rtl);
5460 emit_move_insn (real_decl_rtl, tmp);
5461 }
5462 /* If a named return value dumped decl_return to memory, then
5463 we may need to re-do the PROMOTE_MODE signed/unsigned
5464 extension. */
5465 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5466 {
5467 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5468 promote_function_mode (TREE_TYPE (decl_result),
5469 GET_MODE (decl_rtl), &unsignedp,
5470 TREE_TYPE (current_function_decl), 1);
5471
5472 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5473 }
5474 else
5475 emit_move_insn (real_decl_rtl, decl_rtl);
5476 }
5477 }
5478
5479 /* If returning a structure, arrange to return the address of the value
5480 in a place where debuggers expect to find it.
5481
5482 If returning a structure PCC style,
5483 the caller also depends on this value.
5484 And cfun->returns_pcc_struct is not necessarily set. */
5485 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5486 && !targetm.calls.omit_struct_return_reg)
5487 {
5488 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5489 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5490 rtx outgoing;
5491
5492 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5493 type = TREE_TYPE (type);
5494 else
5495 value_address = XEXP (value_address, 0);
5496
5497 outgoing = targetm.calls.function_value (build_pointer_type (type),
5498 current_function_decl, true);
5499
5500 /* Mark this as a function return value so integrate will delete the
5501 assignment and USE below when inlining this function. */
5502 REG_FUNCTION_VALUE_P (outgoing) = 1;
5503
5504 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5505 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5506 value_address = convert_memory_address (mode, value_address);
5507
5508 emit_move_insn (outgoing, value_address);
5509
5510 /* Show return register used to hold result (in this case the address
5511 of the result. */
5512 crtl->return_rtx = outgoing;
5513 }
5514
5515 /* Emit the actual code to clobber return register. Don't emit
5516 it if clobber_after is a barrier, then the previous basic block
5517 certainly doesn't fall thru into the exit block. */
5518 if (!BARRIER_P (clobber_after))
5519 {
5520 start_sequence ();
5521 clobber_return_register ();
5522 rtx_insn *seq = get_insns ();
5523 end_sequence ();
5524
5525 emit_insn_after (seq, clobber_after);
5526 }
5527
5528 /* Output the label for the naked return from the function. */
5529 if (naked_return_label)
5530 emit_label (naked_return_label);
5531
5532 /* @@@ This is a kludge. We want to ensure that instructions that
5533 may trap are not moved into the epilogue by scheduling, because
5534 we don't always emit unwind information for the epilogue. */
5535 if (cfun->can_throw_non_call_exceptions
5536 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5537 emit_insn (gen_blockage ());
5538
5539 /* If stack protection is enabled for this function, check the guard. */
5540 if (crtl->stack_protect_guard
5541 && targetm.stack_protect_runtime_enabled_p ()
5542 && naked_return_label)
5543 stack_protect_epilogue ();
5544
5545 /* If we had calls to alloca, and this machine needs
5546 an accurate stack pointer to exit the function,
5547 insert some code to save and restore the stack pointer. */
5548 if (! EXIT_IGNORE_STACK
5549 && cfun->calls_alloca)
5550 {
5551 rtx tem = 0;
5552
5553 start_sequence ();
5554 emit_stack_save (SAVE_FUNCTION, &tem);
5555 rtx_insn *seq = get_insns ();
5556 end_sequence ();
5557 emit_insn_before (seq, parm_birth_insn);
5558
5559 emit_stack_restore (SAVE_FUNCTION, tem);
5560 }
5561
5562 /* ??? This should no longer be necessary since stupid is no longer with
5563 us, but there are some parts of the compiler (eg reload_combine, and
5564 sh mach_dep_reorg) that still try and compute their own lifetime info
5565 instead of using the general framework. */
5566 use_return_register ();
5567 }
5568
5569 rtx
5570 get_arg_pointer_save_area (void)
5571 {
5572 rtx ret = arg_pointer_save_area;
5573
5574 if (! ret)
5575 {
5576 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5577 arg_pointer_save_area = ret;
5578 }
5579
5580 if (! crtl->arg_pointer_save_area_init)
5581 {
5582 /* Save the arg pointer at the beginning of the function. The
5583 generated stack slot may not be a valid memory address, so we
5584 have to check it and fix it if necessary. */
5585 start_sequence ();
5586 emit_move_insn (validize_mem (copy_rtx (ret)),
5587 crtl->args.internal_arg_pointer);
5588 rtx_insn *seq = get_insns ();
5589 end_sequence ();
5590
5591 push_topmost_sequence ();
5592 emit_insn_after (seq, entry_of_function ());
5593 pop_topmost_sequence ();
5594
5595 crtl->arg_pointer_save_area_init = true;
5596 }
5597
5598 return ret;
5599 }
5600 \f
5601
5602 /* If debugging dumps are requested, dump information about how the
5603 target handled -fstack-check=clash for the prologue.
5604
5605 PROBES describes what if any probes were emitted.
5606
5607 RESIDUALS indicates if the prologue had any residual allocation
5608 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5609
5610 void
5611 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5612 {
5613 if (!dump_file)
5614 return;
5615
5616 switch (probes)
5617 {
5618 case NO_PROBE_NO_FRAME:
5619 fprintf (dump_file,
5620 "Stack clash no probe no stack adjustment in prologue.\n");
5621 break;
5622 case NO_PROBE_SMALL_FRAME:
5623 fprintf (dump_file,
5624 "Stack clash no probe small stack adjustment in prologue.\n");
5625 break;
5626 case PROBE_INLINE:
5627 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5628 break;
5629 case PROBE_LOOP:
5630 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5631 break;
5632 }
5633
5634 if (residuals)
5635 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5636 else
5637 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5638
5639 if (frame_pointer_needed)
5640 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5641 else
5642 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5643
5644 if (TREE_THIS_VOLATILE (cfun->decl))
5645 fprintf (dump_file,
5646 "Stack clash noreturn prologue, assuming no implicit"
5647 " probes in caller.\n");
5648 else
5649 fprintf (dump_file,
5650 "Stack clash not noreturn prologue.\n");
5651 }
5652
5653 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5654 for the first time. */
5655
5656 static void
5657 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5658 {
5659 rtx_insn *tmp;
5660 hash_table<insn_cache_hasher> *hash = *hashp;
5661
5662 if (hash == NULL)
5663 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5664
5665 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5666 {
5667 rtx *slot = hash->find_slot (tmp, INSERT);
5668 gcc_assert (*slot == NULL);
5669 *slot = tmp;
5670 }
5671 }
5672
5673 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5674 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5675 insn, then record COPY as well. */
5676
5677 void
5678 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5679 {
5680 hash_table<insn_cache_hasher> *hash;
5681 rtx *slot;
5682
5683 hash = epilogue_insn_hash;
5684 if (!hash || !hash->find (insn))
5685 {
5686 hash = prologue_insn_hash;
5687 if (!hash || !hash->find (insn))
5688 return;
5689 }
5690
5691 slot = hash->find_slot (copy, INSERT);
5692 gcc_assert (*slot == NULL);
5693 *slot = copy;
5694 }
5695
5696 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5697 we can be running after reorg, SEQUENCE rtl is possible. */
5698
5699 static bool
5700 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5701 {
5702 if (hash == NULL)
5703 return false;
5704
5705 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5706 {
5707 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5708 int i;
5709 for (i = seq->len () - 1; i >= 0; i--)
5710 if (hash->find (seq->element (i)))
5711 return true;
5712 return false;
5713 }
5714
5715 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5716 }
5717
5718 int
5719 prologue_contains (const rtx_insn *insn)
5720 {
5721 return contains (insn, prologue_insn_hash);
5722 }
5723
5724 int
5725 epilogue_contains (const rtx_insn *insn)
5726 {
5727 return contains (insn, epilogue_insn_hash);
5728 }
5729
5730 int
5731 prologue_epilogue_contains (const rtx_insn *insn)
5732 {
5733 if (contains (insn, prologue_insn_hash))
5734 return 1;
5735 if (contains (insn, epilogue_insn_hash))
5736 return 1;
5737 return 0;
5738 }
5739
5740 void
5741 record_prologue_seq (rtx_insn *seq)
5742 {
5743 record_insns (seq, NULL, &prologue_insn_hash);
5744 }
5745
5746 void
5747 record_epilogue_seq (rtx_insn *seq)
5748 {
5749 record_insns (seq, NULL, &epilogue_insn_hash);
5750 }
5751
5752 /* Set JUMP_LABEL for a return insn. */
5753
5754 void
5755 set_return_jump_label (rtx_insn *returnjump)
5756 {
5757 rtx pat = PATTERN (returnjump);
5758 if (GET_CODE (pat) == PARALLEL)
5759 pat = XVECEXP (pat, 0, 0);
5760 if (ANY_RETURN_P (pat))
5761 JUMP_LABEL (returnjump) = pat;
5762 else
5763 JUMP_LABEL (returnjump) = ret_rtx;
5764 }
5765
5766 /* Return a sequence to be used as the split prologue for the current
5767 function, or NULL. */
5768
5769 static rtx_insn *
5770 make_split_prologue_seq (void)
5771 {
5772 if (!flag_split_stack
5773 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5774 return NULL;
5775
5776 start_sequence ();
5777 emit_insn (targetm.gen_split_stack_prologue ());
5778 rtx_insn *seq = get_insns ();
5779 end_sequence ();
5780
5781 record_insns (seq, NULL, &prologue_insn_hash);
5782 set_insn_locations (seq, prologue_location);
5783
5784 return seq;
5785 }
5786
5787 /* Return a sequence to be used as the prologue for the current function,
5788 or NULL. */
5789
5790 static rtx_insn *
5791 make_prologue_seq (void)
5792 {
5793 if (!targetm.have_prologue ())
5794 return NULL;
5795
5796 start_sequence ();
5797 rtx_insn *seq = targetm.gen_prologue ();
5798 emit_insn (seq);
5799
5800 /* Insert an explicit USE for the frame pointer
5801 if the profiling is on and the frame pointer is required. */
5802 if (crtl->profile && frame_pointer_needed)
5803 emit_use (hard_frame_pointer_rtx);
5804
5805 /* Retain a map of the prologue insns. */
5806 record_insns (seq, NULL, &prologue_insn_hash);
5807 emit_note (NOTE_INSN_PROLOGUE_END);
5808
5809 /* Ensure that instructions are not moved into the prologue when
5810 profiling is on. The call to the profiling routine can be
5811 emitted within the live range of a call-clobbered register. */
5812 if (!targetm.profile_before_prologue () && crtl->profile)
5813 emit_insn (gen_blockage ());
5814
5815 seq = get_insns ();
5816 end_sequence ();
5817 set_insn_locations (seq, prologue_location);
5818
5819 return seq;
5820 }
5821
5822 /* Emit a sequence of insns to zero the call-used registers before RET
5823 according to ZERO_REGS_TYPE. */
5824
5825 static void
5826 gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
5827 {
5828 bool only_gpr = true;
5829 bool only_used = true;
5830 bool only_arg = true;
5831
5832 /* No need to zero call-used-regs in main (). */
5833 if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
5834 return;
5835
5836 /* No need to zero call-used-regs if __builtin_eh_return is called
5837 since it isn't a normal function return. */
5838 if (crtl->calls_eh_return)
5839 return;
5840
5841 /* If only_gpr is true, only zero call-used registers that are
5842 general-purpose registers; if only_used is true, only zero
5843 call-used registers that are used in the current function;
5844 if only_arg is true, only zero call-used registers that pass
5845 parameters defined by the flatform's calling conversion. */
5846
5847 using namespace zero_regs_flags;
5848
5849 only_gpr = zero_regs_type & ONLY_GPR;
5850 only_used = zero_regs_type & ONLY_USED;
5851 only_arg = zero_regs_type & ONLY_ARG;
5852
5853 /* For each of the hard registers, we should zero it if:
5854 1. it is a call-used register;
5855 and 2. it is not a fixed register;
5856 and 3. it is not live at the return of the routine;
5857 and 4. it is general registor if only_gpr is true;
5858 and 5. it is used in the routine if only_used is true;
5859 and 6. it is a register that passes parameter if only_arg is true. */
5860
5861 /* First, prepare the data flow information. */
5862 basic_block bb = BLOCK_FOR_INSN (ret);
5863 auto_bitmap live_out;
5864 bitmap_copy (live_out, df_get_live_out (bb));
5865 df_simulate_initialize_backwards (bb, live_out);
5866 df_simulate_one_insn_backwards (bb, ret, live_out);
5867
5868 HARD_REG_SET selected_hardregs;
5869 CLEAR_HARD_REG_SET (selected_hardregs);
5870 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5871 {
5872 if (!crtl->abi->clobbers_full_reg_p (regno))
5873 continue;
5874 if (fixed_regs[regno])
5875 continue;
5876 if (REGNO_REG_SET_P (live_out, regno))
5877 continue;
5878 if (only_gpr
5879 && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
5880 continue;
5881 if (only_used && !df_regs_ever_live_p (regno))
5882 continue;
5883 if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
5884 continue;
5885 #ifdef LEAF_REG_REMAP
5886 if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
5887 continue;
5888 #endif
5889
5890 /* Now this is a register that we might want to zero. */
5891 SET_HARD_REG_BIT (selected_hardregs, regno);
5892 }
5893
5894 if (hard_reg_set_empty_p (selected_hardregs))
5895 return;
5896
5897 /* Now that we have a hard register set that needs to be zeroed, pass it to
5898 target to generate zeroing sequence. */
5899 HARD_REG_SET zeroed_hardregs;
5900 start_sequence ();
5901 zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
5902 rtx_insn *seq = get_insns ();
5903 end_sequence ();
5904 if (seq)
5905 {
5906 /* Emit the memory blockage and register clobber asm volatile before
5907 the whole sequence. */
5908 start_sequence ();
5909 expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
5910 rtx_insn *seq_barrier = get_insns ();
5911 end_sequence ();
5912
5913 emit_insn_before (seq_barrier, ret);
5914 emit_insn_before (seq, ret);
5915
5916 /* Update the data flow information. */
5917 crtl->must_be_zero_on_return |= zeroed_hardregs;
5918 df_set_bb_dirty (EXIT_BLOCK_PTR_FOR_FN (cfun));
5919 }
5920 }
5921
5922
5923 /* Return a sequence to be used as the epilogue for the current function,
5924 or NULL. */
5925
5926 static rtx_insn *
5927 make_epilogue_seq (void)
5928 {
5929 if (!targetm.have_epilogue ())
5930 return NULL;
5931
5932 start_sequence ();
5933 emit_note (NOTE_INSN_EPILOGUE_BEG);
5934 rtx_insn *seq = targetm.gen_epilogue ();
5935 if (seq)
5936 emit_jump_insn (seq);
5937
5938 /* Retain a map of the epilogue insns. */
5939 record_insns (seq, NULL, &epilogue_insn_hash);
5940 set_insn_locations (seq, epilogue_location);
5941
5942 seq = get_insns ();
5943 rtx_insn *returnjump = get_last_insn ();
5944 end_sequence ();
5945
5946 if (JUMP_P (returnjump))
5947 set_return_jump_label (returnjump);
5948
5949 return seq;
5950 }
5951
5952
5953 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5954 this into place with notes indicating where the prologue ends and where
5955 the epilogue begins. Update the basic block information when possible.
5956
5957 Notes on epilogue placement:
5958 There are several kinds of edges to the exit block:
5959 * a single fallthru edge from LAST_BB
5960 * possibly, edges from blocks containing sibcalls
5961 * possibly, fake edges from infinite loops
5962
5963 The epilogue is always emitted on the fallthru edge from the last basic
5964 block in the function, LAST_BB, into the exit block.
5965
5966 If LAST_BB is empty except for a label, it is the target of every
5967 other basic block in the function that ends in a return. If a
5968 target has a return or simple_return pattern (possibly with
5969 conditional variants), these basic blocks can be changed so that a
5970 return insn is emitted into them, and their target is adjusted to
5971 the real exit block.
5972
5973 Notes on shrink wrapping: We implement a fairly conservative
5974 version of shrink-wrapping rather than the textbook one. We only
5975 generate a single prologue and a single epilogue. This is
5976 sufficient to catch a number of interesting cases involving early
5977 exits.
5978
5979 First, we identify the blocks that require the prologue to occur before
5980 them. These are the ones that modify a call-saved register, or reference
5981 any of the stack or frame pointer registers. To simplify things, we then
5982 mark everything reachable from these blocks as also requiring a prologue.
5983 This takes care of loops automatically, and avoids the need to examine
5984 whether MEMs reference the frame, since it is sufficient to check for
5985 occurrences of the stack or frame pointer.
5986
5987 We then compute the set of blocks for which the need for a prologue
5988 is anticipatable (borrowing terminology from the shrink-wrapping
5989 description in Muchnick's book). These are the blocks which either
5990 require a prologue themselves, or those that have only successors
5991 where the prologue is anticipatable. The prologue needs to be
5992 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5993 is not. For the moment, we ensure that only one such edge exists.
5994
5995 The epilogue is placed as described above, but we make a
5996 distinction between inserting return and simple_return patterns
5997 when modifying other blocks that end in a return. Blocks that end
5998 in a sibcall omit the sibcall_epilogue if the block is not in
5999 ANTIC. */
6000
6001 void
6002 thread_prologue_and_epilogue_insns (void)
6003 {
6004 df_analyze ();
6005
6006 /* Can't deal with multiple successors of the entry block at the
6007 moment. Function should always have at least one entry
6008 point. */
6009 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6010
6011 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6012 edge orig_entry_edge = entry_edge;
6013
6014 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6015 rtx_insn *prologue_seq = make_prologue_seq ();
6016 rtx_insn *epilogue_seq = make_epilogue_seq ();
6017
6018 /* Try to perform a kind of shrink-wrapping, making sure the
6019 prologue/epilogue is emitted only around those parts of the
6020 function that require it. */
6021 try_shrink_wrapping (&entry_edge, prologue_seq);
6022
6023 /* If the target can handle splitting the prologue/epilogue into separate
6024 components, try to shrink-wrap these components separately. */
6025 try_shrink_wrapping_separate (entry_edge->dest);
6026
6027 /* If that did anything for any component we now need the generate the
6028 "main" prologue again. Because some targets require some of these
6029 to be called in a specific order (i386 requires the split prologue
6030 to be first, for example), we create all three sequences again here.
6031 If this does not work for some target, that target should not enable
6032 separate shrink-wrapping. */
6033 if (crtl->shrink_wrapped_separate)
6034 {
6035 split_prologue_seq = make_split_prologue_seq ();
6036 prologue_seq = make_prologue_seq ();
6037 epilogue_seq = make_epilogue_seq ();
6038 }
6039
6040 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6041
6042 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6043 this marker for the splits of EH_RETURN patterns, and nothing else
6044 uses the flag in the meantime. */
6045 epilogue_completed = 1;
6046
6047 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6048 some targets, these get split to a special version of the epilogue
6049 code. In order to be able to properly annotate these with unwind
6050 info, try to split them now. If we get a valid split, drop an
6051 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6052 edge e;
6053 edge_iterator ei;
6054 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6055 {
6056 rtx_insn *prev, *last, *trial;
6057
6058 if (e->flags & EDGE_FALLTHRU)
6059 continue;
6060 last = BB_END (e->src);
6061 if (!eh_returnjump_p (last))
6062 continue;
6063
6064 prev = PREV_INSN (last);
6065 trial = try_split (PATTERN (last), last, 1);
6066 if (trial == last)
6067 continue;
6068
6069 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6070 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6071 }
6072
6073 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6074
6075 if (exit_fallthru_edge)
6076 {
6077 if (epilogue_seq)
6078 {
6079 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6080 commit_edge_insertions ();
6081
6082 /* The epilogue insns we inserted may cause the exit edge to no longer
6083 be fallthru. */
6084 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6085 {
6086 if (((e->flags & EDGE_FALLTHRU) != 0)
6087 && returnjump_p (BB_END (e->src)))
6088 e->flags &= ~EDGE_FALLTHRU;
6089 }
6090 }
6091 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6092 {
6093 /* We have a fall-through edge to the exit block, the source is not
6094 at the end of the function, and there will be an assembler epilogue
6095 at the end of the function.
6096 We can't use force_nonfallthru here, because that would try to
6097 use return. Inserting a jump 'by hand' is extremely messy, so
6098 we take advantage of cfg_layout_finalize using
6099 fixup_fallthru_exit_predecessor. */
6100 cfg_layout_initialize (0);
6101 basic_block cur_bb;
6102 FOR_EACH_BB_FN (cur_bb, cfun)
6103 if (cur_bb->index >= NUM_FIXED_BLOCKS
6104 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6105 cur_bb->aux = cur_bb->next_bb;
6106 cfg_layout_finalize ();
6107 }
6108 }
6109
6110 /* Insert the prologue. */
6111
6112 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6113
6114 if (split_prologue_seq || prologue_seq)
6115 {
6116 rtx_insn *split_prologue_insn = split_prologue_seq;
6117 if (split_prologue_seq)
6118 {
6119 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6120 split_prologue_insn = NEXT_INSN (split_prologue_insn);
6121 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6122 }
6123
6124 rtx_insn *prologue_insn = prologue_seq;
6125 if (prologue_seq)
6126 {
6127 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6128 prologue_insn = NEXT_INSN (prologue_insn);
6129 insert_insn_on_edge (prologue_seq, entry_edge);
6130 }
6131
6132 commit_edge_insertions ();
6133
6134 /* Look for basic blocks within the prologue insns. */
6135 if (split_prologue_insn
6136 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6137 split_prologue_insn = NULL;
6138 if (prologue_insn
6139 && BLOCK_FOR_INSN (prologue_insn) == NULL)
6140 prologue_insn = NULL;
6141 if (split_prologue_insn || prologue_insn)
6142 {
6143 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6144 bitmap_clear (blocks);
6145 if (split_prologue_insn)
6146 bitmap_set_bit (blocks,
6147 BLOCK_FOR_INSN (split_prologue_insn)->index);
6148 if (prologue_insn)
6149 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6150 find_many_sub_basic_blocks (blocks);
6151 }
6152 }
6153
6154 default_rtl_profile ();
6155
6156 /* Emit sibling epilogues before any sibling call sites. */
6157 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6158 (e = ei_safe_edge (ei));
6159 ei_next (&ei))
6160 {
6161 /* Skip those already handled, the ones that run without prologue. */
6162 if (e->flags & EDGE_IGNORE)
6163 {
6164 e->flags &= ~EDGE_IGNORE;
6165 continue;
6166 }
6167
6168 rtx_insn *insn = BB_END (e->src);
6169
6170 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6171 continue;
6172
6173 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6174 {
6175 start_sequence ();
6176 emit_note (NOTE_INSN_EPILOGUE_BEG);
6177 emit_insn (ep_seq);
6178 rtx_insn *seq = get_insns ();
6179 end_sequence ();
6180
6181 /* Retain a map of the epilogue insns. Used in life analysis to
6182 avoid getting rid of sibcall epilogue insns. Do this before we
6183 actually emit the sequence. */
6184 record_insns (seq, NULL, &epilogue_insn_hash);
6185 set_insn_locations (seq, epilogue_location);
6186
6187 emit_insn_before (seq, insn);
6188 }
6189 }
6190
6191 if (epilogue_seq)
6192 {
6193 rtx_insn *insn, *next;
6194
6195 /* Similarly, move any line notes that appear after the epilogue.
6196 There is no need, however, to be quite so anal about the existence
6197 of such a note. Also possibly move
6198 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6199 info generation. */
6200 for (insn = epilogue_seq; insn; insn = next)
6201 {
6202 next = NEXT_INSN (insn);
6203 if (NOTE_P (insn)
6204 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6205 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6206 }
6207 }
6208
6209 /* Threading the prologue and epilogue changes the artificial refs
6210 in the entry and exit blocks. */
6211 epilogue_completed = 1;
6212 df_update_entry_exit_and_calls ();
6213 }
6214
6215 /* Reposition the prologue-end and epilogue-begin notes after
6216 instruction scheduling. */
6217
6218 void
6219 reposition_prologue_and_epilogue_notes (void)
6220 {
6221 if (!targetm.have_prologue ()
6222 && !targetm.have_epilogue ()
6223 && !targetm.have_sibcall_epilogue ())
6224 return;
6225
6226 /* Since the hash table is created on demand, the fact that it is
6227 non-null is a signal that it is non-empty. */
6228 if (prologue_insn_hash != NULL)
6229 {
6230 size_t len = prologue_insn_hash->elements ();
6231 rtx_insn *insn, *last = NULL, *note = NULL;
6232
6233 /* Scan from the beginning until we reach the last prologue insn. */
6234 /* ??? While we do have the CFG intact, there are two problems:
6235 (1) The prologue can contain loops (typically probing the stack),
6236 which means that the end of the prologue isn't in the first bb.
6237 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6238 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6239 {
6240 if (NOTE_P (insn))
6241 {
6242 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6243 note = insn;
6244 }
6245 else if (contains (insn, prologue_insn_hash))
6246 {
6247 last = insn;
6248 if (--len == 0)
6249 break;
6250 }
6251 }
6252
6253 if (last)
6254 {
6255 if (note == NULL)
6256 {
6257 /* Scan forward looking for the PROLOGUE_END note. It should
6258 be right at the beginning of the block, possibly with other
6259 insn notes that got moved there. */
6260 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6261 {
6262 if (NOTE_P (note)
6263 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6264 break;
6265 }
6266 }
6267
6268 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6269 if (LABEL_P (last))
6270 last = NEXT_INSN (last);
6271 reorder_insns (note, note, last);
6272 }
6273 }
6274
6275 if (epilogue_insn_hash != NULL)
6276 {
6277 edge_iterator ei;
6278 edge e;
6279
6280 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6281 {
6282 rtx_insn *insn, *first = NULL, *note = NULL;
6283 basic_block bb = e->src;
6284
6285 /* Scan from the beginning until we reach the first epilogue insn. */
6286 FOR_BB_INSNS (bb, insn)
6287 {
6288 if (NOTE_P (insn))
6289 {
6290 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6291 {
6292 note = insn;
6293 if (first != NULL)
6294 break;
6295 }
6296 }
6297 else if (first == NULL && contains (insn, epilogue_insn_hash))
6298 {
6299 first = insn;
6300 if (note != NULL)
6301 break;
6302 }
6303 }
6304
6305 if (note)
6306 {
6307 /* If the function has a single basic block, and no real
6308 epilogue insns (e.g. sibcall with no cleanup), the
6309 epilogue note can get scheduled before the prologue
6310 note. If we have frame related prologue insns, having
6311 them scanned during the epilogue will result in a crash.
6312 In this case re-order the epilogue note to just before
6313 the last insn in the block. */
6314 if (first == NULL)
6315 first = BB_END (bb);
6316
6317 if (PREV_INSN (first) != note)
6318 reorder_insns (note, note, PREV_INSN (first));
6319 }
6320 }
6321 }
6322 }
6323
6324 /* Returns the name of function declared by FNDECL. */
6325 const char *
6326 fndecl_name (tree fndecl)
6327 {
6328 if (fndecl == NULL)
6329 return "(nofn)";
6330 return lang_hooks.decl_printable_name (fndecl, 1);
6331 }
6332
6333 /* Returns the name of function FN. */
6334 const char *
6335 function_name (struct function *fn)
6336 {
6337 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6338 return fndecl_name (fndecl);
6339 }
6340
6341 /* Returns the name of the current function. */
6342 const char *
6343 current_function_name (void)
6344 {
6345 return function_name (cfun);
6346 }
6347 \f
6348
6349 static unsigned int
6350 rest_of_handle_check_leaf_regs (void)
6351 {
6352 #ifdef LEAF_REGISTERS
6353 crtl->uses_only_leaf_regs
6354 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6355 #endif
6356 return 0;
6357 }
6358
6359 /* Insert a TYPE into the used types hash table of CFUN. */
6360
6361 static void
6362 used_types_insert_helper (tree type, struct function *func)
6363 {
6364 if (type != NULL && func != NULL)
6365 {
6366 if (func->used_types_hash == NULL)
6367 func->used_types_hash = hash_set<tree>::create_ggc (37);
6368
6369 func->used_types_hash->add (type);
6370 }
6371 }
6372
6373 /* Given a type, insert it into the used hash table in cfun. */
6374 void
6375 used_types_insert (tree t)
6376 {
6377 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6378 if (TYPE_NAME (t))
6379 break;
6380 else
6381 t = TREE_TYPE (t);
6382 if (TREE_CODE (t) == ERROR_MARK)
6383 return;
6384 if (TYPE_NAME (t) == NULL_TREE
6385 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6386 t = TYPE_MAIN_VARIANT (t);
6387 if (debug_info_level > DINFO_LEVEL_NONE)
6388 {
6389 if (cfun)
6390 used_types_insert_helper (t, cfun);
6391 else
6392 {
6393 /* So this might be a type referenced by a global variable.
6394 Record that type so that we can later decide to emit its
6395 debug information. */
6396 vec_safe_push (types_used_by_cur_var_decl, t);
6397 }
6398 }
6399 }
6400
6401 /* Helper to Hash a struct types_used_by_vars_entry. */
6402
6403 static hashval_t
6404 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6405 {
6406 gcc_assert (entry && entry->var_decl && entry->type);
6407
6408 return iterative_hash_object (entry->type,
6409 iterative_hash_object (entry->var_decl, 0));
6410 }
6411
6412 /* Hash function of the types_used_by_vars_entry hash table. */
6413
6414 hashval_t
6415 used_type_hasher::hash (types_used_by_vars_entry *entry)
6416 {
6417 return hash_types_used_by_vars_entry (entry);
6418 }
6419
6420 /*Equality function of the types_used_by_vars_entry hash table. */
6421
6422 bool
6423 used_type_hasher::equal (types_used_by_vars_entry *e1,
6424 types_used_by_vars_entry *e2)
6425 {
6426 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6427 }
6428
6429 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6430
6431 void
6432 types_used_by_var_decl_insert (tree type, tree var_decl)
6433 {
6434 if (type != NULL && var_decl != NULL)
6435 {
6436 types_used_by_vars_entry **slot;
6437 struct types_used_by_vars_entry e;
6438 e.var_decl = var_decl;
6439 e.type = type;
6440 if (types_used_by_vars_hash == NULL)
6441 types_used_by_vars_hash
6442 = hash_table<used_type_hasher>::create_ggc (37);
6443
6444 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6445 if (*slot == NULL)
6446 {
6447 struct types_used_by_vars_entry *entry;
6448 entry = ggc_alloc<types_used_by_vars_entry> ();
6449 entry->type = type;
6450 entry->var_decl = var_decl;
6451 *slot = entry;
6452 }
6453 }
6454 }
6455
6456 namespace {
6457
6458 const pass_data pass_data_leaf_regs =
6459 {
6460 RTL_PASS, /* type */
6461 "*leaf_regs", /* name */
6462 OPTGROUP_NONE, /* optinfo_flags */
6463 TV_NONE, /* tv_id */
6464 0, /* properties_required */
6465 0, /* properties_provided */
6466 0, /* properties_destroyed */
6467 0, /* todo_flags_start */
6468 0, /* todo_flags_finish */
6469 };
6470
6471 class pass_leaf_regs : public rtl_opt_pass
6472 {
6473 public:
6474 pass_leaf_regs (gcc::context *ctxt)
6475 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6476 {}
6477
6478 /* opt_pass methods: */
6479 virtual unsigned int execute (function *)
6480 {
6481 return rest_of_handle_check_leaf_regs ();
6482 }
6483
6484 }; // class pass_leaf_regs
6485
6486 } // anon namespace
6487
6488 rtl_opt_pass *
6489 make_pass_leaf_regs (gcc::context *ctxt)
6490 {
6491 return new pass_leaf_regs (ctxt);
6492 }
6493
6494 static unsigned int
6495 rest_of_handle_thread_prologue_and_epilogue (void)
6496 {
6497 /* prepare_shrink_wrap is sensitive to the block structure of the control
6498 flow graph, so clean it up first. */
6499 if (optimize)
6500 cleanup_cfg (0);
6501
6502 /* On some machines, the prologue and epilogue code, or parts thereof,
6503 can be represented as RTL. Doing so lets us schedule insns between
6504 it and the rest of the code and also allows delayed branch
6505 scheduling to operate in the epilogue. */
6506 thread_prologue_and_epilogue_insns ();
6507
6508 /* Some non-cold blocks may now be only reachable from cold blocks.
6509 Fix that up. */
6510 fixup_partitions ();
6511
6512 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6513 see PR57320. */
6514 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6515
6516 /* The stack usage info is finalized during prologue expansion. */
6517 if (flag_stack_usage_info || flag_callgraph_info)
6518 output_stack_usage ();
6519
6520 return 0;
6521 }
6522
6523 /* Record a final call to CALLEE at LOCATION. */
6524
6525 void
6526 record_final_call (tree callee, location_t location)
6527 {
6528 struct callinfo_callee datum = { location, callee };
6529 vec_safe_push (cfun->su->callees, datum);
6530 }
6531
6532 /* Record a dynamic allocation made for DECL_OR_EXP. */
6533
6534 void
6535 record_dynamic_alloc (tree decl_or_exp)
6536 {
6537 struct callinfo_dalloc datum;
6538
6539 if (DECL_P (decl_or_exp))
6540 {
6541 datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6542 const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6543 const char *dot = strrchr (name, '.');
6544 if (dot)
6545 name = dot + 1;
6546 datum.name = ggc_strdup (name);
6547 }
6548 else
6549 {
6550 datum.location = EXPR_LOCATION (decl_or_exp);
6551 datum.name = NULL;
6552 }
6553
6554 vec_safe_push (cfun->su->dallocs, datum);
6555 }
6556
6557 namespace {
6558
6559 const pass_data pass_data_thread_prologue_and_epilogue =
6560 {
6561 RTL_PASS, /* type */
6562 "pro_and_epilogue", /* name */
6563 OPTGROUP_NONE, /* optinfo_flags */
6564 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6565 0, /* properties_required */
6566 0, /* properties_provided */
6567 0, /* properties_destroyed */
6568 0, /* todo_flags_start */
6569 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6570 };
6571
6572 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6573 {
6574 public:
6575 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6576 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6577 {}
6578
6579 /* opt_pass methods: */
6580 virtual unsigned int execute (function *)
6581 {
6582 return rest_of_handle_thread_prologue_and_epilogue ();
6583 }
6584
6585 }; // class pass_thread_prologue_and_epilogue
6586
6587 } // anon namespace
6588
6589 rtl_opt_pass *
6590 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6591 {
6592 return new pass_thread_prologue_and_epilogue (ctxt);
6593 }
6594
6595 namespace {
6596
6597 const pass_data pass_data_zero_call_used_regs =
6598 {
6599 RTL_PASS, /* type */
6600 "zero_call_used_regs", /* name */
6601 OPTGROUP_NONE, /* optinfo_flags */
6602 TV_NONE, /* tv_id */
6603 0, /* properties_required */
6604 0, /* properties_provided */
6605 0, /* properties_destroyed */
6606 0, /* todo_flags_start */
6607 0, /* todo_flags_finish */
6608 };
6609
6610 class pass_zero_call_used_regs: public rtl_opt_pass
6611 {
6612 public:
6613 pass_zero_call_used_regs (gcc::context *ctxt)
6614 : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
6615 {}
6616
6617 /* opt_pass methods: */
6618 virtual unsigned int execute (function *);
6619
6620 }; // class pass_zero_call_used_regs
6621
6622 unsigned int
6623 pass_zero_call_used_regs::execute (function *fun)
6624 {
6625 using namespace zero_regs_flags;
6626 unsigned int zero_regs_type = UNSET;
6627
6628 tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
6629 DECL_ATTRIBUTES (fun->decl));
6630
6631 /* Get the type of zero_call_used_regs from function attribute.
6632 We have filtered out invalid attribute values already at this point. */
6633 if (attr_zero_regs)
6634 {
6635 /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
6636 is the attribute argument's value. */
6637 attr_zero_regs = TREE_VALUE (attr_zero_regs);
6638 gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
6639 attr_zero_regs = TREE_VALUE (attr_zero_regs);
6640 gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
6641
6642 for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
6643 if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
6644 zero_call_used_regs_opts[i].name) == 0)
6645 {
6646 zero_regs_type = zero_call_used_regs_opts[i].flag;
6647 break;
6648 }
6649 }
6650
6651 if (!zero_regs_type)
6652 zero_regs_type = flag_zero_call_used_regs;
6653
6654 /* No need to zero call-used-regs when no user request is present. */
6655 if (!(zero_regs_type & ENABLED))
6656 return 0;
6657
6658 edge_iterator ei;
6659 edge e;
6660
6661 /* This pass needs data flow information. */
6662 df_analyze ();
6663
6664 /* Iterate over the function's return instructions and insert any
6665 register zeroing required by the -fzero-call-used-regs command-line
6666 option or the "zero_call_used_regs" function attribute. */
6667 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6668 {
6669 rtx_insn *insn = BB_END (e->src);
6670 if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
6671 gen_call_used_regs_seq (insn, zero_regs_type);
6672 }
6673
6674 return 0;
6675 }
6676
6677 } // anon namespace
6678
6679 rtl_opt_pass *
6680 make_pass_zero_call_used_regs (gcc::context *ctxt)
6681 {
6682 return new pass_zero_call_used_regs (ctxt);
6683 }
6684
6685 /* If CONSTRAINT is a matching constraint, then return its number.
6686 Otherwise, return -1. */
6687
6688 static int
6689 matching_constraint_num (const char *constraint)
6690 {
6691 if (*constraint == '%')
6692 constraint++;
6693
6694 if (IN_RANGE (*constraint, '0', '9'))
6695 return strtoul (constraint, NULL, 10);
6696
6697 return -1;
6698 }
6699
6700 /* This mini-pass fixes fall-out from SSA in asm statements that have
6701 in-out constraints. Say you start with
6702
6703 orig = inout;
6704 asm ("": "+mr" (inout));
6705 use (orig);
6706
6707 which is transformed very early to use explicit output and match operands:
6708
6709 orig = inout;
6710 asm ("": "=mr" (inout) : "0" (inout));
6711 use (orig);
6712
6713 Or, after SSA and copyprop,
6714
6715 asm ("": "=mr" (inout_2) : "0" (inout_1));
6716 use (inout_1);
6717
6718 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6719 they represent two separate values, so they will get different pseudo
6720 registers during expansion. Then, since the two operands need to match
6721 per the constraints, but use different pseudo registers, reload can
6722 only register a reload for these operands. But reloads can only be
6723 satisfied by hardregs, not by memory, so we need a register for this
6724 reload, just because we are presented with non-matching operands.
6725 So, even though we allow memory for this operand, no memory can be
6726 used for it, just because the two operands don't match. This can
6727 cause reload failures on register-starved targets.
6728
6729 So it's a symptom of reload not being able to use memory for reloads
6730 or, alternatively it's also a symptom of both operands not coming into
6731 reload as matching (in which case the pseudo could go to memory just
6732 fine, as the alternative allows it, and no reload would be necessary).
6733 We fix the latter problem here, by transforming
6734
6735 asm ("": "=mr" (inout_2) : "0" (inout_1));
6736
6737 back to
6738
6739 inout_2 = inout_1;
6740 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6741
6742 static void
6743 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6744 {
6745 int i;
6746 bool changed = false;
6747 rtx op = SET_SRC (p_sets[0]);
6748 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6749 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6750 bool *output_matched = XALLOCAVEC (bool, noutputs);
6751
6752 memset (output_matched, 0, noutputs * sizeof (bool));
6753 for (i = 0; i < ninputs; i++)
6754 {
6755 rtx input, output;
6756 rtx_insn *insns;
6757 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6758 int match, j;
6759
6760 match = matching_constraint_num (constraint);
6761 if (match < 0)
6762 continue;
6763
6764 gcc_assert (match < noutputs);
6765 output = SET_DEST (p_sets[match]);
6766 input = RTVEC_ELT (inputs, i);
6767 /* Only do the transformation for pseudos. */
6768 if (! REG_P (output)
6769 || rtx_equal_p (output, input)
6770 || !(REG_P (input) || SUBREG_P (input)
6771 || MEM_P (input) || CONSTANT_P (input))
6772 || !general_operand (input, GET_MODE (output)))
6773 continue;
6774
6775 /* We can't do anything if the output is also used as input,
6776 as we're going to overwrite it. */
6777 for (j = 0; j < ninputs; j++)
6778 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6779 break;
6780 if (j != ninputs)
6781 continue;
6782
6783 /* Avoid changing the same input several times. For
6784 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6785 only change it once (to out1), rather than changing it
6786 first to out1 and afterwards to out2. */
6787 if (i > 0)
6788 {
6789 for (j = 0; j < noutputs; j++)
6790 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6791 break;
6792 if (j != noutputs)
6793 continue;
6794 }
6795 output_matched[match] = true;
6796
6797 start_sequence ();
6798 emit_move_insn (output, copy_rtx (input));
6799 insns = get_insns ();
6800 end_sequence ();
6801 emit_insn_before (insns, insn);
6802
6803 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6804 bool early_clobber_p = strchr (constraint, '&') != NULL;
6805
6806 /* Now replace all mentions of the input with output. We can't
6807 just replace the occurrence in inputs[i], as the register might
6808 also be used in some other input (or even in an address of an
6809 output), which would mean possibly increasing the number of
6810 inputs by one (namely 'output' in addition), which might pose
6811 a too complicated problem for reload to solve. E.g. this situation:
6812
6813 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6814
6815 Here 'input' is used in two occurrences as input (once for the
6816 input operand, once for the address in the second output operand).
6817 If we would replace only the occurrence of the input operand (to
6818 make the matching) we would be left with this:
6819
6820 output = input
6821 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6822
6823 Now we suddenly have two different input values (containing the same
6824 value, but different pseudos) where we formerly had only one.
6825 With more complicated asms this might lead to reload failures
6826 which wouldn't have happen without this pass. So, iterate over
6827 all operands and replace all occurrences of the register used.
6828
6829 However, if one or more of the 'input' uses have a non-matching
6830 constraint and the matched output operand is an early clobber
6831 operand, then do not replace the input operand, since by definition
6832 it conflicts with the output operand and cannot share the same
6833 register. See PR89313 for details. */
6834
6835 for (j = 0; j < noutputs; j++)
6836 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6837 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6838 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6839 input, output);
6840 for (j = 0; j < ninputs; j++)
6841 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6842 {
6843 if (!early_clobber_p
6844 || match == matching_constraint_num
6845 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6846 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6847 input, output);
6848 }
6849
6850 changed = true;
6851 }
6852
6853 if (changed)
6854 df_insn_rescan (insn);
6855 }
6856
6857 /* Add the decl D to the local_decls list of FUN. */
6858
6859 void
6860 add_local_decl (struct function *fun, tree d)
6861 {
6862 gcc_assert (VAR_P (d));
6863 vec_safe_push (fun->local_decls, d);
6864 }
6865
6866 namespace {
6867
6868 const pass_data pass_data_match_asm_constraints =
6869 {
6870 RTL_PASS, /* type */
6871 "asmcons", /* name */
6872 OPTGROUP_NONE, /* optinfo_flags */
6873 TV_NONE, /* tv_id */
6874 0, /* properties_required */
6875 0, /* properties_provided */
6876 0, /* properties_destroyed */
6877 0, /* todo_flags_start */
6878 0, /* todo_flags_finish */
6879 };
6880
6881 class pass_match_asm_constraints : public rtl_opt_pass
6882 {
6883 public:
6884 pass_match_asm_constraints (gcc::context *ctxt)
6885 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6886 {}
6887
6888 /* opt_pass methods: */
6889 virtual unsigned int execute (function *);
6890
6891 }; // class pass_match_asm_constraints
6892
6893 unsigned
6894 pass_match_asm_constraints::execute (function *fun)
6895 {
6896 basic_block bb;
6897 rtx_insn *insn;
6898 rtx pat, *p_sets;
6899 int noutputs;
6900
6901 if (!crtl->has_asm_statement)
6902 return 0;
6903
6904 df_set_flags (DF_DEFER_INSN_RESCAN);
6905 FOR_EACH_BB_FN (bb, fun)
6906 {
6907 FOR_BB_INSNS (bb, insn)
6908 {
6909 if (!INSN_P (insn))
6910 continue;
6911
6912 pat = PATTERN (insn);
6913 if (GET_CODE (pat) == PARALLEL)
6914 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6915 else if (GET_CODE (pat) == SET)
6916 p_sets = &PATTERN (insn), noutputs = 1;
6917 else
6918 continue;
6919
6920 if (GET_CODE (*p_sets) == SET
6921 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6922 match_asm_constraints_1 (insn, p_sets, noutputs);
6923 }
6924 }
6925
6926 return TODO_df_finish;
6927 }
6928
6929 } // anon namespace
6930
6931 rtl_opt_pass *
6932 make_pass_match_asm_constraints (gcc::context *ctxt)
6933 {
6934 return new pass_match_asm_constraints (ctxt);
6935 }
6936
6937
6938 #include "gt-function.h"