]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/function.c
Add call_used_or_fixed_reg_p
[thirdparty/gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-dfa.h"
77 #include "tree-ssa.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82
83 /* So we can assign to cfun in this file. */
84 #undef cfun
85
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
88 #endif
89
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
91
92 /* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
96
97 /* Similar, but round to the next highest integer that meets the
98 alignment. */
99 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
100
101 /* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105 int virtuals_instantiated;
106
107 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
108 static GTY(()) int funcdef_no;
109
110 /* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112 struct machine_function * (*init_machine_status) (void);
113
114 /* The currently compiled function. */
115 struct function *cfun = 0;
116
117 /* These hashes record the prologue and epilogue insns. */
118
119 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
120 {
121 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
122 static bool equal (rtx a, rtx b) { return a == b; }
123 };
124
125 static GTY((cache))
126 hash_table<insn_cache_hasher> *prologue_insn_hash;
127 static GTY((cache))
128 hash_table<insn_cache_hasher> *epilogue_insn_hash;
129 \f
130
131 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
132 vec<tree, va_gc> *types_used_by_cur_var_decl;
133
134 /* Forward declarations. */
135
136 static class temp_slot *find_temp_slot_from_address (rtx);
137 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
138 static void pad_below (struct args_size *, machine_mode, tree);
139 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
140 static int all_blocks (tree, tree *);
141 static tree *get_block_vector (tree, int *);
142 extern tree debug_find_var_in_block_tree (tree, tree);
143 /* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
146 ATTRIBUTE_UNUSED;
147 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx, void *);
150 static void do_use_return_reg (rtx, void *);
151
152 \f
153 /* Stack of nested functions. */
154 /* Keep track of the cfun stack. */
155
156 static vec<function *> function_context_stack;
157
158 /* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
160
161 void
162 push_function_context (void)
163 {
164 if (cfun == 0)
165 allocate_struct_function (NULL, false);
166
167 function_context_stack.safe_push (cfun);
168 set_cfun (NULL);
169 }
170
171 /* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
173
174 void
175 pop_function_context (void)
176 {
177 struct function *p = function_context_stack.pop ();
178 set_cfun (p);
179 current_function_decl = p->decl;
180
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated = 0;
183 generating_concat_p = 1;
184 }
185
186 /* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
189
190 void
191 free_after_parsing (struct function *f)
192 {
193 f->language = 0;
194 }
195
196 /* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
199
200 void
201 free_after_compilation (struct function *f)
202 {
203 prologue_insn_hash = NULL;
204 epilogue_insn_hash = NULL;
205
206 free (crtl->emit.regno_pointer_align);
207
208 memset (crtl, 0, sizeof (struct rtl_data));
209 f->eh = NULL;
210 f->machine = NULL;
211 f->cfg = NULL;
212 f->curr_properties &= ~PROP_cfg;
213
214 regno_reg_rtx = NULL;
215 }
216 \f
217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
220
221 poly_int64
222 get_frame_size (void)
223 {
224 if (FRAME_GROWS_DOWNWARD)
225 return -frame_offset;
226 else
227 return frame_offset;
228 }
229
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
232 return FALSE. */
233
234 bool
235 frame_offset_overflow (poly_int64 offset, tree func)
236 {
237 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
238 unsigned HOST_WIDE_INT limit
239 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD);
242
243 if (!coeffs_in_range_p (size, 0U, limit))
244 {
245 unsigned HOST_WIDE_INT hwisize;
246 if (size.is_constant (&hwisize))
247 error_at (DECL_SOURCE_LOCATION (func),
248 "total size of local objects %wu exceeds maximum %wu",
249 hwisize, limit);
250 else
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects exceeds maximum %wu",
253 limit);
254 return true;
255 }
256
257 return false;
258 }
259
260 /* Return the minimum spill slot alignment for a register of mode MODE. */
261
262 unsigned int
263 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
264 {
265 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
266 }
267
268 /* Return stack slot alignment in bits for TYPE and MODE. */
269
270 static unsigned int
271 get_stack_local_alignment (tree type, machine_mode mode)
272 {
273 unsigned int alignment;
274
275 if (mode == BLKmode)
276 alignment = BIGGEST_ALIGNMENT;
277 else
278 alignment = GET_MODE_ALIGNMENT (mode);
279
280 /* Allow the frond-end to (possibly) increase the alignment of this
281 stack slot. */
282 if (! type)
283 type = lang_hooks.types.type_for_mode (mode, 0);
284
285 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
286 }
287
288 /* Determine whether it is possible to fit a stack slot of size SIZE and
289 alignment ALIGNMENT into an area in the stack frame that starts at
290 frame offset START and has a length of LENGTH. If so, store the frame
291 offset to be used for the stack slot in *POFFSET and return true;
292 return false otherwise. This function will extend the frame size when
293 given a start/length pair that lies at the end of the frame. */
294
295 static bool
296 try_fit_stack_local (poly_int64 start, poly_int64 length,
297 poly_int64 size, unsigned int alignment,
298 poly_int64_pod *poffset)
299 {
300 poly_int64 this_frame_offset;
301 int frame_off, frame_alignment, frame_phase;
302
303 /* Calculate how many bytes the start of local variables is off from
304 stack alignment. */
305 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
306 frame_off = targetm.starting_frame_offset () % frame_alignment;
307 frame_phase = frame_off ? frame_alignment - frame_off : 0;
308
309 /* Round the frame offset to the specified alignment. */
310
311 if (FRAME_GROWS_DOWNWARD)
312 this_frame_offset
313 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
314 + frame_phase);
315 else
316 this_frame_offset
317 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
318
319 /* See if it fits. If this space is at the edge of the frame,
320 consider extending the frame to make it fit. Our caller relies on
321 this when allocating a new slot. */
322 if (maybe_lt (this_frame_offset, start))
323 {
324 if (known_eq (frame_offset, start))
325 frame_offset = this_frame_offset;
326 else
327 return false;
328 }
329 else if (maybe_gt (this_frame_offset + size, start + length))
330 {
331 if (known_eq (frame_offset, start + length))
332 frame_offset = this_frame_offset + size;
333 else
334 return false;
335 }
336
337 *poffset = this_frame_offset;
338 return true;
339 }
340
341 /* Create a new frame_space structure describing free space in the stack
342 frame beginning at START and ending at END, and chain it into the
343 function's frame_space_list. */
344
345 static void
346 add_frame_space (poly_int64 start, poly_int64 end)
347 {
348 class frame_space *space = ggc_alloc<frame_space> ();
349 space->next = crtl->frame_space_list;
350 crtl->frame_space_list = space;
351 space->start = start;
352 space->length = end - start;
353 }
354
355 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
356 with machine mode MODE.
357
358 ALIGN controls the amount of alignment for the address of the slot:
359 0 means according to MODE,
360 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
361 -2 means use BITS_PER_UNIT,
362 positive specifies alignment boundary in bits.
363
364 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
365 alignment and ASLK_RECORD_PAD bit set if we should remember
366 extra space we allocated for alignment purposes. When we are
367 called from assign_stack_temp_for_type, it is not set so we don't
368 track the same stack slot in two independent lists.
369
370 We do not round to stack_boundary here. */
371
372 rtx
373 assign_stack_local_1 (machine_mode mode, poly_int64 size,
374 int align, int kind)
375 {
376 rtx x, addr;
377 poly_int64 bigend_correction = 0;
378 poly_int64 slot_offset = 0, old_frame_offset;
379 unsigned int alignment, alignment_in_bits;
380
381 if (align == 0)
382 {
383 alignment = get_stack_local_alignment (NULL, mode);
384 alignment /= BITS_PER_UNIT;
385 }
386 else if (align == -1)
387 {
388 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
389 size = aligned_upper_bound (size, alignment);
390 }
391 else if (align == -2)
392 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
393 else
394 alignment = align / BITS_PER_UNIT;
395
396 alignment_in_bits = alignment * BITS_PER_UNIT;
397
398 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
399 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
400 {
401 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
402 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
403 }
404
405 if (SUPPORTS_STACK_ALIGNMENT)
406 {
407 if (crtl->stack_alignment_estimated < alignment_in_bits)
408 {
409 if (!crtl->stack_realign_processed)
410 crtl->stack_alignment_estimated = alignment_in_bits;
411 else
412 {
413 /* If stack is realigned and stack alignment value
414 hasn't been finalized, it is OK not to increase
415 stack_alignment_estimated. The bigger alignment
416 requirement is recorded in stack_alignment_needed
417 below. */
418 gcc_assert (!crtl->stack_realign_finalized);
419 if (!crtl->stack_realign_needed)
420 {
421 /* It is OK to reduce the alignment as long as the
422 requested size is 0 or the estimated stack
423 alignment >= mode alignment. */
424 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
425 || known_eq (size, 0)
426 || (crtl->stack_alignment_estimated
427 >= GET_MODE_ALIGNMENT (mode)));
428 alignment_in_bits = crtl->stack_alignment_estimated;
429 alignment = alignment_in_bits / BITS_PER_UNIT;
430 }
431 }
432 }
433 }
434
435 if (crtl->stack_alignment_needed < alignment_in_bits)
436 crtl->stack_alignment_needed = alignment_in_bits;
437 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
438 crtl->max_used_stack_slot_alignment = alignment_in_bits;
439
440 if (mode != BLKmode || maybe_ne (size, 0))
441 {
442 if (kind & ASLK_RECORD_PAD)
443 {
444 class frame_space **psp;
445
446 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
447 {
448 class frame_space *space = *psp;
449 if (!try_fit_stack_local (space->start, space->length, size,
450 alignment, &slot_offset))
451 continue;
452 *psp = space->next;
453 if (known_gt (slot_offset, space->start))
454 add_frame_space (space->start, slot_offset);
455 if (known_lt (slot_offset + size, space->start + space->length))
456 add_frame_space (slot_offset + size,
457 space->start + space->length);
458 goto found_space;
459 }
460 }
461 }
462 else if (!STACK_ALIGNMENT_NEEDED)
463 {
464 slot_offset = frame_offset;
465 goto found_space;
466 }
467
468 old_frame_offset = frame_offset;
469
470 if (FRAME_GROWS_DOWNWARD)
471 {
472 frame_offset -= size;
473 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
474
475 if (kind & ASLK_RECORD_PAD)
476 {
477 if (known_gt (slot_offset, frame_offset))
478 add_frame_space (frame_offset, slot_offset);
479 if (known_lt (slot_offset + size, old_frame_offset))
480 add_frame_space (slot_offset + size, old_frame_offset);
481 }
482 }
483 else
484 {
485 frame_offset += size;
486 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
487
488 if (kind & ASLK_RECORD_PAD)
489 {
490 if (known_gt (slot_offset, old_frame_offset))
491 add_frame_space (old_frame_offset, slot_offset);
492 if (known_lt (slot_offset + size, frame_offset))
493 add_frame_space (slot_offset + size, frame_offset);
494 }
495 }
496
497 found_space:
498 /* On a big-endian machine, if we are allocating more space than we will use,
499 use the least significant bytes of those that are allocated. */
500 if (mode != BLKmode)
501 {
502 /* The slot size can sometimes be smaller than the mode size;
503 e.g. the rs6000 port allocates slots with a vector mode
504 that have the size of only one element. However, the slot
505 size must always be ordered wrt to the mode size, in the
506 same way as for a subreg. */
507 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
508 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
509 bigend_correction = size - GET_MODE_SIZE (mode);
510 }
511
512 /* If we have already instantiated virtual registers, return the actual
513 address relative to the frame pointer. */
514 if (virtuals_instantiated)
515 addr = plus_constant (Pmode, frame_pointer_rtx,
516 trunc_int_for_mode
517 (slot_offset + bigend_correction
518 + targetm.starting_frame_offset (), Pmode));
519 else
520 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
521 trunc_int_for_mode
522 (slot_offset + bigend_correction,
523 Pmode));
524
525 x = gen_rtx_MEM (mode, addr);
526 set_mem_align (x, alignment_in_bits);
527 MEM_NOTRAP_P (x) = 1;
528
529 vec_safe_push (stack_slot_list, x);
530
531 if (frame_offset_overflow (frame_offset, current_function_decl))
532 frame_offset = 0;
533
534 return x;
535 }
536
537 /* Wrap up assign_stack_local_1 with last parameter as false. */
538
539 rtx
540 assign_stack_local (machine_mode mode, poly_int64 size, int align)
541 {
542 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
543 }
544 \f
545 /* In order to evaluate some expressions, such as function calls returning
546 structures in memory, we need to temporarily allocate stack locations.
547 We record each allocated temporary in the following structure.
548
549 Associated with each temporary slot is a nesting level. When we pop up
550 one level, all temporaries associated with the previous level are freed.
551 Normally, all temporaries are freed after the execution of the statement
552 in which they were created. However, if we are inside a ({...}) grouping,
553 the result may be in a temporary and hence must be preserved. If the
554 result could be in a temporary, we preserve it if we can determine which
555 one it is in. If we cannot determine which temporary may contain the
556 result, all temporaries are preserved. A temporary is preserved by
557 pretending it was allocated at the previous nesting level. */
558
559 class GTY(()) temp_slot {
560 public:
561 /* Points to next temporary slot. */
562 class temp_slot *next;
563 /* Points to previous temporary slot. */
564 class temp_slot *prev;
565 /* The rtx to used to reference the slot. */
566 rtx slot;
567 /* The size, in units, of the slot. */
568 poly_int64 size;
569 /* The type of the object in the slot, or zero if it doesn't correspond
570 to a type. We use this to determine whether a slot can be reused.
571 It can be reused if objects of the type of the new slot will always
572 conflict with objects of the type of the old slot. */
573 tree type;
574 /* The alignment (in bits) of the slot. */
575 unsigned int align;
576 /* Nonzero if this temporary is currently in use. */
577 char in_use;
578 /* Nesting level at which this slot is being used. */
579 int level;
580 /* The offset of the slot from the frame_pointer, including extra space
581 for alignment. This info is for combine_temp_slots. */
582 poly_int64 base_offset;
583 /* The size of the slot, including extra space for alignment. This
584 info is for combine_temp_slots. */
585 poly_int64 full_size;
586 };
587
588 /* Entry for the below hash table. */
589 struct GTY((for_user)) temp_slot_address_entry {
590 hashval_t hash;
591 rtx address;
592 class temp_slot *temp_slot;
593 };
594
595 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
596 {
597 static hashval_t hash (temp_slot_address_entry *);
598 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
599 };
600
601 /* A table of addresses that represent a stack slot. The table is a mapping
602 from address RTXen to a temp slot. */
603 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
604 static size_t n_temp_slots_in_use;
605
606 /* Removes temporary slot TEMP from LIST. */
607
608 static void
609 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
610 {
611 if (temp->next)
612 temp->next->prev = temp->prev;
613 if (temp->prev)
614 temp->prev->next = temp->next;
615 else
616 *list = temp->next;
617
618 temp->prev = temp->next = NULL;
619 }
620
621 /* Inserts temporary slot TEMP to LIST. */
622
623 static void
624 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
625 {
626 temp->next = *list;
627 if (*list)
628 (*list)->prev = temp;
629 temp->prev = NULL;
630 *list = temp;
631 }
632
633 /* Returns the list of used temp slots at LEVEL. */
634
635 static class temp_slot **
636 temp_slots_at_level (int level)
637 {
638 if (level >= (int) vec_safe_length (used_temp_slots))
639 vec_safe_grow_cleared (used_temp_slots, level + 1);
640
641 return &(*used_temp_slots)[level];
642 }
643
644 /* Returns the maximal temporary slot level. */
645
646 static int
647 max_slot_level (void)
648 {
649 if (!used_temp_slots)
650 return -1;
651
652 return used_temp_slots->length () - 1;
653 }
654
655 /* Moves temporary slot TEMP to LEVEL. */
656
657 static void
658 move_slot_to_level (class temp_slot *temp, int level)
659 {
660 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
661 insert_slot_to_list (temp, temp_slots_at_level (level));
662 temp->level = level;
663 }
664
665 /* Make temporary slot TEMP available. */
666
667 static void
668 make_slot_available (class temp_slot *temp)
669 {
670 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
671 insert_slot_to_list (temp, &avail_temp_slots);
672 temp->in_use = 0;
673 temp->level = -1;
674 n_temp_slots_in_use--;
675 }
676
677 /* Compute the hash value for an address -> temp slot mapping.
678 The value is cached on the mapping entry. */
679 static hashval_t
680 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
681 {
682 int do_not_record = 0;
683 return hash_rtx (t->address, GET_MODE (t->address),
684 &do_not_record, NULL, false);
685 }
686
687 /* Return the hash value for an address -> temp slot mapping. */
688 hashval_t
689 temp_address_hasher::hash (temp_slot_address_entry *t)
690 {
691 return t->hash;
692 }
693
694 /* Compare two address -> temp slot mapping entries. */
695 bool
696 temp_address_hasher::equal (temp_slot_address_entry *t1,
697 temp_slot_address_entry *t2)
698 {
699 return exp_equiv_p (t1->address, t2->address, 0, true);
700 }
701
702 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
703 static void
704 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
705 {
706 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
707 t->address = copy_rtx (address);
708 t->temp_slot = temp_slot;
709 t->hash = temp_slot_address_compute_hash (t);
710 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
711 }
712
713 /* Remove an address -> temp slot mapping entry if the temp slot is
714 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
715 int
716 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
717 {
718 const struct temp_slot_address_entry *t = *slot;
719 if (! t->temp_slot->in_use)
720 temp_slot_address_table->clear_slot (slot);
721 return 1;
722 }
723
724 /* Remove all mappings of addresses to unused temp slots. */
725 static void
726 remove_unused_temp_slot_addresses (void)
727 {
728 /* Use quicker clearing if there aren't any active temp slots. */
729 if (n_temp_slots_in_use)
730 temp_slot_address_table->traverse
731 <void *, remove_unused_temp_slot_addresses_1> (NULL);
732 else
733 temp_slot_address_table->empty ();
734 }
735
736 /* Find the temp slot corresponding to the object at address X. */
737
738 static class temp_slot *
739 find_temp_slot_from_address (rtx x)
740 {
741 class temp_slot *p;
742 struct temp_slot_address_entry tmp, *t;
743
744 /* First try the easy way:
745 See if X exists in the address -> temp slot mapping. */
746 tmp.address = x;
747 tmp.temp_slot = NULL;
748 tmp.hash = temp_slot_address_compute_hash (&tmp);
749 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
750 if (t)
751 return t->temp_slot;
752
753 /* If we have a sum involving a register, see if it points to a temp
754 slot. */
755 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
756 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
757 return p;
758 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
759 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
760 return p;
761
762 /* Last resort: Address is a virtual stack var address. */
763 poly_int64 offset;
764 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
765 {
766 int i;
767 for (i = max_slot_level (); i >= 0; i--)
768 for (p = *temp_slots_at_level (i); p; p = p->next)
769 if (known_in_range_p (offset, p->base_offset, p->full_size))
770 return p;
771 }
772
773 return NULL;
774 }
775 \f
776 /* Allocate a temporary stack slot and record it for possible later
777 reuse.
778
779 MODE is the machine mode to be given to the returned rtx.
780
781 SIZE is the size in units of the space required. We do no rounding here
782 since assign_stack_local will do any required rounding.
783
784 TYPE is the type that will be used for the stack slot. */
785
786 rtx
787 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
788 {
789 unsigned int align;
790 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
791 rtx slot;
792
793 gcc_assert (known_size_p (size));
794
795 align = get_stack_local_alignment (type, mode);
796
797 /* Try to find an available, already-allocated temporary of the proper
798 mode which meets the size and alignment requirements. Choose the
799 smallest one with the closest alignment.
800
801 If assign_stack_temp is called outside of the tree->rtl expansion,
802 we cannot reuse the stack slots (that may still refer to
803 VIRTUAL_STACK_VARS_REGNUM). */
804 if (!virtuals_instantiated)
805 {
806 for (p = avail_temp_slots; p; p = p->next)
807 {
808 if (p->align >= align
809 && known_ge (p->size, size)
810 && GET_MODE (p->slot) == mode
811 && objects_must_conflict_p (p->type, type)
812 && (best_p == 0
813 || (known_eq (best_p->size, p->size)
814 ? best_p->align > p->align
815 : known_ge (best_p->size, p->size))))
816 {
817 if (p->align == align && known_eq (p->size, size))
818 {
819 selected = p;
820 cut_slot_from_list (selected, &avail_temp_slots);
821 best_p = 0;
822 break;
823 }
824 best_p = p;
825 }
826 }
827 }
828
829 /* Make our best, if any, the one to use. */
830 if (best_p)
831 {
832 selected = best_p;
833 cut_slot_from_list (selected, &avail_temp_slots);
834
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
839 {
840 int alignment = best_p->align / BITS_PER_UNIT;
841 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
842
843 if (known_ge (best_p->size - rounded_size, alignment))
844 {
845 p = ggc_alloc<temp_slot> ();
846 p->in_use = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
851 p->align = best_p->align;
852 p->type = best_p->type;
853 insert_slot_to_list (p, &avail_temp_slots);
854
855 vec_safe_push (stack_slot_list, p->slot);
856
857 best_p->size = rounded_size;
858 best_p->full_size = rounded_size;
859 }
860 }
861 }
862
863 /* If we still didn't find one, make a new temporary. */
864 if (selected == 0)
865 {
866 poly_int64 frame_offset_old = frame_offset;
867
868 p = ggc_alloc<temp_slot> ();
869
870 /* We are passing an explicit alignment request to assign_stack_local.
871 One side effect of that is assign_stack_local will not round SIZE
872 to ensure the frame offset remains suitably aligned.
873
874 So for requests which depended on the rounding of SIZE, we go ahead
875 and round it now. We also make sure ALIGNMENT is at least
876 BIGGEST_ALIGNMENT. */
877 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
878 p->slot = assign_stack_local_1 (mode,
879 (mode == BLKmode
880 ? aligned_upper_bound (size,
881 (int) align
882 / BITS_PER_UNIT)
883 : size),
884 align, 0);
885
886 p->align = align;
887
888 /* The following slot size computation is necessary because we don't
889 know the actual size of the temporary slot until assign_stack_local
890 has performed all the frame alignment and size rounding for the
891 requested temporary. Note that extra space added for alignment
892 can be either above or below this stack slot depending on which
893 way the frame grows. We include the extra space if and only if it
894 is above this slot. */
895 if (FRAME_GROWS_DOWNWARD)
896 p->size = frame_offset_old - frame_offset;
897 else
898 p->size = size;
899
900 /* Now define the fields used by combine_temp_slots. */
901 if (FRAME_GROWS_DOWNWARD)
902 {
903 p->base_offset = frame_offset;
904 p->full_size = frame_offset_old - frame_offset;
905 }
906 else
907 {
908 p->base_offset = frame_offset_old;
909 p->full_size = frame_offset - frame_offset_old;
910 }
911
912 selected = p;
913 }
914
915 p = selected;
916 p->in_use = 1;
917 p->type = type;
918 p->level = temp_slot_level;
919 n_temp_slots_in_use++;
920
921 pp = temp_slots_at_level (p->level);
922 insert_slot_to_list (p, pp);
923 insert_temp_slot_address (XEXP (p->slot, 0), p);
924
925 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
926 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
927 vec_safe_push (stack_slot_list, slot);
928
929 /* If we know the alias set for the memory that will be used, use
930 it. If there's no TYPE, then we don't know anything about the
931 alias set for the memory. */
932 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
933 set_mem_align (slot, align);
934
935 /* If a type is specified, set the relevant flags. */
936 if (type != 0)
937 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
938 MEM_NOTRAP_P (slot) = 1;
939
940 return slot;
941 }
942
943 /* Allocate a temporary stack slot and record it for possible later
944 reuse. First two arguments are same as in preceding function. */
945
946 rtx
947 assign_stack_temp (machine_mode mode, poly_int64 size)
948 {
949 return assign_stack_temp_for_type (mode, size, NULL_TREE);
950 }
951 \f
952 /* Assign a temporary.
953 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
954 and so that should be used in error messages. In either case, we
955 allocate of the given type.
956 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
957 it is 0 if a register is OK.
958 DONT_PROMOTE is 1 if we should not promote values in register
959 to wider modes. */
960
961 rtx
962 assign_temp (tree type_or_decl, int memory_required,
963 int dont_promote ATTRIBUTE_UNUSED)
964 {
965 tree type, decl;
966 machine_mode mode;
967 #ifdef PROMOTE_MODE
968 int unsignedp;
969 #endif
970
971 if (DECL_P (type_or_decl))
972 decl = type_or_decl, type = TREE_TYPE (decl);
973 else
974 decl = NULL, type = type_or_decl;
975
976 mode = TYPE_MODE (type);
977 #ifdef PROMOTE_MODE
978 unsignedp = TYPE_UNSIGNED (type);
979 #endif
980
981 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
982 end. See also create_tmp_var for the gimplification-time check. */
983 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
984
985 if (mode == BLKmode || memory_required)
986 {
987 poly_int64 size;
988 rtx tmp;
989
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we can find a fixed upper limit on
992 the size, so try that instead. */
993 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
994 size = max_int_size_in_bytes (type);
995
996 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
997 problems with allocating the stack space. */
998 if (known_eq (size, 0))
999 size = 1;
1000
1001 /* The size of the temporary may be too large to fit into an integer. */
1002 /* ??? Not sure this should happen except for user silliness, so limit
1003 this to things that aren't compiler-generated temporaries. The
1004 rest of the time we'll die in assign_stack_temp_for_type. */
1005 if (decl
1006 && !known_size_p (size)
1007 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1008 {
1009 error ("size of variable %q+D is too large", decl);
1010 size = 1;
1011 }
1012
1013 tmp = assign_stack_temp_for_type (mode, size, type);
1014 return tmp;
1015 }
1016
1017 #ifdef PROMOTE_MODE
1018 if (! dont_promote)
1019 mode = promote_mode (type, mode, &unsignedp);
1020 #endif
1021
1022 return gen_reg_rtx (mode);
1023 }
1024 \f
1025 /* Combine temporary stack slots which are adjacent on the stack.
1026
1027 This allows for better use of already allocated stack space. This is only
1028 done for BLKmode slots because we can be sure that we won't have alignment
1029 problems in this case. */
1030
1031 static void
1032 combine_temp_slots (void)
1033 {
1034 class temp_slot *p, *q, *next, *next_q;
1035 int num_slots;
1036
1037 /* We can't combine slots, because the information about which slot
1038 is in which alias set will be lost. */
1039 if (flag_strict_aliasing)
1040 return;
1041
1042 /* If there are a lot of temp slots, don't do anything unless
1043 high levels of optimization. */
1044 if (! flag_expensive_optimizations)
1045 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1046 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1047 return;
1048
1049 for (p = avail_temp_slots; p; p = next)
1050 {
1051 int delete_p = 0;
1052
1053 next = p->next;
1054
1055 if (GET_MODE (p->slot) != BLKmode)
1056 continue;
1057
1058 for (q = p->next; q; q = next_q)
1059 {
1060 int delete_q = 0;
1061
1062 next_q = q->next;
1063
1064 if (GET_MODE (q->slot) != BLKmode)
1065 continue;
1066
1067 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1068 {
1069 /* Q comes after P; combine Q into P. */
1070 p->size += q->size;
1071 p->full_size += q->full_size;
1072 delete_q = 1;
1073 }
1074 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1075 {
1076 /* P comes after Q; combine P into Q. */
1077 q->size += p->size;
1078 q->full_size += p->full_size;
1079 delete_p = 1;
1080 break;
1081 }
1082 if (delete_q)
1083 cut_slot_from_list (q, &avail_temp_slots);
1084 }
1085
1086 /* Either delete P or advance past it. */
1087 if (delete_p)
1088 cut_slot_from_list (p, &avail_temp_slots);
1089 }
1090 }
1091 \f
1092 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1093 slot that previously was known by OLD_RTX. */
1094
1095 void
1096 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1097 {
1098 class temp_slot *p;
1099
1100 if (rtx_equal_p (old_rtx, new_rtx))
1101 return;
1102
1103 p = find_temp_slot_from_address (old_rtx);
1104
1105 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1106 NEW_RTX is a register, see if one operand of the PLUS is a
1107 temporary location. If so, NEW_RTX points into it. Otherwise,
1108 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1109 in common between them. If so, try a recursive call on those
1110 values. */
1111 if (p == 0)
1112 {
1113 if (GET_CODE (old_rtx) != PLUS)
1114 return;
1115
1116 if (REG_P (new_rtx))
1117 {
1118 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1119 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1120 return;
1121 }
1122 else if (GET_CODE (new_rtx) != PLUS)
1123 return;
1124
1125 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1126 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1127 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1128 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1129 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1130 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1131 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1132 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1133
1134 return;
1135 }
1136
1137 /* Otherwise add an alias for the temp's address. */
1138 insert_temp_slot_address (new_rtx, p);
1139 }
1140
1141 /* If X could be a reference to a temporary slot, mark that slot as
1142 belonging to the to one level higher than the current level. If X
1143 matched one of our slots, just mark that one. Otherwise, we can't
1144 easily predict which it is, so upgrade all of them.
1145
1146 This is called when an ({...}) construct occurs and a statement
1147 returns a value in memory. */
1148
1149 void
1150 preserve_temp_slots (rtx x)
1151 {
1152 class temp_slot *p = 0, *next;
1153
1154 if (x == 0)
1155 return;
1156
1157 /* If X is a register that is being used as a pointer, see if we have
1158 a temporary slot we know it points to. */
1159 if (REG_P (x) && REG_POINTER (x))
1160 p = find_temp_slot_from_address (x);
1161
1162 /* If X is not in memory or is at a constant address, it cannot be in
1163 a temporary slot. */
1164 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1165 return;
1166
1167 /* First see if we can find a match. */
1168 if (p == 0)
1169 p = find_temp_slot_from_address (XEXP (x, 0));
1170
1171 if (p != 0)
1172 {
1173 if (p->level == temp_slot_level)
1174 move_slot_to_level (p, temp_slot_level - 1);
1175 return;
1176 }
1177
1178 /* Otherwise, preserve all non-kept slots at this level. */
1179 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1180 {
1181 next = p->next;
1182 move_slot_to_level (p, temp_slot_level - 1);
1183 }
1184 }
1185
1186 /* Free all temporaries used so far. This is normally called at the
1187 end of generating code for a statement. */
1188
1189 void
1190 free_temp_slots (void)
1191 {
1192 class temp_slot *p, *next;
1193 bool some_available = false;
1194
1195 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1196 {
1197 next = p->next;
1198 make_slot_available (p);
1199 some_available = true;
1200 }
1201
1202 if (some_available)
1203 {
1204 remove_unused_temp_slot_addresses ();
1205 combine_temp_slots ();
1206 }
1207 }
1208
1209 /* Push deeper into the nesting level for stack temporaries. */
1210
1211 void
1212 push_temp_slots (void)
1213 {
1214 temp_slot_level++;
1215 }
1216
1217 /* Pop a temporary nesting level. All slots in use in the current level
1218 are freed. */
1219
1220 void
1221 pop_temp_slots (void)
1222 {
1223 free_temp_slots ();
1224 temp_slot_level--;
1225 }
1226
1227 /* Initialize temporary slots. */
1228
1229 void
1230 init_temp_slots (void)
1231 {
1232 /* We have not allocated any temporaries yet. */
1233 avail_temp_slots = 0;
1234 vec_alloc (used_temp_slots, 0);
1235 temp_slot_level = 0;
1236 n_temp_slots_in_use = 0;
1237
1238 /* Set up the table to map addresses to temp slots. */
1239 if (! temp_slot_address_table)
1240 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1241 else
1242 temp_slot_address_table->empty ();
1243 }
1244 \f
1245 /* Functions and data structures to keep track of the values hard regs
1246 had at the start of the function. */
1247
1248 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1249 and has_hard_reg_initial_val.. */
1250 struct GTY(()) initial_value_pair {
1251 rtx hard_reg;
1252 rtx pseudo;
1253 };
1254 /* ??? This could be a VEC but there is currently no way to define an
1255 opaque VEC type. This could be worked around by defining struct
1256 initial_value_pair in function.h. */
1257 struct GTY(()) initial_value_struct {
1258 int num_entries;
1259 int max_entries;
1260 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1261 };
1262
1263 /* If a pseudo represents an initial hard reg (or expression), return
1264 it, else return NULL_RTX. */
1265
1266 rtx
1267 get_hard_reg_initial_reg (rtx reg)
1268 {
1269 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1270 int i;
1271
1272 if (ivs == 0)
1273 return NULL_RTX;
1274
1275 for (i = 0; i < ivs->num_entries; i++)
1276 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1277 return ivs->entries[i].hard_reg;
1278
1279 return NULL_RTX;
1280 }
1281
1282 /* Make sure that there's a pseudo register of mode MODE that stores the
1283 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1284
1285 rtx
1286 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1287 {
1288 struct initial_value_struct *ivs;
1289 rtx rv;
1290
1291 rv = has_hard_reg_initial_val (mode, regno);
1292 if (rv)
1293 return rv;
1294
1295 ivs = crtl->hard_reg_initial_vals;
1296 if (ivs == 0)
1297 {
1298 ivs = ggc_alloc<initial_value_struct> ();
1299 ivs->num_entries = 0;
1300 ivs->max_entries = 5;
1301 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1302 crtl->hard_reg_initial_vals = ivs;
1303 }
1304
1305 if (ivs->num_entries >= ivs->max_entries)
1306 {
1307 ivs->max_entries += 5;
1308 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1309 ivs->max_entries);
1310 }
1311
1312 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1313 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1314
1315 return ivs->entries[ivs->num_entries++].pseudo;
1316 }
1317
1318 /* See if get_hard_reg_initial_val has been used to create a pseudo
1319 for the initial value of hard register REGNO in mode MODE. Return
1320 the associated pseudo if so, otherwise return NULL. */
1321
1322 rtx
1323 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1324 {
1325 struct initial_value_struct *ivs;
1326 int i;
1327
1328 ivs = crtl->hard_reg_initial_vals;
1329 if (ivs != 0)
1330 for (i = 0; i < ivs->num_entries; i++)
1331 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1332 && REGNO (ivs->entries[i].hard_reg) == regno)
1333 return ivs->entries[i].pseudo;
1334
1335 return NULL_RTX;
1336 }
1337
1338 unsigned int
1339 emit_initial_value_sets (void)
1340 {
1341 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1342 int i;
1343 rtx_insn *seq;
1344
1345 if (ivs == 0)
1346 return 0;
1347
1348 start_sequence ();
1349 for (i = 0; i < ivs->num_entries; i++)
1350 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1351 seq = get_insns ();
1352 end_sequence ();
1353
1354 emit_insn_at_entry (seq);
1355 return 0;
1356 }
1357
1358 /* Return the hardreg-pseudoreg initial values pair entry I and
1359 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1360 bool
1361 initial_value_entry (int i, rtx *hreg, rtx *preg)
1362 {
1363 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1364 if (!ivs || i >= ivs->num_entries)
1365 return false;
1366
1367 *hreg = ivs->entries[i].hard_reg;
1368 *preg = ivs->entries[i].pseudo;
1369 return true;
1370 }
1371 \f
1372 /* These routines are responsible for converting virtual register references
1373 to the actual hard register references once RTL generation is complete.
1374
1375 The following four variables are used for communication between the
1376 routines. They contain the offsets of the virtual registers from their
1377 respective hard registers. */
1378
1379 static poly_int64 in_arg_offset;
1380 static poly_int64 var_offset;
1381 static poly_int64 dynamic_offset;
1382 static poly_int64 out_arg_offset;
1383 static poly_int64 cfa_offset;
1384
1385 /* In most machines, the stack pointer register is equivalent to the bottom
1386 of the stack. */
1387
1388 #ifndef STACK_POINTER_OFFSET
1389 #define STACK_POINTER_OFFSET 0
1390 #endif
1391
1392 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1393 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1394 #endif
1395
1396 /* If not defined, pick an appropriate default for the offset of dynamically
1397 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1398 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1399
1400 #ifndef STACK_DYNAMIC_OFFSET
1401
1402 /* The bottom of the stack points to the actual arguments. If
1403 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1404 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1405 stack space for register parameters is not pushed by the caller, but
1406 rather part of the fixed stack areas and hence not included in
1407 `crtl->outgoing_args_size'. Nevertheless, we must allow
1408 for it when allocating stack dynamic objects. */
1409
1410 #ifdef INCOMING_REG_PARM_STACK_SPACE
1411 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1412 ((ACCUMULATE_OUTGOING_ARGS \
1413 ? (crtl->outgoing_args_size \
1414 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1415 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1416 : 0) + (STACK_POINTER_OFFSET))
1417 #else
1418 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1419 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1420 + (STACK_POINTER_OFFSET))
1421 #endif
1422 #endif
1423
1424 \f
1425 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1426 is a virtual register, return the equivalent hard register and set the
1427 offset indirectly through the pointer. Otherwise, return 0. */
1428
1429 static rtx
1430 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1431 {
1432 rtx new_rtx;
1433 poly_int64 offset;
1434
1435 if (x == virtual_incoming_args_rtx)
1436 {
1437 if (stack_realign_drap)
1438 {
1439 /* Replace virtual_incoming_args_rtx with internal arg
1440 pointer if DRAP is used to realign stack. */
1441 new_rtx = crtl->args.internal_arg_pointer;
1442 offset = 0;
1443 }
1444 else
1445 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1446 }
1447 else if (x == virtual_stack_vars_rtx)
1448 new_rtx = frame_pointer_rtx, offset = var_offset;
1449 else if (x == virtual_stack_dynamic_rtx)
1450 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1451 else if (x == virtual_outgoing_args_rtx)
1452 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1453 else if (x == virtual_cfa_rtx)
1454 {
1455 #ifdef FRAME_POINTER_CFA_OFFSET
1456 new_rtx = frame_pointer_rtx;
1457 #else
1458 new_rtx = arg_pointer_rtx;
1459 #endif
1460 offset = cfa_offset;
1461 }
1462 else if (x == virtual_preferred_stack_boundary_rtx)
1463 {
1464 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1465 offset = 0;
1466 }
1467 else
1468 return NULL_RTX;
1469
1470 *poffset = offset;
1471 return new_rtx;
1472 }
1473
1474 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1475 registers present inside of *LOC. The expression is simplified,
1476 as much as possible, but is not to be considered "valid" in any sense
1477 implied by the target. Return true if any change is made. */
1478
1479 static bool
1480 instantiate_virtual_regs_in_rtx (rtx *loc)
1481 {
1482 if (!*loc)
1483 return false;
1484 bool changed = false;
1485 subrtx_ptr_iterator::array_type array;
1486 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1487 {
1488 rtx *loc = *iter;
1489 if (rtx x = *loc)
1490 {
1491 rtx new_rtx;
1492 poly_int64 offset;
1493 switch (GET_CODE (x))
1494 {
1495 case REG:
1496 new_rtx = instantiate_new_reg (x, &offset);
1497 if (new_rtx)
1498 {
1499 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1500 changed = true;
1501 }
1502 iter.skip_subrtxes ();
1503 break;
1504
1505 case PLUS:
1506 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1507 if (new_rtx)
1508 {
1509 XEXP (x, 0) = new_rtx;
1510 *loc = plus_constant (GET_MODE (x), x, offset, true);
1511 changed = true;
1512 iter.skip_subrtxes ();
1513 break;
1514 }
1515
1516 /* FIXME -- from old code */
1517 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1518 we can commute the PLUS and SUBREG because pointers into the
1519 frame are well-behaved. */
1520 break;
1521
1522 default:
1523 break;
1524 }
1525 }
1526 }
1527 return changed;
1528 }
1529
1530 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1531 matches the predicate for insn CODE operand OPERAND. */
1532
1533 static int
1534 safe_insn_predicate (int code, int operand, rtx x)
1535 {
1536 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1537 }
1538
1539 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1540 registers present inside of insn. The result will be a valid insn. */
1541
1542 static void
1543 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1544 {
1545 poly_int64 offset;
1546 int insn_code, i;
1547 bool any_change = false;
1548 rtx set, new_rtx, x;
1549 rtx_insn *seq;
1550
1551 /* There are some special cases to be handled first. */
1552 set = single_set (insn);
1553 if (set)
1554 {
1555 /* We're allowed to assign to a virtual register. This is interpreted
1556 to mean that the underlying register gets assigned the inverse
1557 transformation. This is used, for example, in the handling of
1558 non-local gotos. */
1559 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1560 if (new_rtx)
1561 {
1562 start_sequence ();
1563
1564 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1565 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1566 gen_int_mode (-offset, GET_MODE (new_rtx)));
1567 x = force_operand (x, new_rtx);
1568 if (x != new_rtx)
1569 emit_move_insn (new_rtx, x);
1570
1571 seq = get_insns ();
1572 end_sequence ();
1573
1574 emit_insn_before (seq, insn);
1575 delete_insn (insn);
1576 return;
1577 }
1578
1579 /* Handle a straight copy from a virtual register by generating a
1580 new add insn. The difference between this and falling through
1581 to the generic case is avoiding a new pseudo and eliminating a
1582 move insn in the initial rtl stream. */
1583 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1584 if (new_rtx
1585 && maybe_ne (offset, 0)
1586 && REG_P (SET_DEST (set))
1587 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1588 {
1589 start_sequence ();
1590
1591 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1592 gen_int_mode (offset,
1593 GET_MODE (SET_DEST (set))),
1594 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1595 if (x != SET_DEST (set))
1596 emit_move_insn (SET_DEST (set), x);
1597
1598 seq = get_insns ();
1599 end_sequence ();
1600
1601 emit_insn_before (seq, insn);
1602 delete_insn (insn);
1603 return;
1604 }
1605
1606 extract_insn (insn);
1607 insn_code = INSN_CODE (insn);
1608
1609 /* Handle a plus involving a virtual register by determining if the
1610 operands remain valid if they're modified in place. */
1611 poly_int64 delta;
1612 if (GET_CODE (SET_SRC (set)) == PLUS
1613 && recog_data.n_operands >= 3
1614 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1615 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1616 && poly_int_rtx_p (recog_data.operand[2], &delta)
1617 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1618 {
1619 offset += delta;
1620
1621 /* If the sum is zero, then replace with a plain move. */
1622 if (known_eq (offset, 0)
1623 && REG_P (SET_DEST (set))
1624 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1625 {
1626 start_sequence ();
1627 emit_move_insn (SET_DEST (set), new_rtx);
1628 seq = get_insns ();
1629 end_sequence ();
1630
1631 emit_insn_before (seq, insn);
1632 delete_insn (insn);
1633 return;
1634 }
1635
1636 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1637
1638 /* Using validate_change and apply_change_group here leaves
1639 recog_data in an invalid state. Since we know exactly what
1640 we want to check, do those two by hand. */
1641 if (safe_insn_predicate (insn_code, 1, new_rtx)
1642 && safe_insn_predicate (insn_code, 2, x))
1643 {
1644 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1645 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1646 any_change = true;
1647
1648 /* Fall through into the regular operand fixup loop in
1649 order to take care of operands other than 1 and 2. */
1650 }
1651 }
1652 }
1653 else
1654 {
1655 extract_insn (insn);
1656 insn_code = INSN_CODE (insn);
1657 }
1658
1659 /* In the general case, we expect virtual registers to appear only in
1660 operands, and then only as either bare registers or inside memories. */
1661 for (i = 0; i < recog_data.n_operands; ++i)
1662 {
1663 x = recog_data.operand[i];
1664 switch (GET_CODE (x))
1665 {
1666 case MEM:
1667 {
1668 rtx addr = XEXP (x, 0);
1669
1670 if (!instantiate_virtual_regs_in_rtx (&addr))
1671 continue;
1672
1673 start_sequence ();
1674 x = replace_equiv_address (x, addr, true);
1675 /* It may happen that the address with the virtual reg
1676 was valid (e.g. based on the virtual stack reg, which might
1677 be acceptable to the predicates with all offsets), whereas
1678 the address now isn't anymore, for instance when the address
1679 is still offsetted, but the base reg isn't virtual-stack-reg
1680 anymore. Below we would do a force_reg on the whole operand,
1681 but this insn might actually only accept memory. Hence,
1682 before doing that last resort, try to reload the address into
1683 a register, so this operand stays a MEM. */
1684 if (!safe_insn_predicate (insn_code, i, x))
1685 {
1686 addr = force_reg (GET_MODE (addr), addr);
1687 x = replace_equiv_address (x, addr, true);
1688 }
1689 seq = get_insns ();
1690 end_sequence ();
1691 if (seq)
1692 emit_insn_before (seq, insn);
1693 }
1694 break;
1695
1696 case REG:
1697 new_rtx = instantiate_new_reg (x, &offset);
1698 if (new_rtx == NULL)
1699 continue;
1700 if (known_eq (offset, 0))
1701 x = new_rtx;
1702 else
1703 {
1704 start_sequence ();
1705
1706 /* Careful, special mode predicates may have stuff in
1707 insn_data[insn_code].operand[i].mode that isn't useful
1708 to us for computing a new value. */
1709 /* ??? Recognize address_operand and/or "p" constraints
1710 to see if (plus new offset) is a valid before we put
1711 this through expand_simple_binop. */
1712 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1713 gen_int_mode (offset, GET_MODE (x)),
1714 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1715 seq = get_insns ();
1716 end_sequence ();
1717 emit_insn_before (seq, insn);
1718 }
1719 break;
1720
1721 case SUBREG:
1722 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1723 if (new_rtx == NULL)
1724 continue;
1725 if (maybe_ne (offset, 0))
1726 {
1727 start_sequence ();
1728 new_rtx = expand_simple_binop
1729 (GET_MODE (new_rtx), PLUS, new_rtx,
1730 gen_int_mode (offset, GET_MODE (new_rtx)),
1731 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1732 seq = get_insns ();
1733 end_sequence ();
1734 emit_insn_before (seq, insn);
1735 }
1736 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1737 GET_MODE (new_rtx), SUBREG_BYTE (x));
1738 gcc_assert (x);
1739 break;
1740
1741 default:
1742 continue;
1743 }
1744
1745 /* At this point, X contains the new value for the operand.
1746 Validate the new value vs the insn predicate. Note that
1747 asm insns will have insn_code -1 here. */
1748 if (!safe_insn_predicate (insn_code, i, x))
1749 {
1750 start_sequence ();
1751 if (REG_P (x))
1752 {
1753 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1754 x = copy_to_reg (x);
1755 }
1756 else
1757 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1758 seq = get_insns ();
1759 end_sequence ();
1760 if (seq)
1761 emit_insn_before (seq, insn);
1762 }
1763
1764 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1765 any_change = true;
1766 }
1767
1768 if (any_change)
1769 {
1770 /* Propagate operand changes into the duplicates. */
1771 for (i = 0; i < recog_data.n_dups; ++i)
1772 *recog_data.dup_loc[i]
1773 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1774
1775 /* Force re-recognition of the instruction for validation. */
1776 INSN_CODE (insn) = -1;
1777 }
1778
1779 if (asm_noperands (PATTERN (insn)) >= 0)
1780 {
1781 if (!check_asm_operands (PATTERN (insn)))
1782 {
1783 error_for_asm (insn, "impossible constraint in %<asm%>");
1784 /* For asm goto, instead of fixing up all the edges
1785 just clear the template and clear input operands
1786 (asm goto doesn't have any output operands). */
1787 if (JUMP_P (insn))
1788 {
1789 rtx asm_op = extract_asm_operands (PATTERN (insn));
1790 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1791 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1792 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1793 }
1794 else
1795 delete_insn (insn);
1796 }
1797 }
1798 else
1799 {
1800 if (recog_memoized (insn) < 0)
1801 fatal_insn_not_found (insn);
1802 }
1803 }
1804
1805 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1806 do any instantiation required. */
1807
1808 void
1809 instantiate_decl_rtl (rtx x)
1810 {
1811 rtx addr;
1812
1813 if (x == 0)
1814 return;
1815
1816 /* If this is a CONCAT, recurse for the pieces. */
1817 if (GET_CODE (x) == CONCAT)
1818 {
1819 instantiate_decl_rtl (XEXP (x, 0));
1820 instantiate_decl_rtl (XEXP (x, 1));
1821 return;
1822 }
1823
1824 /* If this is not a MEM, no need to do anything. Similarly if the
1825 address is a constant or a register that is not a virtual register. */
1826 if (!MEM_P (x))
1827 return;
1828
1829 addr = XEXP (x, 0);
1830 if (CONSTANT_P (addr)
1831 || (REG_P (addr)
1832 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1833 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1834 return;
1835
1836 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1837 }
1838
1839 /* Helper for instantiate_decls called via walk_tree: Process all decls
1840 in the given DECL_VALUE_EXPR. */
1841
1842 static tree
1843 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1844 {
1845 tree t = *tp;
1846 if (! EXPR_P (t))
1847 {
1848 *walk_subtrees = 0;
1849 if (DECL_P (t))
1850 {
1851 if (DECL_RTL_SET_P (t))
1852 instantiate_decl_rtl (DECL_RTL (t));
1853 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1854 && DECL_INCOMING_RTL (t))
1855 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1856 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1857 && DECL_HAS_VALUE_EXPR_P (t))
1858 {
1859 tree v = DECL_VALUE_EXPR (t);
1860 walk_tree (&v, instantiate_expr, NULL, NULL);
1861 }
1862 }
1863 }
1864 return NULL;
1865 }
1866
1867 /* Subroutine of instantiate_decls: Process all decls in the given
1868 BLOCK node and all its subblocks. */
1869
1870 static void
1871 instantiate_decls_1 (tree let)
1872 {
1873 tree t;
1874
1875 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1876 {
1877 if (DECL_RTL_SET_P (t))
1878 instantiate_decl_rtl (DECL_RTL (t));
1879 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1880 {
1881 tree v = DECL_VALUE_EXPR (t);
1882 walk_tree (&v, instantiate_expr, NULL, NULL);
1883 }
1884 }
1885
1886 /* Process all subblocks. */
1887 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1888 instantiate_decls_1 (t);
1889 }
1890
1891 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1892 all virtual registers in their DECL_RTL's. */
1893
1894 static void
1895 instantiate_decls (tree fndecl)
1896 {
1897 tree decl;
1898 unsigned ix;
1899
1900 /* Process all parameters of the function. */
1901 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1902 {
1903 instantiate_decl_rtl (DECL_RTL (decl));
1904 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1905 if (DECL_HAS_VALUE_EXPR_P (decl))
1906 {
1907 tree v = DECL_VALUE_EXPR (decl);
1908 walk_tree (&v, instantiate_expr, NULL, NULL);
1909 }
1910 }
1911
1912 if ((decl = DECL_RESULT (fndecl))
1913 && TREE_CODE (decl) == RESULT_DECL)
1914 {
1915 if (DECL_RTL_SET_P (decl))
1916 instantiate_decl_rtl (DECL_RTL (decl));
1917 if (DECL_HAS_VALUE_EXPR_P (decl))
1918 {
1919 tree v = DECL_VALUE_EXPR (decl);
1920 walk_tree (&v, instantiate_expr, NULL, NULL);
1921 }
1922 }
1923
1924 /* Process the saved static chain if it exists. */
1925 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1926 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1927 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1928
1929 /* Now process all variables defined in the function or its subblocks. */
1930 if (DECL_INITIAL (fndecl))
1931 instantiate_decls_1 (DECL_INITIAL (fndecl));
1932
1933 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1934 if (DECL_RTL_SET_P (decl))
1935 instantiate_decl_rtl (DECL_RTL (decl));
1936 vec_free (cfun->local_decls);
1937 }
1938
1939 /* Pass through the INSNS of function FNDECL and convert virtual register
1940 references to hard register references. */
1941
1942 static unsigned int
1943 instantiate_virtual_regs (void)
1944 {
1945 rtx_insn *insn;
1946
1947 /* Compute the offsets to use for this function. */
1948 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1949 var_offset = targetm.starting_frame_offset ();
1950 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1951 out_arg_offset = STACK_POINTER_OFFSET;
1952 #ifdef FRAME_POINTER_CFA_OFFSET
1953 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1954 #else
1955 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1956 #endif
1957
1958 /* Initialize recognition, indicating that volatile is OK. */
1959 init_recog ();
1960
1961 /* Scan through all the insns, instantiating every virtual register still
1962 present. */
1963 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1964 if (INSN_P (insn))
1965 {
1966 /* These patterns in the instruction stream can never be recognized.
1967 Fortunately, they shouldn't contain virtual registers either. */
1968 if (GET_CODE (PATTERN (insn)) == USE
1969 || GET_CODE (PATTERN (insn)) == CLOBBER
1970 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1971 || DEBUG_MARKER_INSN_P (insn))
1972 continue;
1973 else if (DEBUG_BIND_INSN_P (insn))
1974 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1975 else
1976 instantiate_virtual_regs_in_insn (insn);
1977
1978 if (insn->deleted ())
1979 continue;
1980
1981 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1982
1983 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1984 if (CALL_P (insn))
1985 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1986 }
1987
1988 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1989 instantiate_decls (current_function_decl);
1990
1991 targetm.instantiate_decls ();
1992
1993 /* Indicate that, from now on, assign_stack_local should use
1994 frame_pointer_rtx. */
1995 virtuals_instantiated = 1;
1996
1997 return 0;
1998 }
1999
2000 namespace {
2001
2002 const pass_data pass_data_instantiate_virtual_regs =
2003 {
2004 RTL_PASS, /* type */
2005 "vregs", /* name */
2006 OPTGROUP_NONE, /* optinfo_flags */
2007 TV_NONE, /* tv_id */
2008 0, /* properties_required */
2009 0, /* properties_provided */
2010 0, /* properties_destroyed */
2011 0, /* todo_flags_start */
2012 0, /* todo_flags_finish */
2013 };
2014
2015 class pass_instantiate_virtual_regs : public rtl_opt_pass
2016 {
2017 public:
2018 pass_instantiate_virtual_regs (gcc::context *ctxt)
2019 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2020 {}
2021
2022 /* opt_pass methods: */
2023 virtual unsigned int execute (function *)
2024 {
2025 return instantiate_virtual_regs ();
2026 }
2027
2028 }; // class pass_instantiate_virtual_regs
2029
2030 } // anon namespace
2031
2032 rtl_opt_pass *
2033 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2034 {
2035 return new pass_instantiate_virtual_regs (ctxt);
2036 }
2037
2038 \f
2039 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2040 This means a type for which function calls must pass an address to the
2041 function or get an address back from the function.
2042 EXP may be a type node or an expression (whose type is tested). */
2043
2044 int
2045 aggregate_value_p (const_tree exp, const_tree fntype)
2046 {
2047 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2048 int i, regno, nregs;
2049 rtx reg;
2050
2051 if (fntype)
2052 switch (TREE_CODE (fntype))
2053 {
2054 case CALL_EXPR:
2055 {
2056 tree fndecl = get_callee_fndecl (fntype);
2057 if (fndecl)
2058 fntype = TREE_TYPE (fndecl);
2059 else if (CALL_EXPR_FN (fntype))
2060 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2061 else
2062 /* For internal functions, assume nothing needs to be
2063 returned in memory. */
2064 return 0;
2065 }
2066 break;
2067 case FUNCTION_DECL:
2068 fntype = TREE_TYPE (fntype);
2069 break;
2070 case FUNCTION_TYPE:
2071 case METHOD_TYPE:
2072 break;
2073 case IDENTIFIER_NODE:
2074 fntype = NULL_TREE;
2075 break;
2076 default:
2077 /* We don't expect other tree types here. */
2078 gcc_unreachable ();
2079 }
2080
2081 if (VOID_TYPE_P (type))
2082 return 0;
2083
2084 /* If a record should be passed the same as its first (and only) member
2085 don't pass it as an aggregate. */
2086 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2087 return aggregate_value_p (first_field (type), fntype);
2088
2089 /* If the front end has decided that this needs to be passed by
2090 reference, do so. */
2091 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2092 && DECL_BY_REFERENCE (exp))
2093 return 1;
2094
2095 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2096 if (fntype && TREE_ADDRESSABLE (fntype))
2097 return 1;
2098
2099 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2100 and thus can't be returned in registers. */
2101 if (TREE_ADDRESSABLE (type))
2102 return 1;
2103
2104 if (TYPE_EMPTY_P (type))
2105 return 0;
2106
2107 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2108 return 1;
2109
2110 if (targetm.calls.return_in_memory (type, fntype))
2111 return 1;
2112
2113 /* Make sure we have suitable call-clobbered regs to return
2114 the value in; if not, we must return it in memory. */
2115 reg = hard_function_value (type, 0, fntype, 0);
2116
2117 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2118 it is OK. */
2119 if (!REG_P (reg))
2120 return 0;
2121
2122 regno = REGNO (reg);
2123 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2124 for (i = 0; i < nregs; i++)
2125 if (! call_used_or_fixed_reg_p (regno + i))
2126 return 1;
2127
2128 return 0;
2129 }
2130 \f
2131 /* Return true if we should assign DECL a pseudo register; false if it
2132 should live on the local stack. */
2133
2134 bool
2135 use_register_for_decl (const_tree decl)
2136 {
2137 if (TREE_CODE (decl) == SSA_NAME)
2138 {
2139 /* We often try to use the SSA_NAME, instead of its underlying
2140 decl, to get type information and guide decisions, to avoid
2141 differences of behavior between anonymous and named
2142 variables, but in this one case we have to go for the actual
2143 variable if there is one. The main reason is that, at least
2144 at -O0, we want to place user variables on the stack, but we
2145 don't mind using pseudos for anonymous or ignored temps.
2146 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2147 should go in pseudos, whereas their corresponding variables
2148 might have to go on the stack. So, disregarding the decl
2149 here would negatively impact debug info at -O0, enable
2150 coalescing between SSA_NAMEs that ought to get different
2151 stack/pseudo assignments, and get the incoming argument
2152 processing thoroughly confused by PARM_DECLs expected to live
2153 in stack slots but assigned to pseudos. */
2154 if (!SSA_NAME_VAR (decl))
2155 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2156 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2157
2158 decl = SSA_NAME_VAR (decl);
2159 }
2160
2161 /* Honor volatile. */
2162 if (TREE_SIDE_EFFECTS (decl))
2163 return false;
2164
2165 /* Honor addressability. */
2166 if (TREE_ADDRESSABLE (decl))
2167 return false;
2168
2169 /* RESULT_DECLs are a bit special in that they're assigned without
2170 regard to use_register_for_decl, but we generally only store in
2171 them. If we coalesce their SSA NAMEs, we'd better return a
2172 result that matches the assignment in expand_function_start. */
2173 if (TREE_CODE (decl) == RESULT_DECL)
2174 {
2175 /* If it's not an aggregate, we're going to use a REG or a
2176 PARALLEL containing a REG. */
2177 if (!aggregate_value_p (decl, current_function_decl))
2178 return true;
2179
2180 /* If expand_function_start determines the return value, we'll
2181 use MEM if it's not by reference. */
2182 if (cfun->returns_pcc_struct
2183 || (targetm.calls.struct_value_rtx
2184 (TREE_TYPE (current_function_decl), 1)))
2185 return DECL_BY_REFERENCE (decl);
2186
2187 /* Otherwise, we're taking an extra all.function_result_decl
2188 argument. It's set up in assign_parms_augmented_arg_list,
2189 under the (negated) conditions above, and then it's used to
2190 set up the RESULT_DECL rtl in assign_params, after looping
2191 over all parameters. Now, if the RESULT_DECL is not by
2192 reference, we'll use a MEM either way. */
2193 if (!DECL_BY_REFERENCE (decl))
2194 return false;
2195
2196 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2197 the function_result_decl's assignment. Since it's a pointer,
2198 we can short-circuit a number of the tests below, and we must
2199 duplicat e them because we don't have the
2200 function_result_decl to test. */
2201 if (!targetm.calls.allocate_stack_slots_for_args ())
2202 return true;
2203 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2204 if (optimize)
2205 return true;
2206 /* We don't set DECL_REGISTER for the function_result_decl. */
2207 return false;
2208 }
2209
2210 /* Only register-like things go in registers. */
2211 if (DECL_MODE (decl) == BLKmode)
2212 return false;
2213
2214 /* If -ffloat-store specified, don't put explicit float variables
2215 into registers. */
2216 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2217 propagates values across these stores, and it probably shouldn't. */
2218 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2219 return false;
2220
2221 if (!targetm.calls.allocate_stack_slots_for_args ())
2222 return true;
2223
2224 /* If we're not interested in tracking debugging information for
2225 this decl, then we can certainly put it in a register. */
2226 if (DECL_IGNORED_P (decl))
2227 return true;
2228
2229 if (optimize)
2230 return true;
2231
2232 if (!DECL_REGISTER (decl))
2233 return false;
2234
2235 /* When not optimizing, disregard register keyword for types that
2236 could have methods, otherwise the methods won't be callable from
2237 the debugger. */
2238 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2239 return false;
2240
2241 return true;
2242 }
2243
2244 /* Structures to communicate between the subroutines of assign_parms.
2245 The first holds data persistent across all parameters, the second
2246 is cleared out for each parameter. */
2247
2248 struct assign_parm_data_all
2249 {
2250 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2251 should become a job of the target or otherwise encapsulated. */
2252 CUMULATIVE_ARGS args_so_far_v;
2253 cumulative_args_t args_so_far;
2254 struct args_size stack_args_size;
2255 tree function_result_decl;
2256 tree orig_fnargs;
2257 rtx_insn *first_conversion_insn;
2258 rtx_insn *last_conversion_insn;
2259 HOST_WIDE_INT pretend_args_size;
2260 HOST_WIDE_INT extra_pretend_bytes;
2261 int reg_parm_stack_space;
2262 };
2263
2264 struct assign_parm_data_one
2265 {
2266 tree nominal_type;
2267 function_arg_info arg;
2268 rtx entry_parm;
2269 rtx stack_parm;
2270 machine_mode nominal_mode;
2271 machine_mode passed_mode;
2272 struct locate_and_pad_arg_data locate;
2273 int partial;
2274 };
2275
2276 /* A subroutine of assign_parms. Initialize ALL. */
2277
2278 static void
2279 assign_parms_initialize_all (struct assign_parm_data_all *all)
2280 {
2281 tree fntype ATTRIBUTE_UNUSED;
2282
2283 memset (all, 0, sizeof (*all));
2284
2285 fntype = TREE_TYPE (current_function_decl);
2286
2287 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2288 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2289 #else
2290 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2291 current_function_decl, -1);
2292 #endif
2293 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2294
2295 #ifdef INCOMING_REG_PARM_STACK_SPACE
2296 all->reg_parm_stack_space
2297 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2298 #endif
2299 }
2300
2301 /* If ARGS contains entries with complex types, split the entry into two
2302 entries of the component type. Return a new list of substitutions are
2303 needed, else the old list. */
2304
2305 static void
2306 split_complex_args (vec<tree> *args)
2307 {
2308 unsigned i;
2309 tree p;
2310
2311 FOR_EACH_VEC_ELT (*args, i, p)
2312 {
2313 tree type = TREE_TYPE (p);
2314 if (TREE_CODE (type) == COMPLEX_TYPE
2315 && targetm.calls.split_complex_arg (type))
2316 {
2317 tree decl;
2318 tree subtype = TREE_TYPE (type);
2319 bool addressable = TREE_ADDRESSABLE (p);
2320
2321 /* Rewrite the PARM_DECL's type with its component. */
2322 p = copy_node (p);
2323 TREE_TYPE (p) = subtype;
2324 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2325 SET_DECL_MODE (p, VOIDmode);
2326 DECL_SIZE (p) = NULL;
2327 DECL_SIZE_UNIT (p) = NULL;
2328 /* If this arg must go in memory, put it in a pseudo here.
2329 We can't allow it to go in memory as per normal parms,
2330 because the usual place might not have the imag part
2331 adjacent to the real part. */
2332 DECL_ARTIFICIAL (p) = addressable;
2333 DECL_IGNORED_P (p) = addressable;
2334 TREE_ADDRESSABLE (p) = 0;
2335 layout_decl (p, 0);
2336 (*args)[i] = p;
2337
2338 /* Build a second synthetic decl. */
2339 decl = build_decl (EXPR_LOCATION (p),
2340 PARM_DECL, NULL_TREE, subtype);
2341 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2342 DECL_ARTIFICIAL (decl) = addressable;
2343 DECL_IGNORED_P (decl) = addressable;
2344 layout_decl (decl, 0);
2345 args->safe_insert (++i, decl);
2346 }
2347 }
2348 }
2349
2350 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2351 the hidden struct return argument, and (abi willing) complex args.
2352 Return the new parameter list. */
2353
2354 static vec<tree>
2355 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2356 {
2357 tree fndecl = current_function_decl;
2358 tree fntype = TREE_TYPE (fndecl);
2359 vec<tree> fnargs = vNULL;
2360 tree arg;
2361
2362 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2363 fnargs.safe_push (arg);
2364
2365 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2366
2367 /* If struct value address is treated as the first argument, make it so. */
2368 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2369 && ! cfun->returns_pcc_struct
2370 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2371 {
2372 tree type = build_pointer_type (TREE_TYPE (fntype));
2373 tree decl;
2374
2375 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2376 PARM_DECL, get_identifier (".result_ptr"), type);
2377 DECL_ARG_TYPE (decl) = type;
2378 DECL_ARTIFICIAL (decl) = 1;
2379 DECL_NAMELESS (decl) = 1;
2380 TREE_CONSTANT (decl) = 1;
2381 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2382 changes, the end of the RESULT_DECL handling block in
2383 use_register_for_decl must be adjusted to match. */
2384
2385 DECL_CHAIN (decl) = all->orig_fnargs;
2386 all->orig_fnargs = decl;
2387 fnargs.safe_insert (0, decl);
2388
2389 all->function_result_decl = decl;
2390 }
2391
2392 /* If the target wants to split complex arguments into scalars, do so. */
2393 if (targetm.calls.split_complex_arg)
2394 split_complex_args (&fnargs);
2395
2396 return fnargs;
2397 }
2398
2399 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2400 data for the parameter. Incorporate ABI specifics such as pass-by-
2401 reference and type promotion. */
2402
2403 static void
2404 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2405 struct assign_parm_data_one *data)
2406 {
2407 int unsignedp;
2408
2409 *data = assign_parm_data_one ();
2410
2411 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2412 if (!cfun->stdarg)
2413 data->arg.named = 1; /* No variadic parms. */
2414 else if (DECL_CHAIN (parm))
2415 data->arg.named = 1; /* Not the last non-variadic parm. */
2416 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2417 data->arg.named = 1; /* Only variadic ones are unnamed. */
2418 else
2419 data->arg.named = 0; /* Treat as variadic. */
2420
2421 data->nominal_type = TREE_TYPE (parm);
2422 data->arg.type = DECL_ARG_TYPE (parm);
2423
2424 /* Look out for errors propagating this far. Also, if the parameter's
2425 type is void then its value doesn't matter. */
2426 if (TREE_TYPE (parm) == error_mark_node
2427 /* This can happen after weird syntax errors
2428 or if an enum type is defined among the parms. */
2429 || TREE_CODE (parm) != PARM_DECL
2430 || data->arg.type == NULL
2431 || VOID_TYPE_P (data->nominal_type))
2432 {
2433 data->nominal_type = data->arg.type = void_type_node;
2434 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2435 return;
2436 }
2437
2438 /* Find mode of arg as it is passed, and mode of arg as it should be
2439 during execution of this function. */
2440 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2441 data->nominal_mode = TYPE_MODE (data->nominal_type);
2442
2443 /* If the parm is to be passed as a transparent union or record, use the
2444 type of the first field for the tests below. We have already verified
2445 that the modes are the same. */
2446 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2447 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2448 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2449
2450 /* See if this arg was passed by invisible reference. */
2451 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2452 {
2453 data->nominal_type = data->arg.type;
2454 data->passed_mode = data->nominal_mode = data->arg.mode;
2455 }
2456
2457 /* Find mode as it is passed by the ABI. */
2458 unsignedp = TYPE_UNSIGNED (data->arg.type);
2459 data->arg.mode
2460 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2461 TREE_TYPE (current_function_decl), 0);
2462 }
2463
2464 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2465
2466 static void
2467 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2468 struct assign_parm_data_one *data, bool no_rtl)
2469 {
2470 int varargs_pretend_bytes = 0;
2471
2472 function_arg_info last_named_arg = data->arg;
2473 last_named_arg.named = true;
2474 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2475 &varargs_pretend_bytes, no_rtl);
2476
2477 /* If the back-end has requested extra stack space, record how much is
2478 needed. Do not change pretend_args_size otherwise since it may be
2479 nonzero from an earlier partial argument. */
2480 if (varargs_pretend_bytes > 0)
2481 all->pretend_args_size = varargs_pretend_bytes;
2482 }
2483
2484 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2485 the incoming location of the current parameter. */
2486
2487 static void
2488 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2489 struct assign_parm_data_one *data)
2490 {
2491 HOST_WIDE_INT pretend_bytes = 0;
2492 rtx entry_parm;
2493 bool in_regs;
2494
2495 if (data->arg.mode == VOIDmode)
2496 {
2497 data->entry_parm = data->stack_parm = const0_rtx;
2498 return;
2499 }
2500
2501 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2502 data->arg.type);
2503
2504 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2505 data->arg);
2506 if (entry_parm == 0)
2507 data->arg.mode = data->passed_mode;
2508
2509 /* Determine parm's home in the stack, in case it arrives in the stack
2510 or we should pretend it did. Compute the stack position and rtx where
2511 the argument arrives and its size.
2512
2513 There is one complexity here: If this was a parameter that would
2514 have been passed in registers, but wasn't only because it is
2515 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2516 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2517 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2518 as it was the previous time. */
2519 in_regs = (entry_parm != 0);
2520 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2521 in_regs = true;
2522 #endif
2523 if (!in_regs && !data->arg.named)
2524 {
2525 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2526 {
2527 rtx tem;
2528 function_arg_info named_arg = data->arg;
2529 named_arg.named = true;
2530 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2531 named_arg);
2532 in_regs = tem != NULL;
2533 }
2534 }
2535
2536 /* If this parameter was passed both in registers and in the stack, use
2537 the copy on the stack. */
2538 if (targetm.calls.must_pass_in_stack (data->arg))
2539 entry_parm = 0;
2540
2541 if (entry_parm)
2542 {
2543 int partial;
2544
2545 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2546 data->partial = partial;
2547
2548 /* The caller might already have allocated stack space for the
2549 register parameters. */
2550 if (partial != 0 && all->reg_parm_stack_space == 0)
2551 {
2552 /* Part of this argument is passed in registers and part
2553 is passed on the stack. Ask the prologue code to extend
2554 the stack part so that we can recreate the full value.
2555
2556 PRETEND_BYTES is the size of the registers we need to store.
2557 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2558 stack space that the prologue should allocate.
2559
2560 Internally, gcc assumes that the argument pointer is aligned
2561 to STACK_BOUNDARY bits. This is used both for alignment
2562 optimizations (see init_emit) and to locate arguments that are
2563 aligned to more than PARM_BOUNDARY bits. We must preserve this
2564 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2565 a stack boundary. */
2566
2567 /* We assume at most one partial arg, and it must be the first
2568 argument on the stack. */
2569 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2570
2571 pretend_bytes = partial;
2572 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2573
2574 /* We want to align relative to the actual stack pointer, so
2575 don't include this in the stack size until later. */
2576 all->extra_pretend_bytes = all->pretend_args_size;
2577 }
2578 }
2579
2580 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2581 all->reg_parm_stack_space,
2582 entry_parm ? data->partial : 0, current_function_decl,
2583 &all->stack_args_size, &data->locate);
2584
2585 /* Update parm_stack_boundary if this parameter is passed in the
2586 stack. */
2587 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2588 crtl->parm_stack_boundary = data->locate.boundary;
2589
2590 /* Adjust offsets to include the pretend args. */
2591 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2592 data->locate.slot_offset.constant += pretend_bytes;
2593 data->locate.offset.constant += pretend_bytes;
2594
2595 data->entry_parm = entry_parm;
2596 }
2597
2598 /* A subroutine of assign_parms. If there is actually space on the stack
2599 for this parm, count it in stack_args_size and return true. */
2600
2601 static bool
2602 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2603 struct assign_parm_data_one *data)
2604 {
2605 /* Trivially true if we've no incoming register. */
2606 if (data->entry_parm == NULL)
2607 ;
2608 /* Also true if we're partially in registers and partially not,
2609 since we've arranged to drop the entire argument on the stack. */
2610 else if (data->partial != 0)
2611 ;
2612 /* Also true if the target says that it's passed in both registers
2613 and on the stack. */
2614 else if (GET_CODE (data->entry_parm) == PARALLEL
2615 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2616 ;
2617 /* Also true if the target says that there's stack allocated for
2618 all register parameters. */
2619 else if (all->reg_parm_stack_space > 0)
2620 ;
2621 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2622 else
2623 return false;
2624
2625 all->stack_args_size.constant += data->locate.size.constant;
2626 if (data->locate.size.var)
2627 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2628
2629 return true;
2630 }
2631
2632 /* A subroutine of assign_parms. Given that this parameter is allocated
2633 stack space by the ABI, find it. */
2634
2635 static void
2636 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2637 {
2638 rtx offset_rtx, stack_parm;
2639 unsigned int align, boundary;
2640
2641 /* If we're passing this arg using a reg, make its stack home the
2642 aligned stack slot. */
2643 if (data->entry_parm)
2644 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2645 else
2646 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2647
2648 stack_parm = crtl->args.internal_arg_pointer;
2649 if (offset_rtx != const0_rtx)
2650 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2651 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2652
2653 if (!data->arg.pass_by_reference)
2654 {
2655 set_mem_attributes (stack_parm, parm, 1);
2656 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2657 while promoted mode's size is needed. */
2658 if (data->arg.mode != BLKmode
2659 && data->arg.mode != DECL_MODE (parm))
2660 {
2661 set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2662 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2663 {
2664 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2665 data->arg.mode);
2666 if (maybe_ne (offset, 0))
2667 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2668 }
2669 }
2670 }
2671
2672 boundary = data->locate.boundary;
2673 align = BITS_PER_UNIT;
2674
2675 /* If we're padding upward, we know that the alignment of the slot
2676 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2677 intentionally forcing upward padding. Otherwise we have to come
2678 up with a guess at the alignment based on OFFSET_RTX. */
2679 poly_int64 offset;
2680 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2681 align = boundary;
2682 else if (data->locate.where_pad == PAD_UPWARD)
2683 {
2684 align = boundary;
2685 /* If the argument offset is actually more aligned than the nominal
2686 stack slot boundary, take advantage of that excess alignment.
2687 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2688 if (poly_int_rtx_p (offset_rtx, &offset)
2689 && known_eq (STACK_POINTER_OFFSET, 0))
2690 {
2691 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2692 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2693 offset_align = STACK_BOUNDARY;
2694 align = MAX (align, offset_align);
2695 }
2696 }
2697 else if (poly_int_rtx_p (offset_rtx, &offset))
2698 {
2699 align = least_bit_hwi (boundary);
2700 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2701 if (offset_align != 0)
2702 align = MIN (align, offset_align);
2703 }
2704 set_mem_align (stack_parm, align);
2705
2706 if (data->entry_parm)
2707 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2708
2709 data->stack_parm = stack_parm;
2710 }
2711
2712 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2713 always valid and contiguous. */
2714
2715 static void
2716 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2717 {
2718 rtx entry_parm = data->entry_parm;
2719 rtx stack_parm = data->stack_parm;
2720
2721 /* If this parm was passed part in regs and part in memory, pretend it
2722 arrived entirely in memory by pushing the register-part onto the stack.
2723 In the special case of a DImode or DFmode that is split, we could put
2724 it together in a pseudoreg directly, but for now that's not worth
2725 bothering with. */
2726 if (data->partial != 0)
2727 {
2728 /* Handle calls that pass values in multiple non-contiguous
2729 locations. The Irix 6 ABI has examples of this. */
2730 if (GET_CODE (entry_parm) == PARALLEL)
2731 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2732 data->arg.type, int_size_in_bytes (data->arg.type));
2733 else
2734 {
2735 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2736 move_block_from_reg (REGNO (entry_parm),
2737 validize_mem (copy_rtx (stack_parm)),
2738 data->partial / UNITS_PER_WORD);
2739 }
2740
2741 entry_parm = stack_parm;
2742 }
2743
2744 /* If we didn't decide this parm came in a register, by default it came
2745 on the stack. */
2746 else if (entry_parm == NULL)
2747 entry_parm = stack_parm;
2748
2749 /* When an argument is passed in multiple locations, we can't make use
2750 of this information, but we can save some copying if the whole argument
2751 is passed in a single register. */
2752 else if (GET_CODE (entry_parm) == PARALLEL
2753 && data->nominal_mode != BLKmode
2754 && data->passed_mode != BLKmode)
2755 {
2756 size_t i, len = XVECLEN (entry_parm, 0);
2757
2758 for (i = 0; i < len; i++)
2759 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2760 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2761 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2762 == data->passed_mode)
2763 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2764 {
2765 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2766 break;
2767 }
2768 }
2769
2770 data->entry_parm = entry_parm;
2771 }
2772
2773 /* A subroutine of assign_parms. Reconstitute any values which were
2774 passed in multiple registers and would fit in a single register. */
2775
2776 static void
2777 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2778 {
2779 rtx entry_parm = data->entry_parm;
2780
2781 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2782 This can be done with register operations rather than on the
2783 stack, even if we will store the reconstituted parameter on the
2784 stack later. */
2785 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2786 {
2787 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2788 emit_group_store (parmreg, entry_parm, data->arg.type,
2789 GET_MODE_SIZE (GET_MODE (entry_parm)));
2790 entry_parm = parmreg;
2791 }
2792
2793 data->entry_parm = entry_parm;
2794 }
2795
2796 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2797 always valid and properly aligned. */
2798
2799 static void
2800 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2801 {
2802 rtx stack_parm = data->stack_parm;
2803
2804 /* If we can't trust the parm stack slot to be aligned enough for its
2805 ultimate type, don't use that slot after entry. We'll make another
2806 stack slot, if we need one. */
2807 if (stack_parm
2808 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2809 && ((optab_handler (movmisalign_optab, data->nominal_mode)
2810 != CODE_FOR_nothing)
2811 || targetm.slow_unaligned_access (data->nominal_mode,
2812 MEM_ALIGN (stack_parm))))
2813 || (data->nominal_type
2814 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2815 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2816 stack_parm = NULL;
2817
2818 /* If parm was passed in memory, and we need to convert it on entry,
2819 don't store it back in that same slot. */
2820 else if (data->entry_parm == stack_parm
2821 && data->nominal_mode != BLKmode
2822 && data->nominal_mode != data->passed_mode)
2823 stack_parm = NULL;
2824
2825 /* If stack protection is in effect for this function, don't leave any
2826 pointers in their passed stack slots. */
2827 else if (crtl->stack_protect_guard
2828 && (flag_stack_protect == 2
2829 || data->arg.pass_by_reference
2830 || POINTER_TYPE_P (data->nominal_type)))
2831 stack_parm = NULL;
2832
2833 data->stack_parm = stack_parm;
2834 }
2835
2836 /* A subroutine of assign_parms. Return true if the current parameter
2837 should be stored as a BLKmode in the current frame. */
2838
2839 static bool
2840 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2841 {
2842 if (data->nominal_mode == BLKmode)
2843 return true;
2844 if (GET_MODE (data->entry_parm) == BLKmode)
2845 return true;
2846
2847 #ifdef BLOCK_REG_PADDING
2848 /* Only assign_parm_setup_block knows how to deal with register arguments
2849 that are padded at the least significant end. */
2850 if (REG_P (data->entry_parm)
2851 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2852 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2853 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2854 return true;
2855 #endif
2856
2857 return false;
2858 }
2859
2860 /* A subroutine of assign_parms. Arrange for the parameter to be
2861 present and valid in DATA->STACK_RTL. */
2862
2863 static void
2864 assign_parm_setup_block (struct assign_parm_data_all *all,
2865 tree parm, struct assign_parm_data_one *data)
2866 {
2867 rtx entry_parm = data->entry_parm;
2868 rtx stack_parm = data->stack_parm;
2869 rtx target_reg = NULL_RTX;
2870 bool in_conversion_seq = false;
2871 HOST_WIDE_INT size;
2872 HOST_WIDE_INT size_stored;
2873
2874 if (GET_CODE (entry_parm) == PARALLEL)
2875 entry_parm = emit_group_move_into_temps (entry_parm);
2876
2877 /* If we want the parameter in a pseudo, don't use a stack slot. */
2878 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2879 {
2880 tree def = ssa_default_def (cfun, parm);
2881 gcc_assert (def);
2882 machine_mode mode = promote_ssa_mode (def, NULL);
2883 rtx reg = gen_reg_rtx (mode);
2884 if (GET_CODE (reg) != CONCAT)
2885 stack_parm = reg;
2886 else
2887 {
2888 target_reg = reg;
2889 /* Avoid allocating a stack slot, if there isn't one
2890 preallocated by the ABI. It might seem like we should
2891 always prefer a pseudo, but converting between
2892 floating-point and integer modes goes through the stack
2893 on various machines, so it's better to use the reserved
2894 stack slot than to risk wasting it and allocating more
2895 for the conversion. */
2896 if (stack_parm == NULL_RTX)
2897 {
2898 int save = generating_concat_p;
2899 generating_concat_p = 0;
2900 stack_parm = gen_reg_rtx (mode);
2901 generating_concat_p = save;
2902 }
2903 }
2904 data->stack_parm = NULL;
2905 }
2906
2907 size = int_size_in_bytes (data->arg.type);
2908 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2909 if (stack_parm == 0)
2910 {
2911 HOST_WIDE_INT parm_align
2912 = (STRICT_ALIGNMENT
2913 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2914
2915 SET_DECL_ALIGN (parm, parm_align);
2916 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2917 {
2918 rtx allocsize = gen_int_mode (size_stored, Pmode);
2919 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2920 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2921 MAX_SUPPORTED_STACK_ALIGNMENT);
2922 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2923 DECL_ALIGN (parm));
2924 mark_reg_pointer (addr, DECL_ALIGN (parm));
2925 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2926 MEM_NOTRAP_P (stack_parm) = 1;
2927 }
2928 else
2929 stack_parm = assign_stack_local (BLKmode, size_stored,
2930 DECL_ALIGN (parm));
2931 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2932 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2933 set_mem_attributes (stack_parm, parm, 1);
2934 }
2935
2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2937 calls that pass values in multiple non-contiguous locations. */
2938 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2939 {
2940 rtx mem;
2941
2942 /* Note that we will be storing an integral number of words.
2943 So we have to be careful to ensure that we allocate an
2944 integral number of words. We do this above when we call
2945 assign_stack_local if space was not allocated in the argument
2946 list. If it was, this will not work if PARM_BOUNDARY is not
2947 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2948 if it becomes a problem. Exception is when BLKmode arrives
2949 with arguments not conforming to word_mode. */
2950
2951 if (data->stack_parm == 0)
2952 ;
2953 else if (GET_CODE (entry_parm) == PARALLEL)
2954 ;
2955 else
2956 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2957
2958 mem = validize_mem (copy_rtx (stack_parm));
2959
2960 /* Handle values in multiple non-contiguous locations. */
2961 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2962 emit_group_store (mem, entry_parm, data->arg.type, size);
2963 else if (GET_CODE (entry_parm) == PARALLEL)
2964 {
2965 push_to_sequence2 (all->first_conversion_insn,
2966 all->last_conversion_insn);
2967 emit_group_store (mem, entry_parm, data->arg.type, size);
2968 all->first_conversion_insn = get_insns ();
2969 all->last_conversion_insn = get_last_insn ();
2970 end_sequence ();
2971 in_conversion_seq = true;
2972 }
2973
2974 else if (size == 0)
2975 ;
2976
2977 /* If SIZE is that of a mode no bigger than a word, just use
2978 that mode's store operation. */
2979 else if (size <= UNITS_PER_WORD)
2980 {
2981 unsigned int bits = size * BITS_PER_UNIT;
2982 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2983
2984 if (mode != BLKmode
2985 #ifdef BLOCK_REG_PADDING
2986 && (size == UNITS_PER_WORD
2987 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
2988 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2989 #endif
2990 )
2991 {
2992 rtx reg;
2993
2994 /* We are really truncating a word_mode value containing
2995 SIZE bytes into a value of mode MODE. If such an
2996 operation requires no actual instructions, we can refer
2997 to the value directly in mode MODE, otherwise we must
2998 start with the register in word_mode and explicitly
2999 convert it. */
3000 if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
3001 BITS_PER_WORD))
3002 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3003 else
3004 {
3005 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3006 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3007 }
3008 emit_move_insn (change_address (mem, mode, 0), reg);
3009 }
3010
3011 #ifdef BLOCK_REG_PADDING
3012 /* Storing the register in memory as a full word, as
3013 move_block_from_reg below would do, and then using the
3014 MEM in a smaller mode, has the effect of shifting right
3015 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3016 shifting must be explicit. */
3017 else if (!MEM_P (mem))
3018 {
3019 rtx x;
3020
3021 /* If the assert below fails, we should have taken the
3022 mode != BLKmode path above, unless we have downward
3023 padding of smaller-than-word arguments on a machine
3024 with little-endian bytes, which would likely require
3025 additional changes to work correctly. */
3026 gcc_checking_assert (BYTES_BIG_ENDIAN
3027 && (BLOCK_REG_PADDING (mode,
3028 data->arg.type, 1)
3029 == PAD_UPWARD));
3030
3031 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3032
3033 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3034 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3035 NULL_RTX, 1);
3036 x = force_reg (word_mode, x);
3037 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3038
3039 emit_move_insn (mem, x);
3040 }
3041 #endif
3042
3043 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3044 machine must be aligned to the left before storing
3045 to memory. Note that the previous test doesn't
3046 handle all cases (e.g. SIZE == 3). */
3047 else if (size != UNITS_PER_WORD
3048 #ifdef BLOCK_REG_PADDING
3049 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3050 == PAD_DOWNWARD)
3051 #else
3052 && BYTES_BIG_ENDIAN
3053 #endif
3054 )
3055 {
3056 rtx tem, x;
3057 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3058 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3059
3060 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3061 tem = change_address (mem, word_mode, 0);
3062 emit_move_insn (tem, x);
3063 }
3064 else
3065 move_block_from_reg (REGNO (entry_parm), mem,
3066 size_stored / UNITS_PER_WORD);
3067 }
3068 else if (!MEM_P (mem))
3069 {
3070 gcc_checking_assert (size > UNITS_PER_WORD);
3071 #ifdef BLOCK_REG_PADDING
3072 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3073 data->arg.type, 0)
3074 == PAD_UPWARD);
3075 #endif
3076 emit_move_insn (mem, entry_parm);
3077 }
3078 else
3079 move_block_from_reg (REGNO (entry_parm), mem,
3080 size_stored / UNITS_PER_WORD);
3081 }
3082 else if (data->stack_parm == 0)
3083 {
3084 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3085 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3086 BLOCK_OP_NORMAL);
3087 all->first_conversion_insn = get_insns ();
3088 all->last_conversion_insn = get_last_insn ();
3089 end_sequence ();
3090 in_conversion_seq = true;
3091 }
3092
3093 if (target_reg)
3094 {
3095 if (!in_conversion_seq)
3096 emit_move_insn (target_reg, stack_parm);
3097 else
3098 {
3099 push_to_sequence2 (all->first_conversion_insn,
3100 all->last_conversion_insn);
3101 emit_move_insn (target_reg, stack_parm);
3102 all->first_conversion_insn = get_insns ();
3103 all->last_conversion_insn = get_last_insn ();
3104 end_sequence ();
3105 }
3106 stack_parm = target_reg;
3107 }
3108
3109 data->stack_parm = stack_parm;
3110 set_parm_rtl (parm, stack_parm);
3111 }
3112
3113 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3114 parameter. Get it there. Perform all ABI specified conversions. */
3115
3116 static void
3117 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3118 struct assign_parm_data_one *data)
3119 {
3120 rtx parmreg, validated_mem;
3121 rtx equiv_stack_parm;
3122 machine_mode promoted_nominal_mode;
3123 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3124 bool did_conversion = false;
3125 bool need_conversion, moved;
3126 enum insn_code icode;
3127 rtx rtl;
3128
3129 /* Store the parm in a pseudoregister during the function, but we may
3130 need to do it in a wider mode. Using 2 here makes the result
3131 consistent with promote_decl_mode and thus expand_expr_real_1. */
3132 promoted_nominal_mode
3133 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3134 TREE_TYPE (current_function_decl), 2);
3135
3136 parmreg = gen_reg_rtx (promoted_nominal_mode);
3137 if (!DECL_ARTIFICIAL (parm))
3138 mark_user_reg (parmreg);
3139
3140 /* If this was an item that we received a pointer to,
3141 set rtl appropriately. */
3142 if (data->arg.pass_by_reference)
3143 {
3144 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3145 set_mem_attributes (rtl, parm, 1);
3146 }
3147 else
3148 rtl = parmreg;
3149
3150 assign_parm_remove_parallels (data);
3151
3152 /* Copy the value into the register, thus bridging between
3153 assign_parm_find_data_types and expand_expr_real_1. */
3154
3155 equiv_stack_parm = data->stack_parm;
3156 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3157
3158 need_conversion = (data->nominal_mode != data->passed_mode
3159 || promoted_nominal_mode != data->arg.mode);
3160 moved = false;
3161
3162 if (need_conversion
3163 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3164 && data->nominal_mode == data->passed_mode
3165 && data->nominal_mode == GET_MODE (data->entry_parm))
3166 {
3167 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3168 mode, by the caller. We now have to convert it to
3169 NOMINAL_MODE, if different. However, PARMREG may be in
3170 a different mode than NOMINAL_MODE if it is being stored
3171 promoted.
3172
3173 If ENTRY_PARM is a hard register, it might be in a register
3174 not valid for operating in its mode (e.g., an odd-numbered
3175 register for a DFmode). In that case, moves are the only
3176 thing valid, so we can't do a convert from there. This
3177 occurs when the calling sequence allow such misaligned
3178 usages.
3179
3180 In addition, the conversion may involve a call, which could
3181 clobber parameters which haven't been copied to pseudo
3182 registers yet.
3183
3184 First, we try to emit an insn which performs the necessary
3185 conversion. We verify that this insn does not clobber any
3186 hard registers. */
3187
3188 rtx op0, op1;
3189
3190 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3191 unsignedp);
3192
3193 op0 = parmreg;
3194 op1 = validated_mem;
3195 if (icode != CODE_FOR_nothing
3196 && insn_operand_matches (icode, 0, op0)
3197 && insn_operand_matches (icode, 1, op1))
3198 {
3199 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3200 rtx_insn *insn, *insns;
3201 rtx t = op1;
3202 HARD_REG_SET hardregs;
3203
3204 start_sequence ();
3205 /* If op1 is a hard register that is likely spilled, first
3206 force it into a pseudo, otherwise combiner might extend
3207 its lifetime too much. */
3208 if (GET_CODE (t) == SUBREG)
3209 t = SUBREG_REG (t);
3210 if (REG_P (t)
3211 && HARD_REGISTER_P (t)
3212 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3213 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3214 {
3215 t = gen_reg_rtx (GET_MODE (op1));
3216 emit_move_insn (t, op1);
3217 }
3218 else
3219 t = op1;
3220 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3221 data->passed_mode, unsignedp);
3222 emit_insn (pat);
3223 insns = get_insns ();
3224
3225 moved = true;
3226 CLEAR_HARD_REG_SET (hardregs);
3227 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3228 {
3229 if (INSN_P (insn))
3230 note_stores (insn, record_hard_reg_sets, &hardregs);
3231 if (!hard_reg_set_empty_p (hardregs))
3232 moved = false;
3233 }
3234
3235 end_sequence ();
3236
3237 if (moved)
3238 {
3239 emit_insn (insns);
3240 if (equiv_stack_parm != NULL_RTX)
3241 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3242 equiv_stack_parm);
3243 }
3244 }
3245 }
3246
3247 if (moved)
3248 /* Nothing to do. */
3249 ;
3250 else if (need_conversion)
3251 {
3252 /* We did not have an insn to convert directly, or the sequence
3253 generated appeared unsafe. We must first copy the parm to a
3254 pseudo reg, and save the conversion until after all
3255 parameters have been moved. */
3256
3257 int save_tree_used;
3258 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3259
3260 emit_move_insn (tempreg, validated_mem);
3261
3262 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3263 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3264
3265 if (partial_subreg_p (tempreg)
3266 && GET_MODE (tempreg) == data->nominal_mode
3267 && REG_P (SUBREG_REG (tempreg))
3268 && data->nominal_mode == data->passed_mode
3269 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3270 {
3271 /* The argument is already sign/zero extended, so note it
3272 into the subreg. */
3273 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3274 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3275 }
3276
3277 /* TREE_USED gets set erroneously during expand_assignment. */
3278 save_tree_used = TREE_USED (parm);
3279 SET_DECL_RTL (parm, rtl);
3280 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3281 SET_DECL_RTL (parm, NULL_RTX);
3282 TREE_USED (parm) = save_tree_used;
3283 all->first_conversion_insn = get_insns ();
3284 all->last_conversion_insn = get_last_insn ();
3285 end_sequence ();
3286
3287 did_conversion = true;
3288 }
3289 else if (MEM_P (data->entry_parm)
3290 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3291 > MEM_ALIGN (data->entry_parm)
3292 && (((icode = optab_handler (movmisalign_optab,
3293 promoted_nominal_mode))
3294 != CODE_FOR_nothing)
3295 || targetm.slow_unaligned_access (promoted_nominal_mode,
3296 MEM_ALIGN (data->entry_parm))))
3297 {
3298 if (icode != CODE_FOR_nothing)
3299 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3300 else
3301 rtl = parmreg = extract_bit_field (validated_mem,
3302 GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3303 unsignedp, parmreg,
3304 promoted_nominal_mode, VOIDmode, false, NULL);
3305 }
3306 else
3307 emit_move_insn (parmreg, validated_mem);
3308
3309 /* If we were passed a pointer but the actual value can safely live
3310 in a register, retrieve it and use it directly. */
3311 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3312 {
3313 /* We can't use nominal_mode, because it will have been set to
3314 Pmode above. We must use the actual mode of the parm. */
3315 if (use_register_for_decl (parm))
3316 {
3317 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3318 mark_user_reg (parmreg);
3319 }
3320 else
3321 {
3322 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3323 TYPE_MODE (TREE_TYPE (parm)),
3324 TYPE_ALIGN (TREE_TYPE (parm)));
3325 parmreg
3326 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3327 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3328 align);
3329 set_mem_attributes (parmreg, parm, 1);
3330 }
3331
3332 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3333 the debug info in case it is not legitimate. */
3334 if (GET_MODE (parmreg) != GET_MODE (rtl))
3335 {
3336 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3337 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3338
3339 push_to_sequence2 (all->first_conversion_insn,
3340 all->last_conversion_insn);
3341 emit_move_insn (tempreg, rtl);
3342 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3343 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3344 tempreg);
3345 all->first_conversion_insn = get_insns ();
3346 all->last_conversion_insn = get_last_insn ();
3347 end_sequence ();
3348
3349 did_conversion = true;
3350 }
3351 else
3352 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3353
3354 rtl = parmreg;
3355
3356 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3357 now the parm. */
3358 data->stack_parm = NULL;
3359 }
3360
3361 set_parm_rtl (parm, rtl);
3362
3363 /* Mark the register as eliminable if we did no conversion and it was
3364 copied from memory at a fixed offset, and the arg pointer was not
3365 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3366 offset formed an invalid address, such memory-equivalences as we
3367 make here would screw up life analysis for it. */
3368 if (data->nominal_mode == data->passed_mode
3369 && !did_conversion
3370 && data->stack_parm != 0
3371 && MEM_P (data->stack_parm)
3372 && data->locate.offset.var == 0
3373 && reg_mentioned_p (virtual_incoming_args_rtx,
3374 XEXP (data->stack_parm, 0)))
3375 {
3376 rtx_insn *linsn = get_last_insn ();
3377 rtx_insn *sinsn;
3378 rtx set;
3379
3380 /* Mark complex types separately. */
3381 if (GET_CODE (parmreg) == CONCAT)
3382 {
3383 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3384 int regnor = REGNO (XEXP (parmreg, 0));
3385 int regnoi = REGNO (XEXP (parmreg, 1));
3386 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3387 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3388 GET_MODE_SIZE (submode));
3389
3390 /* Scan backwards for the set of the real and
3391 imaginary parts. */
3392 for (sinsn = linsn; sinsn != 0;
3393 sinsn = prev_nonnote_insn (sinsn))
3394 {
3395 set = single_set (sinsn);
3396 if (set == 0)
3397 continue;
3398
3399 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3400 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3401 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3402 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3403 }
3404 }
3405 else
3406 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3407 }
3408
3409 /* For pointer data type, suggest pointer register. */
3410 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3411 mark_reg_pointer (parmreg,
3412 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3413 }
3414
3415 /* A subroutine of assign_parms. Allocate stack space to hold the current
3416 parameter. Get it there. Perform all ABI specified conversions. */
3417
3418 static void
3419 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3420 struct assign_parm_data_one *data)
3421 {
3422 /* Value must be stored in the stack slot STACK_PARM during function
3423 execution. */
3424 bool to_conversion = false;
3425
3426 assign_parm_remove_parallels (data);
3427
3428 if (data->arg.mode != data->nominal_mode)
3429 {
3430 /* Conversion is required. */
3431 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3432
3433 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3434
3435 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3436 to_conversion = true;
3437
3438 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3439 TYPE_UNSIGNED (TREE_TYPE (parm)));
3440
3441 if (data->stack_parm)
3442 {
3443 poly_int64 offset
3444 = subreg_lowpart_offset (data->nominal_mode,
3445 GET_MODE (data->stack_parm));
3446 /* ??? This may need a big-endian conversion on sparc64. */
3447 data->stack_parm
3448 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3449 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3450 set_mem_offset (data->stack_parm,
3451 MEM_OFFSET (data->stack_parm) + offset);
3452 }
3453 }
3454
3455 if (data->entry_parm != data->stack_parm)
3456 {
3457 rtx src, dest;
3458
3459 if (data->stack_parm == 0)
3460 {
3461 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3462 GET_MODE (data->entry_parm),
3463 TYPE_ALIGN (data->arg.type));
3464 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3465 && ((optab_handler (movmisalign_optab,
3466 GET_MODE (data->entry_parm))
3467 != CODE_FOR_nothing)
3468 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3469 align)))
3470 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3471 data->stack_parm
3472 = assign_stack_local (GET_MODE (data->entry_parm),
3473 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3474 align);
3475 align = MEM_ALIGN (data->stack_parm);
3476 set_mem_attributes (data->stack_parm, parm, 1);
3477 set_mem_align (data->stack_parm, align);
3478 }
3479
3480 dest = validize_mem (copy_rtx (data->stack_parm));
3481 src = validize_mem (copy_rtx (data->entry_parm));
3482
3483 if (MEM_P (src))
3484 {
3485 /* Use a block move to handle potentially misaligned entry_parm. */
3486 if (!to_conversion)
3487 push_to_sequence2 (all->first_conversion_insn,
3488 all->last_conversion_insn);
3489 to_conversion = true;
3490
3491 emit_block_move (dest, src,
3492 GEN_INT (int_size_in_bytes (data->arg.type)),
3493 BLOCK_OP_NORMAL);
3494 }
3495 else
3496 {
3497 if (!REG_P (src))
3498 src = force_reg (GET_MODE (src), src);
3499 emit_move_insn (dest, src);
3500 }
3501 }
3502
3503 if (to_conversion)
3504 {
3505 all->first_conversion_insn = get_insns ();
3506 all->last_conversion_insn = get_last_insn ();
3507 end_sequence ();
3508 }
3509
3510 set_parm_rtl (parm, data->stack_parm);
3511 }
3512
3513 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3514 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3515
3516 static void
3517 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3518 vec<tree> fnargs)
3519 {
3520 tree parm;
3521 tree orig_fnargs = all->orig_fnargs;
3522 unsigned i = 0;
3523
3524 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3525 {
3526 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3527 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3528 {
3529 rtx tmp, real, imag;
3530 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3531
3532 real = DECL_RTL (fnargs[i]);
3533 imag = DECL_RTL (fnargs[i + 1]);
3534 if (inner != GET_MODE (real))
3535 {
3536 real = gen_lowpart_SUBREG (inner, real);
3537 imag = gen_lowpart_SUBREG (inner, imag);
3538 }
3539
3540 if (TREE_ADDRESSABLE (parm))
3541 {
3542 rtx rmem, imem;
3543 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3544 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3545 DECL_MODE (parm),
3546 TYPE_ALIGN (TREE_TYPE (parm)));
3547
3548 /* split_complex_arg put the real and imag parts in
3549 pseudos. Move them to memory. */
3550 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3551 set_mem_attributes (tmp, parm, 1);
3552 rmem = adjust_address_nv (tmp, inner, 0);
3553 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3554 push_to_sequence2 (all->first_conversion_insn,
3555 all->last_conversion_insn);
3556 emit_move_insn (rmem, real);
3557 emit_move_insn (imem, imag);
3558 all->first_conversion_insn = get_insns ();
3559 all->last_conversion_insn = get_last_insn ();
3560 end_sequence ();
3561 }
3562 else
3563 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3564 set_parm_rtl (parm, tmp);
3565
3566 real = DECL_INCOMING_RTL (fnargs[i]);
3567 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3568 if (inner != GET_MODE (real))
3569 {
3570 real = gen_lowpart_SUBREG (inner, real);
3571 imag = gen_lowpart_SUBREG (inner, imag);
3572 }
3573 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3574 set_decl_incoming_rtl (parm, tmp, false);
3575 i++;
3576 }
3577 }
3578 }
3579
3580 /* Assign RTL expressions to the function's parameters. This may involve
3581 copying them into registers and using those registers as the DECL_RTL. */
3582
3583 static void
3584 assign_parms (tree fndecl)
3585 {
3586 struct assign_parm_data_all all;
3587 tree parm;
3588 vec<tree> fnargs;
3589 unsigned i;
3590
3591 crtl->args.internal_arg_pointer
3592 = targetm.calls.internal_arg_pointer ();
3593
3594 assign_parms_initialize_all (&all);
3595 fnargs = assign_parms_augmented_arg_list (&all);
3596
3597 FOR_EACH_VEC_ELT (fnargs, i, parm)
3598 {
3599 struct assign_parm_data_one data;
3600
3601 /* Extract the type of PARM; adjust it according to ABI. */
3602 assign_parm_find_data_types (&all, parm, &data);
3603
3604 /* Early out for errors and void parameters. */
3605 if (data.passed_mode == VOIDmode)
3606 {
3607 SET_DECL_RTL (parm, const0_rtx);
3608 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3609 continue;
3610 }
3611
3612 /* Estimate stack alignment from parameter alignment. */
3613 if (SUPPORTS_STACK_ALIGNMENT)
3614 {
3615 unsigned int align
3616 = targetm.calls.function_arg_boundary (data.arg.mode,
3617 data.arg.type);
3618 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3619 if (TYPE_ALIGN (data.nominal_type) > align)
3620 align = MINIMUM_ALIGNMENT (data.nominal_type,
3621 TYPE_MODE (data.nominal_type),
3622 TYPE_ALIGN (data.nominal_type));
3623 if (crtl->stack_alignment_estimated < align)
3624 {
3625 gcc_assert (!crtl->stack_realign_processed);
3626 crtl->stack_alignment_estimated = align;
3627 }
3628 }
3629
3630 /* Find out where the parameter arrives in this function. */
3631 assign_parm_find_entry_rtl (&all, &data);
3632
3633 /* Find out where stack space for this parameter might be. */
3634 if (assign_parm_is_stack_parm (&all, &data))
3635 {
3636 assign_parm_find_stack_rtl (parm, &data);
3637 assign_parm_adjust_entry_rtl (&data);
3638 }
3639 /* Record permanently how this parm was passed. */
3640 if (data.arg.pass_by_reference)
3641 {
3642 rtx incoming_rtl
3643 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3644 data.entry_parm);
3645 set_decl_incoming_rtl (parm, incoming_rtl, true);
3646 }
3647 else
3648 set_decl_incoming_rtl (parm, data.entry_parm, false);
3649
3650 assign_parm_adjust_stack_rtl (&data);
3651
3652 if (assign_parm_setup_block_p (&data))
3653 assign_parm_setup_block (&all, parm, &data);
3654 else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3655 assign_parm_setup_reg (&all, parm, &data);
3656 else
3657 assign_parm_setup_stack (&all, parm, &data);
3658
3659 if (cfun->stdarg && !DECL_CHAIN (parm))
3660 assign_parms_setup_varargs (&all, &data, false);
3661
3662 /* Update info on where next arg arrives in registers. */
3663 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3664 }
3665
3666 if (targetm.calls.split_complex_arg)
3667 assign_parms_unsplit_complex (&all, fnargs);
3668
3669 fnargs.release ();
3670
3671 /* Output all parameter conversion instructions (possibly including calls)
3672 now that all parameters have been copied out of hard registers. */
3673 emit_insn (all.first_conversion_insn);
3674
3675 /* Estimate reload stack alignment from scalar return mode. */
3676 if (SUPPORTS_STACK_ALIGNMENT)
3677 {
3678 if (DECL_RESULT (fndecl))
3679 {
3680 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3681 machine_mode mode = TYPE_MODE (type);
3682
3683 if (mode != BLKmode
3684 && mode != VOIDmode
3685 && !AGGREGATE_TYPE_P (type))
3686 {
3687 unsigned int align = GET_MODE_ALIGNMENT (mode);
3688 if (crtl->stack_alignment_estimated < align)
3689 {
3690 gcc_assert (!crtl->stack_realign_processed);
3691 crtl->stack_alignment_estimated = align;
3692 }
3693 }
3694 }
3695 }
3696
3697 /* If we are receiving a struct value address as the first argument, set up
3698 the RTL for the function result. As this might require code to convert
3699 the transmitted address to Pmode, we do this here to ensure that possible
3700 preliminary conversions of the address have been emitted already. */
3701 if (all.function_result_decl)
3702 {
3703 tree result = DECL_RESULT (current_function_decl);
3704 rtx addr = DECL_RTL (all.function_result_decl);
3705 rtx x;
3706
3707 if (DECL_BY_REFERENCE (result))
3708 {
3709 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3710 x = addr;
3711 }
3712 else
3713 {
3714 SET_DECL_VALUE_EXPR (result,
3715 build1 (INDIRECT_REF, TREE_TYPE (result),
3716 all.function_result_decl));
3717 addr = convert_memory_address (Pmode, addr);
3718 x = gen_rtx_MEM (DECL_MODE (result), addr);
3719 set_mem_attributes (x, result, 1);
3720 }
3721
3722 DECL_HAS_VALUE_EXPR_P (result) = 1;
3723
3724 set_parm_rtl (result, x);
3725 }
3726
3727 /* We have aligned all the args, so add space for the pretend args. */
3728 crtl->args.pretend_args_size = all.pretend_args_size;
3729 all.stack_args_size.constant += all.extra_pretend_bytes;
3730 crtl->args.size = all.stack_args_size.constant;
3731
3732 /* Adjust function incoming argument size for alignment and
3733 minimum length. */
3734
3735 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3736 crtl->args.size = aligned_upper_bound (crtl->args.size,
3737 PARM_BOUNDARY / BITS_PER_UNIT);
3738
3739 if (ARGS_GROW_DOWNWARD)
3740 {
3741 crtl->args.arg_offset_rtx
3742 = (all.stack_args_size.var == 0
3743 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3744 : expand_expr (size_diffop (all.stack_args_size.var,
3745 size_int (-all.stack_args_size.constant)),
3746 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3747 }
3748 else
3749 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3750
3751 /* See how many bytes, if any, of its args a function should try to pop
3752 on return. */
3753
3754 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3755 TREE_TYPE (fndecl),
3756 crtl->args.size);
3757
3758 /* For stdarg.h function, save info about
3759 regs and stack space used by the named args. */
3760
3761 crtl->args.info = all.args_so_far_v;
3762
3763 /* Set the rtx used for the function return value. Put this in its
3764 own variable so any optimizers that need this information don't have
3765 to include tree.h. Do this here so it gets done when an inlined
3766 function gets output. */
3767
3768 crtl->return_rtx
3769 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3770 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3771
3772 /* If scalar return value was computed in a pseudo-reg, or was a named
3773 return value that got dumped to the stack, copy that to the hard
3774 return register. */
3775 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3776 {
3777 tree decl_result = DECL_RESULT (fndecl);
3778 rtx decl_rtl = DECL_RTL (decl_result);
3779
3780 if (REG_P (decl_rtl)
3781 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3782 : DECL_REGISTER (decl_result))
3783 {
3784 rtx real_decl_rtl;
3785
3786 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3787 fndecl, true);
3788 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3789 /* The delay slot scheduler assumes that crtl->return_rtx
3790 holds the hard register containing the return value, not a
3791 temporary pseudo. */
3792 crtl->return_rtx = real_decl_rtl;
3793 }
3794 }
3795 }
3796
3797 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3798 For all seen types, gimplify their sizes. */
3799
3800 static tree
3801 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3802 {
3803 tree t = *tp;
3804
3805 *walk_subtrees = 0;
3806 if (TYPE_P (t))
3807 {
3808 if (POINTER_TYPE_P (t))
3809 *walk_subtrees = 1;
3810 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3811 && !TYPE_SIZES_GIMPLIFIED (t))
3812 {
3813 gimplify_type_sizes (t, (gimple_seq *) data);
3814 *walk_subtrees = 1;
3815 }
3816 }
3817
3818 return NULL;
3819 }
3820
3821 /* Gimplify the parameter list for current_function_decl. This involves
3822 evaluating SAVE_EXPRs of variable sized parameters and generating code
3823 to implement callee-copies reference parameters. Returns a sequence of
3824 statements to add to the beginning of the function. */
3825
3826 gimple_seq
3827 gimplify_parameters (gimple_seq *cleanup)
3828 {
3829 struct assign_parm_data_all all;
3830 tree parm;
3831 gimple_seq stmts = NULL;
3832 vec<tree> fnargs;
3833 unsigned i;
3834
3835 assign_parms_initialize_all (&all);
3836 fnargs = assign_parms_augmented_arg_list (&all);
3837
3838 FOR_EACH_VEC_ELT (fnargs, i, parm)
3839 {
3840 struct assign_parm_data_one data;
3841
3842 /* Extract the type of PARM; adjust it according to ABI. */
3843 assign_parm_find_data_types (&all, parm, &data);
3844
3845 /* Early out for errors and void parameters. */
3846 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3847 continue;
3848
3849 /* Update info on where next arg arrives in registers. */
3850 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3851
3852 /* ??? Once upon a time variable_size stuffed parameter list
3853 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3854 turned out to be less than manageable in the gimple world.
3855 Now we have to hunt them down ourselves. */
3856 walk_tree_without_duplicates (&data.arg.type,
3857 gimplify_parm_type, &stmts);
3858
3859 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3860 {
3861 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3862 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3863 }
3864
3865 if (data.arg.pass_by_reference)
3866 {
3867 tree type = TREE_TYPE (data.arg.type);
3868 function_arg_info orig_arg (type, data.arg.named);
3869 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3870 {
3871 tree local, t;
3872
3873 /* For constant-sized objects, this is trivial; for
3874 variable-sized objects, we have to play games. */
3875 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3876 && !(flag_stack_check == GENERIC_STACK_CHECK
3877 && compare_tree_int (DECL_SIZE_UNIT (parm),
3878 STACK_CHECK_MAX_VAR_SIZE) > 0))
3879 {
3880 local = create_tmp_var (type, get_name (parm));
3881 DECL_IGNORED_P (local) = 0;
3882 /* If PARM was addressable, move that flag over
3883 to the local copy, as its address will be taken,
3884 not the PARMs. Keep the parms address taken
3885 as we'll query that flag during gimplification. */
3886 if (TREE_ADDRESSABLE (parm))
3887 TREE_ADDRESSABLE (local) = 1;
3888 else if (TREE_CODE (type) == COMPLEX_TYPE
3889 || TREE_CODE (type) == VECTOR_TYPE)
3890 DECL_GIMPLE_REG_P (local) = 1;
3891
3892 if (!is_gimple_reg (local)
3893 && flag_stack_reuse != SR_NONE)
3894 {
3895 tree clobber = build_constructor (type, NULL);
3896 gimple *clobber_stmt;
3897 TREE_THIS_VOLATILE (clobber) = 1;
3898 clobber_stmt = gimple_build_assign (local, clobber);
3899 gimple_seq_add_stmt (cleanup, clobber_stmt);
3900 }
3901 }
3902 else
3903 {
3904 tree ptr_type, addr;
3905
3906 ptr_type = build_pointer_type (type);
3907 addr = create_tmp_reg (ptr_type, get_name (parm));
3908 DECL_IGNORED_P (addr) = 0;
3909 local = build_fold_indirect_ref (addr);
3910
3911 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3912 DECL_ALIGN (parm),
3913 max_int_size_in_bytes (type));
3914 /* The call has been built for a variable-sized object. */
3915 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3916 t = fold_convert (ptr_type, t);
3917 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3918 gimplify_and_add (t, &stmts);
3919 }
3920
3921 gimplify_assign (local, parm, &stmts);
3922
3923 SET_DECL_VALUE_EXPR (parm, local);
3924 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3925 }
3926 }
3927 }
3928
3929 fnargs.release ();
3930
3931 return stmts;
3932 }
3933 \f
3934 /* Compute the size and offset from the start of the stacked arguments for a
3935 parm passed in mode PASSED_MODE and with type TYPE.
3936
3937 INITIAL_OFFSET_PTR points to the current offset into the stacked
3938 arguments.
3939
3940 The starting offset and size for this parm are returned in
3941 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3942 nonzero, the offset is that of stack slot, which is returned in
3943 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3944 padding required from the initial offset ptr to the stack slot.
3945
3946 IN_REGS is nonzero if the argument will be passed in registers. It will
3947 never be set if REG_PARM_STACK_SPACE is not defined.
3948
3949 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3950 for arguments which are passed in registers.
3951
3952 FNDECL is the function in which the argument was defined.
3953
3954 There are two types of rounding that are done. The first, controlled by
3955 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3956 argument list to be aligned to the specific boundary (in bits). This
3957 rounding affects the initial and starting offsets, but not the argument
3958 size.
3959
3960 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3961 optionally rounds the size of the parm to PARM_BOUNDARY. The
3962 initial offset is not affected by this rounding, while the size always
3963 is and the starting offset may be. */
3964
3965 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3966 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3967 callers pass in the total size of args so far as
3968 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3969
3970 void
3971 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3972 int reg_parm_stack_space, int partial,
3973 tree fndecl ATTRIBUTE_UNUSED,
3974 struct args_size *initial_offset_ptr,
3975 struct locate_and_pad_arg_data *locate)
3976 {
3977 tree sizetree;
3978 pad_direction where_pad;
3979 unsigned int boundary, round_boundary;
3980 int part_size_in_regs;
3981
3982 /* If we have found a stack parm before we reach the end of the
3983 area reserved for registers, skip that area. */
3984 if (! in_regs)
3985 {
3986 if (reg_parm_stack_space > 0)
3987 {
3988 if (initial_offset_ptr->var
3989 || !ordered_p (initial_offset_ptr->constant,
3990 reg_parm_stack_space))
3991 {
3992 initial_offset_ptr->var
3993 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3994 ssize_int (reg_parm_stack_space));
3995 initial_offset_ptr->constant = 0;
3996 }
3997 else
3998 initial_offset_ptr->constant
3999 = ordered_max (initial_offset_ptr->constant,
4000 reg_parm_stack_space);
4001 }
4002 }
4003
4004 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4005
4006 sizetree = (type
4007 ? arg_size_in_bytes (type)
4008 : size_int (GET_MODE_SIZE (passed_mode)));
4009 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4010 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4011 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4012 type);
4013 locate->where_pad = where_pad;
4014
4015 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4016 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4017 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4018
4019 locate->boundary = boundary;
4020
4021 if (SUPPORTS_STACK_ALIGNMENT)
4022 {
4023 /* stack_alignment_estimated can't change after stack has been
4024 realigned. */
4025 if (crtl->stack_alignment_estimated < boundary)
4026 {
4027 if (!crtl->stack_realign_processed)
4028 crtl->stack_alignment_estimated = boundary;
4029 else
4030 {
4031 /* If stack is realigned and stack alignment value
4032 hasn't been finalized, it is OK not to increase
4033 stack_alignment_estimated. The bigger alignment
4034 requirement is recorded in stack_alignment_needed
4035 below. */
4036 gcc_assert (!crtl->stack_realign_finalized
4037 && crtl->stack_realign_needed);
4038 }
4039 }
4040 }
4041
4042 if (ARGS_GROW_DOWNWARD)
4043 {
4044 locate->slot_offset.constant = -initial_offset_ptr->constant;
4045 if (initial_offset_ptr->var)
4046 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4047 initial_offset_ptr->var);
4048
4049 {
4050 tree s2 = sizetree;
4051 if (where_pad != PAD_NONE
4052 && (!tree_fits_uhwi_p (sizetree)
4053 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4054 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4055 SUB_PARM_SIZE (locate->slot_offset, s2);
4056 }
4057
4058 locate->slot_offset.constant += part_size_in_regs;
4059
4060 if (!in_regs || reg_parm_stack_space > 0)
4061 pad_to_arg_alignment (&locate->slot_offset, boundary,
4062 &locate->alignment_pad);
4063
4064 locate->size.constant = (-initial_offset_ptr->constant
4065 - locate->slot_offset.constant);
4066 if (initial_offset_ptr->var)
4067 locate->size.var = size_binop (MINUS_EXPR,
4068 size_binop (MINUS_EXPR,
4069 ssize_int (0),
4070 initial_offset_ptr->var),
4071 locate->slot_offset.var);
4072
4073 /* Pad_below needs the pre-rounded size to know how much to pad
4074 below. */
4075 locate->offset = locate->slot_offset;
4076 if (where_pad == PAD_DOWNWARD)
4077 pad_below (&locate->offset, passed_mode, sizetree);
4078
4079 }
4080 else
4081 {
4082 if (!in_regs || reg_parm_stack_space > 0)
4083 pad_to_arg_alignment (initial_offset_ptr, boundary,
4084 &locate->alignment_pad);
4085 locate->slot_offset = *initial_offset_ptr;
4086
4087 #ifdef PUSH_ROUNDING
4088 if (passed_mode != BLKmode)
4089 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4090 #endif
4091
4092 /* Pad_below needs the pre-rounded size to know how much to pad below
4093 so this must be done before rounding up. */
4094 locate->offset = locate->slot_offset;
4095 if (where_pad == PAD_DOWNWARD)
4096 pad_below (&locate->offset, passed_mode, sizetree);
4097
4098 if (where_pad != PAD_NONE
4099 && (!tree_fits_uhwi_p (sizetree)
4100 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4101 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4102
4103 ADD_PARM_SIZE (locate->size, sizetree);
4104
4105 locate->size.constant -= part_size_in_regs;
4106 }
4107
4108 locate->offset.constant
4109 += targetm.calls.function_arg_offset (passed_mode, type);
4110 }
4111
4112 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4113 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4114
4115 static void
4116 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4117 struct args_size *alignment_pad)
4118 {
4119 tree save_var = NULL_TREE;
4120 poly_int64 save_constant = 0;
4121 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4122 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4123
4124 #ifdef SPARC_STACK_BOUNDARY_HACK
4125 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4126 the real alignment of %sp. However, when it does this, the
4127 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4128 if (SPARC_STACK_BOUNDARY_HACK)
4129 sp_offset = 0;
4130 #endif
4131
4132 if (boundary > PARM_BOUNDARY)
4133 {
4134 save_var = offset_ptr->var;
4135 save_constant = offset_ptr->constant;
4136 }
4137
4138 alignment_pad->var = NULL_TREE;
4139 alignment_pad->constant = 0;
4140
4141 if (boundary > BITS_PER_UNIT)
4142 {
4143 int misalign;
4144 if (offset_ptr->var
4145 || !known_misalignment (offset_ptr->constant + sp_offset,
4146 boundary_in_bytes, &misalign))
4147 {
4148 tree sp_offset_tree = ssize_int (sp_offset);
4149 tree offset = size_binop (PLUS_EXPR,
4150 ARGS_SIZE_TREE (*offset_ptr),
4151 sp_offset_tree);
4152 tree rounded;
4153 if (ARGS_GROW_DOWNWARD)
4154 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4155 else
4156 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4157
4158 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4159 /* ARGS_SIZE_TREE includes constant term. */
4160 offset_ptr->constant = 0;
4161 if (boundary > PARM_BOUNDARY)
4162 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4163 save_var);
4164 }
4165 else
4166 {
4167 if (ARGS_GROW_DOWNWARD)
4168 offset_ptr->constant -= misalign;
4169 else
4170 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4171
4172 if (boundary > PARM_BOUNDARY)
4173 alignment_pad->constant = offset_ptr->constant - save_constant;
4174 }
4175 }
4176 }
4177
4178 static void
4179 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4180 {
4181 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4182 int misalign;
4183 if (passed_mode != BLKmode
4184 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4185 offset_ptr->constant += -misalign & (align - 1);
4186 else
4187 {
4188 if (TREE_CODE (sizetree) != INTEGER_CST
4189 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4190 {
4191 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4192 tree s2 = round_up (sizetree, align);
4193 /* Add it in. */
4194 ADD_PARM_SIZE (*offset_ptr, s2);
4195 SUB_PARM_SIZE (*offset_ptr, sizetree);
4196 }
4197 }
4198 }
4199 \f
4200
4201 /* True if register REGNO was alive at a place where `setjmp' was
4202 called and was set more than once or is an argument. Such regs may
4203 be clobbered by `longjmp'. */
4204
4205 static bool
4206 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4207 {
4208 /* There appear to be cases where some local vars never reach the
4209 backend but have bogus regnos. */
4210 if (regno >= max_reg_num ())
4211 return false;
4212
4213 return ((REG_N_SETS (regno) > 1
4214 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4215 regno))
4216 && REGNO_REG_SET_P (setjmp_crosses, regno));
4217 }
4218
4219 /* Walk the tree of blocks describing the binding levels within a
4220 function and warn about variables the might be killed by setjmp or
4221 vfork. This is done after calling flow_analysis before register
4222 allocation since that will clobber the pseudo-regs to hard
4223 regs. */
4224
4225 static void
4226 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4227 {
4228 tree decl, sub;
4229
4230 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4231 {
4232 if (VAR_P (decl)
4233 && DECL_RTL_SET_P (decl)
4234 && REG_P (DECL_RTL (decl))
4235 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4236 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4237 " %<longjmp%> or %<vfork%>", decl);
4238 }
4239
4240 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4241 setjmp_vars_warning (setjmp_crosses, sub);
4242 }
4243
4244 /* Do the appropriate part of setjmp_vars_warning
4245 but for arguments instead of local variables. */
4246
4247 static void
4248 setjmp_args_warning (bitmap setjmp_crosses)
4249 {
4250 tree decl;
4251 for (decl = DECL_ARGUMENTS (current_function_decl);
4252 decl; decl = DECL_CHAIN (decl))
4253 if (DECL_RTL (decl) != 0
4254 && REG_P (DECL_RTL (decl))
4255 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4256 warning (OPT_Wclobbered,
4257 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4258 decl);
4259 }
4260
4261 /* Generate warning messages for variables live across setjmp. */
4262
4263 void
4264 generate_setjmp_warnings (void)
4265 {
4266 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4267
4268 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4269 || bitmap_empty_p (setjmp_crosses))
4270 return;
4271
4272 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4273 setjmp_args_warning (setjmp_crosses);
4274 }
4275
4276 \f
4277 /* Reverse the order of elements in the fragment chain T of blocks,
4278 and return the new head of the chain (old last element).
4279 In addition to that clear BLOCK_SAME_RANGE flags when needed
4280 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4281 its super fragment origin. */
4282
4283 static tree
4284 block_fragments_nreverse (tree t)
4285 {
4286 tree prev = 0, block, next, prev_super = 0;
4287 tree super = BLOCK_SUPERCONTEXT (t);
4288 if (BLOCK_FRAGMENT_ORIGIN (super))
4289 super = BLOCK_FRAGMENT_ORIGIN (super);
4290 for (block = t; block; block = next)
4291 {
4292 next = BLOCK_FRAGMENT_CHAIN (block);
4293 BLOCK_FRAGMENT_CHAIN (block) = prev;
4294 if ((prev && !BLOCK_SAME_RANGE (prev))
4295 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4296 != prev_super))
4297 BLOCK_SAME_RANGE (block) = 0;
4298 prev_super = BLOCK_SUPERCONTEXT (block);
4299 BLOCK_SUPERCONTEXT (block) = super;
4300 prev = block;
4301 }
4302 t = BLOCK_FRAGMENT_ORIGIN (t);
4303 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4304 != prev_super)
4305 BLOCK_SAME_RANGE (t) = 0;
4306 BLOCK_SUPERCONTEXT (t) = super;
4307 return prev;
4308 }
4309
4310 /* Reverse the order of elements in the chain T of blocks,
4311 and return the new head of the chain (old last element).
4312 Also do the same on subblocks and reverse the order of elements
4313 in BLOCK_FRAGMENT_CHAIN as well. */
4314
4315 static tree
4316 blocks_nreverse_all (tree t)
4317 {
4318 tree prev = 0, block, next;
4319 for (block = t; block; block = next)
4320 {
4321 next = BLOCK_CHAIN (block);
4322 BLOCK_CHAIN (block) = prev;
4323 if (BLOCK_FRAGMENT_CHAIN (block)
4324 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4325 {
4326 BLOCK_FRAGMENT_CHAIN (block)
4327 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4328 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4329 BLOCK_SAME_RANGE (block) = 0;
4330 }
4331 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4332 prev = block;
4333 }
4334 return prev;
4335 }
4336
4337
4338 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4339 and create duplicate blocks. */
4340 /* ??? Need an option to either create block fragments or to create
4341 abstract origin duplicates of a source block. It really depends
4342 on what optimization has been performed. */
4343
4344 void
4345 reorder_blocks (void)
4346 {
4347 tree block = DECL_INITIAL (current_function_decl);
4348
4349 if (block == NULL_TREE)
4350 return;
4351
4352 auto_vec<tree, 10> block_stack;
4353
4354 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4355 clear_block_marks (block);
4356
4357 /* Prune the old trees away, so that they don't get in the way. */
4358 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4359 BLOCK_CHAIN (block) = NULL_TREE;
4360
4361 /* Recreate the block tree from the note nesting. */
4362 reorder_blocks_1 (get_insns (), block, &block_stack);
4363 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4364 }
4365
4366 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4367
4368 void
4369 clear_block_marks (tree block)
4370 {
4371 while (block)
4372 {
4373 TREE_ASM_WRITTEN (block) = 0;
4374 clear_block_marks (BLOCK_SUBBLOCKS (block));
4375 block = BLOCK_CHAIN (block);
4376 }
4377 }
4378
4379 static void
4380 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4381 vec<tree> *p_block_stack)
4382 {
4383 rtx_insn *insn;
4384 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4385
4386 for (insn = insns; insn; insn = NEXT_INSN (insn))
4387 {
4388 if (NOTE_P (insn))
4389 {
4390 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4391 {
4392 tree block = NOTE_BLOCK (insn);
4393 tree origin;
4394
4395 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4396 origin = block;
4397
4398 if (prev_end)
4399 BLOCK_SAME_RANGE (prev_end) = 0;
4400 prev_end = NULL_TREE;
4401
4402 /* If we have seen this block before, that means it now
4403 spans multiple address regions. Create a new fragment. */
4404 if (TREE_ASM_WRITTEN (block))
4405 {
4406 tree new_block = copy_node (block);
4407
4408 BLOCK_SAME_RANGE (new_block) = 0;
4409 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4410 BLOCK_FRAGMENT_CHAIN (new_block)
4411 = BLOCK_FRAGMENT_CHAIN (origin);
4412 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4413
4414 NOTE_BLOCK (insn) = new_block;
4415 block = new_block;
4416 }
4417
4418 if (prev_beg == current_block && prev_beg)
4419 BLOCK_SAME_RANGE (block) = 1;
4420
4421 prev_beg = origin;
4422
4423 BLOCK_SUBBLOCKS (block) = 0;
4424 TREE_ASM_WRITTEN (block) = 1;
4425 /* When there's only one block for the entire function,
4426 current_block == block and we mustn't do this, it
4427 will cause infinite recursion. */
4428 if (block != current_block)
4429 {
4430 tree super;
4431 if (block != origin)
4432 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4433 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4434 (origin))
4435 == current_block);
4436 if (p_block_stack->is_empty ())
4437 super = current_block;
4438 else
4439 {
4440 super = p_block_stack->last ();
4441 gcc_assert (super == current_block
4442 || BLOCK_FRAGMENT_ORIGIN (super)
4443 == current_block);
4444 }
4445 BLOCK_SUPERCONTEXT (block) = super;
4446 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4447 BLOCK_SUBBLOCKS (current_block) = block;
4448 current_block = origin;
4449 }
4450 p_block_stack->safe_push (block);
4451 }
4452 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4453 {
4454 NOTE_BLOCK (insn) = p_block_stack->pop ();
4455 current_block = BLOCK_SUPERCONTEXT (current_block);
4456 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4457 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4458 prev_beg = NULL_TREE;
4459 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4460 ? NOTE_BLOCK (insn) : NULL_TREE;
4461 }
4462 }
4463 else
4464 {
4465 prev_beg = NULL_TREE;
4466 if (prev_end)
4467 BLOCK_SAME_RANGE (prev_end) = 0;
4468 prev_end = NULL_TREE;
4469 }
4470 }
4471 }
4472
4473 /* Reverse the order of elements in the chain T of blocks,
4474 and return the new head of the chain (old last element). */
4475
4476 tree
4477 blocks_nreverse (tree t)
4478 {
4479 tree prev = 0, block, next;
4480 for (block = t; block; block = next)
4481 {
4482 next = BLOCK_CHAIN (block);
4483 BLOCK_CHAIN (block) = prev;
4484 prev = block;
4485 }
4486 return prev;
4487 }
4488
4489 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4490 by modifying the last node in chain 1 to point to chain 2. */
4491
4492 tree
4493 block_chainon (tree op1, tree op2)
4494 {
4495 tree t1;
4496
4497 if (!op1)
4498 return op2;
4499 if (!op2)
4500 return op1;
4501
4502 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4503 continue;
4504 BLOCK_CHAIN (t1) = op2;
4505
4506 #ifdef ENABLE_TREE_CHECKING
4507 {
4508 tree t2;
4509 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4510 gcc_assert (t2 != t1);
4511 }
4512 #endif
4513
4514 return op1;
4515 }
4516
4517 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4518 non-NULL, list them all into VECTOR, in a depth-first preorder
4519 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4520 blocks. */
4521
4522 static int
4523 all_blocks (tree block, tree *vector)
4524 {
4525 int n_blocks = 0;
4526
4527 while (block)
4528 {
4529 TREE_ASM_WRITTEN (block) = 0;
4530
4531 /* Record this block. */
4532 if (vector)
4533 vector[n_blocks] = block;
4534
4535 ++n_blocks;
4536
4537 /* Record the subblocks, and their subblocks... */
4538 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4539 vector ? vector + n_blocks : 0);
4540 block = BLOCK_CHAIN (block);
4541 }
4542
4543 return n_blocks;
4544 }
4545
4546 /* Return a vector containing all the blocks rooted at BLOCK. The
4547 number of elements in the vector is stored in N_BLOCKS_P. The
4548 vector is dynamically allocated; it is the caller's responsibility
4549 to call `free' on the pointer returned. */
4550
4551 static tree *
4552 get_block_vector (tree block, int *n_blocks_p)
4553 {
4554 tree *block_vector;
4555
4556 *n_blocks_p = all_blocks (block, NULL);
4557 block_vector = XNEWVEC (tree, *n_blocks_p);
4558 all_blocks (block, block_vector);
4559
4560 return block_vector;
4561 }
4562
4563 static GTY(()) int next_block_index = 2;
4564
4565 /* Set BLOCK_NUMBER for all the blocks in FN. */
4566
4567 void
4568 number_blocks (tree fn)
4569 {
4570 int i;
4571 int n_blocks;
4572 tree *block_vector;
4573
4574 /* For XCOFF debugging output, we start numbering the blocks
4575 from 1 within each function, rather than keeping a running
4576 count. */
4577 #if defined (XCOFF_DEBUGGING_INFO)
4578 if (write_symbols == XCOFF_DEBUG)
4579 next_block_index = 1;
4580 #endif
4581
4582 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4583
4584 /* The top-level BLOCK isn't numbered at all. */
4585 for (i = 1; i < n_blocks; ++i)
4586 /* We number the blocks from two. */
4587 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4588
4589 free (block_vector);
4590
4591 return;
4592 }
4593
4594 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4595
4596 DEBUG_FUNCTION tree
4597 debug_find_var_in_block_tree (tree var, tree block)
4598 {
4599 tree t;
4600
4601 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4602 if (t == var)
4603 return block;
4604
4605 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4606 {
4607 tree ret = debug_find_var_in_block_tree (var, t);
4608 if (ret)
4609 return ret;
4610 }
4611
4612 return NULL_TREE;
4613 }
4614 \f
4615 /* Keep track of whether we're in a dummy function context. If we are,
4616 we don't want to invoke the set_current_function hook, because we'll
4617 get into trouble if the hook calls target_reinit () recursively or
4618 when the initial initialization is not yet complete. */
4619
4620 static bool in_dummy_function;
4621
4622 /* Invoke the target hook when setting cfun. Update the optimization options
4623 if the function uses different options than the default. */
4624
4625 static void
4626 invoke_set_current_function_hook (tree fndecl)
4627 {
4628 if (!in_dummy_function)
4629 {
4630 tree opts = ((fndecl)
4631 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4632 : optimization_default_node);
4633
4634 if (!opts)
4635 opts = optimization_default_node;
4636
4637 /* Change optimization options if needed. */
4638 if (optimization_current_node != opts)
4639 {
4640 optimization_current_node = opts;
4641 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4642 }
4643
4644 targetm.set_current_function (fndecl);
4645 this_fn_optabs = this_target_optabs;
4646
4647 /* Initialize global alignment variables after op. */
4648 parse_alignment_opts ();
4649
4650 if (opts != optimization_default_node)
4651 {
4652 init_tree_optimization_optabs (opts);
4653 if (TREE_OPTIMIZATION_OPTABS (opts))
4654 this_fn_optabs = (struct target_optabs *)
4655 TREE_OPTIMIZATION_OPTABS (opts);
4656 }
4657 }
4658 }
4659
4660 /* cfun should never be set directly; use this function. */
4661
4662 void
4663 set_cfun (struct function *new_cfun, bool force)
4664 {
4665 if (cfun != new_cfun || force)
4666 {
4667 cfun = new_cfun;
4668 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4669 redirect_edge_var_map_empty ();
4670 }
4671 }
4672
4673 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4674
4675 static vec<function *> cfun_stack;
4676
4677 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4678 current_function_decl accordingly. */
4679
4680 void
4681 push_cfun (struct function *new_cfun)
4682 {
4683 gcc_assert ((!cfun && !current_function_decl)
4684 || (cfun && current_function_decl == cfun->decl));
4685 cfun_stack.safe_push (cfun);
4686 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4687 set_cfun (new_cfun);
4688 }
4689
4690 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4691
4692 void
4693 pop_cfun (void)
4694 {
4695 struct function *new_cfun = cfun_stack.pop ();
4696 /* When in_dummy_function, we do have a cfun but current_function_decl is
4697 NULL. We also allow pushing NULL cfun and subsequently changing
4698 current_function_decl to something else and have both restored by
4699 pop_cfun. */
4700 gcc_checking_assert (in_dummy_function
4701 || !cfun
4702 || current_function_decl == cfun->decl);
4703 set_cfun (new_cfun);
4704 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4705 }
4706
4707 /* Return value of funcdef and increase it. */
4708 int
4709 get_next_funcdef_no (void)
4710 {
4711 return funcdef_no++;
4712 }
4713
4714 /* Return value of funcdef. */
4715 int
4716 get_last_funcdef_no (void)
4717 {
4718 return funcdef_no;
4719 }
4720
4721 /* Allocate a function structure for FNDECL and set its contents
4722 to the defaults. Set cfun to the newly-allocated object.
4723 Some of the helper functions invoked during initialization assume
4724 that cfun has already been set. Therefore, assign the new object
4725 directly into cfun and invoke the back end hook explicitly at the
4726 very end, rather than initializing a temporary and calling set_cfun
4727 on it.
4728
4729 ABSTRACT_P is true if this is a function that will never be seen by
4730 the middle-end. Such functions are front-end concepts (like C++
4731 function templates) that do not correspond directly to functions
4732 placed in object files. */
4733
4734 void
4735 allocate_struct_function (tree fndecl, bool abstract_p)
4736 {
4737 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4738
4739 cfun = ggc_cleared_alloc<function> ();
4740
4741 init_eh_for_function ();
4742
4743 if (init_machine_status)
4744 cfun->machine = (*init_machine_status) ();
4745
4746 #ifdef OVERRIDE_ABI_FORMAT
4747 OVERRIDE_ABI_FORMAT (fndecl);
4748 #endif
4749
4750 if (fndecl != NULL_TREE)
4751 {
4752 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4753 cfun->decl = fndecl;
4754 current_function_funcdef_no = get_next_funcdef_no ();
4755 }
4756
4757 invoke_set_current_function_hook (fndecl);
4758
4759 if (fndecl != NULL_TREE)
4760 {
4761 tree result = DECL_RESULT (fndecl);
4762
4763 if (!abstract_p)
4764 {
4765 /* Now that we have activated any function-specific attributes
4766 that might affect layout, particularly vector modes, relayout
4767 each of the parameters and the result. */
4768 relayout_decl (result);
4769 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4770 parm = DECL_CHAIN (parm))
4771 relayout_decl (parm);
4772
4773 /* Similarly relayout the function decl. */
4774 targetm.target_option.relayout_function (fndecl);
4775 }
4776
4777 if (!abstract_p && aggregate_value_p (result, fndecl))
4778 {
4779 #ifdef PCC_STATIC_STRUCT_RETURN
4780 cfun->returns_pcc_struct = 1;
4781 #endif
4782 cfun->returns_struct = 1;
4783 }
4784
4785 cfun->stdarg = stdarg_p (fntype);
4786
4787 /* Assume all registers in stdarg functions need to be saved. */
4788 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4789 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4790
4791 /* ??? This could be set on a per-function basis by the front-end
4792 but is this worth the hassle? */
4793 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4794 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4795
4796 if (!profile_flag && !flag_instrument_function_entry_exit)
4797 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4798 }
4799
4800 /* Don't enable begin stmt markers if var-tracking at assignments is
4801 disabled. The markers make little sense without the variable
4802 binding annotations among them. */
4803 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4804 && MAY_HAVE_DEBUG_MARKER_STMTS;
4805 }
4806
4807 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4808 instead of just setting it. */
4809
4810 void
4811 push_struct_function (tree fndecl)
4812 {
4813 /* When in_dummy_function we might be in the middle of a pop_cfun and
4814 current_function_decl and cfun may not match. */
4815 gcc_assert (in_dummy_function
4816 || (!cfun && !current_function_decl)
4817 || (cfun && current_function_decl == cfun->decl));
4818 cfun_stack.safe_push (cfun);
4819 current_function_decl = fndecl;
4820 allocate_struct_function (fndecl, false);
4821 }
4822
4823 /* Reset crtl and other non-struct-function variables to defaults as
4824 appropriate for emitting rtl at the start of a function. */
4825
4826 static void
4827 prepare_function_start (void)
4828 {
4829 gcc_assert (!get_last_insn ());
4830 init_temp_slots ();
4831 init_emit ();
4832 init_varasm_status ();
4833 init_expr ();
4834 default_rtl_profile ();
4835
4836 if (flag_stack_usage_info)
4837 {
4838 cfun->su = ggc_cleared_alloc<stack_usage> ();
4839 cfun->su->static_stack_size = -1;
4840 }
4841
4842 cse_not_expected = ! optimize;
4843
4844 /* Caller save not needed yet. */
4845 caller_save_needed = 0;
4846
4847 /* We haven't done register allocation yet. */
4848 reg_renumber = 0;
4849
4850 /* Indicate that we have not instantiated virtual registers yet. */
4851 virtuals_instantiated = 0;
4852
4853 /* Indicate that we want CONCATs now. */
4854 generating_concat_p = 1;
4855
4856 /* Indicate we have no need of a frame pointer yet. */
4857 frame_pointer_needed = 0;
4858 }
4859
4860 void
4861 push_dummy_function (bool with_decl)
4862 {
4863 tree fn_decl, fn_type, fn_result_decl;
4864
4865 gcc_assert (!in_dummy_function);
4866 in_dummy_function = true;
4867
4868 if (with_decl)
4869 {
4870 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4871 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4872 fn_type);
4873 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4874 NULL_TREE, void_type_node);
4875 DECL_RESULT (fn_decl) = fn_result_decl;
4876 }
4877 else
4878 fn_decl = NULL_TREE;
4879
4880 push_struct_function (fn_decl);
4881 }
4882
4883 /* Initialize the rtl expansion mechanism so that we can do simple things
4884 like generate sequences. This is used to provide a context during global
4885 initialization of some passes. You must call expand_dummy_function_end
4886 to exit this context. */
4887
4888 void
4889 init_dummy_function_start (void)
4890 {
4891 push_dummy_function (false);
4892 prepare_function_start ();
4893 }
4894
4895 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4896 and initialize static variables for generating RTL for the statements
4897 of the function. */
4898
4899 void
4900 init_function_start (tree subr)
4901 {
4902 /* Initialize backend, if needed. */
4903 initialize_rtl ();
4904
4905 prepare_function_start ();
4906 decide_function_section (subr);
4907
4908 /* Warn if this value is an aggregate type,
4909 regardless of which calling convention we are using for it. */
4910 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4911 warning (OPT_Waggregate_return, "function returns an aggregate");
4912 }
4913
4914 /* Expand code to verify the stack_protect_guard. This is invoked at
4915 the end of a function to be protected. */
4916
4917 void
4918 stack_protect_epilogue (void)
4919 {
4920 tree guard_decl = crtl->stack_protect_guard_decl;
4921 rtx_code_label *label = gen_label_rtx ();
4922 rtx x, y;
4923 rtx_insn *seq = NULL;
4924
4925 x = expand_normal (crtl->stack_protect_guard);
4926
4927 if (targetm.have_stack_protect_combined_test () && guard_decl)
4928 {
4929 gcc_assert (DECL_P (guard_decl));
4930 y = DECL_RTL (guard_decl);
4931 /* Allow the target to compute address of Y and compare it with X without
4932 leaking Y into a register. This combined address + compare pattern
4933 allows the target to prevent spilling of any intermediate results by
4934 splitting it after register allocator. */
4935 seq = targetm.gen_stack_protect_combined_test (x, y, label);
4936 }
4937 else
4938 {
4939 if (guard_decl)
4940 y = expand_normal (guard_decl);
4941 else
4942 y = const0_rtx;
4943
4944 /* Allow the target to compare Y with X without leaking either into
4945 a register. */
4946 if (targetm.have_stack_protect_test ())
4947 seq = targetm.gen_stack_protect_test (x, y, label);
4948 }
4949
4950 if (seq)
4951 emit_insn (seq);
4952 else
4953 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4954
4955 /* The noreturn predictor has been moved to the tree level. The rtl-level
4956 predictors estimate this branch about 20%, which isn't enough to get
4957 things moved out of line. Since this is the only extant case of adding
4958 a noreturn function at the rtl level, it doesn't seem worth doing ought
4959 except adding the prediction by hand. */
4960 rtx_insn *tmp = get_last_insn ();
4961 if (JUMP_P (tmp))
4962 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4963
4964 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4965 free_temp_slots ();
4966 emit_label (label);
4967 }
4968 \f
4969 /* Start the RTL for a new function, and set variables used for
4970 emitting RTL.
4971 SUBR is the FUNCTION_DECL node.
4972 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4973 the function's parameters, which must be run at any return statement. */
4974
4975 void
4976 expand_function_start (tree subr)
4977 {
4978 /* Make sure volatile mem refs aren't considered
4979 valid operands of arithmetic insns. */
4980 init_recog_no_volatile ();
4981
4982 crtl->profile
4983 = (profile_flag
4984 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4985
4986 crtl->limit_stack
4987 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4988
4989 /* Make the label for return statements to jump to. Do not special
4990 case machines with special return instructions -- they will be
4991 handled later during jump, ifcvt, or epilogue creation. */
4992 return_label = gen_label_rtx ();
4993
4994 /* Initialize rtx used to return the value. */
4995 /* Do this before assign_parms so that we copy the struct value address
4996 before any library calls that assign parms might generate. */
4997
4998 /* Decide whether to return the value in memory or in a register. */
4999 tree res = DECL_RESULT (subr);
5000 if (aggregate_value_p (res, subr))
5001 {
5002 /* Returning something that won't go in a register. */
5003 rtx value_address = 0;
5004
5005 #ifdef PCC_STATIC_STRUCT_RETURN
5006 if (cfun->returns_pcc_struct)
5007 {
5008 int size = int_size_in_bytes (TREE_TYPE (res));
5009 value_address = assemble_static_space (size);
5010 }
5011 else
5012 #endif
5013 {
5014 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5015 /* Expect to be passed the address of a place to store the value.
5016 If it is passed as an argument, assign_parms will take care of
5017 it. */
5018 if (sv)
5019 {
5020 value_address = gen_reg_rtx (Pmode);
5021 emit_move_insn (value_address, sv);
5022 }
5023 }
5024 if (value_address)
5025 {
5026 rtx x = value_address;
5027 if (!DECL_BY_REFERENCE (res))
5028 {
5029 x = gen_rtx_MEM (DECL_MODE (res), x);
5030 set_mem_attributes (x, res, 1);
5031 }
5032 set_parm_rtl (res, x);
5033 }
5034 }
5035 else if (DECL_MODE (res) == VOIDmode)
5036 /* If return mode is void, this decl rtl should not be used. */
5037 set_parm_rtl (res, NULL_RTX);
5038 else
5039 {
5040 /* Compute the return values into a pseudo reg, which we will copy
5041 into the true return register after the cleanups are done. */
5042 tree return_type = TREE_TYPE (res);
5043
5044 /* If we may coalesce this result, make sure it has the expected mode
5045 in case it was promoted. But we need not bother about BLKmode. */
5046 machine_mode promoted_mode
5047 = flag_tree_coalesce_vars && is_gimple_reg (res)
5048 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5049 : BLKmode;
5050
5051 if (promoted_mode != BLKmode)
5052 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5053 else if (TYPE_MODE (return_type) != BLKmode
5054 && targetm.calls.return_in_msb (return_type))
5055 /* expand_function_end will insert the appropriate padding in
5056 this case. Use the return value's natural (unpadded) mode
5057 within the function proper. */
5058 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5059 else
5060 {
5061 /* In order to figure out what mode to use for the pseudo, we
5062 figure out what the mode of the eventual return register will
5063 actually be, and use that. */
5064 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5065
5066 /* Structures that are returned in registers are not
5067 aggregate_value_p, so we may see a PARALLEL or a REG. */
5068 if (REG_P (hard_reg))
5069 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5070 else
5071 {
5072 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5073 set_parm_rtl (res, gen_group_rtx (hard_reg));
5074 }
5075 }
5076
5077 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5078 result to the real return register(s). */
5079 DECL_REGISTER (res) = 1;
5080 }
5081
5082 /* Initialize rtx for parameters and local variables.
5083 In some cases this requires emitting insns. */
5084 assign_parms (subr);
5085
5086 /* If function gets a static chain arg, store it. */
5087 if (cfun->static_chain_decl)
5088 {
5089 tree parm = cfun->static_chain_decl;
5090 rtx local, chain;
5091 rtx_insn *insn;
5092 int unsignedp;
5093
5094 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5095 chain = targetm.calls.static_chain (current_function_decl, true);
5096
5097 set_decl_incoming_rtl (parm, chain, false);
5098 set_parm_rtl (parm, local);
5099 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5100
5101 if (GET_MODE (local) != GET_MODE (chain))
5102 {
5103 convert_move (local, chain, unsignedp);
5104 insn = get_last_insn ();
5105 }
5106 else
5107 insn = emit_move_insn (local, chain);
5108
5109 /* Mark the register as eliminable, similar to parameters. */
5110 if (MEM_P (chain)
5111 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5112 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5113
5114 /* If we aren't optimizing, save the static chain onto the stack. */
5115 if (!optimize)
5116 {
5117 tree saved_static_chain_decl
5118 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5119 DECL_NAME (parm), TREE_TYPE (parm));
5120 rtx saved_static_chain_rtx
5121 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5122 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5123 emit_move_insn (saved_static_chain_rtx, chain);
5124 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5125 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5126 }
5127 }
5128
5129 /* The following was moved from init_function_start.
5130 The move was supposed to make sdb output more accurate. */
5131 /* Indicate the beginning of the function body,
5132 as opposed to parm setup. */
5133 emit_note (NOTE_INSN_FUNCTION_BEG);
5134
5135 gcc_assert (NOTE_P (get_last_insn ()));
5136
5137 parm_birth_insn = get_last_insn ();
5138
5139 /* If the function receives a non-local goto, then store the
5140 bits we need to restore the frame pointer. */
5141 if (cfun->nonlocal_goto_save_area)
5142 {
5143 tree t_save;
5144 rtx r_save;
5145
5146 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5147 gcc_assert (DECL_RTL_SET_P (var));
5148
5149 t_save = build4 (ARRAY_REF,
5150 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5151 cfun->nonlocal_goto_save_area,
5152 integer_zero_node, NULL_TREE, NULL_TREE);
5153 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5154 gcc_assert (GET_MODE (r_save) == Pmode);
5155
5156 emit_move_insn (r_save, hard_frame_pointer_rtx);
5157 update_nonlocal_goto_save_area ();
5158 }
5159
5160 if (crtl->profile)
5161 {
5162 #ifdef PROFILE_HOOK
5163 PROFILE_HOOK (current_function_funcdef_no);
5164 #endif
5165 }
5166
5167 /* If we are doing generic stack checking, the probe should go here. */
5168 if (flag_stack_check == GENERIC_STACK_CHECK)
5169 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5170 }
5171 \f
5172 void
5173 pop_dummy_function (void)
5174 {
5175 pop_cfun ();
5176 in_dummy_function = false;
5177 }
5178
5179 /* Undo the effects of init_dummy_function_start. */
5180 void
5181 expand_dummy_function_end (void)
5182 {
5183 gcc_assert (in_dummy_function);
5184
5185 /* End any sequences that failed to be closed due to syntax errors. */
5186 while (in_sequence_p ())
5187 end_sequence ();
5188
5189 /* Outside function body, can't compute type's actual size
5190 until next function's body starts. */
5191
5192 free_after_parsing (cfun);
5193 free_after_compilation (cfun);
5194 pop_dummy_function ();
5195 }
5196
5197 /* Helper for diddle_return_value. */
5198
5199 void
5200 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5201 {
5202 if (! outgoing)
5203 return;
5204
5205 if (REG_P (outgoing))
5206 (*doit) (outgoing, arg);
5207 else if (GET_CODE (outgoing) == PARALLEL)
5208 {
5209 int i;
5210
5211 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5212 {
5213 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5214
5215 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5216 (*doit) (x, arg);
5217 }
5218 }
5219 }
5220
5221 /* Call DOIT for each hard register used as a return value from
5222 the current function. */
5223
5224 void
5225 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5226 {
5227 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5228 }
5229
5230 static void
5231 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5232 {
5233 emit_clobber (reg);
5234 }
5235
5236 void
5237 clobber_return_register (void)
5238 {
5239 diddle_return_value (do_clobber_return_reg, NULL);
5240
5241 /* In case we do use pseudo to return value, clobber it too. */
5242 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5243 {
5244 tree decl_result = DECL_RESULT (current_function_decl);
5245 rtx decl_rtl = DECL_RTL (decl_result);
5246 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5247 {
5248 do_clobber_return_reg (decl_rtl, NULL);
5249 }
5250 }
5251 }
5252
5253 static void
5254 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5255 {
5256 emit_use (reg);
5257 }
5258
5259 static void
5260 use_return_register (void)
5261 {
5262 diddle_return_value (do_use_return_reg, NULL);
5263 }
5264
5265 /* Generate RTL for the end of the current function. */
5266
5267 void
5268 expand_function_end (void)
5269 {
5270 /* If arg_pointer_save_area was referenced only from a nested
5271 function, we will not have initialized it yet. Do that now. */
5272 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5273 get_arg_pointer_save_area ();
5274
5275 /* If we are doing generic stack checking and this function makes calls,
5276 do a stack probe at the start of the function to ensure we have enough
5277 space for another stack frame. */
5278 if (flag_stack_check == GENERIC_STACK_CHECK)
5279 {
5280 rtx_insn *insn, *seq;
5281
5282 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5283 if (CALL_P (insn))
5284 {
5285 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5286 start_sequence ();
5287 if (STACK_CHECK_MOVING_SP)
5288 anti_adjust_stack_and_probe (max_frame_size, true);
5289 else
5290 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5291 seq = get_insns ();
5292 end_sequence ();
5293 set_insn_locations (seq, prologue_location);
5294 emit_insn_before (seq, stack_check_probe_note);
5295 break;
5296 }
5297 }
5298
5299 /* End any sequences that failed to be closed due to syntax errors. */
5300 while (in_sequence_p ())
5301 end_sequence ();
5302
5303 clear_pending_stack_adjust ();
5304 do_pending_stack_adjust ();
5305
5306 /* Output a linenumber for the end of the function.
5307 SDB depended on this. */
5308 set_curr_insn_location (input_location);
5309
5310 /* Before the return label (if any), clobber the return
5311 registers so that they are not propagated live to the rest of
5312 the function. This can only happen with functions that drop
5313 through; if there had been a return statement, there would
5314 have either been a return rtx, or a jump to the return label.
5315
5316 We delay actual code generation after the current_function_value_rtx
5317 is computed. */
5318 rtx_insn *clobber_after = get_last_insn ();
5319
5320 /* Output the label for the actual return from the function. */
5321 emit_label (return_label);
5322
5323 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5324 {
5325 /* Let except.c know where it should emit the call to unregister
5326 the function context for sjlj exceptions. */
5327 if (flag_exceptions)
5328 sjlj_emit_function_exit_after (get_last_insn ());
5329 }
5330
5331 /* If this is an implementation of throw, do what's necessary to
5332 communicate between __builtin_eh_return and the epilogue. */
5333 expand_eh_return ();
5334
5335 /* If stack protection is enabled for this function, check the guard. */
5336 if (crtl->stack_protect_guard
5337 && targetm.stack_protect_runtime_enabled_p ()
5338 && naked_return_label == NULL_RTX)
5339 stack_protect_epilogue ();
5340
5341 /* If scalar return value was computed in a pseudo-reg, or was a named
5342 return value that got dumped to the stack, copy that to the hard
5343 return register. */
5344 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5345 {
5346 tree decl_result = DECL_RESULT (current_function_decl);
5347 rtx decl_rtl = DECL_RTL (decl_result);
5348
5349 if (REG_P (decl_rtl)
5350 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5351 : DECL_REGISTER (decl_result))
5352 {
5353 rtx real_decl_rtl = crtl->return_rtx;
5354 complex_mode cmode;
5355
5356 /* This should be set in assign_parms. */
5357 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5358
5359 /* If this is a BLKmode structure being returned in registers,
5360 then use the mode computed in expand_return. Note that if
5361 decl_rtl is memory, then its mode may have been changed,
5362 but that crtl->return_rtx has not. */
5363 if (GET_MODE (real_decl_rtl) == BLKmode)
5364 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5365
5366 /* If a non-BLKmode return value should be padded at the least
5367 significant end of the register, shift it left by the appropriate
5368 amount. BLKmode results are handled using the group load/store
5369 machinery. */
5370 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5371 && REG_P (real_decl_rtl)
5372 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5373 {
5374 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5375 REGNO (real_decl_rtl)),
5376 decl_rtl);
5377 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5378 }
5379 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5380 {
5381 /* If expand_function_start has created a PARALLEL for decl_rtl,
5382 move the result to the real return registers. Otherwise, do
5383 a group load from decl_rtl for a named return. */
5384 if (GET_CODE (decl_rtl) == PARALLEL)
5385 emit_group_move (real_decl_rtl, decl_rtl);
5386 else
5387 emit_group_load (real_decl_rtl, decl_rtl,
5388 TREE_TYPE (decl_result),
5389 int_size_in_bytes (TREE_TYPE (decl_result)));
5390 }
5391 /* In the case of complex integer modes smaller than a word, we'll
5392 need to generate some non-trivial bitfield insertions. Do that
5393 on a pseudo and not the hard register. */
5394 else if (GET_CODE (decl_rtl) == CONCAT
5395 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5396 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5397 {
5398 int old_generating_concat_p;
5399 rtx tmp;
5400
5401 old_generating_concat_p = generating_concat_p;
5402 generating_concat_p = 0;
5403 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5404 generating_concat_p = old_generating_concat_p;
5405
5406 emit_move_insn (tmp, decl_rtl);
5407 emit_move_insn (real_decl_rtl, tmp);
5408 }
5409 /* If a named return value dumped decl_return to memory, then
5410 we may need to re-do the PROMOTE_MODE signed/unsigned
5411 extension. */
5412 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5413 {
5414 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5415 promote_function_mode (TREE_TYPE (decl_result),
5416 GET_MODE (decl_rtl), &unsignedp,
5417 TREE_TYPE (current_function_decl), 1);
5418
5419 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5420 }
5421 else
5422 emit_move_insn (real_decl_rtl, decl_rtl);
5423 }
5424 }
5425
5426 /* If returning a structure, arrange to return the address of the value
5427 in a place where debuggers expect to find it.
5428
5429 If returning a structure PCC style,
5430 the caller also depends on this value.
5431 And cfun->returns_pcc_struct is not necessarily set. */
5432 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5433 && !targetm.calls.omit_struct_return_reg)
5434 {
5435 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5436 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5437 rtx outgoing;
5438
5439 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5440 type = TREE_TYPE (type);
5441 else
5442 value_address = XEXP (value_address, 0);
5443
5444 outgoing = targetm.calls.function_value (build_pointer_type (type),
5445 current_function_decl, true);
5446
5447 /* Mark this as a function return value so integrate will delete the
5448 assignment and USE below when inlining this function. */
5449 REG_FUNCTION_VALUE_P (outgoing) = 1;
5450
5451 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5452 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5453 value_address = convert_memory_address (mode, value_address);
5454
5455 emit_move_insn (outgoing, value_address);
5456
5457 /* Show return register used to hold result (in this case the address
5458 of the result. */
5459 crtl->return_rtx = outgoing;
5460 }
5461
5462 /* Emit the actual code to clobber return register. Don't emit
5463 it if clobber_after is a barrier, then the previous basic block
5464 certainly doesn't fall thru into the exit block. */
5465 if (!BARRIER_P (clobber_after))
5466 {
5467 start_sequence ();
5468 clobber_return_register ();
5469 rtx_insn *seq = get_insns ();
5470 end_sequence ();
5471
5472 emit_insn_after (seq, clobber_after);
5473 }
5474
5475 /* Output the label for the naked return from the function. */
5476 if (naked_return_label)
5477 emit_label (naked_return_label);
5478
5479 /* @@@ This is a kludge. We want to ensure that instructions that
5480 may trap are not moved into the epilogue by scheduling, because
5481 we don't always emit unwind information for the epilogue. */
5482 if (cfun->can_throw_non_call_exceptions
5483 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5484 emit_insn (gen_blockage ());
5485
5486 /* If stack protection is enabled for this function, check the guard. */
5487 if (crtl->stack_protect_guard
5488 && targetm.stack_protect_runtime_enabled_p ()
5489 && naked_return_label)
5490 stack_protect_epilogue ();
5491
5492 /* If we had calls to alloca, and this machine needs
5493 an accurate stack pointer to exit the function,
5494 insert some code to save and restore the stack pointer. */
5495 if (! EXIT_IGNORE_STACK
5496 && cfun->calls_alloca)
5497 {
5498 rtx tem = 0;
5499
5500 start_sequence ();
5501 emit_stack_save (SAVE_FUNCTION, &tem);
5502 rtx_insn *seq = get_insns ();
5503 end_sequence ();
5504 emit_insn_before (seq, parm_birth_insn);
5505
5506 emit_stack_restore (SAVE_FUNCTION, tem);
5507 }
5508
5509 /* ??? This should no longer be necessary since stupid is no longer with
5510 us, but there are some parts of the compiler (eg reload_combine, and
5511 sh mach_dep_reorg) that still try and compute their own lifetime info
5512 instead of using the general framework. */
5513 use_return_register ();
5514 }
5515
5516 rtx
5517 get_arg_pointer_save_area (void)
5518 {
5519 rtx ret = arg_pointer_save_area;
5520
5521 if (! ret)
5522 {
5523 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5524 arg_pointer_save_area = ret;
5525 }
5526
5527 if (! crtl->arg_pointer_save_area_init)
5528 {
5529 /* Save the arg pointer at the beginning of the function. The
5530 generated stack slot may not be a valid memory address, so we
5531 have to check it and fix it if necessary. */
5532 start_sequence ();
5533 emit_move_insn (validize_mem (copy_rtx (ret)),
5534 crtl->args.internal_arg_pointer);
5535 rtx_insn *seq = get_insns ();
5536 end_sequence ();
5537
5538 push_topmost_sequence ();
5539 emit_insn_after (seq, entry_of_function ());
5540 pop_topmost_sequence ();
5541
5542 crtl->arg_pointer_save_area_init = true;
5543 }
5544
5545 return ret;
5546 }
5547 \f
5548
5549 /* If debugging dumps are requested, dump information about how the
5550 target handled -fstack-check=clash for the prologue.
5551
5552 PROBES describes what if any probes were emitted.
5553
5554 RESIDUALS indicates if the prologue had any residual allocation
5555 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5556
5557 void
5558 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5559 {
5560 if (!dump_file)
5561 return;
5562
5563 switch (probes)
5564 {
5565 case NO_PROBE_NO_FRAME:
5566 fprintf (dump_file,
5567 "Stack clash no probe no stack adjustment in prologue.\n");
5568 break;
5569 case NO_PROBE_SMALL_FRAME:
5570 fprintf (dump_file,
5571 "Stack clash no probe small stack adjustment in prologue.\n");
5572 break;
5573 case PROBE_INLINE:
5574 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5575 break;
5576 case PROBE_LOOP:
5577 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5578 break;
5579 }
5580
5581 if (residuals)
5582 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5583 else
5584 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5585
5586 if (frame_pointer_needed)
5587 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5588 else
5589 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5590
5591 if (TREE_THIS_VOLATILE (cfun->decl))
5592 fprintf (dump_file,
5593 "Stack clash noreturn prologue, assuming no implicit"
5594 " probes in caller.\n");
5595 else
5596 fprintf (dump_file,
5597 "Stack clash not noreturn prologue.\n");
5598 }
5599
5600 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5601 for the first time. */
5602
5603 static void
5604 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5605 {
5606 rtx_insn *tmp;
5607 hash_table<insn_cache_hasher> *hash = *hashp;
5608
5609 if (hash == NULL)
5610 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5611
5612 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5613 {
5614 rtx *slot = hash->find_slot (tmp, INSERT);
5615 gcc_assert (*slot == NULL);
5616 *slot = tmp;
5617 }
5618 }
5619
5620 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5621 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5622 insn, then record COPY as well. */
5623
5624 void
5625 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5626 {
5627 hash_table<insn_cache_hasher> *hash;
5628 rtx *slot;
5629
5630 hash = epilogue_insn_hash;
5631 if (!hash || !hash->find (insn))
5632 {
5633 hash = prologue_insn_hash;
5634 if (!hash || !hash->find (insn))
5635 return;
5636 }
5637
5638 slot = hash->find_slot (copy, INSERT);
5639 gcc_assert (*slot == NULL);
5640 *slot = copy;
5641 }
5642
5643 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5644 we can be running after reorg, SEQUENCE rtl is possible. */
5645
5646 static bool
5647 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5648 {
5649 if (hash == NULL)
5650 return false;
5651
5652 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5653 {
5654 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5655 int i;
5656 for (i = seq->len () - 1; i >= 0; i--)
5657 if (hash->find (seq->element (i)))
5658 return true;
5659 return false;
5660 }
5661
5662 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5663 }
5664
5665 int
5666 prologue_contains (const rtx_insn *insn)
5667 {
5668 return contains (insn, prologue_insn_hash);
5669 }
5670
5671 int
5672 epilogue_contains (const rtx_insn *insn)
5673 {
5674 return contains (insn, epilogue_insn_hash);
5675 }
5676
5677 int
5678 prologue_epilogue_contains (const rtx_insn *insn)
5679 {
5680 if (contains (insn, prologue_insn_hash))
5681 return 1;
5682 if (contains (insn, epilogue_insn_hash))
5683 return 1;
5684 return 0;
5685 }
5686
5687 void
5688 record_prologue_seq (rtx_insn *seq)
5689 {
5690 record_insns (seq, NULL, &prologue_insn_hash);
5691 }
5692
5693 void
5694 record_epilogue_seq (rtx_insn *seq)
5695 {
5696 record_insns (seq, NULL, &epilogue_insn_hash);
5697 }
5698
5699 /* Set JUMP_LABEL for a return insn. */
5700
5701 void
5702 set_return_jump_label (rtx_insn *returnjump)
5703 {
5704 rtx pat = PATTERN (returnjump);
5705 if (GET_CODE (pat) == PARALLEL)
5706 pat = XVECEXP (pat, 0, 0);
5707 if (ANY_RETURN_P (pat))
5708 JUMP_LABEL (returnjump) = pat;
5709 else
5710 JUMP_LABEL (returnjump) = ret_rtx;
5711 }
5712
5713 /* Return a sequence to be used as the split prologue for the current
5714 function, or NULL. */
5715
5716 static rtx_insn *
5717 make_split_prologue_seq (void)
5718 {
5719 if (!flag_split_stack
5720 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5721 return NULL;
5722
5723 start_sequence ();
5724 emit_insn (targetm.gen_split_stack_prologue ());
5725 rtx_insn *seq = get_insns ();
5726 end_sequence ();
5727
5728 record_insns (seq, NULL, &prologue_insn_hash);
5729 set_insn_locations (seq, prologue_location);
5730
5731 return seq;
5732 }
5733
5734 /* Return a sequence to be used as the prologue for the current function,
5735 or NULL. */
5736
5737 static rtx_insn *
5738 make_prologue_seq (void)
5739 {
5740 if (!targetm.have_prologue ())
5741 return NULL;
5742
5743 start_sequence ();
5744 rtx_insn *seq = targetm.gen_prologue ();
5745 emit_insn (seq);
5746
5747 /* Insert an explicit USE for the frame pointer
5748 if the profiling is on and the frame pointer is required. */
5749 if (crtl->profile && frame_pointer_needed)
5750 emit_use (hard_frame_pointer_rtx);
5751
5752 /* Retain a map of the prologue insns. */
5753 record_insns (seq, NULL, &prologue_insn_hash);
5754 emit_note (NOTE_INSN_PROLOGUE_END);
5755
5756 /* Ensure that instructions are not moved into the prologue when
5757 profiling is on. The call to the profiling routine can be
5758 emitted within the live range of a call-clobbered register. */
5759 if (!targetm.profile_before_prologue () && crtl->profile)
5760 emit_insn (gen_blockage ());
5761
5762 seq = get_insns ();
5763 end_sequence ();
5764 set_insn_locations (seq, prologue_location);
5765
5766 return seq;
5767 }
5768
5769 /* Return a sequence to be used as the epilogue for the current function,
5770 or NULL. */
5771
5772 static rtx_insn *
5773 make_epilogue_seq (void)
5774 {
5775 if (!targetm.have_epilogue ())
5776 return NULL;
5777
5778 start_sequence ();
5779 emit_note (NOTE_INSN_EPILOGUE_BEG);
5780 rtx_insn *seq = targetm.gen_epilogue ();
5781 if (seq)
5782 emit_jump_insn (seq);
5783
5784 /* Retain a map of the epilogue insns. */
5785 record_insns (seq, NULL, &epilogue_insn_hash);
5786 set_insn_locations (seq, epilogue_location);
5787
5788 seq = get_insns ();
5789 rtx_insn *returnjump = get_last_insn ();
5790 end_sequence ();
5791
5792 if (JUMP_P (returnjump))
5793 set_return_jump_label (returnjump);
5794
5795 return seq;
5796 }
5797
5798
5799 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5800 this into place with notes indicating where the prologue ends and where
5801 the epilogue begins. Update the basic block information when possible.
5802
5803 Notes on epilogue placement:
5804 There are several kinds of edges to the exit block:
5805 * a single fallthru edge from LAST_BB
5806 * possibly, edges from blocks containing sibcalls
5807 * possibly, fake edges from infinite loops
5808
5809 The epilogue is always emitted on the fallthru edge from the last basic
5810 block in the function, LAST_BB, into the exit block.
5811
5812 If LAST_BB is empty except for a label, it is the target of every
5813 other basic block in the function that ends in a return. If a
5814 target has a return or simple_return pattern (possibly with
5815 conditional variants), these basic blocks can be changed so that a
5816 return insn is emitted into them, and their target is adjusted to
5817 the real exit block.
5818
5819 Notes on shrink wrapping: We implement a fairly conservative
5820 version of shrink-wrapping rather than the textbook one. We only
5821 generate a single prologue and a single epilogue. This is
5822 sufficient to catch a number of interesting cases involving early
5823 exits.
5824
5825 First, we identify the blocks that require the prologue to occur before
5826 them. These are the ones that modify a call-saved register, or reference
5827 any of the stack or frame pointer registers. To simplify things, we then
5828 mark everything reachable from these blocks as also requiring a prologue.
5829 This takes care of loops automatically, and avoids the need to examine
5830 whether MEMs reference the frame, since it is sufficient to check for
5831 occurrences of the stack or frame pointer.
5832
5833 We then compute the set of blocks for which the need for a prologue
5834 is anticipatable (borrowing terminology from the shrink-wrapping
5835 description in Muchnick's book). These are the blocks which either
5836 require a prologue themselves, or those that have only successors
5837 where the prologue is anticipatable. The prologue needs to be
5838 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5839 is not. For the moment, we ensure that only one such edge exists.
5840
5841 The epilogue is placed as described above, but we make a
5842 distinction between inserting return and simple_return patterns
5843 when modifying other blocks that end in a return. Blocks that end
5844 in a sibcall omit the sibcall_epilogue if the block is not in
5845 ANTIC. */
5846
5847 void
5848 thread_prologue_and_epilogue_insns (void)
5849 {
5850 df_analyze ();
5851
5852 /* Can't deal with multiple successors of the entry block at the
5853 moment. Function should always have at least one entry
5854 point. */
5855 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5856
5857 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5858 edge orig_entry_edge = entry_edge;
5859
5860 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5861 rtx_insn *prologue_seq = make_prologue_seq ();
5862 rtx_insn *epilogue_seq = make_epilogue_seq ();
5863
5864 /* Try to perform a kind of shrink-wrapping, making sure the
5865 prologue/epilogue is emitted only around those parts of the
5866 function that require it. */
5867 try_shrink_wrapping (&entry_edge, prologue_seq);
5868
5869 /* If the target can handle splitting the prologue/epilogue into separate
5870 components, try to shrink-wrap these components separately. */
5871 try_shrink_wrapping_separate (entry_edge->dest);
5872
5873 /* If that did anything for any component we now need the generate the
5874 "main" prologue again. Because some targets require some of these
5875 to be called in a specific order (i386 requires the split prologue
5876 to be first, for example), we create all three sequences again here.
5877 If this does not work for some target, that target should not enable
5878 separate shrink-wrapping. */
5879 if (crtl->shrink_wrapped_separate)
5880 {
5881 split_prologue_seq = make_split_prologue_seq ();
5882 prologue_seq = make_prologue_seq ();
5883 epilogue_seq = make_epilogue_seq ();
5884 }
5885
5886 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5887
5888 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5889 this marker for the splits of EH_RETURN patterns, and nothing else
5890 uses the flag in the meantime. */
5891 epilogue_completed = 1;
5892
5893 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5894 some targets, these get split to a special version of the epilogue
5895 code. In order to be able to properly annotate these with unwind
5896 info, try to split them now. If we get a valid split, drop an
5897 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5898 edge e;
5899 edge_iterator ei;
5900 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5901 {
5902 rtx_insn *prev, *last, *trial;
5903
5904 if (e->flags & EDGE_FALLTHRU)
5905 continue;
5906 last = BB_END (e->src);
5907 if (!eh_returnjump_p (last))
5908 continue;
5909
5910 prev = PREV_INSN (last);
5911 trial = try_split (PATTERN (last), last, 1);
5912 if (trial == last)
5913 continue;
5914
5915 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5916 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5917 }
5918
5919 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5920
5921 if (exit_fallthru_edge)
5922 {
5923 if (epilogue_seq)
5924 {
5925 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
5926 commit_edge_insertions ();
5927
5928 /* The epilogue insns we inserted may cause the exit edge to no longer
5929 be fallthru. */
5930 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5931 {
5932 if (((e->flags & EDGE_FALLTHRU) != 0)
5933 && returnjump_p (BB_END (e->src)))
5934 e->flags &= ~EDGE_FALLTHRU;
5935 }
5936 }
5937 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
5938 {
5939 /* We have a fall-through edge to the exit block, the source is not
5940 at the end of the function, and there will be an assembler epilogue
5941 at the end of the function.
5942 We can't use force_nonfallthru here, because that would try to
5943 use return. Inserting a jump 'by hand' is extremely messy, so
5944 we take advantage of cfg_layout_finalize using
5945 fixup_fallthru_exit_predecessor. */
5946 cfg_layout_initialize (0);
5947 basic_block cur_bb;
5948 FOR_EACH_BB_FN (cur_bb, cfun)
5949 if (cur_bb->index >= NUM_FIXED_BLOCKS
5950 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5951 cur_bb->aux = cur_bb->next_bb;
5952 cfg_layout_finalize ();
5953 }
5954 }
5955
5956 /* Insert the prologue. */
5957
5958 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5959
5960 if (split_prologue_seq || prologue_seq)
5961 {
5962 rtx_insn *split_prologue_insn = split_prologue_seq;
5963 if (split_prologue_seq)
5964 {
5965 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
5966 split_prologue_insn = NEXT_INSN (split_prologue_insn);
5967 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5968 }
5969
5970 rtx_insn *prologue_insn = prologue_seq;
5971 if (prologue_seq)
5972 {
5973 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
5974 prologue_insn = NEXT_INSN (prologue_insn);
5975 insert_insn_on_edge (prologue_seq, entry_edge);
5976 }
5977
5978 commit_edge_insertions ();
5979
5980 /* Look for basic blocks within the prologue insns. */
5981 if (split_prologue_insn
5982 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
5983 split_prologue_insn = NULL;
5984 if (prologue_insn
5985 && BLOCK_FOR_INSN (prologue_insn) == NULL)
5986 prologue_insn = NULL;
5987 if (split_prologue_insn || prologue_insn)
5988 {
5989 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
5990 bitmap_clear (blocks);
5991 if (split_prologue_insn)
5992 bitmap_set_bit (blocks,
5993 BLOCK_FOR_INSN (split_prologue_insn)->index);
5994 if (prologue_insn)
5995 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
5996 find_many_sub_basic_blocks (blocks);
5997 }
5998 }
5999
6000 default_rtl_profile ();
6001
6002 /* Emit sibling epilogues before any sibling call sites. */
6003 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6004 (e = ei_safe_edge (ei));
6005 ei_next (&ei))
6006 {
6007 /* Skip those already handled, the ones that run without prologue. */
6008 if (e->flags & EDGE_IGNORE)
6009 {
6010 e->flags &= ~EDGE_IGNORE;
6011 continue;
6012 }
6013
6014 rtx_insn *insn = BB_END (e->src);
6015
6016 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6017 continue;
6018
6019 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6020 {
6021 start_sequence ();
6022 emit_note (NOTE_INSN_EPILOGUE_BEG);
6023 emit_insn (ep_seq);
6024 rtx_insn *seq = get_insns ();
6025 end_sequence ();
6026
6027 /* Retain a map of the epilogue insns. Used in life analysis to
6028 avoid getting rid of sibcall epilogue insns. Do this before we
6029 actually emit the sequence. */
6030 record_insns (seq, NULL, &epilogue_insn_hash);
6031 set_insn_locations (seq, epilogue_location);
6032
6033 emit_insn_before (seq, insn);
6034 }
6035 }
6036
6037 if (epilogue_seq)
6038 {
6039 rtx_insn *insn, *next;
6040
6041 /* Similarly, move any line notes that appear after the epilogue.
6042 There is no need, however, to be quite so anal about the existence
6043 of such a note. Also possibly move
6044 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6045 info generation. */
6046 for (insn = epilogue_seq; insn; insn = next)
6047 {
6048 next = NEXT_INSN (insn);
6049 if (NOTE_P (insn)
6050 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6051 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6052 }
6053 }
6054
6055 /* Threading the prologue and epilogue changes the artificial refs
6056 in the entry and exit blocks. */
6057 epilogue_completed = 1;
6058 df_update_entry_exit_and_calls ();
6059 }
6060
6061 /* Reposition the prologue-end and epilogue-begin notes after
6062 instruction scheduling. */
6063
6064 void
6065 reposition_prologue_and_epilogue_notes (void)
6066 {
6067 if (!targetm.have_prologue ()
6068 && !targetm.have_epilogue ()
6069 && !targetm.have_sibcall_epilogue ())
6070 return;
6071
6072 /* Since the hash table is created on demand, the fact that it is
6073 non-null is a signal that it is non-empty. */
6074 if (prologue_insn_hash != NULL)
6075 {
6076 size_t len = prologue_insn_hash->elements ();
6077 rtx_insn *insn, *last = NULL, *note = NULL;
6078
6079 /* Scan from the beginning until we reach the last prologue insn. */
6080 /* ??? While we do have the CFG intact, there are two problems:
6081 (1) The prologue can contain loops (typically probing the stack),
6082 which means that the end of the prologue isn't in the first bb.
6083 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6084 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6085 {
6086 if (NOTE_P (insn))
6087 {
6088 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6089 note = insn;
6090 }
6091 else if (contains (insn, prologue_insn_hash))
6092 {
6093 last = insn;
6094 if (--len == 0)
6095 break;
6096 }
6097 }
6098
6099 if (last)
6100 {
6101 if (note == NULL)
6102 {
6103 /* Scan forward looking for the PROLOGUE_END note. It should
6104 be right at the beginning of the block, possibly with other
6105 insn notes that got moved there. */
6106 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6107 {
6108 if (NOTE_P (note)
6109 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6110 break;
6111 }
6112 }
6113
6114 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6115 if (LABEL_P (last))
6116 last = NEXT_INSN (last);
6117 reorder_insns (note, note, last);
6118 }
6119 }
6120
6121 if (epilogue_insn_hash != NULL)
6122 {
6123 edge_iterator ei;
6124 edge e;
6125
6126 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6127 {
6128 rtx_insn *insn, *first = NULL, *note = NULL;
6129 basic_block bb = e->src;
6130
6131 /* Scan from the beginning until we reach the first epilogue insn. */
6132 FOR_BB_INSNS (bb, insn)
6133 {
6134 if (NOTE_P (insn))
6135 {
6136 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6137 {
6138 note = insn;
6139 if (first != NULL)
6140 break;
6141 }
6142 }
6143 else if (first == NULL && contains (insn, epilogue_insn_hash))
6144 {
6145 first = insn;
6146 if (note != NULL)
6147 break;
6148 }
6149 }
6150
6151 if (note)
6152 {
6153 /* If the function has a single basic block, and no real
6154 epilogue insns (e.g. sibcall with no cleanup), the
6155 epilogue note can get scheduled before the prologue
6156 note. If we have frame related prologue insns, having
6157 them scanned during the epilogue will result in a crash.
6158 In this case re-order the epilogue note to just before
6159 the last insn in the block. */
6160 if (first == NULL)
6161 first = BB_END (bb);
6162
6163 if (PREV_INSN (first) != note)
6164 reorder_insns (note, note, PREV_INSN (first));
6165 }
6166 }
6167 }
6168 }
6169
6170 /* Returns the name of function declared by FNDECL. */
6171 const char *
6172 fndecl_name (tree fndecl)
6173 {
6174 if (fndecl == NULL)
6175 return "(nofn)";
6176 return lang_hooks.decl_printable_name (fndecl, 1);
6177 }
6178
6179 /* Returns the name of function FN. */
6180 const char *
6181 function_name (struct function *fn)
6182 {
6183 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6184 return fndecl_name (fndecl);
6185 }
6186
6187 /* Returns the name of the current function. */
6188 const char *
6189 current_function_name (void)
6190 {
6191 return function_name (cfun);
6192 }
6193 \f
6194
6195 static unsigned int
6196 rest_of_handle_check_leaf_regs (void)
6197 {
6198 #ifdef LEAF_REGISTERS
6199 crtl->uses_only_leaf_regs
6200 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6201 #endif
6202 return 0;
6203 }
6204
6205 /* Insert a TYPE into the used types hash table of CFUN. */
6206
6207 static void
6208 used_types_insert_helper (tree type, struct function *func)
6209 {
6210 if (type != NULL && func != NULL)
6211 {
6212 if (func->used_types_hash == NULL)
6213 func->used_types_hash = hash_set<tree>::create_ggc (37);
6214
6215 func->used_types_hash->add (type);
6216 }
6217 }
6218
6219 /* Given a type, insert it into the used hash table in cfun. */
6220 void
6221 used_types_insert (tree t)
6222 {
6223 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6224 if (TYPE_NAME (t))
6225 break;
6226 else
6227 t = TREE_TYPE (t);
6228 if (TREE_CODE (t) == ERROR_MARK)
6229 return;
6230 if (TYPE_NAME (t) == NULL_TREE
6231 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6232 t = TYPE_MAIN_VARIANT (t);
6233 if (debug_info_level > DINFO_LEVEL_NONE)
6234 {
6235 if (cfun)
6236 used_types_insert_helper (t, cfun);
6237 else
6238 {
6239 /* So this might be a type referenced by a global variable.
6240 Record that type so that we can later decide to emit its
6241 debug information. */
6242 vec_safe_push (types_used_by_cur_var_decl, t);
6243 }
6244 }
6245 }
6246
6247 /* Helper to Hash a struct types_used_by_vars_entry. */
6248
6249 static hashval_t
6250 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6251 {
6252 gcc_assert (entry && entry->var_decl && entry->type);
6253
6254 return iterative_hash_object (entry->type,
6255 iterative_hash_object (entry->var_decl, 0));
6256 }
6257
6258 /* Hash function of the types_used_by_vars_entry hash table. */
6259
6260 hashval_t
6261 used_type_hasher::hash (types_used_by_vars_entry *entry)
6262 {
6263 return hash_types_used_by_vars_entry (entry);
6264 }
6265
6266 /*Equality function of the types_used_by_vars_entry hash table. */
6267
6268 bool
6269 used_type_hasher::equal (types_used_by_vars_entry *e1,
6270 types_used_by_vars_entry *e2)
6271 {
6272 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6273 }
6274
6275 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6276
6277 void
6278 types_used_by_var_decl_insert (tree type, tree var_decl)
6279 {
6280 if (type != NULL && var_decl != NULL)
6281 {
6282 types_used_by_vars_entry **slot;
6283 struct types_used_by_vars_entry e;
6284 e.var_decl = var_decl;
6285 e.type = type;
6286 if (types_used_by_vars_hash == NULL)
6287 types_used_by_vars_hash
6288 = hash_table<used_type_hasher>::create_ggc (37);
6289
6290 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6291 if (*slot == NULL)
6292 {
6293 struct types_used_by_vars_entry *entry;
6294 entry = ggc_alloc<types_used_by_vars_entry> ();
6295 entry->type = type;
6296 entry->var_decl = var_decl;
6297 *slot = entry;
6298 }
6299 }
6300 }
6301
6302 namespace {
6303
6304 const pass_data pass_data_leaf_regs =
6305 {
6306 RTL_PASS, /* type */
6307 "*leaf_regs", /* name */
6308 OPTGROUP_NONE, /* optinfo_flags */
6309 TV_NONE, /* tv_id */
6310 0, /* properties_required */
6311 0, /* properties_provided */
6312 0, /* properties_destroyed */
6313 0, /* todo_flags_start */
6314 0, /* todo_flags_finish */
6315 };
6316
6317 class pass_leaf_regs : public rtl_opt_pass
6318 {
6319 public:
6320 pass_leaf_regs (gcc::context *ctxt)
6321 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6322 {}
6323
6324 /* opt_pass methods: */
6325 virtual unsigned int execute (function *)
6326 {
6327 return rest_of_handle_check_leaf_regs ();
6328 }
6329
6330 }; // class pass_leaf_regs
6331
6332 } // anon namespace
6333
6334 rtl_opt_pass *
6335 make_pass_leaf_regs (gcc::context *ctxt)
6336 {
6337 return new pass_leaf_regs (ctxt);
6338 }
6339
6340 static unsigned int
6341 rest_of_handle_thread_prologue_and_epilogue (void)
6342 {
6343 /* prepare_shrink_wrap is sensitive to the block structure of the control
6344 flow graph, so clean it up first. */
6345 if (optimize)
6346 cleanup_cfg (0);
6347
6348 /* On some machines, the prologue and epilogue code, or parts thereof,
6349 can be represented as RTL. Doing so lets us schedule insns between
6350 it and the rest of the code and also allows delayed branch
6351 scheduling to operate in the epilogue. */
6352 thread_prologue_and_epilogue_insns ();
6353
6354 /* Some non-cold blocks may now be only reachable from cold blocks.
6355 Fix that up. */
6356 fixup_partitions ();
6357
6358 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6359 see PR57320. */
6360 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6361
6362 /* The stack usage info is finalized during prologue expansion. */
6363 if (flag_stack_usage_info)
6364 output_stack_usage ();
6365
6366 return 0;
6367 }
6368
6369 namespace {
6370
6371 const pass_data pass_data_thread_prologue_and_epilogue =
6372 {
6373 RTL_PASS, /* type */
6374 "pro_and_epilogue", /* name */
6375 OPTGROUP_NONE, /* optinfo_flags */
6376 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6377 0, /* properties_required */
6378 0, /* properties_provided */
6379 0, /* properties_destroyed */
6380 0, /* todo_flags_start */
6381 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6382 };
6383
6384 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6385 {
6386 public:
6387 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6388 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6389 {}
6390
6391 /* opt_pass methods: */
6392 virtual unsigned int execute (function *)
6393 {
6394 return rest_of_handle_thread_prologue_and_epilogue ();
6395 }
6396
6397 }; // class pass_thread_prologue_and_epilogue
6398
6399 } // anon namespace
6400
6401 rtl_opt_pass *
6402 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6403 {
6404 return new pass_thread_prologue_and_epilogue (ctxt);
6405 }
6406 \f
6407
6408 /* If CONSTRAINT is a matching constraint, then return its number.
6409 Otherwise, return -1. */
6410
6411 static int
6412 matching_constraint_num (const char *constraint)
6413 {
6414 if (*constraint == '%')
6415 constraint++;
6416
6417 if (IN_RANGE (*constraint, '0', '9'))
6418 return strtoul (constraint, NULL, 10);
6419
6420 return -1;
6421 }
6422
6423 /* This mini-pass fixes fall-out from SSA in asm statements that have
6424 in-out constraints. Say you start with
6425
6426 orig = inout;
6427 asm ("": "+mr" (inout));
6428 use (orig);
6429
6430 which is transformed very early to use explicit output and match operands:
6431
6432 orig = inout;
6433 asm ("": "=mr" (inout) : "0" (inout));
6434 use (orig);
6435
6436 Or, after SSA and copyprop,
6437
6438 asm ("": "=mr" (inout_2) : "0" (inout_1));
6439 use (inout_1);
6440
6441 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6442 they represent two separate values, so they will get different pseudo
6443 registers during expansion. Then, since the two operands need to match
6444 per the constraints, but use different pseudo registers, reload can
6445 only register a reload for these operands. But reloads can only be
6446 satisfied by hardregs, not by memory, so we need a register for this
6447 reload, just because we are presented with non-matching operands.
6448 So, even though we allow memory for this operand, no memory can be
6449 used for it, just because the two operands don't match. This can
6450 cause reload failures on register-starved targets.
6451
6452 So it's a symptom of reload not being able to use memory for reloads
6453 or, alternatively it's also a symptom of both operands not coming into
6454 reload as matching (in which case the pseudo could go to memory just
6455 fine, as the alternative allows it, and no reload would be necessary).
6456 We fix the latter problem here, by transforming
6457
6458 asm ("": "=mr" (inout_2) : "0" (inout_1));
6459
6460 back to
6461
6462 inout_2 = inout_1;
6463 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6464
6465 static void
6466 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6467 {
6468 int i;
6469 bool changed = false;
6470 rtx op = SET_SRC (p_sets[0]);
6471 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6472 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6473 bool *output_matched = XALLOCAVEC (bool, noutputs);
6474
6475 memset (output_matched, 0, noutputs * sizeof (bool));
6476 for (i = 0; i < ninputs; i++)
6477 {
6478 rtx input, output;
6479 rtx_insn *insns;
6480 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6481 int match, j;
6482
6483 match = matching_constraint_num (constraint);
6484 if (match < 0)
6485 continue;
6486
6487 gcc_assert (match < noutputs);
6488 output = SET_DEST (p_sets[match]);
6489 input = RTVEC_ELT (inputs, i);
6490 /* Only do the transformation for pseudos. */
6491 if (! REG_P (output)
6492 || rtx_equal_p (output, input)
6493 || !(REG_P (input) || SUBREG_P (input)
6494 || MEM_P (input) || CONSTANT_P (input))
6495 || !general_operand (input, GET_MODE (output)))
6496 continue;
6497
6498 /* We can't do anything if the output is also used as input,
6499 as we're going to overwrite it. */
6500 for (j = 0; j < ninputs; j++)
6501 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6502 break;
6503 if (j != ninputs)
6504 continue;
6505
6506 /* Avoid changing the same input several times. For
6507 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6508 only change it once (to out1), rather than changing it
6509 first to out1 and afterwards to out2. */
6510 if (i > 0)
6511 {
6512 for (j = 0; j < noutputs; j++)
6513 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6514 break;
6515 if (j != noutputs)
6516 continue;
6517 }
6518 output_matched[match] = true;
6519
6520 start_sequence ();
6521 emit_move_insn (output, copy_rtx (input));
6522 insns = get_insns ();
6523 end_sequence ();
6524 emit_insn_before (insns, insn);
6525
6526 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6527 bool early_clobber_p = strchr (constraint, '&') != NULL;
6528
6529 /* Now replace all mentions of the input with output. We can't
6530 just replace the occurrence in inputs[i], as the register might
6531 also be used in some other input (or even in an address of an
6532 output), which would mean possibly increasing the number of
6533 inputs by one (namely 'output' in addition), which might pose
6534 a too complicated problem for reload to solve. E.g. this situation:
6535
6536 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6537
6538 Here 'input' is used in two occurrences as input (once for the
6539 input operand, once for the address in the second output operand).
6540 If we would replace only the occurrence of the input operand (to
6541 make the matching) we would be left with this:
6542
6543 output = input
6544 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6545
6546 Now we suddenly have two different input values (containing the same
6547 value, but different pseudos) where we formerly had only one.
6548 With more complicated asms this might lead to reload failures
6549 which wouldn't have happen without this pass. So, iterate over
6550 all operands and replace all occurrences of the register used.
6551
6552 However, if one or more of the 'input' uses have a non-matching
6553 constraint and the matched output operand is an early clobber
6554 operand, then do not replace the input operand, since by definition
6555 it conflicts with the output operand and cannot share the same
6556 register. See PR89313 for details. */
6557
6558 for (j = 0; j < noutputs; j++)
6559 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6560 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6561 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6562 input, output);
6563 for (j = 0; j < ninputs; j++)
6564 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6565 {
6566 if (!early_clobber_p
6567 || match == matching_constraint_num
6568 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6569 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6570 input, output);
6571 }
6572
6573 changed = true;
6574 }
6575
6576 if (changed)
6577 df_insn_rescan (insn);
6578 }
6579
6580 /* Add the decl D to the local_decls list of FUN. */
6581
6582 void
6583 add_local_decl (struct function *fun, tree d)
6584 {
6585 gcc_assert (VAR_P (d));
6586 vec_safe_push (fun->local_decls, d);
6587 }
6588
6589 namespace {
6590
6591 const pass_data pass_data_match_asm_constraints =
6592 {
6593 RTL_PASS, /* type */
6594 "asmcons", /* name */
6595 OPTGROUP_NONE, /* optinfo_flags */
6596 TV_NONE, /* tv_id */
6597 0, /* properties_required */
6598 0, /* properties_provided */
6599 0, /* properties_destroyed */
6600 0, /* todo_flags_start */
6601 0, /* todo_flags_finish */
6602 };
6603
6604 class pass_match_asm_constraints : public rtl_opt_pass
6605 {
6606 public:
6607 pass_match_asm_constraints (gcc::context *ctxt)
6608 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6609 {}
6610
6611 /* opt_pass methods: */
6612 virtual unsigned int execute (function *);
6613
6614 }; // class pass_match_asm_constraints
6615
6616 unsigned
6617 pass_match_asm_constraints::execute (function *fun)
6618 {
6619 basic_block bb;
6620 rtx_insn *insn;
6621 rtx pat, *p_sets;
6622 int noutputs;
6623
6624 if (!crtl->has_asm_statement)
6625 return 0;
6626
6627 df_set_flags (DF_DEFER_INSN_RESCAN);
6628 FOR_EACH_BB_FN (bb, fun)
6629 {
6630 FOR_BB_INSNS (bb, insn)
6631 {
6632 if (!INSN_P (insn))
6633 continue;
6634
6635 pat = PATTERN (insn);
6636 if (GET_CODE (pat) == PARALLEL)
6637 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6638 else if (GET_CODE (pat) == SET)
6639 p_sets = &PATTERN (insn), noutputs = 1;
6640 else
6641 continue;
6642
6643 if (GET_CODE (*p_sets) == SET
6644 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6645 match_asm_constraints_1 (insn, p_sets, noutputs);
6646 }
6647 }
6648
6649 return TODO_df_finish;
6650 }
6651
6652 } // anon namespace
6653
6654 rtl_opt_pass *
6655 make_pass_match_asm_constraints (gcc::context *ctxt)
6656 {
6657 return new pass_match_asm_constraints (ctxt);
6658 }
6659
6660
6661 #include "gt-function.h"